Merge branch 'for-current' of https://github.com/PeterHuewe/linux-tpmdd into for...
[sfrench/cifs-2.6.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2  * Copyright 2010 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Alex Deucher
23  */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 static const u32 crtc_offsets[6] =
39 {
40         EVERGREEN_CRTC0_REGISTER_OFFSET,
41         EVERGREEN_CRTC1_REGISTER_OFFSET,
42         EVERGREEN_CRTC2_REGISTER_OFFSET,
43         EVERGREEN_CRTC3_REGISTER_OFFSET,
44         EVERGREEN_CRTC4_REGISTER_OFFSET,
45         EVERGREEN_CRTC5_REGISTER_OFFSET
46 };
47
48 #include "clearstate_evergreen.h"
49
50 static const u32 sumo_rlc_save_restore_register_list[] =
51 {
52         0x98fc,
53         0x9830,
54         0x9834,
55         0x9838,
56         0x9870,
57         0x9874,
58         0x8a14,
59         0x8b24,
60         0x8bcc,
61         0x8b10,
62         0x8d00,
63         0x8d04,
64         0x8c00,
65         0x8c04,
66         0x8c08,
67         0x8c0c,
68         0x8d8c,
69         0x8c20,
70         0x8c24,
71         0x8c28,
72         0x8c18,
73         0x8c1c,
74         0x8cf0,
75         0x8e2c,
76         0x8e38,
77         0x8c30,
78         0x9508,
79         0x9688,
80         0x9608,
81         0x960c,
82         0x9610,
83         0x9614,
84         0x88c4,
85         0x88d4,
86         0xa008,
87         0x900c,
88         0x9100,
89         0x913c,
90         0x98f8,
91         0x98f4,
92         0x9b7c,
93         0x3f8c,
94         0x8950,
95         0x8954,
96         0x8a18,
97         0x8b28,
98         0x9144,
99         0x9148,
100         0x914c,
101         0x3f90,
102         0x3f94,
103         0x915c,
104         0x9160,
105         0x9178,
106         0x917c,
107         0x9180,
108         0x918c,
109         0x9190,
110         0x9194,
111         0x9198,
112         0x919c,
113         0x91a8,
114         0x91ac,
115         0x91b0,
116         0x91b4,
117         0x91b8,
118         0x91c4,
119         0x91c8,
120         0x91cc,
121         0x91d0,
122         0x91d4,
123         0x91e0,
124         0x91e4,
125         0x91ec,
126         0x91f0,
127         0x91f4,
128         0x9200,
129         0x9204,
130         0x929c,
131         0x9150,
132         0x802c,
133 };
134
135 static void evergreen_gpu_init(struct radeon_device *rdev);
136 void evergreen_fini(struct radeon_device *rdev);
137 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
138 void evergreen_program_aspm(struct radeon_device *rdev);
139 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
140                                      int ring, u32 cp_int_cntl);
141 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
142                                    u32 status, u32 addr);
143 void cik_init_cp_pg_table(struct radeon_device *rdev);
144
145 extern u32 si_get_csb_size(struct radeon_device *rdev);
146 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
147 extern u32 cik_get_csb_size(struct radeon_device *rdev);
148 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
149 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
150
151 static const u32 evergreen_golden_registers[] =
152 {
153         0x3f90, 0xffff0000, 0xff000000,
154         0x9148, 0xffff0000, 0xff000000,
155         0x3f94, 0xffff0000, 0xff000000,
156         0x914c, 0xffff0000, 0xff000000,
157         0x9b7c, 0xffffffff, 0x00000000,
158         0x8a14, 0xffffffff, 0x00000007,
159         0x8b10, 0xffffffff, 0x00000000,
160         0x960c, 0xffffffff, 0x54763210,
161         0x88c4, 0xffffffff, 0x000000c2,
162         0x88d4, 0xffffffff, 0x00000010,
163         0x8974, 0xffffffff, 0x00000000,
164         0xc78, 0x00000080, 0x00000080,
165         0x5eb4, 0xffffffff, 0x00000002,
166         0x5e78, 0xffffffff, 0x001000f0,
167         0x6104, 0x01000300, 0x00000000,
168         0x5bc0, 0x00300000, 0x00000000,
169         0x7030, 0xffffffff, 0x00000011,
170         0x7c30, 0xffffffff, 0x00000011,
171         0x10830, 0xffffffff, 0x00000011,
172         0x11430, 0xffffffff, 0x00000011,
173         0x12030, 0xffffffff, 0x00000011,
174         0x12c30, 0xffffffff, 0x00000011,
175         0xd02c, 0xffffffff, 0x08421000,
176         0x240c, 0xffffffff, 0x00000380,
177         0x8b24, 0xffffffff, 0x00ff0fff,
178         0x28a4c, 0x06000000, 0x06000000,
179         0x10c, 0x00000001, 0x00000001,
180         0x8d00, 0xffffffff, 0x100e4848,
181         0x8d04, 0xffffffff, 0x00164745,
182         0x8c00, 0xffffffff, 0xe4000003,
183         0x8c04, 0xffffffff, 0x40600060,
184         0x8c08, 0xffffffff, 0x001c001c,
185         0x8cf0, 0xffffffff, 0x08e00620,
186         0x8c20, 0xffffffff, 0x00800080,
187         0x8c24, 0xffffffff, 0x00800080,
188         0x8c18, 0xffffffff, 0x20202078,
189         0x8c1c, 0xffffffff, 0x00001010,
190         0x28350, 0xffffffff, 0x00000000,
191         0xa008, 0xffffffff, 0x00010000,
192         0x5c4, 0xffffffff, 0x00000001,
193         0x9508, 0xffffffff, 0x00000002,
194         0x913c, 0x0000000f, 0x0000000a
195 };
196
197 static const u32 evergreen_golden_registers2[] =
198 {
199         0x2f4c, 0xffffffff, 0x00000000,
200         0x54f4, 0xffffffff, 0x00000000,
201         0x54f0, 0xffffffff, 0x00000000,
202         0x5498, 0xffffffff, 0x00000000,
203         0x549c, 0xffffffff, 0x00000000,
204         0x5494, 0xffffffff, 0x00000000,
205         0x53cc, 0xffffffff, 0x00000000,
206         0x53c8, 0xffffffff, 0x00000000,
207         0x53c4, 0xffffffff, 0x00000000,
208         0x53c0, 0xffffffff, 0x00000000,
209         0x53bc, 0xffffffff, 0x00000000,
210         0x53b8, 0xffffffff, 0x00000000,
211         0x53b4, 0xffffffff, 0x00000000,
212         0x53b0, 0xffffffff, 0x00000000
213 };
214
215 static const u32 cypress_mgcg_init[] =
216 {
217         0x802c, 0xffffffff, 0xc0000000,
218         0x5448, 0xffffffff, 0x00000100,
219         0x55e4, 0xffffffff, 0x00000100,
220         0x160c, 0xffffffff, 0x00000100,
221         0x5644, 0xffffffff, 0x00000100,
222         0xc164, 0xffffffff, 0x00000100,
223         0x8a18, 0xffffffff, 0x00000100,
224         0x897c, 0xffffffff, 0x06000100,
225         0x8b28, 0xffffffff, 0x00000100,
226         0x9144, 0xffffffff, 0x00000100,
227         0x9a60, 0xffffffff, 0x00000100,
228         0x9868, 0xffffffff, 0x00000100,
229         0x8d58, 0xffffffff, 0x00000100,
230         0x9510, 0xffffffff, 0x00000100,
231         0x949c, 0xffffffff, 0x00000100,
232         0x9654, 0xffffffff, 0x00000100,
233         0x9030, 0xffffffff, 0x00000100,
234         0x9034, 0xffffffff, 0x00000100,
235         0x9038, 0xffffffff, 0x00000100,
236         0x903c, 0xffffffff, 0x00000100,
237         0x9040, 0xffffffff, 0x00000100,
238         0xa200, 0xffffffff, 0x00000100,
239         0xa204, 0xffffffff, 0x00000100,
240         0xa208, 0xffffffff, 0x00000100,
241         0xa20c, 0xffffffff, 0x00000100,
242         0x971c, 0xffffffff, 0x00000100,
243         0x977c, 0xffffffff, 0x00000100,
244         0x3f80, 0xffffffff, 0x00000100,
245         0xa210, 0xffffffff, 0x00000100,
246         0xa214, 0xffffffff, 0x00000100,
247         0x4d8, 0xffffffff, 0x00000100,
248         0x9784, 0xffffffff, 0x00000100,
249         0x9698, 0xffffffff, 0x00000100,
250         0x4d4, 0xffffffff, 0x00000200,
251         0x30cc, 0xffffffff, 0x00000100,
252         0xd0c0, 0xffffffff, 0xff000100,
253         0x802c, 0xffffffff, 0x40000000,
254         0x915c, 0xffffffff, 0x00010000,
255         0x9160, 0xffffffff, 0x00030002,
256         0x9178, 0xffffffff, 0x00070000,
257         0x917c, 0xffffffff, 0x00030002,
258         0x9180, 0xffffffff, 0x00050004,
259         0x918c, 0xffffffff, 0x00010006,
260         0x9190, 0xffffffff, 0x00090008,
261         0x9194, 0xffffffff, 0x00070000,
262         0x9198, 0xffffffff, 0x00030002,
263         0x919c, 0xffffffff, 0x00050004,
264         0x91a8, 0xffffffff, 0x00010006,
265         0x91ac, 0xffffffff, 0x00090008,
266         0x91b0, 0xffffffff, 0x00070000,
267         0x91b4, 0xffffffff, 0x00030002,
268         0x91b8, 0xffffffff, 0x00050004,
269         0x91c4, 0xffffffff, 0x00010006,
270         0x91c8, 0xffffffff, 0x00090008,
271         0x91cc, 0xffffffff, 0x00070000,
272         0x91d0, 0xffffffff, 0x00030002,
273         0x91d4, 0xffffffff, 0x00050004,
274         0x91e0, 0xffffffff, 0x00010006,
275         0x91e4, 0xffffffff, 0x00090008,
276         0x91e8, 0xffffffff, 0x00000000,
277         0x91ec, 0xffffffff, 0x00070000,
278         0x91f0, 0xffffffff, 0x00030002,
279         0x91f4, 0xffffffff, 0x00050004,
280         0x9200, 0xffffffff, 0x00010006,
281         0x9204, 0xffffffff, 0x00090008,
282         0x9208, 0xffffffff, 0x00070000,
283         0x920c, 0xffffffff, 0x00030002,
284         0x9210, 0xffffffff, 0x00050004,
285         0x921c, 0xffffffff, 0x00010006,
286         0x9220, 0xffffffff, 0x00090008,
287         0x9224, 0xffffffff, 0x00070000,
288         0x9228, 0xffffffff, 0x00030002,
289         0x922c, 0xffffffff, 0x00050004,
290         0x9238, 0xffffffff, 0x00010006,
291         0x923c, 0xffffffff, 0x00090008,
292         0x9240, 0xffffffff, 0x00070000,
293         0x9244, 0xffffffff, 0x00030002,
294         0x9248, 0xffffffff, 0x00050004,
295         0x9254, 0xffffffff, 0x00010006,
296         0x9258, 0xffffffff, 0x00090008,
297         0x925c, 0xffffffff, 0x00070000,
298         0x9260, 0xffffffff, 0x00030002,
299         0x9264, 0xffffffff, 0x00050004,
300         0x9270, 0xffffffff, 0x00010006,
301         0x9274, 0xffffffff, 0x00090008,
302         0x9278, 0xffffffff, 0x00070000,
303         0x927c, 0xffffffff, 0x00030002,
304         0x9280, 0xffffffff, 0x00050004,
305         0x928c, 0xffffffff, 0x00010006,
306         0x9290, 0xffffffff, 0x00090008,
307         0x9294, 0xffffffff, 0x00000000,
308         0x929c, 0xffffffff, 0x00000001,
309         0x802c, 0xffffffff, 0x40010000,
310         0x915c, 0xffffffff, 0x00010000,
311         0x9160, 0xffffffff, 0x00030002,
312         0x9178, 0xffffffff, 0x00070000,
313         0x917c, 0xffffffff, 0x00030002,
314         0x9180, 0xffffffff, 0x00050004,
315         0x918c, 0xffffffff, 0x00010006,
316         0x9190, 0xffffffff, 0x00090008,
317         0x9194, 0xffffffff, 0x00070000,
318         0x9198, 0xffffffff, 0x00030002,
319         0x919c, 0xffffffff, 0x00050004,
320         0x91a8, 0xffffffff, 0x00010006,
321         0x91ac, 0xffffffff, 0x00090008,
322         0x91b0, 0xffffffff, 0x00070000,
323         0x91b4, 0xffffffff, 0x00030002,
324         0x91b8, 0xffffffff, 0x00050004,
325         0x91c4, 0xffffffff, 0x00010006,
326         0x91c8, 0xffffffff, 0x00090008,
327         0x91cc, 0xffffffff, 0x00070000,
328         0x91d0, 0xffffffff, 0x00030002,
329         0x91d4, 0xffffffff, 0x00050004,
330         0x91e0, 0xffffffff, 0x00010006,
331         0x91e4, 0xffffffff, 0x00090008,
332         0x91e8, 0xffffffff, 0x00000000,
333         0x91ec, 0xffffffff, 0x00070000,
334         0x91f0, 0xffffffff, 0x00030002,
335         0x91f4, 0xffffffff, 0x00050004,
336         0x9200, 0xffffffff, 0x00010006,
337         0x9204, 0xffffffff, 0x00090008,
338         0x9208, 0xffffffff, 0x00070000,
339         0x920c, 0xffffffff, 0x00030002,
340         0x9210, 0xffffffff, 0x00050004,
341         0x921c, 0xffffffff, 0x00010006,
342         0x9220, 0xffffffff, 0x00090008,
343         0x9224, 0xffffffff, 0x00070000,
344         0x9228, 0xffffffff, 0x00030002,
345         0x922c, 0xffffffff, 0x00050004,
346         0x9238, 0xffffffff, 0x00010006,
347         0x923c, 0xffffffff, 0x00090008,
348         0x9240, 0xffffffff, 0x00070000,
349         0x9244, 0xffffffff, 0x00030002,
350         0x9248, 0xffffffff, 0x00050004,
351         0x9254, 0xffffffff, 0x00010006,
352         0x9258, 0xffffffff, 0x00090008,
353         0x925c, 0xffffffff, 0x00070000,
354         0x9260, 0xffffffff, 0x00030002,
355         0x9264, 0xffffffff, 0x00050004,
356         0x9270, 0xffffffff, 0x00010006,
357         0x9274, 0xffffffff, 0x00090008,
358         0x9278, 0xffffffff, 0x00070000,
359         0x927c, 0xffffffff, 0x00030002,
360         0x9280, 0xffffffff, 0x00050004,
361         0x928c, 0xffffffff, 0x00010006,
362         0x9290, 0xffffffff, 0x00090008,
363         0x9294, 0xffffffff, 0x00000000,
364         0x929c, 0xffffffff, 0x00000001,
365         0x802c, 0xffffffff, 0xc0000000
366 };
367
368 static const u32 redwood_mgcg_init[] =
369 {
370         0x802c, 0xffffffff, 0xc0000000,
371         0x5448, 0xffffffff, 0x00000100,
372         0x55e4, 0xffffffff, 0x00000100,
373         0x160c, 0xffffffff, 0x00000100,
374         0x5644, 0xffffffff, 0x00000100,
375         0xc164, 0xffffffff, 0x00000100,
376         0x8a18, 0xffffffff, 0x00000100,
377         0x897c, 0xffffffff, 0x06000100,
378         0x8b28, 0xffffffff, 0x00000100,
379         0x9144, 0xffffffff, 0x00000100,
380         0x9a60, 0xffffffff, 0x00000100,
381         0x9868, 0xffffffff, 0x00000100,
382         0x8d58, 0xffffffff, 0x00000100,
383         0x9510, 0xffffffff, 0x00000100,
384         0x949c, 0xffffffff, 0x00000100,
385         0x9654, 0xffffffff, 0x00000100,
386         0x9030, 0xffffffff, 0x00000100,
387         0x9034, 0xffffffff, 0x00000100,
388         0x9038, 0xffffffff, 0x00000100,
389         0x903c, 0xffffffff, 0x00000100,
390         0x9040, 0xffffffff, 0x00000100,
391         0xa200, 0xffffffff, 0x00000100,
392         0xa204, 0xffffffff, 0x00000100,
393         0xa208, 0xffffffff, 0x00000100,
394         0xa20c, 0xffffffff, 0x00000100,
395         0x971c, 0xffffffff, 0x00000100,
396         0x977c, 0xffffffff, 0x00000100,
397         0x3f80, 0xffffffff, 0x00000100,
398         0xa210, 0xffffffff, 0x00000100,
399         0xa214, 0xffffffff, 0x00000100,
400         0x4d8, 0xffffffff, 0x00000100,
401         0x9784, 0xffffffff, 0x00000100,
402         0x9698, 0xffffffff, 0x00000100,
403         0x4d4, 0xffffffff, 0x00000200,
404         0x30cc, 0xffffffff, 0x00000100,
405         0xd0c0, 0xffffffff, 0xff000100,
406         0x802c, 0xffffffff, 0x40000000,
407         0x915c, 0xffffffff, 0x00010000,
408         0x9160, 0xffffffff, 0x00030002,
409         0x9178, 0xffffffff, 0x00070000,
410         0x917c, 0xffffffff, 0x00030002,
411         0x9180, 0xffffffff, 0x00050004,
412         0x918c, 0xffffffff, 0x00010006,
413         0x9190, 0xffffffff, 0x00090008,
414         0x9194, 0xffffffff, 0x00070000,
415         0x9198, 0xffffffff, 0x00030002,
416         0x919c, 0xffffffff, 0x00050004,
417         0x91a8, 0xffffffff, 0x00010006,
418         0x91ac, 0xffffffff, 0x00090008,
419         0x91b0, 0xffffffff, 0x00070000,
420         0x91b4, 0xffffffff, 0x00030002,
421         0x91b8, 0xffffffff, 0x00050004,
422         0x91c4, 0xffffffff, 0x00010006,
423         0x91c8, 0xffffffff, 0x00090008,
424         0x91cc, 0xffffffff, 0x00070000,
425         0x91d0, 0xffffffff, 0x00030002,
426         0x91d4, 0xffffffff, 0x00050004,
427         0x91e0, 0xffffffff, 0x00010006,
428         0x91e4, 0xffffffff, 0x00090008,
429         0x91e8, 0xffffffff, 0x00000000,
430         0x91ec, 0xffffffff, 0x00070000,
431         0x91f0, 0xffffffff, 0x00030002,
432         0x91f4, 0xffffffff, 0x00050004,
433         0x9200, 0xffffffff, 0x00010006,
434         0x9204, 0xffffffff, 0x00090008,
435         0x9294, 0xffffffff, 0x00000000,
436         0x929c, 0xffffffff, 0x00000001,
437         0x802c, 0xffffffff, 0xc0000000
438 };
439
440 static const u32 cedar_golden_registers[] =
441 {
442         0x3f90, 0xffff0000, 0xff000000,
443         0x9148, 0xffff0000, 0xff000000,
444         0x3f94, 0xffff0000, 0xff000000,
445         0x914c, 0xffff0000, 0xff000000,
446         0x9b7c, 0xffffffff, 0x00000000,
447         0x8a14, 0xffffffff, 0x00000007,
448         0x8b10, 0xffffffff, 0x00000000,
449         0x960c, 0xffffffff, 0x54763210,
450         0x88c4, 0xffffffff, 0x000000c2,
451         0x88d4, 0xffffffff, 0x00000000,
452         0x8974, 0xffffffff, 0x00000000,
453         0xc78, 0x00000080, 0x00000080,
454         0x5eb4, 0xffffffff, 0x00000002,
455         0x5e78, 0xffffffff, 0x001000f0,
456         0x6104, 0x01000300, 0x00000000,
457         0x5bc0, 0x00300000, 0x00000000,
458         0x7030, 0xffffffff, 0x00000011,
459         0x7c30, 0xffffffff, 0x00000011,
460         0x10830, 0xffffffff, 0x00000011,
461         0x11430, 0xffffffff, 0x00000011,
462         0xd02c, 0xffffffff, 0x08421000,
463         0x240c, 0xffffffff, 0x00000380,
464         0x8b24, 0xffffffff, 0x00ff0fff,
465         0x28a4c, 0x06000000, 0x06000000,
466         0x10c, 0x00000001, 0x00000001,
467         0x8d00, 0xffffffff, 0x100e4848,
468         0x8d04, 0xffffffff, 0x00164745,
469         0x8c00, 0xffffffff, 0xe4000003,
470         0x8c04, 0xffffffff, 0x40600060,
471         0x8c08, 0xffffffff, 0x001c001c,
472         0x8cf0, 0xffffffff, 0x08e00410,
473         0x8c20, 0xffffffff, 0x00800080,
474         0x8c24, 0xffffffff, 0x00800080,
475         0x8c18, 0xffffffff, 0x20202078,
476         0x8c1c, 0xffffffff, 0x00001010,
477         0x28350, 0xffffffff, 0x00000000,
478         0xa008, 0xffffffff, 0x00010000,
479         0x5c4, 0xffffffff, 0x00000001,
480         0x9508, 0xffffffff, 0x00000002
481 };
482
483 static const u32 cedar_mgcg_init[] =
484 {
485         0x802c, 0xffffffff, 0xc0000000,
486         0x5448, 0xffffffff, 0x00000100,
487         0x55e4, 0xffffffff, 0x00000100,
488         0x160c, 0xffffffff, 0x00000100,
489         0x5644, 0xffffffff, 0x00000100,
490         0xc164, 0xffffffff, 0x00000100,
491         0x8a18, 0xffffffff, 0x00000100,
492         0x897c, 0xffffffff, 0x06000100,
493         0x8b28, 0xffffffff, 0x00000100,
494         0x9144, 0xffffffff, 0x00000100,
495         0x9a60, 0xffffffff, 0x00000100,
496         0x9868, 0xffffffff, 0x00000100,
497         0x8d58, 0xffffffff, 0x00000100,
498         0x9510, 0xffffffff, 0x00000100,
499         0x949c, 0xffffffff, 0x00000100,
500         0x9654, 0xffffffff, 0x00000100,
501         0x9030, 0xffffffff, 0x00000100,
502         0x9034, 0xffffffff, 0x00000100,
503         0x9038, 0xffffffff, 0x00000100,
504         0x903c, 0xffffffff, 0x00000100,
505         0x9040, 0xffffffff, 0x00000100,
506         0xa200, 0xffffffff, 0x00000100,
507         0xa204, 0xffffffff, 0x00000100,
508         0xa208, 0xffffffff, 0x00000100,
509         0xa20c, 0xffffffff, 0x00000100,
510         0x971c, 0xffffffff, 0x00000100,
511         0x977c, 0xffffffff, 0x00000100,
512         0x3f80, 0xffffffff, 0x00000100,
513         0xa210, 0xffffffff, 0x00000100,
514         0xa214, 0xffffffff, 0x00000100,
515         0x4d8, 0xffffffff, 0x00000100,
516         0x9784, 0xffffffff, 0x00000100,
517         0x9698, 0xffffffff, 0x00000100,
518         0x4d4, 0xffffffff, 0x00000200,
519         0x30cc, 0xffffffff, 0x00000100,
520         0xd0c0, 0xffffffff, 0xff000100,
521         0x802c, 0xffffffff, 0x40000000,
522         0x915c, 0xffffffff, 0x00010000,
523         0x9178, 0xffffffff, 0x00050000,
524         0x917c, 0xffffffff, 0x00030002,
525         0x918c, 0xffffffff, 0x00010004,
526         0x9190, 0xffffffff, 0x00070006,
527         0x9194, 0xffffffff, 0x00050000,
528         0x9198, 0xffffffff, 0x00030002,
529         0x91a8, 0xffffffff, 0x00010004,
530         0x91ac, 0xffffffff, 0x00070006,
531         0x91e8, 0xffffffff, 0x00000000,
532         0x9294, 0xffffffff, 0x00000000,
533         0x929c, 0xffffffff, 0x00000001,
534         0x802c, 0xffffffff, 0xc0000000
535 };
536
537 static const u32 juniper_mgcg_init[] =
538 {
539         0x802c, 0xffffffff, 0xc0000000,
540         0x5448, 0xffffffff, 0x00000100,
541         0x55e4, 0xffffffff, 0x00000100,
542         0x160c, 0xffffffff, 0x00000100,
543         0x5644, 0xffffffff, 0x00000100,
544         0xc164, 0xffffffff, 0x00000100,
545         0x8a18, 0xffffffff, 0x00000100,
546         0x897c, 0xffffffff, 0x06000100,
547         0x8b28, 0xffffffff, 0x00000100,
548         0x9144, 0xffffffff, 0x00000100,
549         0x9a60, 0xffffffff, 0x00000100,
550         0x9868, 0xffffffff, 0x00000100,
551         0x8d58, 0xffffffff, 0x00000100,
552         0x9510, 0xffffffff, 0x00000100,
553         0x949c, 0xffffffff, 0x00000100,
554         0x9654, 0xffffffff, 0x00000100,
555         0x9030, 0xffffffff, 0x00000100,
556         0x9034, 0xffffffff, 0x00000100,
557         0x9038, 0xffffffff, 0x00000100,
558         0x903c, 0xffffffff, 0x00000100,
559         0x9040, 0xffffffff, 0x00000100,
560         0xa200, 0xffffffff, 0x00000100,
561         0xa204, 0xffffffff, 0x00000100,
562         0xa208, 0xffffffff, 0x00000100,
563         0xa20c, 0xffffffff, 0x00000100,
564         0x971c, 0xffffffff, 0x00000100,
565         0xd0c0, 0xffffffff, 0xff000100,
566         0x802c, 0xffffffff, 0x40000000,
567         0x915c, 0xffffffff, 0x00010000,
568         0x9160, 0xffffffff, 0x00030002,
569         0x9178, 0xffffffff, 0x00070000,
570         0x917c, 0xffffffff, 0x00030002,
571         0x9180, 0xffffffff, 0x00050004,
572         0x918c, 0xffffffff, 0x00010006,
573         0x9190, 0xffffffff, 0x00090008,
574         0x9194, 0xffffffff, 0x00070000,
575         0x9198, 0xffffffff, 0x00030002,
576         0x919c, 0xffffffff, 0x00050004,
577         0x91a8, 0xffffffff, 0x00010006,
578         0x91ac, 0xffffffff, 0x00090008,
579         0x91b0, 0xffffffff, 0x00070000,
580         0x91b4, 0xffffffff, 0x00030002,
581         0x91b8, 0xffffffff, 0x00050004,
582         0x91c4, 0xffffffff, 0x00010006,
583         0x91c8, 0xffffffff, 0x00090008,
584         0x91cc, 0xffffffff, 0x00070000,
585         0x91d0, 0xffffffff, 0x00030002,
586         0x91d4, 0xffffffff, 0x00050004,
587         0x91e0, 0xffffffff, 0x00010006,
588         0x91e4, 0xffffffff, 0x00090008,
589         0x91e8, 0xffffffff, 0x00000000,
590         0x91ec, 0xffffffff, 0x00070000,
591         0x91f0, 0xffffffff, 0x00030002,
592         0x91f4, 0xffffffff, 0x00050004,
593         0x9200, 0xffffffff, 0x00010006,
594         0x9204, 0xffffffff, 0x00090008,
595         0x9208, 0xffffffff, 0x00070000,
596         0x920c, 0xffffffff, 0x00030002,
597         0x9210, 0xffffffff, 0x00050004,
598         0x921c, 0xffffffff, 0x00010006,
599         0x9220, 0xffffffff, 0x00090008,
600         0x9224, 0xffffffff, 0x00070000,
601         0x9228, 0xffffffff, 0x00030002,
602         0x922c, 0xffffffff, 0x00050004,
603         0x9238, 0xffffffff, 0x00010006,
604         0x923c, 0xffffffff, 0x00090008,
605         0x9240, 0xffffffff, 0x00070000,
606         0x9244, 0xffffffff, 0x00030002,
607         0x9248, 0xffffffff, 0x00050004,
608         0x9254, 0xffffffff, 0x00010006,
609         0x9258, 0xffffffff, 0x00090008,
610         0x925c, 0xffffffff, 0x00070000,
611         0x9260, 0xffffffff, 0x00030002,
612         0x9264, 0xffffffff, 0x00050004,
613         0x9270, 0xffffffff, 0x00010006,
614         0x9274, 0xffffffff, 0x00090008,
615         0x9278, 0xffffffff, 0x00070000,
616         0x927c, 0xffffffff, 0x00030002,
617         0x9280, 0xffffffff, 0x00050004,
618         0x928c, 0xffffffff, 0x00010006,
619         0x9290, 0xffffffff, 0x00090008,
620         0x9294, 0xffffffff, 0x00000000,
621         0x929c, 0xffffffff, 0x00000001,
622         0x802c, 0xffffffff, 0xc0000000,
623         0x977c, 0xffffffff, 0x00000100,
624         0x3f80, 0xffffffff, 0x00000100,
625         0xa210, 0xffffffff, 0x00000100,
626         0xa214, 0xffffffff, 0x00000100,
627         0x4d8, 0xffffffff, 0x00000100,
628         0x9784, 0xffffffff, 0x00000100,
629         0x9698, 0xffffffff, 0x00000100,
630         0x4d4, 0xffffffff, 0x00000200,
631         0x30cc, 0xffffffff, 0x00000100,
632         0x802c, 0xffffffff, 0xc0000000
633 };
634
635 static const u32 supersumo_golden_registers[] =
636 {
637         0x5eb4, 0xffffffff, 0x00000002,
638         0x5c4, 0xffffffff, 0x00000001,
639         0x7030, 0xffffffff, 0x00000011,
640         0x7c30, 0xffffffff, 0x00000011,
641         0x6104, 0x01000300, 0x00000000,
642         0x5bc0, 0x00300000, 0x00000000,
643         0x8c04, 0xffffffff, 0x40600060,
644         0x8c08, 0xffffffff, 0x001c001c,
645         0x8c20, 0xffffffff, 0x00800080,
646         0x8c24, 0xffffffff, 0x00800080,
647         0x8c18, 0xffffffff, 0x20202078,
648         0x8c1c, 0xffffffff, 0x00001010,
649         0x918c, 0xffffffff, 0x00010006,
650         0x91a8, 0xffffffff, 0x00010006,
651         0x91c4, 0xffffffff, 0x00010006,
652         0x91e0, 0xffffffff, 0x00010006,
653         0x9200, 0xffffffff, 0x00010006,
654         0x9150, 0xffffffff, 0x6e944040,
655         0x917c, 0xffffffff, 0x00030002,
656         0x9180, 0xffffffff, 0x00050004,
657         0x9198, 0xffffffff, 0x00030002,
658         0x919c, 0xffffffff, 0x00050004,
659         0x91b4, 0xffffffff, 0x00030002,
660         0x91b8, 0xffffffff, 0x00050004,
661         0x91d0, 0xffffffff, 0x00030002,
662         0x91d4, 0xffffffff, 0x00050004,
663         0x91f0, 0xffffffff, 0x00030002,
664         0x91f4, 0xffffffff, 0x00050004,
665         0x915c, 0xffffffff, 0x00010000,
666         0x9160, 0xffffffff, 0x00030002,
667         0x3f90, 0xffff0000, 0xff000000,
668         0x9178, 0xffffffff, 0x00070000,
669         0x9194, 0xffffffff, 0x00070000,
670         0x91b0, 0xffffffff, 0x00070000,
671         0x91cc, 0xffffffff, 0x00070000,
672         0x91ec, 0xffffffff, 0x00070000,
673         0x9148, 0xffff0000, 0xff000000,
674         0x9190, 0xffffffff, 0x00090008,
675         0x91ac, 0xffffffff, 0x00090008,
676         0x91c8, 0xffffffff, 0x00090008,
677         0x91e4, 0xffffffff, 0x00090008,
678         0x9204, 0xffffffff, 0x00090008,
679         0x3f94, 0xffff0000, 0xff000000,
680         0x914c, 0xffff0000, 0xff000000,
681         0x929c, 0xffffffff, 0x00000001,
682         0x8a18, 0xffffffff, 0x00000100,
683         0x8b28, 0xffffffff, 0x00000100,
684         0x9144, 0xffffffff, 0x00000100,
685         0x5644, 0xffffffff, 0x00000100,
686         0x9b7c, 0xffffffff, 0x00000000,
687         0x8030, 0xffffffff, 0x0000100a,
688         0x8a14, 0xffffffff, 0x00000007,
689         0x8b24, 0xffffffff, 0x00ff0fff,
690         0x8b10, 0xffffffff, 0x00000000,
691         0x28a4c, 0x06000000, 0x06000000,
692         0x4d8, 0xffffffff, 0x00000100,
693         0x913c, 0xffff000f, 0x0100000a,
694         0x960c, 0xffffffff, 0x54763210,
695         0x88c4, 0xffffffff, 0x000000c2,
696         0x88d4, 0xffffffff, 0x00000010,
697         0x8974, 0xffffffff, 0x00000000,
698         0xc78, 0x00000080, 0x00000080,
699         0x5e78, 0xffffffff, 0x001000f0,
700         0xd02c, 0xffffffff, 0x08421000,
701         0xa008, 0xffffffff, 0x00010000,
702         0x8d00, 0xffffffff, 0x100e4848,
703         0x8d04, 0xffffffff, 0x00164745,
704         0x8c00, 0xffffffff, 0xe4000003,
705         0x8cf0, 0x1fffffff, 0x08e00620,
706         0x28350, 0xffffffff, 0x00000000,
707         0x9508, 0xffffffff, 0x00000002
708 };
709
710 static const u32 sumo_golden_registers[] =
711 {
712         0x900c, 0x00ffffff, 0x0017071f,
713         0x8c18, 0xffffffff, 0x10101060,
714         0x8c1c, 0xffffffff, 0x00001010,
715         0x8c30, 0x0000000f, 0x00000005,
716         0x9688, 0x0000000f, 0x00000007
717 };
718
719 static const u32 wrestler_golden_registers[] =
720 {
721         0x5eb4, 0xffffffff, 0x00000002,
722         0x5c4, 0xffffffff, 0x00000001,
723         0x7030, 0xffffffff, 0x00000011,
724         0x7c30, 0xffffffff, 0x00000011,
725         0x6104, 0x01000300, 0x00000000,
726         0x5bc0, 0x00300000, 0x00000000,
727         0x918c, 0xffffffff, 0x00010006,
728         0x91a8, 0xffffffff, 0x00010006,
729         0x9150, 0xffffffff, 0x6e944040,
730         0x917c, 0xffffffff, 0x00030002,
731         0x9198, 0xffffffff, 0x00030002,
732         0x915c, 0xffffffff, 0x00010000,
733         0x3f90, 0xffff0000, 0xff000000,
734         0x9178, 0xffffffff, 0x00070000,
735         0x9194, 0xffffffff, 0x00070000,
736         0x9148, 0xffff0000, 0xff000000,
737         0x9190, 0xffffffff, 0x00090008,
738         0x91ac, 0xffffffff, 0x00090008,
739         0x3f94, 0xffff0000, 0xff000000,
740         0x914c, 0xffff0000, 0xff000000,
741         0x929c, 0xffffffff, 0x00000001,
742         0x8a18, 0xffffffff, 0x00000100,
743         0x8b28, 0xffffffff, 0x00000100,
744         0x9144, 0xffffffff, 0x00000100,
745         0x9b7c, 0xffffffff, 0x00000000,
746         0x8030, 0xffffffff, 0x0000100a,
747         0x8a14, 0xffffffff, 0x00000001,
748         0x8b24, 0xffffffff, 0x00ff0fff,
749         0x8b10, 0xffffffff, 0x00000000,
750         0x28a4c, 0x06000000, 0x06000000,
751         0x4d8, 0xffffffff, 0x00000100,
752         0x913c, 0xffff000f, 0x0100000a,
753         0x960c, 0xffffffff, 0x54763210,
754         0x88c4, 0xffffffff, 0x000000c2,
755         0x88d4, 0xffffffff, 0x00000010,
756         0x8974, 0xffffffff, 0x00000000,
757         0xc78, 0x00000080, 0x00000080,
758         0x5e78, 0xffffffff, 0x001000f0,
759         0xd02c, 0xffffffff, 0x08421000,
760         0xa008, 0xffffffff, 0x00010000,
761         0x8d00, 0xffffffff, 0x100e4848,
762         0x8d04, 0xffffffff, 0x00164745,
763         0x8c00, 0xffffffff, 0xe4000003,
764         0x8cf0, 0x1fffffff, 0x08e00410,
765         0x28350, 0xffffffff, 0x00000000,
766         0x9508, 0xffffffff, 0x00000002,
767         0x900c, 0xffffffff, 0x0017071f,
768         0x8c18, 0xffffffff, 0x10101060,
769         0x8c1c, 0xffffffff, 0x00001010
770 };
771
772 static const u32 barts_golden_registers[] =
773 {
774         0x5eb4, 0xffffffff, 0x00000002,
775         0x5e78, 0x8f311ff1, 0x001000f0,
776         0x3f90, 0xffff0000, 0xff000000,
777         0x9148, 0xffff0000, 0xff000000,
778         0x3f94, 0xffff0000, 0xff000000,
779         0x914c, 0xffff0000, 0xff000000,
780         0xc78, 0x00000080, 0x00000080,
781         0xbd4, 0x70073777, 0x00010001,
782         0xd02c, 0xbfffff1f, 0x08421000,
783         0xd0b8, 0x03773777, 0x02011003,
784         0x5bc0, 0x00200000, 0x50100000,
785         0x98f8, 0x33773777, 0x02011003,
786         0x98fc, 0xffffffff, 0x76543210,
787         0x7030, 0x31000311, 0x00000011,
788         0x2f48, 0x00000007, 0x02011003,
789         0x6b28, 0x00000010, 0x00000012,
790         0x7728, 0x00000010, 0x00000012,
791         0x10328, 0x00000010, 0x00000012,
792         0x10f28, 0x00000010, 0x00000012,
793         0x11b28, 0x00000010, 0x00000012,
794         0x12728, 0x00000010, 0x00000012,
795         0x240c, 0x000007ff, 0x00000380,
796         0x8a14, 0xf000001f, 0x00000007,
797         0x8b24, 0x3fff3fff, 0x00ff0fff,
798         0x8b10, 0x0000ff0f, 0x00000000,
799         0x28a4c, 0x07ffffff, 0x06000000,
800         0x10c, 0x00000001, 0x00010003,
801         0xa02c, 0xffffffff, 0x0000009b,
802         0x913c, 0x0000000f, 0x0100000a,
803         0x8d00, 0xffff7f7f, 0x100e4848,
804         0x8d04, 0x00ffffff, 0x00164745,
805         0x8c00, 0xfffc0003, 0xe4000003,
806         0x8c04, 0xf8ff00ff, 0x40600060,
807         0x8c08, 0x00ff00ff, 0x001c001c,
808         0x8cf0, 0x1fff1fff, 0x08e00620,
809         0x8c20, 0x0fff0fff, 0x00800080,
810         0x8c24, 0x0fff0fff, 0x00800080,
811         0x8c18, 0xffffffff, 0x20202078,
812         0x8c1c, 0x0000ffff, 0x00001010,
813         0x28350, 0x00000f01, 0x00000000,
814         0x9508, 0x3700001f, 0x00000002,
815         0x960c, 0xffffffff, 0x54763210,
816         0x88c4, 0x001f3ae3, 0x000000c2,
817         0x88d4, 0x0000001f, 0x00000010,
818         0x8974, 0xffffffff, 0x00000000
819 };
820
821 static const u32 turks_golden_registers[] =
822 {
823         0x5eb4, 0xffffffff, 0x00000002,
824         0x5e78, 0x8f311ff1, 0x001000f0,
825         0x8c8, 0x00003000, 0x00001070,
826         0x8cc, 0x000fffff, 0x00040035,
827         0x3f90, 0xffff0000, 0xfff00000,
828         0x9148, 0xffff0000, 0xfff00000,
829         0x3f94, 0xffff0000, 0xfff00000,
830         0x914c, 0xffff0000, 0xfff00000,
831         0xc78, 0x00000080, 0x00000080,
832         0xbd4, 0x00073007, 0x00010002,
833         0xd02c, 0xbfffff1f, 0x08421000,
834         0xd0b8, 0x03773777, 0x02010002,
835         0x5bc0, 0x00200000, 0x50100000,
836         0x98f8, 0x33773777, 0x00010002,
837         0x98fc, 0xffffffff, 0x33221100,
838         0x7030, 0x31000311, 0x00000011,
839         0x2f48, 0x33773777, 0x00010002,
840         0x6b28, 0x00000010, 0x00000012,
841         0x7728, 0x00000010, 0x00000012,
842         0x10328, 0x00000010, 0x00000012,
843         0x10f28, 0x00000010, 0x00000012,
844         0x11b28, 0x00000010, 0x00000012,
845         0x12728, 0x00000010, 0x00000012,
846         0x240c, 0x000007ff, 0x00000380,
847         0x8a14, 0xf000001f, 0x00000007,
848         0x8b24, 0x3fff3fff, 0x00ff0fff,
849         0x8b10, 0x0000ff0f, 0x00000000,
850         0x28a4c, 0x07ffffff, 0x06000000,
851         0x10c, 0x00000001, 0x00010003,
852         0xa02c, 0xffffffff, 0x0000009b,
853         0x913c, 0x0000000f, 0x0100000a,
854         0x8d00, 0xffff7f7f, 0x100e4848,
855         0x8d04, 0x00ffffff, 0x00164745,
856         0x8c00, 0xfffc0003, 0xe4000003,
857         0x8c04, 0xf8ff00ff, 0x40600060,
858         0x8c08, 0x00ff00ff, 0x001c001c,
859         0x8cf0, 0x1fff1fff, 0x08e00410,
860         0x8c20, 0x0fff0fff, 0x00800080,
861         0x8c24, 0x0fff0fff, 0x00800080,
862         0x8c18, 0xffffffff, 0x20202078,
863         0x8c1c, 0x0000ffff, 0x00001010,
864         0x28350, 0x00000f01, 0x00000000,
865         0x9508, 0x3700001f, 0x00000002,
866         0x960c, 0xffffffff, 0x54763210,
867         0x88c4, 0x001f3ae3, 0x000000c2,
868         0x88d4, 0x0000001f, 0x00000010,
869         0x8974, 0xffffffff, 0x00000000
870 };
871
872 static const u32 caicos_golden_registers[] =
873 {
874         0x5eb4, 0xffffffff, 0x00000002,
875         0x5e78, 0x8f311ff1, 0x001000f0,
876         0x8c8, 0x00003420, 0x00001450,
877         0x8cc, 0x000fffff, 0x00040035,
878         0x3f90, 0xffff0000, 0xfffc0000,
879         0x9148, 0xffff0000, 0xfffc0000,
880         0x3f94, 0xffff0000, 0xfffc0000,
881         0x914c, 0xffff0000, 0xfffc0000,
882         0xc78, 0x00000080, 0x00000080,
883         0xbd4, 0x00073007, 0x00010001,
884         0xd02c, 0xbfffff1f, 0x08421000,
885         0xd0b8, 0x03773777, 0x02010001,
886         0x5bc0, 0x00200000, 0x50100000,
887         0x98f8, 0x33773777, 0x02010001,
888         0x98fc, 0xffffffff, 0x33221100,
889         0x7030, 0x31000311, 0x00000011,
890         0x2f48, 0x33773777, 0x02010001,
891         0x6b28, 0x00000010, 0x00000012,
892         0x7728, 0x00000010, 0x00000012,
893         0x10328, 0x00000010, 0x00000012,
894         0x10f28, 0x00000010, 0x00000012,
895         0x11b28, 0x00000010, 0x00000012,
896         0x12728, 0x00000010, 0x00000012,
897         0x240c, 0x000007ff, 0x00000380,
898         0x8a14, 0xf000001f, 0x00000001,
899         0x8b24, 0x3fff3fff, 0x00ff0fff,
900         0x8b10, 0x0000ff0f, 0x00000000,
901         0x28a4c, 0x07ffffff, 0x06000000,
902         0x10c, 0x00000001, 0x00010003,
903         0xa02c, 0xffffffff, 0x0000009b,
904         0x913c, 0x0000000f, 0x0100000a,
905         0x8d00, 0xffff7f7f, 0x100e4848,
906         0x8d04, 0x00ffffff, 0x00164745,
907         0x8c00, 0xfffc0003, 0xe4000003,
908         0x8c04, 0xf8ff00ff, 0x40600060,
909         0x8c08, 0x00ff00ff, 0x001c001c,
910         0x8cf0, 0x1fff1fff, 0x08e00410,
911         0x8c20, 0x0fff0fff, 0x00800080,
912         0x8c24, 0x0fff0fff, 0x00800080,
913         0x8c18, 0xffffffff, 0x20202078,
914         0x8c1c, 0x0000ffff, 0x00001010,
915         0x28350, 0x00000f01, 0x00000000,
916         0x9508, 0x3700001f, 0x00000002,
917         0x960c, 0xffffffff, 0x54763210,
918         0x88c4, 0x001f3ae3, 0x000000c2,
919         0x88d4, 0x0000001f, 0x00000010,
920         0x8974, 0xffffffff, 0x00000000
921 };
922
923 static void evergreen_init_golden_registers(struct radeon_device *rdev)
924 {
925         switch (rdev->family) {
926         case CHIP_CYPRESS:
927         case CHIP_HEMLOCK:
928                 radeon_program_register_sequence(rdev,
929                                                  evergreen_golden_registers,
930                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
931                 radeon_program_register_sequence(rdev,
932                                                  evergreen_golden_registers2,
933                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
934                 radeon_program_register_sequence(rdev,
935                                                  cypress_mgcg_init,
936                                                  (const u32)ARRAY_SIZE(cypress_mgcg_init));
937                 break;
938         case CHIP_JUNIPER:
939                 radeon_program_register_sequence(rdev,
940                                                  evergreen_golden_registers,
941                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
942                 radeon_program_register_sequence(rdev,
943                                                  evergreen_golden_registers2,
944                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
945                 radeon_program_register_sequence(rdev,
946                                                  juniper_mgcg_init,
947                                                  (const u32)ARRAY_SIZE(juniper_mgcg_init));
948                 break;
949         case CHIP_REDWOOD:
950                 radeon_program_register_sequence(rdev,
951                                                  evergreen_golden_registers,
952                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers));
953                 radeon_program_register_sequence(rdev,
954                                                  evergreen_golden_registers2,
955                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
956                 radeon_program_register_sequence(rdev,
957                                                  redwood_mgcg_init,
958                                                  (const u32)ARRAY_SIZE(redwood_mgcg_init));
959                 break;
960         case CHIP_CEDAR:
961                 radeon_program_register_sequence(rdev,
962                                                  cedar_golden_registers,
963                                                  (const u32)ARRAY_SIZE(cedar_golden_registers));
964                 radeon_program_register_sequence(rdev,
965                                                  evergreen_golden_registers2,
966                                                  (const u32)ARRAY_SIZE(evergreen_golden_registers2));
967                 radeon_program_register_sequence(rdev,
968                                                  cedar_mgcg_init,
969                                                  (const u32)ARRAY_SIZE(cedar_mgcg_init));
970                 break;
971         case CHIP_PALM:
972                 radeon_program_register_sequence(rdev,
973                                                  wrestler_golden_registers,
974                                                  (const u32)ARRAY_SIZE(wrestler_golden_registers));
975                 break;
976         case CHIP_SUMO:
977                 radeon_program_register_sequence(rdev,
978                                                  supersumo_golden_registers,
979                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
980                 break;
981         case CHIP_SUMO2:
982                 radeon_program_register_sequence(rdev,
983                                                  supersumo_golden_registers,
984                                                  (const u32)ARRAY_SIZE(supersumo_golden_registers));
985                 radeon_program_register_sequence(rdev,
986                                                  sumo_golden_registers,
987                                                  (const u32)ARRAY_SIZE(sumo_golden_registers));
988                 break;
989         case CHIP_BARTS:
990                 radeon_program_register_sequence(rdev,
991                                                  barts_golden_registers,
992                                                  (const u32)ARRAY_SIZE(barts_golden_registers));
993                 break;
994         case CHIP_TURKS:
995                 radeon_program_register_sequence(rdev,
996                                                  turks_golden_registers,
997                                                  (const u32)ARRAY_SIZE(turks_golden_registers));
998                 break;
999         case CHIP_CAICOS:
1000                 radeon_program_register_sequence(rdev,
1001                                                  caicos_golden_registers,
1002                                                  (const u32)ARRAY_SIZE(caicos_golden_registers));
1003                 break;
1004         default:
1005                 break;
1006         }
1007 }
1008
1009 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1010                              unsigned *bankh, unsigned *mtaspect,
1011                              unsigned *tile_split)
1012 {
1013         *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1014         *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1015         *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1016         *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1017         switch (*bankw) {
1018         default:
1019         case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1020         case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1021         case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1022         case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1023         }
1024         switch (*bankh) {
1025         default:
1026         case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1027         case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1028         case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1029         case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1030         }
1031         switch (*mtaspect) {
1032         default:
1033         case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1034         case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1035         case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1036         case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1037         }
1038 }
1039
1040 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1041                               u32 cntl_reg, u32 status_reg)
1042 {
1043         int r, i;
1044         struct atom_clock_dividers dividers;
1045
1046         r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1047                                            clock, false, &dividers);
1048         if (r)
1049                 return r;
1050
1051         WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1052
1053         for (i = 0; i < 100; i++) {
1054                 if (RREG32(status_reg) & DCLK_STATUS)
1055                         break;
1056                 mdelay(10);
1057         }
1058         if (i == 100)
1059                 return -ETIMEDOUT;
1060
1061         return 0;
1062 }
1063
1064 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1065 {
1066         int r = 0;
1067         u32 cg_scratch = RREG32(CG_SCRATCH1);
1068
1069         r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1070         if (r)
1071                 goto done;
1072         cg_scratch &= 0xffff0000;
1073         cg_scratch |= vclk / 100; /* Mhz */
1074
1075         r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1076         if (r)
1077                 goto done;
1078         cg_scratch &= 0x0000ffff;
1079         cg_scratch |= (dclk / 100) << 16; /* Mhz */
1080
1081 done:
1082         WREG32(CG_SCRATCH1, cg_scratch);
1083
1084         return r;
1085 }
1086
1087 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1088 {
1089         /* start off with something large */
1090         unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1091         int r;
1092
1093         /* bypass vclk and dclk with bclk */
1094         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1095                 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1096                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1097
1098         /* put PLL in bypass mode */
1099         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1100
1101         if (!vclk || !dclk) {
1102                 /* keep the Bypass mode, put PLL to sleep */
1103                 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1104                 return 0;
1105         }
1106
1107         r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1108                                           16384, 0x03FFFFFF, 0, 128, 5,
1109                                           &fb_div, &vclk_div, &dclk_div);
1110         if (r)
1111                 return r;
1112
1113         /* set VCO_MODE to 1 */
1114         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1115
1116         /* toggle UPLL_SLEEP to 1 then back to 0 */
1117         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1118         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1119
1120         /* deassert UPLL_RESET */
1121         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1122
1123         mdelay(1);
1124
1125         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1126         if (r)
1127                 return r;
1128
1129         /* assert UPLL_RESET again */
1130         WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1131
1132         /* disable spread spectrum. */
1133         WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1134
1135         /* set feedback divider */
1136         WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1137
1138         /* set ref divider to 0 */
1139         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1140
1141         if (fb_div < 307200)
1142                 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1143         else
1144                 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1145
1146         /* set PDIV_A and PDIV_B */
1147         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1148                 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1149                 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1150
1151         /* give the PLL some time to settle */
1152         mdelay(15);
1153
1154         /* deassert PLL_RESET */
1155         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1156
1157         mdelay(15);
1158
1159         /* switch from bypass mode to normal mode */
1160         WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1161
1162         r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1163         if (r)
1164                 return r;
1165
1166         /* switch VCLK and DCLK selection */
1167         WREG32_P(CG_UPLL_FUNC_CNTL_2,
1168                 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1169                 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1170
1171         mdelay(100);
1172
1173         return 0;
1174 }
1175
1176 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1177 {
1178         int readrq;
1179         u16 v;
1180
1181         readrq = pcie_get_readrq(rdev->pdev);
1182         v = ffs(readrq) - 8;
1183         /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1184          * to avoid hangs or perfomance issues
1185          */
1186         if ((v == 0) || (v == 6) || (v == 7))
1187                 pcie_set_readrq(rdev->pdev, 512);
1188 }
1189
1190 void dce4_program_fmt(struct drm_encoder *encoder)
1191 {
1192         struct drm_device *dev = encoder->dev;
1193         struct radeon_device *rdev = dev->dev_private;
1194         struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1195         struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1196         struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1197         int bpc = 0;
1198         u32 tmp = 0;
1199         enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1200
1201         if (connector) {
1202                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1203                 bpc = radeon_get_monitor_bpc(connector);
1204                 dither = radeon_connector->dither;
1205         }
1206
1207         /* LVDS/eDP FMT is set up by atom */
1208         if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1209                 return;
1210
1211         /* not needed for analog */
1212         if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1213             (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1214                 return;
1215
1216         if (bpc == 0)
1217                 return;
1218
1219         switch (bpc) {
1220         case 6:
1221                 if (dither == RADEON_FMT_DITHER_ENABLE)
1222                         /* XXX sort out optimal dither settings */
1223                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1224                                 FMT_SPATIAL_DITHER_EN);
1225                 else
1226                         tmp |= FMT_TRUNCATE_EN;
1227                 break;
1228         case 8:
1229                 if (dither == RADEON_FMT_DITHER_ENABLE)
1230                         /* XXX sort out optimal dither settings */
1231                         tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1232                                 FMT_RGB_RANDOM_ENABLE |
1233                                 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1234                 else
1235                         tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1236                 break;
1237         case 10:
1238         default:
1239                 /* not needed */
1240                 break;
1241         }
1242
1243         WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1244 }
1245
1246 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1247 {
1248         if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1249                 return true;
1250         else
1251                 return false;
1252 }
1253
1254 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1255 {
1256         u32 pos1, pos2;
1257
1258         pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1259         pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1260
1261         if (pos1 != pos2)
1262                 return true;
1263         else
1264                 return false;
1265 }
1266
1267 /**
1268  * dce4_wait_for_vblank - vblank wait asic callback.
1269  *
1270  * @rdev: radeon_device pointer
1271  * @crtc: crtc to wait for vblank on
1272  *
1273  * Wait for vblank on the requested crtc (evergreen+).
1274  */
1275 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1276 {
1277         unsigned i = 0;
1278
1279         if (crtc >= rdev->num_crtc)
1280                 return;
1281
1282         if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1283                 return;
1284
1285         /* depending on when we hit vblank, we may be close to active; if so,
1286          * wait for another frame.
1287          */
1288         while (dce4_is_in_vblank(rdev, crtc)) {
1289                 if (i++ % 100 == 0) {
1290                         if (!dce4_is_counter_moving(rdev, crtc))
1291                                 break;
1292                 }
1293         }
1294
1295         while (!dce4_is_in_vblank(rdev, crtc)) {
1296                 if (i++ % 100 == 0) {
1297                         if (!dce4_is_counter_moving(rdev, crtc))
1298                                 break;
1299                 }
1300         }
1301 }
1302
1303 /**
1304  * evergreen_page_flip - pageflip callback.
1305  *
1306  * @rdev: radeon_device pointer
1307  * @crtc_id: crtc to cleanup pageflip on
1308  * @crtc_base: new address of the crtc (GPU MC address)
1309  *
1310  * Does the actual pageflip (evergreen+).
1311  * During vblank we take the crtc lock and wait for the update_pending
1312  * bit to go high, when it does, we release the lock, and allow the
1313  * double buffered update to take place.
1314  * Returns the current update pending status.
1315  */
1316 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base)
1317 {
1318         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1319         u32 tmp = RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset);
1320         int i;
1321
1322         /* Lock the graphics update lock */
1323         tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
1324         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1325
1326         /* update the scanout addresses */
1327         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1328                upper_32_bits(crtc_base));
1329         WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1330                (u32)crtc_base);
1331
1332         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1333                upper_32_bits(crtc_base));
1334         WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1335                (u32)crtc_base);
1336
1337         /* Wait for update_pending to go high. */
1338         for (i = 0; i < rdev->usec_timeout; i++) {
1339                 if (RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING)
1340                         break;
1341                 udelay(1);
1342         }
1343         DRM_DEBUG("Update pending now high. Unlocking vupdate_lock.\n");
1344
1345         /* Unlock the lock, so double-buffering can take place inside vblank */
1346         tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
1347         WREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset, tmp);
1348 }
1349
1350 /**
1351  * evergreen_page_flip_pending - check if page flip is still pending
1352  *
1353  * @rdev: radeon_device pointer
1354  * @crtc_id: crtc to check
1355  *
1356  * Returns the current update pending status.
1357  */
1358 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1359 {
1360         struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1361
1362         /* Return current update_pending status: */
1363         return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1364                 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1365 }
1366
1367 /* get temperature in millidegrees */
1368 int evergreen_get_temp(struct radeon_device *rdev)
1369 {
1370         u32 temp, toffset;
1371         int actual_temp = 0;
1372
1373         if (rdev->family == CHIP_JUNIPER) {
1374                 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1375                         TOFFSET_SHIFT;
1376                 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1377                         TS0_ADC_DOUT_SHIFT;
1378
1379                 if (toffset & 0x100)
1380                         actual_temp = temp / 2 - (0x200 - toffset);
1381                 else
1382                         actual_temp = temp / 2 + toffset;
1383
1384                 actual_temp = actual_temp * 1000;
1385
1386         } else {
1387                 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1388                         ASIC_T_SHIFT;
1389
1390                 if (temp & 0x400)
1391                         actual_temp = -256;
1392                 else if (temp & 0x200)
1393                         actual_temp = 255;
1394                 else if (temp & 0x100) {
1395                         actual_temp = temp & 0x1ff;
1396                         actual_temp |= ~0x1ff;
1397                 } else
1398                         actual_temp = temp & 0xff;
1399
1400                 actual_temp = (actual_temp * 1000) / 2;
1401         }
1402
1403         return actual_temp;
1404 }
1405
1406 int sumo_get_temp(struct radeon_device *rdev)
1407 {
1408         u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1409         int actual_temp = temp - 49;
1410
1411         return actual_temp * 1000;
1412 }
1413
1414 /**
1415  * sumo_pm_init_profile - Initialize power profiles callback.
1416  *
1417  * @rdev: radeon_device pointer
1418  *
1419  * Initialize the power states used in profile mode
1420  * (sumo, trinity, SI).
1421  * Used for profile mode only.
1422  */
1423 void sumo_pm_init_profile(struct radeon_device *rdev)
1424 {
1425         int idx;
1426
1427         /* default */
1428         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1429         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1430         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1431         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1432
1433         /* low,mid sh/mh */
1434         if (rdev->flags & RADEON_IS_MOBILITY)
1435                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1436         else
1437                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1438
1439         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1440         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1441         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1442         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1443
1444         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1445         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1446         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1447         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1448
1449         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1450         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1451         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1452         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1453
1454         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1455         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1456         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1457         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1458
1459         /* high sh/mh */
1460         idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1461         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1462         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1463         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1464         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1465                 rdev->pm.power_state[idx].num_clock_modes - 1;
1466
1467         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1468         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1469         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1470         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1471                 rdev->pm.power_state[idx].num_clock_modes - 1;
1472 }
1473
1474 /**
1475  * btc_pm_init_profile - Initialize power profiles callback.
1476  *
1477  * @rdev: radeon_device pointer
1478  *
1479  * Initialize the power states used in profile mode
1480  * (BTC, cayman).
1481  * Used for profile mode only.
1482  */
1483 void btc_pm_init_profile(struct radeon_device *rdev)
1484 {
1485         int idx;
1486
1487         /* default */
1488         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1489         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1490         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1491         rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1492         /* starting with BTC, there is one state that is used for both
1493          * MH and SH.  Difference is that we always use the high clock index for
1494          * mclk.
1495          */
1496         if (rdev->flags & RADEON_IS_MOBILITY)
1497                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1498         else
1499                 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1500         /* low sh */
1501         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1502         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1503         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1504         rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1505         /* mid sh */
1506         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1507         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1508         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1509         rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1510         /* high sh */
1511         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1512         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1513         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1514         rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1515         /* low mh */
1516         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1517         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1518         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1519         rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1520         /* mid mh */
1521         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1522         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1523         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1524         rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1525         /* high mh */
1526         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1527         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1528         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1529         rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1530 }
1531
1532 /**
1533  * evergreen_pm_misc - set additional pm hw parameters callback.
1534  *
1535  * @rdev: radeon_device pointer
1536  *
1537  * Set non-clock parameters associated with a power state
1538  * (voltage, etc.) (evergreen+).
1539  */
1540 void evergreen_pm_misc(struct radeon_device *rdev)
1541 {
1542         int req_ps_idx = rdev->pm.requested_power_state_index;
1543         int req_cm_idx = rdev->pm.requested_clock_mode_index;
1544         struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1545         struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1546
1547         if (voltage->type == VOLTAGE_SW) {
1548                 /* 0xff0x are flags rather then an actual voltage */
1549                 if ((voltage->voltage & 0xff00) == 0xff00)
1550                         return;
1551                 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1552                         radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1553                         rdev->pm.current_vddc = voltage->voltage;
1554                         DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1555                 }
1556
1557                 /* starting with BTC, there is one state that is used for both
1558                  * MH and SH.  Difference is that we always use the high clock index for
1559                  * mclk and vddci.
1560                  */
1561                 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1562                     (rdev->family >= CHIP_BARTS) &&
1563                     rdev->pm.active_crtc_count &&
1564                     ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1565                      (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1566                         voltage = &rdev->pm.power_state[req_ps_idx].
1567                                 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1568
1569                 /* 0xff0x are flags rather then an actual voltage */
1570                 if ((voltage->vddci & 0xff00) == 0xff00)
1571                         return;
1572                 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1573                         radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1574                         rdev->pm.current_vddci = voltage->vddci;
1575                         DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1576                 }
1577         }
1578 }
1579
1580 /**
1581  * evergreen_pm_prepare - pre-power state change callback.
1582  *
1583  * @rdev: radeon_device pointer
1584  *
1585  * Prepare for a power state change (evergreen+).
1586  */
1587 void evergreen_pm_prepare(struct radeon_device *rdev)
1588 {
1589         struct drm_device *ddev = rdev->ddev;
1590         struct drm_crtc *crtc;
1591         struct radeon_crtc *radeon_crtc;
1592         u32 tmp;
1593
1594         /* disable any active CRTCs */
1595         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1596                 radeon_crtc = to_radeon_crtc(crtc);
1597                 if (radeon_crtc->enabled) {
1598                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1599                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1600                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1601                 }
1602         }
1603 }
1604
1605 /**
1606  * evergreen_pm_finish - post-power state change callback.
1607  *
1608  * @rdev: radeon_device pointer
1609  *
1610  * Clean up after a power state change (evergreen+).
1611  */
1612 void evergreen_pm_finish(struct radeon_device *rdev)
1613 {
1614         struct drm_device *ddev = rdev->ddev;
1615         struct drm_crtc *crtc;
1616         struct radeon_crtc *radeon_crtc;
1617         u32 tmp;
1618
1619         /* enable any active CRTCs */
1620         list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1621                 radeon_crtc = to_radeon_crtc(crtc);
1622                 if (radeon_crtc->enabled) {
1623                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1624                         tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1625                         WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1626                 }
1627         }
1628 }
1629
1630 /**
1631  * evergreen_hpd_sense - hpd sense callback.
1632  *
1633  * @rdev: radeon_device pointer
1634  * @hpd: hpd (hotplug detect) pin
1635  *
1636  * Checks if a digital monitor is connected (evergreen+).
1637  * Returns true if connected, false if not connected.
1638  */
1639 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1640 {
1641         bool connected = false;
1642
1643         switch (hpd) {
1644         case RADEON_HPD_1:
1645                 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1646                         connected = true;
1647                 break;
1648         case RADEON_HPD_2:
1649                 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1650                         connected = true;
1651                 break;
1652         case RADEON_HPD_3:
1653                 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1654                         connected = true;
1655                 break;
1656         case RADEON_HPD_4:
1657                 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1658                         connected = true;
1659                 break;
1660         case RADEON_HPD_5:
1661                 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1662                         connected = true;
1663                 break;
1664         case RADEON_HPD_6:
1665                 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1666                         connected = true;
1667                 break;
1668         default:
1669                 break;
1670         }
1671
1672         return connected;
1673 }
1674
1675 /**
1676  * evergreen_hpd_set_polarity - hpd set polarity callback.
1677  *
1678  * @rdev: radeon_device pointer
1679  * @hpd: hpd (hotplug detect) pin
1680  *
1681  * Set the polarity of the hpd pin (evergreen+).
1682  */
1683 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1684                                 enum radeon_hpd_id hpd)
1685 {
1686         u32 tmp;
1687         bool connected = evergreen_hpd_sense(rdev, hpd);
1688
1689         switch (hpd) {
1690         case RADEON_HPD_1:
1691                 tmp = RREG32(DC_HPD1_INT_CONTROL);
1692                 if (connected)
1693                         tmp &= ~DC_HPDx_INT_POLARITY;
1694                 else
1695                         tmp |= DC_HPDx_INT_POLARITY;
1696                 WREG32(DC_HPD1_INT_CONTROL, tmp);
1697                 break;
1698         case RADEON_HPD_2:
1699                 tmp = RREG32(DC_HPD2_INT_CONTROL);
1700                 if (connected)
1701                         tmp &= ~DC_HPDx_INT_POLARITY;
1702                 else
1703                         tmp |= DC_HPDx_INT_POLARITY;
1704                 WREG32(DC_HPD2_INT_CONTROL, tmp);
1705                 break;
1706         case RADEON_HPD_3:
1707                 tmp = RREG32(DC_HPD3_INT_CONTROL);
1708                 if (connected)
1709                         tmp &= ~DC_HPDx_INT_POLARITY;
1710                 else
1711                         tmp |= DC_HPDx_INT_POLARITY;
1712                 WREG32(DC_HPD3_INT_CONTROL, tmp);
1713                 break;
1714         case RADEON_HPD_4:
1715                 tmp = RREG32(DC_HPD4_INT_CONTROL);
1716                 if (connected)
1717                         tmp &= ~DC_HPDx_INT_POLARITY;
1718                 else
1719                         tmp |= DC_HPDx_INT_POLARITY;
1720                 WREG32(DC_HPD4_INT_CONTROL, tmp);
1721                 break;
1722         case RADEON_HPD_5:
1723                 tmp = RREG32(DC_HPD5_INT_CONTROL);
1724                 if (connected)
1725                         tmp &= ~DC_HPDx_INT_POLARITY;
1726                 else
1727                         tmp |= DC_HPDx_INT_POLARITY;
1728                 WREG32(DC_HPD5_INT_CONTROL, tmp);
1729                         break;
1730         case RADEON_HPD_6:
1731                 tmp = RREG32(DC_HPD6_INT_CONTROL);
1732                 if (connected)
1733                         tmp &= ~DC_HPDx_INT_POLARITY;
1734                 else
1735                         tmp |= DC_HPDx_INT_POLARITY;
1736                 WREG32(DC_HPD6_INT_CONTROL, tmp);
1737                 break;
1738         default:
1739                 break;
1740         }
1741 }
1742
1743 /**
1744  * evergreen_hpd_init - hpd setup callback.
1745  *
1746  * @rdev: radeon_device pointer
1747  *
1748  * Setup the hpd pins used by the card (evergreen+).
1749  * Enable the pin, set the polarity, and enable the hpd interrupts.
1750  */
1751 void evergreen_hpd_init(struct radeon_device *rdev)
1752 {
1753         struct drm_device *dev = rdev->ddev;
1754         struct drm_connector *connector;
1755         unsigned enabled = 0;
1756         u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1757                 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1758
1759         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1760                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1761
1762                 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1763                     connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1764                         /* don't try to enable hpd on eDP or LVDS avoid breaking the
1765                          * aux dp channel on imac and help (but not completely fix)
1766                          * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1767                          * also avoid interrupt storms during dpms.
1768                          */
1769                         continue;
1770                 }
1771                 switch (radeon_connector->hpd.hpd) {
1772                 case RADEON_HPD_1:
1773                         WREG32(DC_HPD1_CONTROL, tmp);
1774                         break;
1775                 case RADEON_HPD_2:
1776                         WREG32(DC_HPD2_CONTROL, tmp);
1777                         break;
1778                 case RADEON_HPD_3:
1779                         WREG32(DC_HPD3_CONTROL, tmp);
1780                         break;
1781                 case RADEON_HPD_4:
1782                         WREG32(DC_HPD4_CONTROL, tmp);
1783                         break;
1784                 case RADEON_HPD_5:
1785                         WREG32(DC_HPD5_CONTROL, tmp);
1786                         break;
1787                 case RADEON_HPD_6:
1788                         WREG32(DC_HPD6_CONTROL, tmp);
1789                         break;
1790                 default:
1791                         break;
1792                 }
1793                 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1794                 enabled |= 1 << radeon_connector->hpd.hpd;
1795         }
1796         radeon_irq_kms_enable_hpd(rdev, enabled);
1797 }
1798
1799 /**
1800  * evergreen_hpd_fini - hpd tear down callback.
1801  *
1802  * @rdev: radeon_device pointer
1803  *
1804  * Tear down the hpd pins used by the card (evergreen+).
1805  * Disable the hpd interrupts.
1806  */
1807 void evergreen_hpd_fini(struct radeon_device *rdev)
1808 {
1809         struct drm_device *dev = rdev->ddev;
1810         struct drm_connector *connector;
1811         unsigned disabled = 0;
1812
1813         list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1814                 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1815                 switch (radeon_connector->hpd.hpd) {
1816                 case RADEON_HPD_1:
1817                         WREG32(DC_HPD1_CONTROL, 0);
1818                         break;
1819                 case RADEON_HPD_2:
1820                         WREG32(DC_HPD2_CONTROL, 0);
1821                         break;
1822                 case RADEON_HPD_3:
1823                         WREG32(DC_HPD3_CONTROL, 0);
1824                         break;
1825                 case RADEON_HPD_4:
1826                         WREG32(DC_HPD4_CONTROL, 0);
1827                         break;
1828                 case RADEON_HPD_5:
1829                         WREG32(DC_HPD5_CONTROL, 0);
1830                         break;
1831                 case RADEON_HPD_6:
1832                         WREG32(DC_HPD6_CONTROL, 0);
1833                         break;
1834                 default:
1835                         break;
1836                 }
1837                 disabled |= 1 << radeon_connector->hpd.hpd;
1838         }
1839         radeon_irq_kms_disable_hpd(rdev, disabled);
1840 }
1841
1842 /* watermark setup */
1843
1844 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1845                                         struct radeon_crtc *radeon_crtc,
1846                                         struct drm_display_mode *mode,
1847                                         struct drm_display_mode *other_mode)
1848 {
1849         u32 tmp, buffer_alloc, i;
1850         u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1851         /*
1852          * Line Buffer Setup
1853          * There are 3 line buffers, each one shared by 2 display controllers.
1854          * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1855          * the display controllers.  The paritioning is done via one of four
1856          * preset allocations specified in bits 2:0:
1857          * first display controller
1858          *  0 - first half of lb (3840 * 2)
1859          *  1 - first 3/4 of lb (5760 * 2)
1860          *  2 - whole lb (7680 * 2), other crtc must be disabled
1861          *  3 - first 1/4 of lb (1920 * 2)
1862          * second display controller
1863          *  4 - second half of lb (3840 * 2)
1864          *  5 - second 3/4 of lb (5760 * 2)
1865          *  6 - whole lb (7680 * 2), other crtc must be disabled
1866          *  7 - last 1/4 of lb (1920 * 2)
1867          */
1868         /* this can get tricky if we have two large displays on a paired group
1869          * of crtcs.  Ideally for multiple large displays we'd assign them to
1870          * non-linked crtcs for maximum line buffer allocation.
1871          */
1872         if (radeon_crtc->base.enabled && mode) {
1873                 if (other_mode) {
1874                         tmp = 0; /* 1/2 */
1875                         buffer_alloc = 1;
1876                 } else {
1877                         tmp = 2; /* whole */
1878                         buffer_alloc = 2;
1879                 }
1880         } else {
1881                 tmp = 0;
1882                 buffer_alloc = 0;
1883         }
1884
1885         /* second controller of the pair uses second half of the lb */
1886         if (radeon_crtc->crtc_id % 2)
1887                 tmp += 4;
1888         WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1889
1890         if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1891                 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1892                        DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1893                 for (i = 0; i < rdev->usec_timeout; i++) {
1894                         if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1895                             DMIF_BUFFERS_ALLOCATED_COMPLETED)
1896                                 break;
1897                         udelay(1);
1898                 }
1899         }
1900
1901         if (radeon_crtc->base.enabled && mode) {
1902                 switch (tmp) {
1903                 case 0:
1904                 case 4:
1905                 default:
1906                         if (ASIC_IS_DCE5(rdev))
1907                                 return 4096 * 2;
1908                         else
1909                                 return 3840 * 2;
1910                 case 1:
1911                 case 5:
1912                         if (ASIC_IS_DCE5(rdev))
1913                                 return 6144 * 2;
1914                         else
1915                                 return 5760 * 2;
1916                 case 2:
1917                 case 6:
1918                         if (ASIC_IS_DCE5(rdev))
1919                                 return 8192 * 2;
1920                         else
1921                                 return 7680 * 2;
1922                 case 3:
1923                 case 7:
1924                         if (ASIC_IS_DCE5(rdev))
1925                                 return 2048 * 2;
1926                         else
1927                                 return 1920 * 2;
1928                 }
1929         }
1930
1931         /* controller not enabled, so no lb used */
1932         return 0;
1933 }
1934
1935 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
1936 {
1937         u32 tmp = RREG32(MC_SHARED_CHMAP);
1938
1939         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1940         case 0:
1941         default:
1942                 return 1;
1943         case 1:
1944                 return 2;
1945         case 2:
1946                 return 4;
1947         case 3:
1948                 return 8;
1949         }
1950 }
1951
1952 struct evergreen_wm_params {
1953         u32 dram_channels; /* number of dram channels */
1954         u32 yclk;          /* bandwidth per dram data pin in kHz */
1955         u32 sclk;          /* engine clock in kHz */
1956         u32 disp_clk;      /* display clock in kHz */
1957         u32 src_width;     /* viewport width */
1958         u32 active_time;   /* active display time in ns */
1959         u32 blank_time;    /* blank time in ns */
1960         bool interlaced;    /* mode is interlaced */
1961         fixed20_12 vsc;    /* vertical scale ratio */
1962         u32 num_heads;     /* number of active crtcs */
1963         u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1964         u32 lb_size;       /* line buffer allocated to pipe */
1965         u32 vtaps;         /* vertical scaler taps */
1966 };
1967
1968 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
1969 {
1970         /* Calculate DRAM Bandwidth and the part allocated to display. */
1971         fixed20_12 dram_efficiency; /* 0.7 */
1972         fixed20_12 yclk, dram_channels, bandwidth;
1973         fixed20_12 a;
1974
1975         a.full = dfixed_const(1000);
1976         yclk.full = dfixed_const(wm->yclk);
1977         yclk.full = dfixed_div(yclk, a);
1978         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1979         a.full = dfixed_const(10);
1980         dram_efficiency.full = dfixed_const(7);
1981         dram_efficiency.full = dfixed_div(dram_efficiency, a);
1982         bandwidth.full = dfixed_mul(dram_channels, yclk);
1983         bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1984
1985         return dfixed_trunc(bandwidth);
1986 }
1987
1988 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
1989 {
1990         /* Calculate DRAM Bandwidth and the part allocated to display. */
1991         fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1992         fixed20_12 yclk, dram_channels, bandwidth;
1993         fixed20_12 a;
1994
1995         a.full = dfixed_const(1000);
1996         yclk.full = dfixed_const(wm->yclk);
1997         yclk.full = dfixed_div(yclk, a);
1998         dram_channels.full = dfixed_const(wm->dram_channels * 4);
1999         a.full = dfixed_const(10);
2000         disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2001         disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2002         bandwidth.full = dfixed_mul(dram_channels, yclk);
2003         bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2004
2005         return dfixed_trunc(bandwidth);
2006 }
2007
2008 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2009 {
2010         /* Calculate the display Data return Bandwidth */
2011         fixed20_12 return_efficiency; /* 0.8 */
2012         fixed20_12 sclk, bandwidth;
2013         fixed20_12 a;
2014
2015         a.full = dfixed_const(1000);
2016         sclk.full = dfixed_const(wm->sclk);
2017         sclk.full = dfixed_div(sclk, a);
2018         a.full = dfixed_const(10);
2019         return_efficiency.full = dfixed_const(8);
2020         return_efficiency.full = dfixed_div(return_efficiency, a);
2021         a.full = dfixed_const(32);
2022         bandwidth.full = dfixed_mul(a, sclk);
2023         bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2024
2025         return dfixed_trunc(bandwidth);
2026 }
2027
2028 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2029 {
2030         /* Calculate the DMIF Request Bandwidth */
2031         fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2032         fixed20_12 disp_clk, bandwidth;
2033         fixed20_12 a;
2034
2035         a.full = dfixed_const(1000);
2036         disp_clk.full = dfixed_const(wm->disp_clk);
2037         disp_clk.full = dfixed_div(disp_clk, a);
2038         a.full = dfixed_const(10);
2039         disp_clk_request_efficiency.full = dfixed_const(8);
2040         disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2041         a.full = dfixed_const(32);
2042         bandwidth.full = dfixed_mul(a, disp_clk);
2043         bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2044
2045         return dfixed_trunc(bandwidth);
2046 }
2047
2048 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2049 {
2050         /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2051         u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2052         u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2053         u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2054
2055         return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2056 }
2057
2058 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2059 {
2060         /* Calculate the display mode Average Bandwidth
2061          * DisplayMode should contain the source and destination dimensions,
2062          * timing, etc.
2063          */
2064         fixed20_12 bpp;
2065         fixed20_12 line_time;
2066         fixed20_12 src_width;
2067         fixed20_12 bandwidth;
2068         fixed20_12 a;
2069
2070         a.full = dfixed_const(1000);
2071         line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2072         line_time.full = dfixed_div(line_time, a);
2073         bpp.full = dfixed_const(wm->bytes_per_pixel);
2074         src_width.full = dfixed_const(wm->src_width);
2075         bandwidth.full = dfixed_mul(src_width, bpp);
2076         bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2077         bandwidth.full = dfixed_div(bandwidth, line_time);
2078
2079         return dfixed_trunc(bandwidth);
2080 }
2081
2082 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2083 {
2084         /* First calcualte the latency in ns */
2085         u32 mc_latency = 2000; /* 2000 ns. */
2086         u32 available_bandwidth = evergreen_available_bandwidth(wm);
2087         u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2088         u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2089         u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2090         u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2091                 (wm->num_heads * cursor_line_pair_return_time);
2092         u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2093         u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2094         fixed20_12 a, b, c;
2095
2096         if (wm->num_heads == 0)
2097                 return 0;
2098
2099         a.full = dfixed_const(2);
2100         b.full = dfixed_const(1);
2101         if ((wm->vsc.full > a.full) ||
2102             ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2103             (wm->vtaps >= 5) ||
2104             ((wm->vsc.full >= a.full) && wm->interlaced))
2105                 max_src_lines_per_dst_line = 4;
2106         else
2107                 max_src_lines_per_dst_line = 2;
2108
2109         a.full = dfixed_const(available_bandwidth);
2110         b.full = dfixed_const(wm->num_heads);
2111         a.full = dfixed_div(a, b);
2112
2113         b.full = dfixed_const(1000);
2114         c.full = dfixed_const(wm->disp_clk);
2115         b.full = dfixed_div(c, b);
2116         c.full = dfixed_const(wm->bytes_per_pixel);
2117         b.full = dfixed_mul(b, c);
2118
2119         lb_fill_bw = min(dfixed_trunc(a), dfixed_trunc(b));
2120
2121         a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2122         b.full = dfixed_const(1000);
2123         c.full = dfixed_const(lb_fill_bw);
2124         b.full = dfixed_div(c, b);
2125         a.full = dfixed_div(a, b);
2126         line_fill_time = dfixed_trunc(a);
2127
2128         if (line_fill_time < wm->active_time)
2129                 return latency;
2130         else
2131                 return latency + (line_fill_time - wm->active_time);
2132
2133 }
2134
2135 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2136 {
2137         if (evergreen_average_bandwidth(wm) <=
2138             (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2139                 return true;
2140         else
2141                 return false;
2142 };
2143
2144 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2145 {
2146         if (evergreen_average_bandwidth(wm) <=
2147             (evergreen_available_bandwidth(wm) / wm->num_heads))
2148                 return true;
2149         else
2150                 return false;
2151 };
2152
2153 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2154 {
2155         u32 lb_partitions = wm->lb_size / wm->src_width;
2156         u32 line_time = wm->active_time + wm->blank_time;
2157         u32 latency_tolerant_lines;
2158         u32 latency_hiding;
2159         fixed20_12 a;
2160
2161         a.full = dfixed_const(1);
2162         if (wm->vsc.full > a.full)
2163                 latency_tolerant_lines = 1;
2164         else {
2165                 if (lb_partitions <= (wm->vtaps + 1))
2166                         latency_tolerant_lines = 1;
2167                 else
2168                         latency_tolerant_lines = 2;
2169         }
2170
2171         latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2172
2173         if (evergreen_latency_watermark(wm) <= latency_hiding)
2174                 return true;
2175         else
2176                 return false;
2177 }
2178
2179 static void evergreen_program_watermarks(struct radeon_device *rdev,
2180                                          struct radeon_crtc *radeon_crtc,
2181                                          u32 lb_size, u32 num_heads)
2182 {
2183         struct drm_display_mode *mode = &radeon_crtc->base.mode;
2184         struct evergreen_wm_params wm_low, wm_high;
2185         u32 dram_channels;
2186         u32 pixel_period;
2187         u32 line_time = 0;
2188         u32 latency_watermark_a = 0, latency_watermark_b = 0;
2189         u32 priority_a_mark = 0, priority_b_mark = 0;
2190         u32 priority_a_cnt = PRIORITY_OFF;
2191         u32 priority_b_cnt = PRIORITY_OFF;
2192         u32 pipe_offset = radeon_crtc->crtc_id * 16;
2193         u32 tmp, arb_control3;
2194         fixed20_12 a, b, c;
2195
2196         if (radeon_crtc->base.enabled && num_heads && mode) {
2197                 pixel_period = 1000000 / (u32)mode->clock;
2198                 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2199                 priority_a_cnt = 0;
2200                 priority_b_cnt = 0;
2201                 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2202
2203                 /* watermark for high clocks */
2204                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2205                         wm_high.yclk =
2206                                 radeon_dpm_get_mclk(rdev, false) * 10;
2207                         wm_high.sclk =
2208                                 radeon_dpm_get_sclk(rdev, false) * 10;
2209                 } else {
2210                         wm_high.yclk = rdev->pm.current_mclk * 10;
2211                         wm_high.sclk = rdev->pm.current_sclk * 10;
2212                 }
2213
2214                 wm_high.disp_clk = mode->clock;
2215                 wm_high.src_width = mode->crtc_hdisplay;
2216                 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2217                 wm_high.blank_time = line_time - wm_high.active_time;
2218                 wm_high.interlaced = false;
2219                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2220                         wm_high.interlaced = true;
2221                 wm_high.vsc = radeon_crtc->vsc;
2222                 wm_high.vtaps = 1;
2223                 if (radeon_crtc->rmx_type != RMX_OFF)
2224                         wm_high.vtaps = 2;
2225                 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2226                 wm_high.lb_size = lb_size;
2227                 wm_high.dram_channels = dram_channels;
2228                 wm_high.num_heads = num_heads;
2229
2230                 /* watermark for low clocks */
2231                 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2232                         wm_low.yclk =
2233                                 radeon_dpm_get_mclk(rdev, true) * 10;
2234                         wm_low.sclk =
2235                                 radeon_dpm_get_sclk(rdev, true) * 10;
2236                 } else {
2237                         wm_low.yclk = rdev->pm.current_mclk * 10;
2238                         wm_low.sclk = rdev->pm.current_sclk * 10;
2239                 }
2240
2241                 wm_low.disp_clk = mode->clock;
2242                 wm_low.src_width = mode->crtc_hdisplay;
2243                 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2244                 wm_low.blank_time = line_time - wm_low.active_time;
2245                 wm_low.interlaced = false;
2246                 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2247                         wm_low.interlaced = true;
2248                 wm_low.vsc = radeon_crtc->vsc;
2249                 wm_low.vtaps = 1;
2250                 if (radeon_crtc->rmx_type != RMX_OFF)
2251                         wm_low.vtaps = 2;
2252                 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2253                 wm_low.lb_size = lb_size;
2254                 wm_low.dram_channels = dram_channels;
2255                 wm_low.num_heads = num_heads;
2256
2257                 /* set for high clocks */
2258                 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2259                 /* set for low clocks */
2260                 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2261
2262                 /* possibly force display priority to high */
2263                 /* should really do this at mode validation time... */
2264                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2265                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2266                     !evergreen_check_latency_hiding(&wm_high) ||
2267                     (rdev->disp_priority == 2)) {
2268                         DRM_DEBUG_KMS("force priority a to high\n");
2269                         priority_a_cnt |= PRIORITY_ALWAYS_ON;
2270                 }
2271                 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2272                     !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2273                     !evergreen_check_latency_hiding(&wm_low) ||
2274                     (rdev->disp_priority == 2)) {
2275                         DRM_DEBUG_KMS("force priority b to high\n");
2276                         priority_b_cnt |= PRIORITY_ALWAYS_ON;
2277                 }
2278
2279                 a.full = dfixed_const(1000);
2280                 b.full = dfixed_const(mode->clock);
2281                 b.full = dfixed_div(b, a);
2282                 c.full = dfixed_const(latency_watermark_a);
2283                 c.full = dfixed_mul(c, b);
2284                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2285                 c.full = dfixed_div(c, a);
2286                 a.full = dfixed_const(16);
2287                 c.full = dfixed_div(c, a);
2288                 priority_a_mark = dfixed_trunc(c);
2289                 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2290
2291                 a.full = dfixed_const(1000);
2292                 b.full = dfixed_const(mode->clock);
2293                 b.full = dfixed_div(b, a);
2294                 c.full = dfixed_const(latency_watermark_b);
2295                 c.full = dfixed_mul(c, b);
2296                 c.full = dfixed_mul(c, radeon_crtc->hsc);
2297                 c.full = dfixed_div(c, a);
2298                 a.full = dfixed_const(16);
2299                 c.full = dfixed_div(c, a);
2300                 priority_b_mark = dfixed_trunc(c);
2301                 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2302         }
2303
2304         /* select wm A */
2305         arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2306         tmp = arb_control3;
2307         tmp &= ~LATENCY_WATERMARK_MASK(3);
2308         tmp |= LATENCY_WATERMARK_MASK(1);
2309         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2310         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2311                (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2312                 LATENCY_HIGH_WATERMARK(line_time)));
2313         /* select wm B */
2314         tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2315         tmp &= ~LATENCY_WATERMARK_MASK(3);
2316         tmp |= LATENCY_WATERMARK_MASK(2);
2317         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2318         WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2319                (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2320                 LATENCY_HIGH_WATERMARK(line_time)));
2321         /* restore original selection */
2322         WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2323
2324         /* write the priority marks */
2325         WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2326         WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2327
2328         /* save values for DPM */
2329         radeon_crtc->line_time = line_time;
2330         radeon_crtc->wm_high = latency_watermark_a;
2331         radeon_crtc->wm_low = latency_watermark_b;
2332 }
2333
2334 /**
2335  * evergreen_bandwidth_update - update display watermarks callback.
2336  *
2337  * @rdev: radeon_device pointer
2338  *
2339  * Update the display watermarks based on the requested mode(s)
2340  * (evergreen+).
2341  */
2342 void evergreen_bandwidth_update(struct radeon_device *rdev)
2343 {
2344         struct drm_display_mode *mode0 = NULL;
2345         struct drm_display_mode *mode1 = NULL;
2346         u32 num_heads = 0, lb_size;
2347         int i;
2348
2349         if (!rdev->mode_info.mode_config_initialized)
2350                 return;
2351
2352         radeon_update_display_priority(rdev);
2353
2354         for (i = 0; i < rdev->num_crtc; i++) {
2355                 if (rdev->mode_info.crtcs[i]->base.enabled)
2356                         num_heads++;
2357         }
2358         for (i = 0; i < rdev->num_crtc; i += 2) {
2359                 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2360                 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2361                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2362                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2363                 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2364                 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2365         }
2366 }
2367
2368 /**
2369  * evergreen_mc_wait_for_idle - wait for MC idle callback.
2370  *
2371  * @rdev: radeon_device pointer
2372  *
2373  * Wait for the MC (memory controller) to be idle.
2374  * (evergreen+).
2375  * Returns 0 if the MC is idle, -1 if not.
2376  */
2377 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2378 {
2379         unsigned i;
2380         u32 tmp;
2381
2382         for (i = 0; i < rdev->usec_timeout; i++) {
2383                 /* read MC_STATUS */
2384                 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2385                 if (!tmp)
2386                         return 0;
2387                 udelay(1);
2388         }
2389         return -1;
2390 }
2391
2392 /*
2393  * GART
2394  */
2395 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2396 {
2397         unsigned i;
2398         u32 tmp;
2399
2400         WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2401
2402         WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2403         for (i = 0; i < rdev->usec_timeout; i++) {
2404                 /* read MC_STATUS */
2405                 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2406                 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2407                 if (tmp == 2) {
2408                         printk(KERN_WARNING "[drm] r600 flush TLB failed\n");
2409                         return;
2410                 }
2411                 if (tmp) {
2412                         return;
2413                 }
2414                 udelay(1);
2415         }
2416 }
2417
2418 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2419 {
2420         u32 tmp;
2421         int r;
2422
2423         if (rdev->gart.robj == NULL) {
2424                 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2425                 return -EINVAL;
2426         }
2427         r = radeon_gart_table_vram_pin(rdev);
2428         if (r)
2429                 return r;
2430         /* Setup L2 cache */
2431         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2432                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2433                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2434         WREG32(VM_L2_CNTL2, 0);
2435         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2436         /* Setup TLB control */
2437         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2438                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2439                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2440                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2441         if (rdev->flags & RADEON_IS_IGP) {
2442                 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2443                 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2444                 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2445         } else {
2446                 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2447                 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2448                 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2449                 if ((rdev->family == CHIP_JUNIPER) ||
2450                     (rdev->family == CHIP_CYPRESS) ||
2451                     (rdev->family == CHIP_HEMLOCK) ||
2452                     (rdev->family == CHIP_BARTS))
2453                         WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2454         }
2455         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2456         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2457         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2458         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2459         WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2460         WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2461         WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2462         WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2463                                 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2464         WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2465                         (u32)(rdev->dummy_page.addr >> 12));
2466         WREG32(VM_CONTEXT1_CNTL, 0);
2467
2468         evergreen_pcie_gart_tlb_flush(rdev);
2469         DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2470                  (unsigned)(rdev->mc.gtt_size >> 20),
2471                  (unsigned long long)rdev->gart.table_addr);
2472         rdev->gart.ready = true;
2473         return 0;
2474 }
2475
2476 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2477 {
2478         u32 tmp;
2479
2480         /* Disable all tables */
2481         WREG32(VM_CONTEXT0_CNTL, 0);
2482         WREG32(VM_CONTEXT1_CNTL, 0);
2483
2484         /* Setup L2 cache */
2485         WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2486                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2487         WREG32(VM_L2_CNTL2, 0);
2488         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2489         /* Setup TLB control */
2490         tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2491         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2492         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2493         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2494         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2495         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2496         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2497         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2498         radeon_gart_table_vram_unpin(rdev);
2499 }
2500
2501 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2502 {
2503         evergreen_pcie_gart_disable(rdev);
2504         radeon_gart_table_vram_free(rdev);
2505         radeon_gart_fini(rdev);
2506 }
2507
2508
2509 static void evergreen_agp_enable(struct radeon_device *rdev)
2510 {
2511         u32 tmp;
2512
2513         /* Setup L2 cache */
2514         WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2515                                 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2516                                 EFFECTIVE_L2_QUEUE_SIZE(7));
2517         WREG32(VM_L2_CNTL2, 0);
2518         WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2519         /* Setup TLB control */
2520         tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2521                 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2522                 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2523                 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2524         WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2525         WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2526         WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2527         WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2528         WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2529         WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2530         WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2531         WREG32(VM_CONTEXT0_CNTL, 0);
2532         WREG32(VM_CONTEXT1_CNTL, 0);
2533 }
2534
2535 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2536 {
2537         u32 crtc_enabled, tmp, frame_count, blackout;
2538         int i, j;
2539
2540         if (!ASIC_IS_NODCE(rdev)) {
2541                 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2542                 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2543
2544                 /* disable VGA render */
2545                 WREG32(VGA_RENDER_CONTROL, 0);
2546         }
2547         /* blank the display controllers */
2548         for (i = 0; i < rdev->num_crtc; i++) {
2549                 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2550                 if (crtc_enabled) {
2551                         save->crtc_enabled[i] = true;
2552                         if (ASIC_IS_DCE6(rdev)) {
2553                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2554                                 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2555                                         radeon_wait_for_vblank(rdev, i);
2556                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2557                                         tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2558                                         WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2559                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2560                                 }
2561                         } else {
2562                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2563                                 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2564                                         radeon_wait_for_vblank(rdev, i);
2565                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2566                                         tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2567                                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2568                                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2569                                 }
2570                         }
2571                         /* wait for the next frame */
2572                         frame_count = radeon_get_vblank_counter(rdev, i);
2573                         for (j = 0; j < rdev->usec_timeout; j++) {
2574                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2575                                         break;
2576                                 udelay(1);
2577                         }
2578
2579                         /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2580                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2581                         tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2582                         tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2583                         WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2584                         WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2585                         save->crtc_enabled[i] = false;
2586                         /* ***** */
2587                 } else {
2588                         save->crtc_enabled[i] = false;
2589                 }
2590         }
2591
2592         radeon_mc_wait_for_idle(rdev);
2593
2594         blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2595         if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2596                 /* Block CPU access */
2597                 WREG32(BIF_FB_EN, 0);
2598                 /* blackout the MC */
2599                 blackout &= ~BLACKOUT_MODE_MASK;
2600                 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2601         }
2602         /* wait for the MC to settle */
2603         udelay(100);
2604
2605         /* lock double buffered regs */
2606         for (i = 0; i < rdev->num_crtc; i++) {
2607                 if (save->crtc_enabled[i]) {
2608                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2609                         if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2610                                 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2611                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2612                         }
2613                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2614                         if (!(tmp & 1)) {
2615                                 tmp |= 1;
2616                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2617                         }
2618                 }
2619         }
2620 }
2621
2622 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2623 {
2624         u32 tmp, frame_count;
2625         int i, j;
2626
2627         /* update crtc base addresses */
2628         for (i = 0; i < rdev->num_crtc; i++) {
2629                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2630                        upper_32_bits(rdev->mc.vram_start));
2631                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2632                        upper_32_bits(rdev->mc.vram_start));
2633                 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2634                        (u32)rdev->mc.vram_start);
2635                 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2636                        (u32)rdev->mc.vram_start);
2637         }
2638
2639         if (!ASIC_IS_NODCE(rdev)) {
2640                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2641                 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2642         }
2643
2644         /* unlock regs and wait for update */
2645         for (i = 0; i < rdev->num_crtc; i++) {
2646                 if (save->crtc_enabled[i]) {
2647                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2648                         if ((tmp & 0x7) != 3) {
2649                                 tmp &= ~0x7;
2650                                 tmp |= 0x3;
2651                                 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2652                         }
2653                         tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2654                         if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2655                                 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2656                                 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2657                         }
2658                         tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2659                         if (tmp & 1) {
2660                                 tmp &= ~1;
2661                                 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2662                         }
2663                         for (j = 0; j < rdev->usec_timeout; j++) {
2664                                 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2665                                 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2666                                         break;
2667                                 udelay(1);
2668                         }
2669                 }
2670         }
2671
2672         /* unblackout the MC */
2673         tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2674         tmp &= ~BLACKOUT_MODE_MASK;
2675         WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2676         /* allow CPU access */
2677         WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2678
2679         for (i = 0; i < rdev->num_crtc; i++) {
2680                 if (save->crtc_enabled[i]) {
2681                         if (ASIC_IS_DCE6(rdev)) {
2682                                 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2683                                 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2684                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2685                                 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2686                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2687                         } else {
2688                                 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2689                                 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2690                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2691                                 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2692                                 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2693                         }
2694                         /* wait for the next frame */
2695                         frame_count = radeon_get_vblank_counter(rdev, i);
2696                         for (j = 0; j < rdev->usec_timeout; j++) {
2697                                 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2698                                         break;
2699                                 udelay(1);
2700                         }
2701                 }
2702         }
2703         if (!ASIC_IS_NODCE(rdev)) {
2704                 /* Unlock vga access */
2705                 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2706                 mdelay(1);
2707                 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2708         }
2709 }
2710
2711 void evergreen_mc_program(struct radeon_device *rdev)
2712 {
2713         struct evergreen_mc_save save;
2714         u32 tmp;
2715         int i, j;
2716
2717         /* Initialize HDP */
2718         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2719                 WREG32((0x2c14 + j), 0x00000000);
2720                 WREG32((0x2c18 + j), 0x00000000);
2721                 WREG32((0x2c1c + j), 0x00000000);
2722                 WREG32((0x2c20 + j), 0x00000000);
2723                 WREG32((0x2c24 + j), 0x00000000);
2724         }
2725         WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2726
2727         evergreen_mc_stop(rdev, &save);
2728         if (evergreen_mc_wait_for_idle(rdev)) {
2729                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2730         }
2731         /* Lockout access through VGA aperture*/
2732         WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2733         /* Update configuration */
2734         if (rdev->flags & RADEON_IS_AGP) {
2735                 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2736                         /* VRAM before AGP */
2737                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2738                                 rdev->mc.vram_start >> 12);
2739                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2740                                 rdev->mc.gtt_end >> 12);
2741                 } else {
2742                         /* VRAM after AGP */
2743                         WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2744                                 rdev->mc.gtt_start >> 12);
2745                         WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2746                                 rdev->mc.vram_end >> 12);
2747                 }
2748         } else {
2749                 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2750                         rdev->mc.vram_start >> 12);
2751                 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2752                         rdev->mc.vram_end >> 12);
2753         }
2754         WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2755         /* llano/ontario only */
2756         if ((rdev->family == CHIP_PALM) ||
2757             (rdev->family == CHIP_SUMO) ||
2758             (rdev->family == CHIP_SUMO2)) {
2759                 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2760                 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2761                 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2762                 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2763         }
2764         tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2765         tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2766         WREG32(MC_VM_FB_LOCATION, tmp);
2767         WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2768         WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2769         WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2770         if (rdev->flags & RADEON_IS_AGP) {
2771                 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
2772                 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
2773                 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
2774         } else {
2775                 WREG32(MC_VM_AGP_BASE, 0);
2776                 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
2777                 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
2778         }
2779         if (evergreen_mc_wait_for_idle(rdev)) {
2780                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2781         }
2782         evergreen_mc_resume(rdev, &save);
2783         /* we need to own VRAM, so turn off the VGA renderer here
2784          * to stop it overwriting our objects */
2785         rv515_vga_render_disable(rdev);
2786 }
2787
2788 /*
2789  * CP.
2790  */
2791 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
2792 {
2793         struct radeon_ring *ring = &rdev->ring[ib->ring];
2794         u32 next_rptr;
2795
2796         /* set to DX10/11 mode */
2797         radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
2798         radeon_ring_write(ring, 1);
2799
2800         if (ring->rptr_save_reg) {
2801                 next_rptr = ring->wptr + 3 + 4;
2802                 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
2803                 radeon_ring_write(ring, ((ring->rptr_save_reg - 
2804                                           PACKET3_SET_CONFIG_REG_START) >> 2));
2805                 radeon_ring_write(ring, next_rptr);
2806         } else if (rdev->wb.enabled) {
2807                 next_rptr = ring->wptr + 5 + 4;
2808                 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
2809                 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
2810                 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
2811                 radeon_ring_write(ring, next_rptr);
2812                 radeon_ring_write(ring, 0);
2813         }
2814
2815         radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
2816         radeon_ring_write(ring,
2817 #ifdef __BIG_ENDIAN
2818                           (2 << 0) |
2819 #endif
2820                           (ib->gpu_addr & 0xFFFFFFFC));
2821         radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
2822         radeon_ring_write(ring, ib->length_dw);
2823 }
2824
2825
2826 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
2827 {
2828         const __be32 *fw_data;
2829         int i;
2830
2831         if (!rdev->me_fw || !rdev->pfp_fw)
2832                 return -EINVAL;
2833
2834         r700_cp_stop(rdev);
2835         WREG32(CP_RB_CNTL,
2836 #ifdef __BIG_ENDIAN
2837                BUF_SWAP_32BIT |
2838 #endif
2839                RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
2840
2841         fw_data = (const __be32 *)rdev->pfp_fw->data;
2842         WREG32(CP_PFP_UCODE_ADDR, 0);
2843         for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
2844                 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
2845         WREG32(CP_PFP_UCODE_ADDR, 0);
2846
2847         fw_data = (const __be32 *)rdev->me_fw->data;
2848         WREG32(CP_ME_RAM_WADDR, 0);
2849         for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
2850                 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
2851
2852         WREG32(CP_PFP_UCODE_ADDR, 0);
2853         WREG32(CP_ME_RAM_WADDR, 0);
2854         WREG32(CP_ME_RAM_RADDR, 0);
2855         return 0;
2856 }
2857
2858 static int evergreen_cp_start(struct radeon_device *rdev)
2859 {
2860         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2861         int r, i;
2862         uint32_t cp_me;
2863
2864         r = radeon_ring_lock(rdev, ring, 7);
2865         if (r) {
2866                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2867                 return r;
2868         }
2869         radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
2870         radeon_ring_write(ring, 0x1);
2871         radeon_ring_write(ring, 0x0);
2872         radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
2873         radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
2874         radeon_ring_write(ring, 0);
2875         radeon_ring_write(ring, 0);
2876         radeon_ring_unlock_commit(rdev, ring, false);
2877
2878         cp_me = 0xff;
2879         WREG32(CP_ME_CNTL, cp_me);
2880
2881         r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
2882         if (r) {
2883                 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
2884                 return r;
2885         }
2886
2887         /* setup clear context state */
2888         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2889         radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
2890
2891         for (i = 0; i < evergreen_default_size; i++)
2892                 radeon_ring_write(ring, evergreen_default_state[i]);
2893
2894         radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
2895         radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
2896
2897         /* set clear context state */
2898         radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
2899         radeon_ring_write(ring, 0);
2900
2901         /* SQ_VTX_BASE_VTX_LOC */
2902         radeon_ring_write(ring, 0xc0026f00);
2903         radeon_ring_write(ring, 0x00000000);
2904         radeon_ring_write(ring, 0x00000000);
2905         radeon_ring_write(ring, 0x00000000);
2906
2907         /* Clear consts */
2908         radeon_ring_write(ring, 0xc0036f00);
2909         radeon_ring_write(ring, 0x00000bc4);
2910         radeon_ring_write(ring, 0xffffffff);
2911         radeon_ring_write(ring, 0xffffffff);
2912         radeon_ring_write(ring, 0xffffffff);
2913
2914         radeon_ring_write(ring, 0xc0026900);
2915         radeon_ring_write(ring, 0x00000316);
2916         radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
2917         radeon_ring_write(ring, 0x00000010); /*  */
2918
2919         radeon_ring_unlock_commit(rdev, ring, false);
2920
2921         return 0;
2922 }
2923
2924 static int evergreen_cp_resume(struct radeon_device *rdev)
2925 {
2926         struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
2927         u32 tmp;
2928         u32 rb_bufsz;
2929         int r;
2930
2931         /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
2932         WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
2933                                  SOFT_RESET_PA |
2934                                  SOFT_RESET_SH |
2935                                  SOFT_RESET_VGT |
2936                                  SOFT_RESET_SPI |
2937                                  SOFT_RESET_SX));
2938         RREG32(GRBM_SOFT_RESET);
2939         mdelay(15);
2940         WREG32(GRBM_SOFT_RESET, 0);
2941         RREG32(GRBM_SOFT_RESET);
2942
2943         /* Set ring buffer size */
2944         rb_bufsz = order_base_2(ring->ring_size / 8);
2945         tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
2946 #ifdef __BIG_ENDIAN
2947         tmp |= BUF_SWAP_32BIT;
2948 #endif
2949         WREG32(CP_RB_CNTL, tmp);
2950         WREG32(CP_SEM_WAIT_TIMER, 0x0);
2951         WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
2952
2953         /* Set the write pointer delay */
2954         WREG32(CP_RB_WPTR_DELAY, 0);
2955
2956         /* Initialize the ring buffer's read and write pointers */
2957         WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
2958         WREG32(CP_RB_RPTR_WR, 0);
2959         ring->wptr = 0;
2960         WREG32(CP_RB_WPTR, ring->wptr);
2961
2962         /* set the wb address whether it's enabled or not */
2963         WREG32(CP_RB_RPTR_ADDR,
2964                ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
2965         WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
2966         WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
2967
2968         if (rdev->wb.enabled)
2969                 WREG32(SCRATCH_UMSK, 0xff);
2970         else {
2971                 tmp |= RB_NO_UPDATE;
2972                 WREG32(SCRATCH_UMSK, 0);
2973         }
2974
2975         mdelay(1);
2976         WREG32(CP_RB_CNTL, tmp);
2977
2978         WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
2979         WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
2980
2981         evergreen_cp_start(rdev);
2982         ring->ready = true;
2983         r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
2984         if (r) {
2985                 ring->ready = false;
2986                 return r;
2987         }
2988         return 0;
2989 }
2990
2991 /*
2992  * Core functions
2993  */
2994 static void evergreen_gpu_init(struct radeon_device *rdev)
2995 {
2996         u32 gb_addr_config;
2997         u32 mc_shared_chmap, mc_arb_ramcfg;
2998         u32 sx_debug_1;
2999         u32 smx_dc_ctl0;
3000         u32 sq_config;
3001         u32 sq_lds_resource_mgmt;
3002         u32 sq_gpr_resource_mgmt_1;
3003         u32 sq_gpr_resource_mgmt_2;
3004         u32 sq_gpr_resource_mgmt_3;
3005         u32 sq_thread_resource_mgmt;
3006         u32 sq_thread_resource_mgmt_2;
3007         u32 sq_stack_resource_mgmt_1;
3008         u32 sq_stack_resource_mgmt_2;
3009         u32 sq_stack_resource_mgmt_3;
3010         u32 vgt_cache_invalidation;
3011         u32 hdp_host_path_cntl, tmp;
3012         u32 disabled_rb_mask;
3013         int i, j, ps_thread_count;
3014
3015         switch (rdev->family) {
3016         case CHIP_CYPRESS:
3017         case CHIP_HEMLOCK:
3018                 rdev->config.evergreen.num_ses = 2;
3019                 rdev->config.evergreen.max_pipes = 4;
3020                 rdev->config.evergreen.max_tile_pipes = 8;
3021                 rdev->config.evergreen.max_simds = 10;
3022                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3023                 rdev->config.evergreen.max_gprs = 256;
3024                 rdev->config.evergreen.max_threads = 248;
3025                 rdev->config.evergreen.max_gs_threads = 32;
3026                 rdev->config.evergreen.max_stack_entries = 512;
3027                 rdev->config.evergreen.sx_num_of_sets = 4;
3028                 rdev->config.evergreen.sx_max_export_size = 256;
3029                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3030                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3031                 rdev->config.evergreen.max_hw_contexts = 8;
3032                 rdev->config.evergreen.sq_num_cf_insts = 2;
3033
3034                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3035                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3036                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3037                 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3038                 break;
3039         case CHIP_JUNIPER:
3040                 rdev->config.evergreen.num_ses = 1;
3041                 rdev->config.evergreen.max_pipes = 4;
3042                 rdev->config.evergreen.max_tile_pipes = 4;
3043                 rdev->config.evergreen.max_simds = 10;
3044                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3045                 rdev->config.evergreen.max_gprs = 256;
3046                 rdev->config.evergreen.max_threads = 248;
3047                 rdev->config.evergreen.max_gs_threads = 32;
3048                 rdev->config.evergreen.max_stack_entries = 512;
3049                 rdev->config.evergreen.sx_num_of_sets = 4;
3050                 rdev->config.evergreen.sx_max_export_size = 256;
3051                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3052                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3053                 rdev->config.evergreen.max_hw_contexts = 8;
3054                 rdev->config.evergreen.sq_num_cf_insts = 2;
3055
3056                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3057                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3058                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3059                 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3060                 break;
3061         case CHIP_REDWOOD:
3062                 rdev->config.evergreen.num_ses = 1;
3063                 rdev->config.evergreen.max_pipes = 4;
3064                 rdev->config.evergreen.max_tile_pipes = 4;
3065                 rdev->config.evergreen.max_simds = 5;
3066                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3067                 rdev->config.evergreen.max_gprs = 256;
3068                 rdev->config.evergreen.max_threads = 248;
3069                 rdev->config.evergreen.max_gs_threads = 32;
3070                 rdev->config.evergreen.max_stack_entries = 256;
3071                 rdev->config.evergreen.sx_num_of_sets = 4;
3072                 rdev->config.evergreen.sx_max_export_size = 256;
3073                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3074                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3075                 rdev->config.evergreen.max_hw_contexts = 8;
3076                 rdev->config.evergreen.sq_num_cf_insts = 2;
3077
3078                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3079                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3080                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3081                 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3082                 break;
3083         case CHIP_CEDAR:
3084         default:
3085                 rdev->config.evergreen.num_ses = 1;
3086                 rdev->config.evergreen.max_pipes = 2;
3087                 rdev->config.evergreen.max_tile_pipes = 2;
3088                 rdev->config.evergreen.max_simds = 2;
3089                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3090                 rdev->config.evergreen.max_gprs = 256;
3091                 rdev->config.evergreen.max_threads = 192;
3092                 rdev->config.evergreen.max_gs_threads = 16;
3093                 rdev->config.evergreen.max_stack_entries = 256;
3094                 rdev->config.evergreen.sx_num_of_sets = 4;
3095                 rdev->config.evergreen.sx_max_export_size = 128;
3096                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3097                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3098                 rdev->config.evergreen.max_hw_contexts = 4;
3099                 rdev->config.evergreen.sq_num_cf_insts = 1;
3100
3101                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3102                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3103                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3104                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3105                 break;
3106         case CHIP_PALM:
3107                 rdev->config.evergreen.num_ses = 1;
3108                 rdev->config.evergreen.max_pipes = 2;
3109                 rdev->config.evergreen.max_tile_pipes = 2;
3110                 rdev->config.evergreen.max_simds = 2;
3111                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3112                 rdev->config.evergreen.max_gprs = 256;
3113                 rdev->config.evergreen.max_threads = 192;
3114                 rdev->config.evergreen.max_gs_threads = 16;
3115                 rdev->config.evergreen.max_stack_entries = 256;
3116                 rdev->config.evergreen.sx_num_of_sets = 4;
3117                 rdev->config.evergreen.sx_max_export_size = 128;
3118                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3119                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3120                 rdev->config.evergreen.max_hw_contexts = 4;
3121                 rdev->config.evergreen.sq_num_cf_insts = 1;
3122
3123                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3124                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3125                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3126                 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3127                 break;
3128         case CHIP_SUMO:
3129                 rdev->config.evergreen.num_ses = 1;
3130                 rdev->config.evergreen.max_pipes = 4;
3131                 rdev->config.evergreen.max_tile_pipes = 4;
3132                 if (rdev->pdev->device == 0x9648)
3133                         rdev->config.evergreen.max_simds = 3;
3134                 else if ((rdev->pdev->device == 0x9647) ||
3135                          (rdev->pdev->device == 0x964a))
3136                         rdev->config.evergreen.max_simds = 4;
3137                 else
3138                         rdev->config.evergreen.max_simds = 5;
3139                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3140                 rdev->config.evergreen.max_gprs = 256;
3141                 rdev->config.evergreen.max_threads = 248;
3142                 rdev->config.evergreen.max_gs_threads = 32;
3143                 rdev->config.evergreen.max_stack_entries = 256;
3144                 rdev->config.evergreen.sx_num_of_sets = 4;
3145                 rdev->config.evergreen.sx_max_export_size = 256;
3146                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3147                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3148                 rdev->config.evergreen.max_hw_contexts = 8;
3149                 rdev->config.evergreen.sq_num_cf_insts = 2;
3150
3151                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3152                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3153                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3154                 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3155                 break;
3156         case CHIP_SUMO2:
3157                 rdev->config.evergreen.num_ses = 1;
3158                 rdev->config.evergreen.max_pipes = 4;
3159                 rdev->config.evergreen.max_tile_pipes = 4;
3160                 rdev->config.evergreen.max_simds = 2;
3161                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3162                 rdev->config.evergreen.max_gprs = 256;
3163                 rdev->config.evergreen.max_threads = 248;
3164                 rdev->config.evergreen.max_gs_threads = 32;
3165                 rdev->config.evergreen.max_stack_entries = 512;
3166                 rdev->config.evergreen.sx_num_of_sets = 4;
3167                 rdev->config.evergreen.sx_max_export_size = 256;
3168                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3169                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3170                 rdev->config.evergreen.max_hw_contexts = 4;
3171                 rdev->config.evergreen.sq_num_cf_insts = 2;
3172
3173                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3174                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3175                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3176                 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3177                 break;
3178         case CHIP_BARTS:
3179                 rdev->config.evergreen.num_ses = 2;
3180                 rdev->config.evergreen.max_pipes = 4;
3181                 rdev->config.evergreen.max_tile_pipes = 8;
3182                 rdev->config.evergreen.max_simds = 7;
3183                 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3184                 rdev->config.evergreen.max_gprs = 256;
3185                 rdev->config.evergreen.max_threads = 248;
3186                 rdev->config.evergreen.max_gs_threads = 32;
3187                 rdev->config.evergreen.max_stack_entries = 512;
3188                 rdev->config.evergreen.sx_num_of_sets = 4;
3189                 rdev->config.evergreen.sx_max_export_size = 256;
3190                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3191                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3192                 rdev->config.evergreen.max_hw_contexts = 8;
3193                 rdev->config.evergreen.sq_num_cf_insts = 2;
3194
3195                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3196                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3197                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3198                 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3199                 break;
3200         case CHIP_TURKS:
3201                 rdev->config.evergreen.num_ses = 1;
3202                 rdev->config.evergreen.max_pipes = 4;
3203                 rdev->config.evergreen.max_tile_pipes = 4;
3204                 rdev->config.evergreen.max_simds = 6;
3205                 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3206                 rdev->config.evergreen.max_gprs = 256;
3207                 rdev->config.evergreen.max_threads = 248;
3208                 rdev->config.evergreen.max_gs_threads = 32;
3209                 rdev->config.evergreen.max_stack_entries = 256;
3210                 rdev->config.evergreen.sx_num_of_sets = 4;
3211                 rdev->config.evergreen.sx_max_export_size = 256;
3212                 rdev->config.evergreen.sx_max_export_pos_size = 64;
3213                 rdev->config.evergreen.sx_max_export_smx_size = 192;
3214                 rdev->config.evergreen.max_hw_contexts = 8;
3215                 rdev->config.evergreen.sq_num_cf_insts = 2;
3216
3217                 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3218                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3219                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3220                 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3221                 break;
3222         case CHIP_CAICOS:
3223                 rdev->config.evergreen.num_ses = 1;
3224                 rdev->config.evergreen.max_pipes = 2;
3225                 rdev->config.evergreen.max_tile_pipes = 2;
3226                 rdev->config.evergreen.max_simds = 2;
3227                 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3228                 rdev->config.evergreen.max_gprs = 256;
3229                 rdev->config.evergreen.max_threads = 192;
3230                 rdev->config.evergreen.max_gs_threads = 16;
3231                 rdev->config.evergreen.max_stack_entries = 256;
3232                 rdev->config.evergreen.sx_num_of_sets = 4;
3233                 rdev->config.evergreen.sx_max_export_size = 128;
3234                 rdev->config.evergreen.sx_max_export_pos_size = 32;
3235                 rdev->config.evergreen.sx_max_export_smx_size = 96;
3236                 rdev->config.evergreen.max_hw_contexts = 4;
3237                 rdev->config.evergreen.sq_num_cf_insts = 1;
3238
3239                 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3240                 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3241                 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3242                 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3243                 break;
3244         }
3245
3246         /* Initialize HDP */
3247         for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3248                 WREG32((0x2c14 + j), 0x00000000);
3249                 WREG32((0x2c18 + j), 0x00000000);
3250                 WREG32((0x2c1c + j), 0x00000000);
3251                 WREG32((0x2c20 + j), 0x00000000);
3252                 WREG32((0x2c24 + j), 0x00000000);
3253         }
3254
3255         WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3256         WREG32(SRBM_INT_CNTL, 0x1);
3257         WREG32(SRBM_INT_ACK, 0x1);
3258
3259         evergreen_fix_pci_max_read_req_size(rdev);
3260
3261         mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3262         if ((rdev->family == CHIP_PALM) ||
3263             (rdev->family == CHIP_SUMO) ||
3264             (rdev->family == CHIP_SUMO2))
3265                 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3266         else
3267                 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3268
3269         /* setup tiling info dword.  gb_addr_config is not adequate since it does
3270          * not have bank info, so create a custom tiling dword.
3271          * bits 3:0   num_pipes
3272          * bits 7:4   num_banks
3273          * bits 11:8  group_size
3274          * bits 15:12 row_size
3275          */
3276         rdev->config.evergreen.tile_config = 0;
3277         switch (rdev->config.evergreen.max_tile_pipes) {
3278         case 1:
3279         default:
3280                 rdev->config.evergreen.tile_config |= (0 << 0);
3281                 break;
3282         case 2:
3283                 rdev->config.evergreen.tile_config |= (1 << 0);
3284                 break;
3285         case 4:
3286                 rdev->config.evergreen.tile_config |= (2 << 0);
3287                 break;
3288         case 8:
3289                 rdev->config.evergreen.tile_config |= (3 << 0);
3290                 break;
3291         }
3292         /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3293         if (rdev->flags & RADEON_IS_IGP)
3294                 rdev->config.evergreen.tile_config |= 1 << 4;
3295         else {
3296                 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3297                 case 0: /* four banks */
3298                         rdev->config.evergreen.tile_config |= 0 << 4;
3299                         break;
3300                 case 1: /* eight banks */
3301                         rdev->config.evergreen.tile_config |= 1 << 4;
3302                         break;
3303                 case 2: /* sixteen banks */
3304                 default:
3305                         rdev->config.evergreen.tile_config |= 2 << 4;
3306                         break;
3307                 }
3308         }
3309         rdev->config.evergreen.tile_config |= 0 << 8;
3310         rdev->config.evergreen.tile_config |=
3311                 ((gb_addr_config & 0x30000000) >> 28) << 12;
3312
3313         if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3314                 u32 efuse_straps_4;
3315                 u32 efuse_straps_3;
3316
3317                 efuse_straps_4 = RREG32_RCU(0x204);
3318                 efuse_straps_3 = RREG32_RCU(0x203);
3319                 tmp = (((efuse_straps_4 & 0xf) << 4) |
3320                       ((efuse_straps_3 & 0xf0000000) >> 28));
3321         } else {
3322                 tmp = 0;
3323                 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3324                         u32 rb_disable_bitmap;
3325
3326                         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3327                         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3328                         rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3329                         tmp <<= 4;
3330                         tmp |= rb_disable_bitmap;
3331                 }
3332         }
3333         /* enabled rb are just the one not disabled :) */
3334         disabled_rb_mask = tmp;
3335         tmp = 0;
3336         for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3337                 tmp |= (1 << i);
3338         /* if all the backends are disabled, fix it up here */
3339         if ((disabled_rb_mask & tmp) == tmp) {
3340                 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3341                         disabled_rb_mask &= ~(1 << i);
3342         }
3343
3344         for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3345                 u32 simd_disable_bitmap;
3346
3347                 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3348                 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3349                 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3350                 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3351                 tmp <<= 16;
3352                 tmp |= simd_disable_bitmap;
3353         }
3354         rdev->config.evergreen.active_simds = hweight32(~tmp);
3355
3356         WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3357         WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3358
3359         WREG32(GB_ADDR_CONFIG, gb_addr_config);
3360         WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3361         WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3362         WREG32(DMA_TILING_CONFIG, gb_addr_config);
3363         WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3364         WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3365         WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3366
3367         if ((rdev->config.evergreen.max_backends == 1) &&
3368             (rdev->flags & RADEON_IS_IGP)) {
3369                 if ((disabled_rb_mask & 3) == 1) {
3370                         /* RB0 disabled, RB1 enabled */
3371                         tmp = 0x11111111;
3372                 } else {
3373                         /* RB1 disabled, RB0 enabled */
3374                         tmp = 0x00000000;
3375                 }
3376         } else {
3377                 tmp = gb_addr_config & NUM_PIPES_MASK;
3378                 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3379                                                 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3380         }
3381         WREG32(GB_BACKEND_MAP, tmp);
3382
3383         WREG32(CGTS_SYS_TCC_DISABLE, 0);
3384         WREG32(CGTS_TCC_DISABLE, 0);
3385         WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3386         WREG32(CGTS_USER_TCC_DISABLE, 0);
3387
3388         /* set HW defaults for 3D engine */
3389         WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3390                                      ROQ_IB2_START(0x2b)));
3391
3392         WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3393
3394         WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3395                              SYNC_GRADIENT |
3396                              SYNC_WALKER |
3397                              SYNC_ALIGNER));
3398
3399         sx_debug_1 = RREG32(SX_DEBUG_1);
3400         sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3401         WREG32(SX_DEBUG_1, sx_debug_1);
3402
3403
3404         smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3405         smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3406         smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3407         WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3408
3409         if (rdev->family <= CHIP_SUMO2)
3410                 WREG32(SMX_SAR_CTL0, 0x00010000);
3411
3412         WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3413                                         POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3414                                         SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3415
3416         WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3417                                  SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3418                                  SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3419
3420         WREG32(VGT_NUM_INSTANCES, 1);
3421         WREG32(SPI_CONFIG_CNTL, 0);
3422         WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3423         WREG32(CP_PERFMON_CNTL, 0);
3424
3425         WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3426                                   FETCH_FIFO_HIWATER(0x4) |
3427                                   DONE_FIFO_HIWATER(0xe0) |
3428                                   ALU_UPDATE_FIFO_HIWATER(0x8)));
3429
3430         sq_config = RREG32(SQ_CONFIG);
3431         sq_config &= ~(PS_PRIO(3) |
3432                        VS_PRIO(3) |
3433                        GS_PRIO(3) |
3434                        ES_PRIO(3));
3435         sq_config |= (VC_ENABLE |
3436                       EXPORT_SRC_C |
3437                       PS_PRIO(0) |
3438                       VS_PRIO(1) |
3439                       GS_PRIO(2) |
3440                       ES_PRIO(3));
3441
3442         switch (rdev->family) {
3443         case CHIP_CEDAR:
3444         case CHIP_PALM:
3445         case CHIP_SUMO:
3446         case CHIP_SUMO2:
3447         case CHIP_CAICOS:
3448                 /* no vertex cache */
3449                 sq_config &= ~VC_ENABLE;
3450                 break;
3451         default:
3452                 break;
3453         }
3454
3455         sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3456
3457         sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3458         sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3459         sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3460         sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3461         sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3462         sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3463         sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3464
3465         switch (rdev->family) {
3466         case CHIP_CEDAR:
3467         case CHIP_PALM:
3468         case CHIP_SUMO:
3469         case CHIP_SUMO2:
3470                 ps_thread_count = 96;
3471                 break;
3472         default:
3473                 ps_thread_count = 128;
3474                 break;
3475         }
3476
3477         sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3478         sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3479         sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3480         sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3481         sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3482         sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3483
3484         sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3485         sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3486         sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3487         sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3488         sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3489         sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3490
3491         WREG32(SQ_CONFIG, sq_config);
3492         WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3493         WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3494         WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3495         WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3496         WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3497         WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3498         WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3499         WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3500         WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3501         WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3502
3503         WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3504                                           FORCE_EOV_MAX_REZ_CNT(255)));
3505
3506         switch (rdev->family) {
3507         case CHIP_CEDAR:
3508         case CHIP_PALM:
3509         case CHIP_SUMO:
3510         case CHIP_SUMO2:
3511         case CHIP_CAICOS:
3512                 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3513                 break;
3514         default:
3515                 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3516                 break;
3517         }
3518         vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3519         WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3520
3521         WREG32(VGT_GS_VERTEX_REUSE, 16);
3522         WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3523         WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3524
3525         WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3526         WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3527
3528         WREG32(CB_PERF_CTR0_SEL_0, 0);
3529         WREG32(CB_PERF_CTR0_SEL_1, 0);
3530         WREG32(CB_PERF_CTR1_SEL_0, 0);
3531         WREG32(CB_PERF_CTR1_SEL_1, 0);
3532         WREG32(CB_PERF_CTR2_SEL_0, 0);
3533         WREG32(CB_PERF_CTR2_SEL_1, 0);
3534         WREG32(CB_PERF_CTR3_SEL_0, 0);
3535         WREG32(CB_PERF_CTR3_SEL_1, 0);
3536
3537         /* clear render buffer base addresses */
3538         WREG32(CB_COLOR0_BASE, 0);
3539         WREG32(CB_COLOR1_BASE, 0);
3540         WREG32(CB_COLOR2_BASE, 0);
3541         WREG32(CB_COLOR3_BASE, 0);
3542         WREG32(CB_COLOR4_BASE, 0);
3543         WREG32(CB_COLOR5_BASE, 0);
3544         WREG32(CB_COLOR6_BASE, 0);
3545         WREG32(CB_COLOR7_BASE, 0);
3546         WREG32(CB_COLOR8_BASE, 0);
3547         WREG32(CB_COLOR9_BASE, 0);
3548         WREG32(CB_COLOR10_BASE, 0);
3549         WREG32(CB_COLOR11_BASE, 0);
3550
3551         /* set the shader const cache sizes to 0 */
3552         for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3553                 WREG32(i, 0);
3554         for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3555                 WREG32(i, 0);
3556
3557         tmp = RREG32(HDP_MISC_CNTL);
3558         tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3559         WREG32(HDP_MISC_CNTL, tmp);
3560
3561         hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3562         WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3563
3564         WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3565
3566         udelay(50);
3567
3568 }
3569
3570 int evergreen_mc_init(struct radeon_device *rdev)
3571 {
3572         u32 tmp;
3573         int chansize, numchan;
3574
3575         /* Get VRAM informations */
3576         rdev->mc.vram_is_ddr = true;
3577         if ((rdev->family == CHIP_PALM) ||
3578             (rdev->family == CHIP_SUMO) ||
3579             (rdev->family == CHIP_SUMO2))
3580                 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3581         else
3582                 tmp = RREG32(MC_ARB_RAMCFG);
3583         if (tmp & CHANSIZE_OVERRIDE) {
3584                 chansize = 16;
3585         } else if (tmp & CHANSIZE_MASK) {
3586                 chansize = 64;
3587         } else {
3588                 chansize = 32;
3589         }
3590         tmp = RREG32(MC_SHARED_CHMAP);
3591         switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3592         case 0:
3593         default:
3594                 numchan = 1;
3595                 break;
3596         case 1:
3597                 numchan = 2;
3598                 break;
3599         case 2:
3600                 numchan = 4;
3601                 break;
3602         case 3:
3603                 numchan = 8;
3604                 break;
3605         }
3606         rdev->mc.vram_width = numchan * chansize;
3607         /* Could aper size report 0 ? */
3608         rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3609         rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3610         /* Setup GPU memory space */
3611         if ((rdev->family == CHIP_PALM) ||
3612             (rdev->family == CHIP_SUMO) ||
3613             (rdev->family == CHIP_SUMO2)) {
3614                 /* size in bytes on fusion */
3615                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3616                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3617         } else {
3618                 /* size in MB on evergreen/cayman/tn */
3619                 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3620                 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3621         }
3622         rdev->mc.visible_vram_size = rdev->mc.aper_size;
3623         r700_vram_gtt_location(rdev, &rdev->mc);
3624         radeon_update_bandwidth_info(rdev);
3625
3626         return 0;
3627 }
3628
3629 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3630 {
3631         dev_info(rdev->dev, "  GRBM_STATUS               = 0x%08X\n",
3632                 RREG32(GRBM_STATUS));
3633         dev_info(rdev->dev, "  GRBM_STATUS_SE0           = 0x%08X\n",
3634                 RREG32(GRBM_STATUS_SE0));
3635         dev_info(rdev->dev, "  GRBM_STATUS_SE1           = 0x%08X\n",
3636                 RREG32(GRBM_STATUS_SE1));
3637         dev_info(rdev->dev, "  SRBM_STATUS               = 0x%08X\n",
3638                 RREG32(SRBM_STATUS));
3639         dev_info(rdev->dev, "  SRBM_STATUS2              = 0x%08X\n",
3640                 RREG32(SRBM_STATUS2));
3641         dev_info(rdev->dev, "  R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3642                 RREG32(CP_STALLED_STAT1));
3643         dev_info(rdev->dev, "  R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3644                 RREG32(CP_STALLED_STAT2));
3645         dev_info(rdev->dev, "  R_00867C_CP_BUSY_STAT     = 0x%08X\n",
3646                 RREG32(CP_BUSY_STAT));
3647         dev_info(rdev->dev, "  R_008680_CP_STAT          = 0x%08X\n",
3648                 RREG32(CP_STAT));
3649         dev_info(rdev->dev, "  R_00D034_DMA_STATUS_REG   = 0x%08X\n",
3650                 RREG32(DMA_STATUS_REG));
3651         if (rdev->family >= CHIP_CAYMAN) {
3652                 dev_info(rdev->dev, "  R_00D834_DMA_STATUS_REG   = 0x%08X\n",
3653                          RREG32(DMA_STATUS_REG + 0x800));
3654         }
3655 }
3656
3657 bool evergreen_is_display_hung(struct radeon_device *rdev)
3658 {
3659         u32 crtc_hung = 0;
3660         u32 crtc_status[6];
3661         u32 i, j, tmp;
3662
3663         for (i = 0; i < rdev->num_crtc; i++) {
3664                 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3665                         crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3666                         crtc_hung |= (1 << i);
3667                 }
3668         }
3669
3670         for (j = 0; j < 10; j++) {
3671                 for (i = 0; i < rdev->num_crtc; i++) {
3672                         if (crtc_hung & (1 << i)) {
3673                                 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3674                                 if (tmp != crtc_status[i])
3675                                         crtc_hung &= ~(1 << i);
3676                         }
3677                 }
3678                 if (crtc_hung == 0)
3679                         return false;
3680                 udelay(100);
3681         }
3682
3683         return true;
3684 }
3685
3686 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3687 {
3688         u32 reset_mask = 0;
3689         u32 tmp;
3690
3691         /* GRBM_STATUS */
3692         tmp = RREG32(GRBM_STATUS);
3693         if (tmp & (PA_BUSY | SC_BUSY |
3694                    SH_BUSY | SX_BUSY |
3695                    TA_BUSY | VGT_BUSY |
3696                    DB_BUSY | CB_BUSY |
3697                    SPI_BUSY | VGT_BUSY_NO_DMA))
3698                 reset_mask |= RADEON_RESET_GFX;
3699
3700         if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3701                    CP_BUSY | CP_COHERENCY_BUSY))
3702                 reset_mask |= RADEON_RESET_CP;
3703
3704         if (tmp & GRBM_EE_BUSY)
3705                 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3706
3707         /* DMA_STATUS_REG */
3708         tmp = RREG32(DMA_STATUS_REG);
3709         if (!(tmp & DMA_IDLE))
3710                 reset_mask |= RADEON_RESET_DMA;
3711
3712         /* SRBM_STATUS2 */
3713         tmp = RREG32(SRBM_STATUS2);
3714         if (tmp & DMA_BUSY)
3715                 reset_mask |= RADEON_RESET_DMA;
3716
3717         /* SRBM_STATUS */
3718         tmp = RREG32(SRBM_STATUS);
3719         if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3720                 reset_mask |= RADEON_RESET_RLC;
3721
3722         if (tmp & IH_BUSY)
3723                 reset_mask |= RADEON_RESET_IH;
3724
3725         if (tmp & SEM_BUSY)
3726                 reset_mask |= RADEON_RESET_SEM;
3727
3728         if (tmp & GRBM_RQ_PENDING)
3729                 reset_mask |= RADEON_RESET_GRBM;
3730
3731         if (tmp & VMC_BUSY)
3732                 reset_mask |= RADEON_RESET_VMC;
3733
3734         if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3735                    MCC_BUSY | MCD_BUSY))
3736                 reset_mask |= RADEON_RESET_MC;
3737
3738         if (evergreen_is_display_hung(rdev))
3739                 reset_mask |= RADEON_RESET_DISPLAY;
3740
3741         /* VM_L2_STATUS */
3742         tmp = RREG32(VM_L2_STATUS);
3743         if (tmp & L2_BUSY)
3744                 reset_mask |= RADEON_RESET_VMC;
3745
3746         /* Skip MC reset as it's mostly likely not hung, just busy */
3747         if (reset_mask & RADEON_RESET_MC) {
3748                 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3749                 reset_mask &= ~RADEON_RESET_MC;
3750         }
3751
3752         return reset_mask;
3753 }
3754
3755 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3756 {
3757         struct evergreen_mc_save save;
3758         u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3759         u32 tmp;
3760
3761         if (reset_mask == 0)
3762                 return;
3763
3764         dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3765
3766         evergreen_print_gpu_status_regs(rdev);
3767
3768         /* Disable CP parsing/prefetching */
3769         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3770
3771         if (reset_mask & RADEON_RESET_DMA) {
3772                 /* Disable DMA */
3773                 tmp = RREG32(DMA_RB_CNTL);
3774                 tmp &= ~DMA_RB_ENABLE;
3775                 WREG32(DMA_RB_CNTL, tmp);
3776         }
3777
3778         udelay(50);
3779
3780         evergreen_mc_stop(rdev, &save);
3781         if (evergreen_mc_wait_for_idle(rdev)) {
3782                 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3783         }
3784
3785         if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
3786                 grbm_soft_reset |= SOFT_RESET_DB |
3787                         SOFT_RESET_CB |
3788                         SOFT_RESET_PA |
3789                         SOFT_RESET_SC |
3790                         SOFT_RESET_SPI |
3791                         SOFT_RESET_SX |
3792                         SOFT_RESET_SH |
3793                         SOFT_RESET_TC |
3794                         SOFT_RESET_TA |
3795                         SOFT_RESET_VC |
3796                         SOFT_RESET_VGT;
3797         }
3798
3799         if (reset_mask & RADEON_RESET_CP) {
3800                 grbm_soft_reset |= SOFT_RESET_CP |
3801                         SOFT_RESET_VGT;
3802
3803                 srbm_soft_reset |= SOFT_RESET_GRBM;
3804         }
3805
3806         if (reset_mask & RADEON_RESET_DMA)
3807                 srbm_soft_reset |= SOFT_RESET_DMA;
3808
3809         if (reset_mask & RADEON_RESET_DISPLAY)
3810                 srbm_soft_reset |= SOFT_RESET_DC;
3811
3812         if (reset_mask & RADEON_RESET_RLC)
3813                 srbm_soft_reset |= SOFT_RESET_RLC;
3814
3815         if (reset_mask & RADEON_RESET_SEM)
3816                 srbm_soft_reset |= SOFT_RESET_SEM;
3817
3818         if (reset_mask & RADEON_RESET_IH)
3819                 srbm_soft_reset |= SOFT_RESET_IH;
3820
3821         if (reset_mask & RADEON_RESET_GRBM)
3822                 srbm_soft_reset |= SOFT_RESET_GRBM;
3823
3824         if (reset_mask & RADEON_RESET_VMC)
3825                 srbm_soft_reset |= SOFT_RESET_VMC;
3826
3827         if (!(rdev->flags & RADEON_IS_IGP)) {
3828                 if (reset_mask & RADEON_RESET_MC)
3829                         srbm_soft_reset |= SOFT_RESET_MC;
3830         }
3831
3832         if (grbm_soft_reset) {
3833                 tmp = RREG32(GRBM_SOFT_RESET);
3834                 tmp |= grbm_soft_reset;
3835                 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3836                 WREG32(GRBM_SOFT_RESET, tmp);
3837                 tmp = RREG32(GRBM_SOFT_RESET);
3838
3839                 udelay(50);
3840
3841                 tmp &= ~grbm_soft_reset;
3842                 WREG32(GRBM_SOFT_RESET, tmp);
3843                 tmp = RREG32(GRBM_SOFT_RESET);
3844         }
3845
3846         if (srbm_soft_reset) {
3847                 tmp = RREG32(SRBM_SOFT_RESET);
3848                 tmp |= srbm_soft_reset;
3849                 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3850                 WREG32(SRBM_SOFT_RESET, tmp);
3851                 tmp = RREG32(SRBM_SOFT_RESET);
3852
3853                 udelay(50);
3854
3855                 tmp &= ~srbm_soft_reset;
3856                 WREG32(SRBM_SOFT_RESET, tmp);
3857                 tmp = RREG32(SRBM_SOFT_RESET);
3858         }
3859
3860         /* Wait a little for things to settle down */
3861         udelay(50);
3862
3863         evergreen_mc_resume(rdev, &save);
3864         udelay(50);
3865
3866         evergreen_print_gpu_status_regs(rdev);
3867 }
3868
3869 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
3870 {
3871         struct evergreen_mc_save save;
3872         u32 tmp, i;
3873
3874         dev_info(rdev->dev, "GPU pci config reset\n");
3875
3876         /* disable dpm? */
3877
3878         /* Disable CP parsing/prefetching */
3879         WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3880         udelay(50);
3881         /* Disable DMA */
3882         tmp = RREG32(DMA_RB_CNTL);
3883         tmp &= ~DMA_RB_ENABLE;
3884         WREG32(DMA_RB_CNTL, tmp);
3885         /* XXX other engines? */
3886
3887         /* halt the rlc */
3888         r600_rlc_stop(rdev);
3889
3890         udelay(50);
3891
3892         /* set mclk/sclk to bypass */
3893         rv770_set_clk_bypass_mode(rdev);
3894         /* disable BM */
3895         pci_clear_master(rdev->pdev);
3896         /* disable mem access */
3897         evergreen_mc_stop(rdev, &save);
3898         if (evergreen_mc_wait_for_idle(rdev)) {
3899                 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3900         }
3901         /* reset */
3902         radeon_pci_config_reset(rdev);
3903         /* wait for asic to come out of reset */
3904         for (i = 0; i < rdev->usec_timeout; i++) {
3905                 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3906                         break;
3907                 udelay(1);
3908         }
3909 }
3910
3911 int evergreen_asic_reset(struct radeon_device *rdev)
3912 {
3913         u32 reset_mask;
3914
3915         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3916
3917         if (reset_mask)
3918                 r600_set_bios_scratch_engine_hung(rdev, true);
3919
3920         /* try soft reset */
3921         evergreen_gpu_soft_reset(rdev, reset_mask);
3922
3923         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3924
3925         /* try pci config reset */
3926         if (reset_mask && radeon_hard_reset)
3927                 evergreen_gpu_pci_config_reset(rdev);
3928
3929         reset_mask = evergreen_gpu_check_soft_reset(rdev);
3930
3931         if (!reset_mask)
3932                 r600_set_bios_scratch_engine_hung(rdev, false);
3933
3934         return 0;
3935 }
3936
3937 /**
3938  * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
3939  *
3940  * @rdev: radeon_device pointer
3941  * @ring: radeon_ring structure holding ring information
3942  *
3943  * Check if the GFX engine is locked up.
3944  * Returns true if the engine appears to be locked up, false if not.
3945  */
3946 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3947 {
3948         u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
3949
3950         if (!(reset_mask & (RADEON_RESET_GFX |
3951                             RADEON_RESET_COMPUTE |
3952                             RADEON_RESET_CP))) {
3953                 radeon_ring_lockup_update(rdev, ring);
3954                 return false;
3955         }
3956         return radeon_ring_test_lockup(rdev, ring);
3957 }
3958
3959 /*
3960  * RLC
3961  */
3962 #define RLC_SAVE_RESTORE_LIST_END_MARKER    0x00000000
3963 #define RLC_CLEAR_STATE_END_MARKER          0x00000001
3964
3965 void sumo_rlc_fini(struct radeon_device *rdev)
3966 {
3967         int r;
3968
3969         /* save restore block */
3970         if (rdev->rlc.save_restore_obj) {
3971                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
3972                 if (unlikely(r != 0))
3973                         dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
3974                 radeon_bo_unpin(rdev->rlc.save_restore_obj);
3975                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
3976
3977                 radeon_bo_unref(&rdev->rlc.save_restore_obj);
3978                 rdev->rlc.save_restore_obj = NULL;
3979         }
3980
3981         /* clear state block */
3982         if (rdev->rlc.clear_state_obj) {
3983                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
3984                 if (unlikely(r != 0))
3985                         dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
3986                 radeon_bo_unpin(rdev->rlc.clear_state_obj);
3987                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
3988
3989                 radeon_bo_unref(&rdev->rlc.clear_state_obj);
3990                 rdev->rlc.clear_state_obj = NULL;
3991         }
3992
3993         /* clear state block */
3994         if (rdev->rlc.cp_table_obj) {
3995                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
3996                 if (unlikely(r != 0))
3997                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
3998                 radeon_bo_unpin(rdev->rlc.cp_table_obj);
3999                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4000
4001                 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4002                 rdev->rlc.cp_table_obj = NULL;
4003         }
4004 }
4005
4006 #define CP_ME_TABLE_SIZE    96
4007
4008 int sumo_rlc_init(struct radeon_device *rdev)
4009 {
4010         const u32 *src_ptr;
4011         volatile u32 *dst_ptr;
4012         u32 dws, data, i, j, k, reg_num;
4013         u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4014         u64 reg_list_mc_addr;
4015         const struct cs_section_def *cs_data;
4016         int r;
4017
4018         src_ptr = rdev->rlc.reg_list;
4019         dws = rdev->rlc.reg_list_size;
4020         if (rdev->family >= CHIP_BONAIRE) {
4021                 dws += (5 * 16) + 48 + 48 + 64;
4022         }
4023         cs_data = rdev->rlc.cs_data;
4024
4025         if (src_ptr) {
4026                 /* save restore block */
4027                 if (rdev->rlc.save_restore_obj == NULL) {
4028                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4029                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4030                                              NULL, &rdev->rlc.save_restore_obj);
4031                         if (r) {
4032                                 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4033                                 return r;
4034                         }
4035                 }
4036
4037                 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4038                 if (unlikely(r != 0)) {
4039                         sumo_rlc_fini(rdev);
4040                         return r;
4041                 }
4042                 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4043                                   &rdev->rlc.save_restore_gpu_addr);
4044                 if (r) {
4045                         radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4046                         dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4047                         sumo_rlc_fini(rdev);
4048                         return r;
4049                 }
4050
4051                 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4052                 if (r) {
4053                         dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4054                         sumo_rlc_fini(rdev);
4055                         return r;
4056                 }
4057                 /* write the sr buffer */
4058                 dst_ptr = rdev->rlc.sr_ptr;
4059                 if (rdev->family >= CHIP_TAHITI) {
4060                         /* SI */
4061                         for (i = 0; i < rdev->rlc.reg_list_size; i++)
4062                                 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4063                 } else {
4064                         /* ON/LN/TN */
4065                         /* format:
4066                          * dw0: (reg2 << 16) | reg1
4067                          * dw1: reg1 save space
4068                          * dw2: reg2 save space
4069                          */
4070                         for (i = 0; i < dws; i++) {
4071                                 data = src_ptr[i] >> 2;
4072                                 i++;
4073                                 if (i < dws)
4074                                         data |= (src_ptr[i] >> 2) << 16;
4075                                 j = (((i - 1) * 3) / 2);
4076                                 dst_ptr[j] = cpu_to_le32(data);
4077                         }
4078                         j = ((i * 3) / 2);
4079                         dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4080                 }
4081                 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4082                 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4083         }
4084
4085         if (cs_data) {
4086                 /* clear state block */
4087                 if (rdev->family >= CHIP_BONAIRE) {
4088                         rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4089                 } else if (rdev->family >= CHIP_TAHITI) {
4090                         rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4091                         dws = rdev->rlc.clear_state_size + (256 / 4);
4092                 } else {
4093                         reg_list_num = 0;
4094                         dws = 0;
4095                         for (i = 0; cs_data[i].section != NULL; i++) {
4096                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4097                                         reg_list_num++;
4098                                         dws += cs_data[i].section[j].reg_count;
4099                                 }
4100                         }
4101                         reg_list_blk_index = (3 * reg_list_num + 2);
4102                         dws += reg_list_blk_index;
4103                         rdev->rlc.clear_state_size = dws;
4104                 }
4105
4106                 if (rdev->rlc.clear_state_obj == NULL) {
4107                         r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4108                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4109                                              NULL, &rdev->rlc.clear_state_obj);
4110                         if (r) {
4111                                 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4112                                 sumo_rlc_fini(rdev);
4113                                 return r;
4114                         }
4115                 }
4116                 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4117                 if (unlikely(r != 0)) {
4118                         sumo_rlc_fini(rdev);
4119                         return r;
4120                 }
4121                 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4122                                   &rdev->rlc.clear_state_gpu_addr);
4123                 if (r) {
4124                         radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4125                         dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4126                         sumo_rlc_fini(rdev);
4127                         return r;
4128                 }
4129
4130                 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4131                 if (r) {
4132                         dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4133                         sumo_rlc_fini(rdev);
4134                         return r;
4135                 }
4136                 /* set up the cs buffer */
4137                 dst_ptr = rdev->rlc.cs_ptr;
4138                 if (rdev->family >= CHIP_BONAIRE) {
4139                         cik_get_csb_buffer(rdev, dst_ptr);
4140                 } else if (rdev->family >= CHIP_TAHITI) {
4141                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4142                         dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4143                         dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4144                         dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4145                         si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4146                 } else {
4147                         reg_list_hdr_blk_index = 0;
4148                         reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4149                         data = upper_32_bits(reg_list_mc_addr);
4150                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4151                         reg_list_hdr_blk_index++;
4152                         for (i = 0; cs_data[i].section != NULL; i++) {
4153                                 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4154                                         reg_num = cs_data[i].section[j].reg_count;
4155                                         data = reg_list_mc_addr & 0xffffffff;
4156                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4157                                         reg_list_hdr_blk_index++;
4158
4159                                         data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4160                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4161                                         reg_list_hdr_blk_index++;
4162
4163                                         data = 0x08000000 | (reg_num * 4);
4164                                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4165                                         reg_list_hdr_blk_index++;
4166
4167                                         for (k = 0; k < reg_num; k++) {
4168                                                 data = cs_data[i].section[j].extent[k];
4169                                                 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4170                                         }
4171                                         reg_list_mc_addr += reg_num * 4;
4172                                         reg_list_blk_index += reg_num;
4173                                 }
4174                         }
4175                         dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4176                 }
4177                 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4178                 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4179         }
4180
4181         if (rdev->rlc.cp_table_size) {
4182                 if (rdev->rlc.cp_table_obj == NULL) {
4183                         r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4184                                              PAGE_SIZE, true,
4185                                              RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4186                                              NULL, &rdev->rlc.cp_table_obj);
4187                         if (r) {
4188                                 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4189                                 sumo_rlc_fini(rdev);
4190                                 return r;
4191                         }
4192                 }
4193
4194                 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4195                 if (unlikely(r != 0)) {
4196                         dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4197                         sumo_rlc_fini(rdev);
4198                         return r;
4199                 }
4200                 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4201                                   &rdev->rlc.cp_table_gpu_addr);
4202                 if (r) {
4203                         radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4204                         dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4205                         sumo_rlc_fini(rdev);
4206                         return r;
4207                 }
4208                 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4209                 if (r) {
4210                         dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4211                         sumo_rlc_fini(rdev);
4212                         return r;
4213                 }
4214
4215                 cik_init_cp_pg_table(rdev);
4216
4217                 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4218                 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4219
4220         }
4221
4222         return 0;
4223 }
4224
4225 static void evergreen_rlc_start(struct radeon_device *rdev)
4226 {
4227         u32 mask = RLC_ENABLE;
4228
4229         if (rdev->flags & RADEON_IS_IGP) {
4230                 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4231         }
4232
4233         WREG32(RLC_CNTL, mask);
4234 }
4235
4236 int evergreen_rlc_resume(struct radeon_device *rdev)
4237 {
4238         u32 i;
4239         const __be32 *fw_data;
4240
4241         if (!rdev->rlc_fw)
4242                 return -EINVAL;
4243
4244         r600_rlc_stop(rdev);
4245
4246         WREG32(RLC_HB_CNTL, 0);
4247
4248         if (rdev->flags & RADEON_IS_IGP) {
4249                 if (rdev->family == CHIP_ARUBA) {
4250                         u32 always_on_bitmap =
4251                                 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4252                         /* find out the number of active simds */
4253                         u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4254                         tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4255                         tmp = hweight32(~tmp);
4256                         if (tmp == rdev->config.cayman.max_simds_per_se) {
4257                                 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4258                                 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4259                                 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4260                                 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4261                                 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4262                         }
4263                 } else {
4264                         WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4265                         WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4266                 }
4267                 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4268                 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4269         } else {
4270                 WREG32(RLC_HB_BASE, 0);
4271                 WREG32(RLC_HB_RPTR, 0);
4272                 WREG32(RLC_HB_WPTR, 0);
4273                 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4274                 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4275         }
4276         WREG32(RLC_MC_CNTL, 0);
4277         WREG32(RLC_UCODE_CNTL, 0);
4278
4279         fw_data = (const __be32 *)rdev->rlc_fw->data;
4280         if (rdev->family >= CHIP_ARUBA) {
4281                 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4282                         WREG32(RLC_UCODE_ADDR, i);
4283                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4284                 }
4285         } else if (rdev->family >= CHIP_CAYMAN) {
4286                 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4287                         WREG32(RLC_UCODE_ADDR, i);
4288                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4289                 }
4290         } else {
4291                 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4292                         WREG32(RLC_UCODE_ADDR, i);
4293                         WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4294                 }
4295         }
4296         WREG32(RLC_UCODE_ADDR, 0);
4297
4298         evergreen_rlc_start(rdev);
4299
4300         return 0;
4301 }
4302
4303 /* Interrupts */
4304
4305 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4306 {
4307         if (crtc >= rdev->num_crtc)
4308                 return 0;
4309         else
4310                 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4311 }
4312
4313 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4314 {
4315         u32 tmp;
4316
4317         if (rdev->family >= CHIP_CAYMAN) {
4318                 cayman_cp_int_cntl_setup(rdev, 0,
4319                                          CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4320                 cayman_cp_int_cntl_setup(rdev, 1, 0);
4321                 cayman_cp_int_cntl_setup(rdev, 2, 0);
4322                 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4323                 WREG32(CAYMAN_DMA1_CNTL, tmp);
4324         } else
4325                 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4326         tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4327         WREG32(DMA_CNTL, tmp);
4328         WREG32(GRBM_INT_CNTL, 0);
4329         WREG32(SRBM_INT_CNTL, 0);
4330         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4331         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4332         if (rdev->num_crtc >= 4) {
4333                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4334                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4335         }
4336         if (rdev->num_crtc >= 6) {
4337                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4338                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4339         }
4340
4341         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4342         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4343         if (rdev->num_crtc >= 4) {
4344                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4345                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4346         }
4347         if (rdev->num_crtc >= 6) {
4348                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4349                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4350         }
4351
4352         /* only one DAC on DCE5 */
4353         if (!ASIC_IS_DCE5(rdev))
4354                 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4355         WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4356
4357         tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4358         WREG32(DC_HPD1_INT_CONTROL, tmp);
4359         tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4360         WREG32(DC_HPD2_INT_CONTROL, tmp);
4361         tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4362         WREG32(DC_HPD3_INT_CONTROL, tmp);
4363         tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4364         WREG32(DC_HPD4_INT_CONTROL, tmp);
4365         tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4366         WREG32(DC_HPD5_INT_CONTROL, tmp);
4367         tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4368         WREG32(DC_HPD6_INT_CONTROL, tmp);
4369
4370 }
4371
4372 int evergreen_irq_set(struct radeon_device *rdev)
4373 {
4374         u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4375         u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4376         u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4377         u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4378         u32 grbm_int_cntl = 0;
4379         u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4380         u32 dma_cntl, dma_cntl1 = 0;
4381         u32 thermal_int = 0;
4382
4383         if (!rdev->irq.installed) {
4384                 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4385                 return -EINVAL;
4386         }
4387         /* don't enable anything if the ih is disabled */
4388         if (!rdev->ih.enabled) {
4389                 r600_disable_interrupts(rdev);
4390                 /* force the active interrupt state to all disabled */
4391                 evergreen_disable_interrupt_state(rdev);
4392                 return 0;
4393         }
4394
4395         hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
4396         hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
4397         hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
4398         hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
4399         hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
4400         hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
4401         if (rdev->family == CHIP_ARUBA)
4402                 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4403                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4404         else
4405                 thermal_int = RREG32(CG_THERMAL_INT) &
4406                         ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4407
4408         afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4409         afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4410         afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4411         afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4412         afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4413         afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4414
4415         dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4416
4417         if (rdev->family >= CHIP_CAYMAN) {
4418                 /* enable CP interrupts on all rings */
4419                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4420                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4421                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4422                 }
4423                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4424                         DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4425                         cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4426                 }
4427                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4428                         DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4429                         cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4430                 }
4431         } else {
4432                 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4433                         DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4434                         cp_int_cntl |= RB_INT_ENABLE;
4435                         cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4436                 }
4437         }
4438
4439         if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4440                 DRM_DEBUG("r600_irq_set: sw int dma\n");
4441                 dma_cntl |= TRAP_ENABLE;
4442         }
4443
4444         if (rdev->family >= CHIP_CAYMAN) {
4445                 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4446                 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4447                         DRM_DEBUG("r600_irq_set: sw int dma1\n");
4448                         dma_cntl1 |= TRAP_ENABLE;
4449                 }
4450         }
4451
4452         if (rdev->irq.dpm_thermal) {
4453                 DRM_DEBUG("dpm thermal\n");
4454                 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4455         }
4456
4457         if (rdev->irq.crtc_vblank_int[0] ||
4458             atomic_read(&rdev->irq.pflip[0])) {
4459                 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4460                 crtc1 |= VBLANK_INT_MASK;
4461         }
4462         if (rdev->irq.crtc_vblank_int[1] ||
4463             atomic_read(&rdev->irq.pflip[1])) {
4464                 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4465                 crtc2 |= VBLANK_INT_MASK;
4466         }
4467         if (rdev->irq.crtc_vblank_int[2] ||
4468             atomic_read(&rdev->irq.pflip[2])) {
4469                 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4470                 crtc3 |= VBLANK_INT_MASK;
4471         }
4472         if (rdev->irq.crtc_vblank_int[3] ||
4473             atomic_read(&rdev->irq.pflip[3])) {
4474                 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4475                 crtc4 |= VBLANK_INT_MASK;
4476         }
4477         if (rdev->irq.crtc_vblank_int[4] ||
4478             atomic_read(&rdev->irq.pflip[4])) {
4479                 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4480                 crtc5 |= VBLANK_INT_MASK;
4481         }
4482         if (rdev->irq.crtc_vblank_int[5] ||
4483             atomic_read(&rdev->irq.pflip[5])) {
4484                 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4485                 crtc6 |= VBLANK_INT_MASK;
4486         }
4487         if (rdev->irq.hpd[0]) {
4488                 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4489                 hpd1 |= DC_HPDx_INT_EN;
4490         }
4491         if (rdev->irq.hpd[1]) {
4492                 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4493                 hpd2 |= DC_HPDx_INT_EN;
4494         }
4495         if (rdev->irq.hpd[2]) {
4496                 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4497                 hpd3 |= DC_HPDx_INT_EN;
4498         }
4499         if (rdev->irq.hpd[3]) {
4500                 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4501                 hpd4 |= DC_HPDx_INT_EN;
4502         }
4503         if (rdev->irq.hpd[4]) {
4504                 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4505                 hpd5 |= DC_HPDx_INT_EN;
4506         }
4507         if (rdev->irq.hpd[5]) {
4508                 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4509                 hpd6 |= DC_HPDx_INT_EN;
4510         }
4511         if (rdev->irq.afmt[0]) {
4512                 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4513                 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4514         }
4515         if (rdev->irq.afmt[1]) {
4516                 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4517                 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4518         }
4519         if (rdev->irq.afmt[2]) {
4520                 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4521                 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4522         }
4523         if (rdev->irq.afmt[3]) {
4524                 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4525                 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4526         }
4527         if (rdev->irq.afmt[4]) {
4528                 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4529                 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4530         }
4531         if (rdev->irq.afmt[5]) {
4532                 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4533                 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4534         }
4535
4536         if (rdev->family >= CHIP_CAYMAN) {
4537                 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4538                 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4539                 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4540         } else
4541                 WREG32(CP_INT_CNTL, cp_int_cntl);
4542
4543         WREG32(DMA_CNTL, dma_cntl);
4544
4545         if (rdev->family >= CHIP_CAYMAN)
4546                 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4547
4548         WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4549
4550         WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4551         WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4552         if (rdev->num_crtc >= 4) {
4553                 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4554                 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4555         }
4556         if (rdev->num_crtc >= 6) {
4557                 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4558                 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4559         }
4560
4561         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4562                GRPH_PFLIP_INT_MASK);
4563         WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4564                GRPH_PFLIP_INT_MASK);
4565         if (rdev->num_crtc >= 4) {
4566                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4567                        GRPH_PFLIP_INT_MASK);
4568                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4569                        GRPH_PFLIP_INT_MASK);
4570         }
4571         if (rdev->num_crtc >= 6) {
4572                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4573                        GRPH_PFLIP_INT_MASK);
4574                 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4575                        GRPH_PFLIP_INT_MASK);
4576         }
4577
4578         WREG32(DC_HPD1_INT_CONTROL, hpd1);
4579         WREG32(DC_HPD2_INT_CONTROL, hpd2);
4580         WREG32(DC_HPD3_INT_CONTROL, hpd3);
4581         WREG32(DC_HPD4_INT_CONTROL, hpd4);
4582         WREG32(DC_HPD5_INT_CONTROL, hpd5);
4583         WREG32(DC_HPD6_INT_CONTROL, hpd6);
4584         if (rdev->family == CHIP_ARUBA)
4585                 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4586         else
4587                 WREG32(CG_THERMAL_INT, thermal_int);
4588
4589         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4590         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4591         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4592         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4593         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4594         WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4595
4596         /* posting read */
4597         RREG32(SRBM_STATUS);
4598
4599         return 0;
4600 }
4601
4602 static void evergreen_irq_ack(struct radeon_device *rdev)
4603 {
4604         u32 tmp;
4605
4606         rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4607         rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4608         rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4609         rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4610         rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4611         rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4612         rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4613         rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4614         if (rdev->num_crtc >= 4) {
4615                 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4616                 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4617         }
4618         if (rdev->num_crtc >= 6) {
4619                 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4620                 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4621         }
4622
4623         rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4624         rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4625         rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4626         rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4627         rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4628         rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4629
4630         if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4631                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4632         if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4633                 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4634         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4635                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4636         if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4637                 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4638         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4639                 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4640         if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4641                 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4642
4643         if (rdev->num_crtc >= 4) {
4644                 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4645                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4646                 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4647                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4648                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4649                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4650                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4651                         WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4652                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4653                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4654                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4655                         WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4656         }
4657
4658         if (rdev->num_crtc >= 6) {
4659                 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4660                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4661                 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4662                         WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4663                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4664                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4665                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4666                         WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4667                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4668                         WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4669                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4670                         WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4671         }
4672
4673         if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4674                 tmp = RREG32(DC_HPD1_INT_CONTROL);
4675                 tmp |= DC_HPDx_INT_ACK;
4676                 WREG32(DC_HPD1_INT_CONTROL, tmp);
4677         }
4678         if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4679                 tmp = RREG32(DC_HPD2_INT_CONTROL);
4680                 tmp |= DC_HPDx_INT_ACK;
4681                 WREG32(DC_HPD2_INT_CONTROL, tmp);
4682         }
4683         if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4684                 tmp = RREG32(DC_HPD3_INT_CONTROL);
4685                 tmp |= DC_HPDx_INT_ACK;
4686                 WREG32(DC_HPD3_INT_CONTROL, tmp);
4687         }
4688         if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4689                 tmp = RREG32(DC_HPD4_INT_CONTROL);
4690                 tmp |= DC_HPDx_INT_ACK;
4691                 WREG32(DC_HPD4_INT_CONTROL, tmp);
4692         }
4693         if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4694                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4695                 tmp |= DC_HPDx_INT_ACK;
4696                 WREG32(DC_HPD5_INT_CONTROL, tmp);
4697         }
4698         if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4699                 tmp = RREG32(DC_HPD5_INT_CONTROL);
4700                 tmp |= DC_HPDx_INT_ACK;
4701                 WREG32(DC_HPD6_INT_CONTROL, tmp);
4702         }
4703         if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4704                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4705                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4706                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4707         }
4708         if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4709                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4710                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4711                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4712         }
4713         if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4714                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4715                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4716                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4717         }
4718         if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4719                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4720                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4721                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4722         }
4723         if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4724                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4725                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4726                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4727         }
4728         if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4729                 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4730                 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4731                 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4732         }
4733 }
4734
4735 static void evergreen_irq_disable(struct radeon_device *rdev)
4736 {
4737         r600_disable_interrupts(rdev);
4738         /* Wait and acknowledge irq */
4739         mdelay(1);
4740         evergreen_irq_ack(rdev);
4741         evergreen_disable_interrupt_state(rdev);
4742 }
4743
4744 void evergreen_irq_suspend(struct radeon_device *rdev)
4745 {
4746         evergreen_irq_disable(rdev);
4747         r600_rlc_stop(rdev);
4748 }
4749
4750 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
4751 {
4752         u32 wptr, tmp;
4753
4754         if (rdev->wb.enabled)
4755                 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
4756         else
4757                 wptr = RREG32(IH_RB_WPTR);
4758
4759         if (wptr & RB_OVERFLOW) {
4760                 wptr &= ~RB_OVERFLOW;
4761                 /* When a ring buffer overflow happen start parsing interrupt
4762                  * from the last not overwritten vector (wptr + 16). Hopefully
4763                  * this should allow us to catchup.
4764                  */
4765                 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
4766                          wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
4767                 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
4768                 tmp = RREG32(IH_RB_CNTL);
4769                 tmp |= IH_WPTR_OVERFLOW_CLEAR;
4770                 WREG32(IH_RB_CNTL, tmp);
4771         }
4772         return (wptr & rdev->ih.ptr_mask);
4773 }
4774
4775 int evergreen_irq_process(struct radeon_device *rdev)
4776 {
4777         u32 wptr;
4778         u32 rptr;
4779         u32 src_id, src_data;
4780         u32 ring_index;
4781         bool queue_hotplug = false;
4782         bool queue_hdmi = false;
4783         bool queue_thermal = false;
4784         u32 status, addr;
4785
4786         if (!rdev->ih.enabled || rdev->shutdown)
4787                 return IRQ_NONE;
4788
4789         wptr = evergreen_get_ih_wptr(rdev);
4790
4791 restart_ih:
4792         /* is somebody else already processing irqs? */
4793         if (atomic_xchg(&rdev->ih.lock, 1))
4794                 return IRQ_NONE;
4795
4796         rptr = rdev->ih.rptr;
4797         DRM_DEBUG("r600_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
4798
4799         /* Order reading of wptr vs. reading of IH ring data */
4800         rmb();
4801
4802         /* display interrupts */
4803         evergreen_irq_ack(rdev);
4804
4805         while (rptr != wptr) {
4806                 /* wptr/rptr are in bytes! */
4807                 ring_index = rptr / 4;
4808                 src_id =  le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
4809                 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
4810
4811                 switch (src_id) {
4812                 case 1: /* D1 vblank/vline */
4813                         switch (src_data) {
4814                         case 0: /* D1 vblank */
4815                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
4816                                         if (rdev->irq.crtc_vblank_int[0]) {
4817                                                 drm_handle_vblank(rdev->ddev, 0);
4818                                                 rdev->pm.vblank_sync = true;
4819                                                 wake_up(&rdev->irq.vblank_queue);
4820                                         }
4821                                         if (atomic_read(&rdev->irq.pflip[0]))
4822                                                 radeon_crtc_handle_vblank(rdev, 0);
4823                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
4824                                         DRM_DEBUG("IH: D1 vblank\n");
4825                                 }
4826                                 break;
4827                         case 1: /* D1 vline */
4828                                 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
4829                                         rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
4830                                         DRM_DEBUG("IH: D1 vline\n");
4831                                 }
4832                                 break;
4833                         default:
4834                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4835                                 break;
4836                         }
4837                         break;
4838                 case 2: /* D2 vblank/vline */
4839                         switch (src_data) {
4840                         case 0: /* D2 vblank */
4841                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
4842                                         if (rdev->irq.crtc_vblank_int[1]) {
4843                                                 drm_handle_vblank(rdev->ddev, 1);
4844                                                 rdev->pm.vblank_sync = true;
4845                                                 wake_up(&rdev->irq.vblank_queue);
4846                                         }
4847                                         if (atomic_read(&rdev->irq.pflip[1]))
4848                                                 radeon_crtc_handle_vblank(rdev, 1);
4849                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
4850                                         DRM_DEBUG("IH: D2 vblank\n");
4851                                 }
4852                                 break;
4853                         case 1: /* D2 vline */
4854                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
4855                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
4856                                         DRM_DEBUG("IH: D2 vline\n");
4857                                 }
4858                                 break;
4859                         default:
4860                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4861                                 break;
4862                         }
4863                         break;
4864                 case 3: /* D3 vblank/vline */
4865                         switch (src_data) {
4866                         case 0: /* D3 vblank */
4867                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
4868                                         if (rdev->irq.crtc_vblank_int[2]) {
4869                                                 drm_handle_vblank(rdev->ddev, 2);
4870                                                 rdev->pm.vblank_sync = true;
4871                                                 wake_up(&rdev->irq.vblank_queue);
4872                                         }
4873                                         if (atomic_read(&rdev->irq.pflip[2]))
4874                                                 radeon_crtc_handle_vblank(rdev, 2);
4875                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
4876                                         DRM_DEBUG("IH: D3 vblank\n");
4877                                 }
4878                                 break;
4879                         case 1: /* D3 vline */
4880                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
4881                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
4882                                         DRM_DEBUG("IH: D3 vline\n");
4883                                 }
4884                                 break;
4885                         default:
4886                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4887                                 break;
4888                         }
4889                         break;
4890                 case 4: /* D4 vblank/vline */
4891                         switch (src_data) {
4892                         case 0: /* D4 vblank */
4893                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
4894                                         if (rdev->irq.crtc_vblank_int[3]) {
4895                                                 drm_handle_vblank(rdev->ddev, 3);
4896                                                 rdev->pm.vblank_sync = true;
4897                                                 wake_up(&rdev->irq.vblank_queue);
4898                                         }
4899                                         if (atomic_read(&rdev->irq.pflip[3]))
4900                                                 radeon_crtc_handle_vblank(rdev, 3);
4901                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
4902                                         DRM_DEBUG("IH: D4 vblank\n");
4903                                 }
4904                                 break;
4905                         case 1: /* D4 vline */
4906                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
4907                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
4908                                         DRM_DEBUG("IH: D4 vline\n");
4909                                 }
4910                                 break;
4911                         default:
4912                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4913                                 break;
4914                         }
4915                         break;
4916                 case 5: /* D5 vblank/vline */
4917                         switch (src_data) {
4918                         case 0: /* D5 vblank */
4919                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
4920                                         if (rdev->irq.crtc_vblank_int[4]) {
4921                                                 drm_handle_vblank(rdev->ddev, 4);
4922                                                 rdev->pm.vblank_sync = true;
4923                                                 wake_up(&rdev->irq.vblank_queue);
4924                                         }
4925                                         if (atomic_read(&rdev->irq.pflip[4]))
4926                                                 radeon_crtc_handle_vblank(rdev, 4);
4927                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
4928                                         DRM_DEBUG("IH: D5 vblank\n");
4929                                 }
4930                                 break;
4931                         case 1: /* D5 vline */
4932                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
4933                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
4934                                         DRM_DEBUG("IH: D5 vline\n");
4935                                 }
4936                                 break;
4937                         default:
4938                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4939                                 break;
4940                         }
4941                         break;
4942                 case 6: /* D6 vblank/vline */
4943                         switch (src_data) {
4944                         case 0: /* D6 vblank */
4945                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
4946                                         if (rdev->irq.crtc_vblank_int[5]) {
4947                                                 drm_handle_vblank(rdev->ddev, 5);
4948                                                 rdev->pm.vblank_sync = true;
4949                                                 wake_up(&rdev->irq.vblank_queue);
4950                                         }
4951                                         if (atomic_read(&rdev->irq.pflip[5]))
4952                                                 radeon_crtc_handle_vblank(rdev, 5);
4953                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
4954                                         DRM_DEBUG("IH: D6 vblank\n");
4955                                 }
4956                                 break;
4957                         case 1: /* D6 vline */
4958                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
4959                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
4960                                         DRM_DEBUG("IH: D6 vline\n");
4961                                 }
4962                                 break;
4963                         default:
4964                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
4965                                 break;
4966                         }
4967                         break;
4968                 case 8: /* D1 page flip */
4969                 case 10: /* D2 page flip */
4970                 case 12: /* D3 page flip */
4971                 case 14: /* D4 page flip */
4972                 case 16: /* D5 page flip */
4973                 case 18: /* D6 page flip */
4974                         DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
4975                         if (radeon_use_pflipirq > 0)
4976                                 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
4977                         break;
4978                 case 42: /* HPD hotplug */
4979                         switch (src_data) {
4980                         case 0:
4981                                 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4982                                         rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
4983                                         queue_hotplug = true;
4984                                         DRM_DEBUG("IH: HPD1\n");
4985                                 }
4986                                 break;
4987                         case 1:
4988                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4989                                         rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
4990                                         queue_hotplug = true;
4991                                         DRM_DEBUG("IH: HPD2\n");
4992                                 }
4993                                 break;
4994                         case 2:
4995                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4996                                         rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
4997                                         queue_hotplug = true;
4998                                         DRM_DEBUG("IH: HPD3\n");
4999                                 }
5000                                 break;
5001                         case 3:
5002                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
5003                                         rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5004                                         queue_hotplug = true;
5005                                         DRM_DEBUG("IH: HPD4\n");
5006                                 }
5007                                 break;
5008                         case 4:
5009                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
5010                                         rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5011                                         queue_hotplug = true;
5012                                         DRM_DEBUG("IH: HPD5\n");
5013                                 }
5014                                 break;
5015                         case 5:
5016                                 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
5017                                         rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5018                                         queue_hotplug = true;
5019                                         DRM_DEBUG("IH: HPD6\n");
5020                                 }
5021                                 break;
5022                         default:
5023                                 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5024                                 break;
5025                         }
5026                         break;
5027                 case 44: /* hdmi */
5028                         switch (src_data) {
5029                         case 0:
5030                                 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
5031                                         rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5032                                         queue_hdmi = true;
5033                                         DRM_DEBUG("IH: HDMI0\n");
5034                                 }
5035                                 break;
5036                         case 1:
5037                                 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
5038                                         rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5039                                         queue_hdmi = true;
5040                                         DRM_DEBUG("IH: HDMI1\n");
5041                                 }
5042                                 break;
5043                         case 2:
5044                                 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
5045                                         rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5046                                         queue_hdmi = true;
5047                                         DRM_DEBUG("IH: HDMI2\n");
5048                                 }
5049                                 break;
5050                         case 3:
5051                                 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
5052                                         rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5053                                         queue_hdmi = true;
5054                                         DRM_DEBUG("IH: HDMI3\n");
5055                                 }
5056                                 break;
5057                         case 4:
5058                                 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
5059                                         rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5060                                         queue_hdmi = true;
5061                                         DRM_DEBUG("IH: HDMI4\n");
5062                                 }
5063                                 break;
5064                         case 5:
5065                                 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
5066                                         rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5067                                         queue_hdmi = true;
5068                                         DRM_DEBUG("IH: HDMI5\n");
5069                                 }
5070                                 break;
5071                         default:
5072                                 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5073                                 break;
5074                         }
5075                 case 96:
5076                         DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5077                         WREG32(SRBM_INT_ACK, 0x1);
5078                         break;
5079                 case 124: /* UVD */
5080                         DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5081                         radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5082                         break;
5083                 case 146:
5084                 case 147:
5085                         addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5086                         status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5087                         /* reset addr and status */
5088                         WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5089                         if (addr == 0x0 && status == 0x0)
5090                                 break;
5091                         dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5092                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_ADDR   0x%08X\n",
5093                                 addr);
5094                         dev_err(rdev->dev, "  VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5095                                 status);
5096                         cayman_vm_decode_fault(rdev, status, addr);
5097                         break;
5098                 case 176: /* CP_INT in ring buffer */
5099                 case 177: /* CP_INT in IB1 */
5100                 case 178: /* CP_INT in IB2 */
5101                         DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5102                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5103                         break;
5104                 case 181: /* CP EOP event */
5105                         DRM_DEBUG("IH: CP EOP\n");
5106                         if (rdev->family >= CHIP_CAYMAN) {
5107                                 switch (src_data) {
5108                                 case 0:
5109                                         radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5110                                         break;
5111                                 case 1:
5112                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5113                                         break;
5114                                 case 2:
5115                                         radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5116                                         break;
5117                                 }
5118                         } else
5119                                 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5120                         break;
5121                 case 224: /* DMA trap event */
5122                         DRM_DEBUG("IH: DMA trap\n");
5123                         radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5124                         break;
5125                 case 230: /* thermal low to high */
5126                         DRM_DEBUG("IH: thermal low to high\n");
5127                         rdev->pm.dpm.thermal.high_to_low = false;
5128                         queue_thermal = true;
5129                         break;
5130                 case 231: /* thermal high to low */
5131                         DRM_DEBUG("IH: thermal high to low\n");
5132                         rdev->pm.dpm.thermal.high_to_low = true;
5133                         queue_thermal = true;
5134                         break;
5135                 case 233: /* GUI IDLE */
5136                         DRM_DEBUG("IH: GUI idle\n");
5137                         break;
5138                 case 244: /* DMA trap event */
5139                         if (rdev->family >= CHIP_CAYMAN) {
5140                                 DRM_DEBUG("IH: DMA1 trap\n");
5141                                 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5142                         }
5143                         break;
5144                 default:
5145                         DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5146                         break;
5147                 }
5148
5149                 /* wptr/rptr are in bytes! */
5150                 rptr += 16;
5151                 rptr &= rdev->ih.ptr_mask;
5152                 WREG32(IH_RB_RPTR, rptr);
5153         }
5154         if (queue_hotplug)
5155                 schedule_work(&rdev->hotplug_work);
5156         if (queue_hdmi)
5157                 schedule_work(&rdev->audio_work);
5158         if (queue_thermal && rdev->pm.dpm_enabled)
5159                 schedule_work(&rdev->pm.dpm.thermal.work);
5160         rdev->ih.rptr = rptr;
5161         atomic_set(&rdev->ih.lock, 0);
5162
5163         /* make sure wptr hasn't changed while processing */
5164         wptr = evergreen_get_ih_wptr(rdev);
5165         if (wptr != rptr)
5166                 goto restart_ih;
5167
5168         return IRQ_HANDLED;
5169 }
5170
5171 static int evergreen_startup(struct radeon_device *rdev)
5172 {
5173         struct radeon_ring *ring;
5174         int r;
5175
5176         /* enable pcie gen2 link */
5177         evergreen_pcie_gen2_enable(rdev);
5178         /* enable aspm */
5179         evergreen_program_aspm(rdev);
5180
5181         /* scratch needs to be initialized before MC */
5182         r = r600_vram_scratch_init(rdev);
5183         if (r)
5184                 return r;
5185
5186         evergreen_mc_program(rdev);
5187
5188         if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5189                 r = ni_mc_load_microcode(rdev);
5190                 if (r) {
5191                         DRM_ERROR("Failed to load MC firmware!\n");
5192                         return r;
5193                 }
5194         }
5195
5196         if (rdev->flags & RADEON_IS_AGP) {
5197                 evergreen_agp_enable(rdev);
5198         } else {
5199                 r = evergreen_pcie_gart_enable(rdev);
5200                 if (r)
5201                         return r;
5202         }
5203         evergreen_gpu_init(rdev);
5204
5205         /* allocate rlc buffers */
5206         if (rdev->flags & RADEON_IS_IGP) {
5207                 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5208                 rdev->rlc.reg_list_size =
5209                         (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5210                 rdev->rlc.cs_data = evergreen_cs_data;
5211                 r = sumo_rlc_init(rdev);
5212                 if (r) {
5213                         DRM_ERROR("Failed to init rlc BOs!\n");
5214                         return r;
5215                 }
5216         }
5217
5218         /* allocate wb buffer */
5219         r = radeon_wb_init(rdev);
5220         if (r)
5221                 return r;
5222
5223         r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5224         if (r) {
5225                 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5226                 return r;
5227         }
5228
5229         r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5230         if (r) {
5231                 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5232                 return r;
5233         }
5234
5235         r = uvd_v2_2_resume(rdev);
5236         if (!r) {
5237                 r = radeon_fence_driver_start_ring(rdev,
5238                                                    R600_RING_TYPE_UVD_INDEX);
5239                 if (r)
5240                         dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
5241         }
5242
5243         if (r)
5244                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5245
5246         /* Enable IRQ */
5247         if (!rdev->irq.installed) {
5248                 r = radeon_irq_kms_init(rdev);
5249                 if (r)
5250                         return r;
5251         }
5252
5253         r = r600_irq_init(rdev);
5254         if (r) {
5255                 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5256                 radeon_irq_kms_fini(rdev);
5257                 return r;
5258         }
5259         evergreen_irq_set(rdev);
5260
5261         ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5262         r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5263                              RADEON_CP_PACKET2);
5264         if (r)
5265                 return r;
5266
5267         ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5268         r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5269                              DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5270         if (r)
5271                 return r;
5272
5273         r = evergreen_cp_load_microcode(rdev);
5274         if (r)
5275                 return r;
5276         r = evergreen_cp_resume(rdev);
5277         if (r)
5278                 return r;
5279         r = r600_dma_resume(rdev);
5280         if (r)
5281                 return r;
5282
5283         ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5284         if (ring->ring_size) {
5285                 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
5286                                      RADEON_CP_PACKET2);
5287                 if (!r)
5288                         r = uvd_v1_0_init(rdev);
5289
5290                 if (r)
5291                         DRM_ERROR("radeon: error initializing UVD (%d).\n", r);
5292         }
5293
5294         r = radeon_ib_pool_init(rdev);
5295         if (r) {
5296                 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5297                 return r;
5298         }
5299
5300         r = radeon_audio_init(rdev);
5301         if (r) {
5302                 DRM_ERROR("radeon: audio init failed\n");
5303                 return r;
5304         }
5305
5306         return 0;
5307 }
5308
5309 int evergreen_resume(struct radeon_device *rdev)
5310 {
5311         int r;
5312
5313         /* reset the asic, the gfx blocks are often in a bad state
5314          * after the driver is unloaded or after a resume
5315          */
5316         if (radeon_asic_reset(rdev))
5317                 dev_warn(rdev->dev, "GPU reset failed !\n");
5318         /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5319          * posting will perform necessary task to bring back GPU into good
5320          * shape.
5321          */
5322         /* post card */
5323         atom_asic_init(rdev->mode_info.atom_context);
5324
5325         /* init golden registers */
5326         evergreen_init_golden_registers(rdev);
5327
5328         if (rdev->pm.pm_method == PM_METHOD_DPM)
5329                 radeon_pm_resume(rdev);
5330
5331         rdev->accel_working = true;
5332         r = evergreen_startup(rdev);
5333         if (r) {
5334                 DRM_ERROR("evergreen startup failed on resume\n");
5335                 rdev->accel_working = false;
5336                 return r;
5337         }
5338
5339         return r;
5340
5341 }
5342
5343 int evergreen_suspend(struct radeon_device *rdev)
5344 {
5345         radeon_pm_suspend(rdev);
5346         radeon_audio_fini(rdev);
5347         uvd_v1_0_fini(rdev);
5348         radeon_uvd_suspend(rdev);
5349         r700_cp_stop(rdev);
5350         r600_dma_stop(rdev);
5351         evergreen_irq_suspend(rdev);
5352         radeon_wb_disable(rdev);
5353         evergreen_pcie_gart_disable(rdev);
5354
5355         return 0;
5356 }
5357
5358 /* Plan is to move initialization in that function and use
5359  * helper function so that radeon_device_init pretty much
5360  * do nothing more than calling asic specific function. This
5361  * should also allow to remove a bunch of callback function
5362  * like vram_info.
5363  */
5364 int evergreen_init(struct radeon_device *rdev)
5365 {
5366         int r;
5367
5368         /* Read BIOS */
5369         if (!radeon_get_bios(rdev)) {
5370                 if (ASIC_IS_AVIVO(rdev))
5371                         return -EINVAL;
5372         }
5373         /* Must be an ATOMBIOS */
5374         if (!rdev->is_atom_bios) {
5375                 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5376                 return -EINVAL;
5377         }
5378         r = radeon_atombios_init(rdev);
5379         if (r)
5380                 return r;
5381         /* reset the asic, the gfx blocks are often in a bad state
5382          * after the driver is unloaded or after a resume
5383          */
5384         if (radeon_asic_reset(rdev))
5385                 dev_warn(rdev->dev, "GPU reset failed !\n");
5386         /* Post card if necessary */
5387         if (!radeon_card_posted(rdev)) {
5388                 if (!rdev->bios) {
5389                         dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5390                         return -EINVAL;
5391                 }
5392                 DRM_INFO("GPU not posted. posting now...\n");
5393                 atom_asic_init(rdev->mode_info.atom_context);
5394         }
5395         /* init golden registers */
5396         evergreen_init_golden_registers(rdev);
5397         /* Initialize scratch registers */
5398         r600_scratch_init(rdev);
5399         /* Initialize surface registers */
5400         radeon_surface_init(rdev);
5401         /* Initialize clocks */
5402         radeon_get_clock_info(rdev->ddev);
5403         /* Fence driver */
5404         r = radeon_fence_driver_init(rdev);
5405         if (r)
5406                 return r;
5407         /* initialize AGP */
5408         if (rdev->flags & RADEON_IS_AGP) {
5409                 r = radeon_agp_init(rdev);
5410                 if (r)
5411                         radeon_agp_disable(rdev);
5412         }
5413         /* initialize memory controller */
5414         r = evergreen_mc_init(rdev);
5415         if (r)
5416                 return r;
5417         /* Memory manager */
5418         r = radeon_bo_init(rdev);
5419         if (r)
5420                 return r;
5421
5422         if (ASIC_IS_DCE5(rdev)) {
5423                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5424                         r = ni_init_microcode(rdev);
5425                         if (r) {
5426                                 DRM_ERROR("Failed to load firmware!\n");
5427                                 return r;
5428                         }
5429                 }
5430         } else {
5431                 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5432                         r = r600_init_microcode(rdev);
5433                         if (r) {
5434                                 DRM_ERROR("Failed to load firmware!\n");
5435                                 return r;
5436                         }
5437                 }
5438         }
5439
5440         /* Initialize power management */
5441         radeon_pm_init(rdev);
5442
5443         rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5444         r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5445
5446         rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5447         r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5448
5449         r = radeon_uvd_init(rdev);
5450         if (!r) {
5451                 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5452                 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX],
5453                                4096);
5454         }
5455
5456         rdev->ih.ring_obj = NULL;
5457         r600_ih_ring_init(rdev, 64 * 1024);
5458
5459         r = r600_pcie_gart_init(rdev);
5460         if (r)
5461                 return r;
5462
5463         rdev->accel_working = true;
5464         r = evergreen_startup(rdev);
5465         if (r) {
5466                 dev_err(rdev->dev, "disabling GPU acceleration\n");
5467                 r700_cp_fini(rdev);
5468                 r600_dma_fini(rdev);
5469                 r600_irq_fini(rdev);
5470                 if (rdev->flags & RADEON_IS_IGP)
5471                         sumo_rlc_fini(rdev);
5472                 radeon_wb_fini(rdev);
5473                 radeon_ib_pool_fini(rdev);
5474                 radeon_irq_kms_fini(rdev);
5475                 evergreen_pcie_gart_fini(rdev);
5476                 rdev->accel_working = false;
5477         }
5478
5479         /* Don't start up if the MC ucode is missing on BTC parts.
5480          * The default clocks and voltages before the MC ucode
5481          * is loaded are not suffient for advanced operations.
5482          */
5483         if (ASIC_IS_DCE5(rdev)) {
5484                 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5485                         DRM_ERROR("radeon: MC ucode required for NI+.\n");
5486                         return -EINVAL;
5487                 }
5488         }
5489
5490         return 0;
5491 }
5492
5493 void evergreen_fini(struct radeon_device *rdev)
5494 {
5495         radeon_pm_fini(rdev);
5496         radeon_audio_fini(rdev);
5497         r700_cp_fini(rdev);
5498         r600_dma_fini(rdev);
5499         r600_irq_fini(rdev);
5500         if (rdev->flags & RADEON_IS_IGP)
5501                 sumo_rlc_fini(rdev);
5502         radeon_wb_fini(rdev);
5503         radeon_ib_pool_fini(rdev);
5504         radeon_irq_kms_fini(rdev);
5505         uvd_v1_0_fini(rdev);
5506         radeon_uvd_fini(rdev);
5507         evergreen_pcie_gart_fini(rdev);
5508         r600_vram_scratch_fini(rdev);
5509         radeon_gem_fini(rdev);
5510         radeon_fence_driver_fini(rdev);
5511         radeon_agp_fini(rdev);
5512         radeon_bo_fini(rdev);
5513         radeon_atombios_fini(rdev);
5514         kfree(rdev->bios);
5515         rdev->bios = NULL;
5516 }
5517
5518 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5519 {
5520         u32 link_width_cntl, speed_cntl;
5521
5522         if (radeon_pcie_gen2 == 0)
5523                 return;
5524
5525         if (rdev->flags & RADEON_IS_IGP)
5526                 return;
5527
5528         if (!(rdev->flags & RADEON_IS_PCIE))
5529                 return;
5530
5531         /* x2 cards have a special sequence */
5532         if (ASIC_IS_X2(rdev))
5533                 return;
5534
5535         if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5536                 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5537                 return;
5538
5539         speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5540         if (speed_cntl & LC_CURRENT_DATA_RATE) {
5541                 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5542                 return;
5543         }
5544
5545         DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5546
5547         if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5548             (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5549
5550                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5551                 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5552                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5553
5554                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5555                 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5556                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5557
5558                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5559                 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5560                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5561
5562                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5563                 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5564                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5565
5566                 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5567                 speed_cntl |= LC_GEN2_EN_STRAP;
5568                 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5569
5570         } else {
5571                 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5572                 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5573                 if (1)
5574                         link_width_cntl |= LC_UPCONFIGURE_DIS;
5575                 else
5576                         link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5577                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5578         }
5579 }
5580
5581 void evergreen_program_aspm(struct radeon_device *rdev)
5582 {
5583         u32 data, orig;
5584         u32 pcie_lc_cntl, pcie_lc_cntl_old;
5585         bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5586         /* fusion_platform = true
5587          * if the system is a fusion system
5588          * (APU or DGPU in a fusion system).
5589          * todo: check if the system is a fusion platform.
5590          */
5591         bool fusion_platform = false;
5592
5593         if (radeon_aspm == 0)
5594                 return;
5595
5596         if (!(rdev->flags & RADEON_IS_PCIE))
5597                 return;
5598
5599         switch (rdev->family) {
5600         case CHIP_CYPRESS:
5601         case CHIP_HEMLOCK:
5602         case CHIP_JUNIPER:
5603         case CHIP_REDWOOD:
5604         case CHIP_CEDAR:
5605         case CHIP_SUMO:
5606         case CHIP_SUMO2:
5607         case CHIP_PALM:
5608         case CHIP_ARUBA:
5609                 disable_l0s = true;
5610                 break;
5611         default:
5612                 disable_l0s = false;
5613                 break;
5614         }
5615
5616         if (rdev->flags & RADEON_IS_IGP)
5617                 fusion_platform = true; /* XXX also dGPUs in a fusion system */
5618
5619         data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
5620         if (fusion_platform)
5621                 data &= ~MULTI_PIF;
5622         else
5623                 data |= MULTI_PIF;
5624         if (data != orig)
5625                 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
5626
5627         data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
5628         if (fusion_platform)
5629                 data &= ~MULTI_PIF;
5630         else
5631                 data |= MULTI_PIF;
5632         if (data != orig)
5633                 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
5634
5635         pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
5636         pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
5637         if (!disable_l0s) {
5638                 if (rdev->family >= CHIP_BARTS)
5639                         pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
5640                 else
5641                         pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
5642         }
5643
5644         if (!disable_l1) {
5645                 if (rdev->family >= CHIP_BARTS)
5646                         pcie_lc_cntl |= LC_L1_INACTIVITY(7);
5647                 else
5648                         pcie_lc_cntl |= LC_L1_INACTIVITY(8);
5649
5650                 if (!disable_plloff_in_l1) {
5651                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5652                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5653                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5654                         if (data != orig)
5655                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5656
5657                         data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5658                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5659                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5660                         if (data != orig)
5661                                 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5662
5663                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5664                         data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
5665                         data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
5666                         if (data != orig)
5667                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5668
5669                         data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5670                         data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
5671                         data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
5672                         if (data != orig)
5673                                 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5674
5675                         if (rdev->family >= CHIP_BARTS) {
5676                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
5677                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5678                                 data |= PLL_RAMP_UP_TIME_0(4);
5679                                 if (data != orig)
5680                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
5681
5682                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
5683                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5684                                 data |= PLL_RAMP_UP_TIME_1(4);
5685                                 if (data != orig)
5686                                         WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
5687
5688                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
5689                                 data &= ~PLL_RAMP_UP_TIME_0_MASK;
5690                                 data |= PLL_RAMP_UP_TIME_0(4);
5691                                 if (data != orig)
5692                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
5693
5694                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
5695                                 data &= ~PLL_RAMP_UP_TIME_1_MASK;
5696                                 data |= PLL_RAMP_UP_TIME_1(4);
5697                                 if (data != orig)
5698                                         WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
5699                         }
5700
5701                         data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5702                         data &= ~LC_DYN_LANES_PWR_STATE_MASK;
5703                         data |= LC_DYN_LANES_PWR_STATE(3);
5704                         if (data != orig)
5705                                 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
5706
5707                         if (rdev->family >= CHIP_BARTS) {
5708                                 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
5709                                 data &= ~LS2_EXIT_TIME_MASK;
5710                                 data |= LS2_EXIT_TIME(1);
5711                                 if (data != orig)
5712                                         WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
5713
5714                                 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
5715                                 data &= ~LS2_EXIT_TIME_MASK;
5716                                 data |= LS2_EXIT_TIME(1);
5717                                 if (data != orig)
5718                                         WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
5719                         }
5720                 }
5721         }
5722
5723         /* evergreen parts only */
5724         if (rdev->family < CHIP_BARTS)
5725                 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
5726
5727         if (pcie_lc_cntl != pcie_lc_cntl_old)
5728                 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
5729 }