1/*
2 * Copyright 2011 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24#include <linux/firmware.h>
25#include <linux/slab.h>
26#include <linux/module.h>
27#include <drm/drmP.h>
28#include "radeon.h"
29#include "radeon_asic.h"
30#include <drm/radeon_drm.h>
31#include "sid.h"
32#include "atom.h"
33#include "si_blit_shaders.h"
34#include "clearstate_si.h"
35#include "radeon_ucode.h"
36
37
38MODULE_FIRMWARE("radeon/TAHITI_pfp.bin");
39MODULE_FIRMWARE("radeon/TAHITI_me.bin");
40MODULE_FIRMWARE("radeon/TAHITI_ce.bin");
41MODULE_FIRMWARE("radeon/TAHITI_mc.bin");
42MODULE_FIRMWARE("radeon/TAHITI_mc2.bin");
43MODULE_FIRMWARE("radeon/TAHITI_rlc.bin");
44MODULE_FIRMWARE("radeon/TAHITI_smc.bin");
45MODULE_FIRMWARE("radeon/PITCAIRN_pfp.bin");
46MODULE_FIRMWARE("radeon/PITCAIRN_me.bin");
47MODULE_FIRMWARE("radeon/PITCAIRN_ce.bin");
48MODULE_FIRMWARE("radeon/PITCAIRN_mc.bin");
49MODULE_FIRMWARE("radeon/PITCAIRN_mc2.bin");
50MODULE_FIRMWARE("radeon/PITCAIRN_rlc.bin");
51MODULE_FIRMWARE("radeon/PITCAIRN_smc.bin");
52MODULE_FIRMWARE("radeon/VERDE_pfp.bin");
53MODULE_FIRMWARE("radeon/VERDE_me.bin");
54MODULE_FIRMWARE("radeon/VERDE_ce.bin");
55MODULE_FIRMWARE("radeon/VERDE_mc.bin");
56MODULE_FIRMWARE("radeon/VERDE_mc2.bin");
57MODULE_FIRMWARE("radeon/VERDE_rlc.bin");
58MODULE_FIRMWARE("radeon/VERDE_smc.bin");
59MODULE_FIRMWARE("radeon/OLAND_pfp.bin");
60MODULE_FIRMWARE("radeon/OLAND_me.bin");
61MODULE_FIRMWARE("radeon/OLAND_ce.bin");
62MODULE_FIRMWARE("radeon/OLAND_mc.bin");
63MODULE_FIRMWARE("radeon/OLAND_mc2.bin");
64MODULE_FIRMWARE("radeon/OLAND_rlc.bin");
65MODULE_FIRMWARE("radeon/OLAND_smc.bin");
66MODULE_FIRMWARE("radeon/HAINAN_pfp.bin");
67MODULE_FIRMWARE("radeon/HAINAN_me.bin");
68MODULE_FIRMWARE("radeon/HAINAN_ce.bin");
69MODULE_FIRMWARE("radeon/HAINAN_mc.bin");
70MODULE_FIRMWARE("radeon/HAINAN_mc2.bin");
71MODULE_FIRMWARE("radeon/HAINAN_rlc.bin");
72MODULE_FIRMWARE("radeon/HAINAN_smc.bin");
73
74static void si_pcie_gen3_enable(struct radeon_device *rdev);
75static void si_program_aspm(struct radeon_device *rdev);
76extern void sumo_rlc_fini(struct radeon_device *rdev);
77extern int sumo_rlc_init(struct radeon_device *rdev);
78extern int r600_ih_ring_alloc(struct radeon_device *rdev);
79extern void r600_ih_ring_fini(struct radeon_device *rdev);
80extern void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev);
81extern void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save);
82extern void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save);
83extern u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev);
84extern void evergreen_print_gpu_status_regs(struct radeon_device *rdev);
85extern bool evergreen_is_display_hung(struct radeon_device *rdev);
86static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
87 bool enable);
88static void si_init_pg(struct radeon_device *rdev);
89static void si_init_cg(struct radeon_device *rdev);
90static void si_fini_pg(struct radeon_device *rdev);
91static void si_fini_cg(struct radeon_device *rdev);
92static void si_rlc_stop(struct radeon_device *rdev);
93
94static const u32 verde_rlc_save_restore_register_list[] =
95{
96 (0x8000 << 16) | (0x98f4 >> 2),
97 0x00000000,
98 (0x8040 << 16) | (0x98f4 >> 2),
99 0x00000000,
100 (0x8000 << 16) | (0xe80 >> 2),
101 0x00000000,
102 (0x8040 << 16) | (0xe80 >> 2),
103 0x00000000,
104 (0x8000 << 16) | (0x89bc >> 2),
105 0x00000000,
106 (0x8040 << 16) | (0x89bc >> 2),
107 0x00000000,
108 (0x8000 << 16) | (0x8c1c >> 2),
109 0x00000000,
110 (0x8040 << 16) | (0x8c1c >> 2),
111 0x00000000,
112 (0x9c00 << 16) | (0x98f0 >> 2),
113 0x00000000,
114 (0x9c00 << 16) | (0xe7c >> 2),
115 0x00000000,
116 (0x8000 << 16) | (0x9148 >> 2),
117 0x00000000,
118 (0x8040 << 16) | (0x9148 >> 2),
119 0x00000000,
120 (0x9c00 << 16) | (0x9150 >> 2),
121 0x00000000,
122 (0x9c00 << 16) | (0x897c >> 2),
123 0x00000000,
124 (0x9c00 << 16) | (0x8d8c >> 2),
125 0x00000000,
126 (0x9c00 << 16) | (0xac54 >> 2),
127 0X00000000,
128 0x3,
129 (0x9c00 << 16) | (0x98f8 >> 2),
130 0x00000000,
131 (0x9c00 << 16) | (0x9910 >> 2),
132 0x00000000,
133 (0x9c00 << 16) | (0x9914 >> 2),
134 0x00000000,
135 (0x9c00 << 16) | (0x9918 >> 2),
136 0x00000000,
137 (0x9c00 << 16) | (0x991c >> 2),
138 0x00000000,
139 (0x9c00 << 16) | (0x9920 >> 2),
140 0x00000000,
141 (0x9c00 << 16) | (0x9924 >> 2),
142 0x00000000,
143 (0x9c00 << 16) | (0x9928 >> 2),
144 0x00000000,
145 (0x9c00 << 16) | (0x992c >> 2),
146 0x00000000,
147 (0x9c00 << 16) | (0x9930 >> 2),
148 0x00000000,
149 (0x9c00 << 16) | (0x9934 >> 2),
150 0x00000000,
151 (0x9c00 << 16) | (0x9938 >> 2),
152 0x00000000,
153 (0x9c00 << 16) | (0x993c >> 2),
154 0x00000000,
155 (0x9c00 << 16) | (0x9940 >> 2),
156 0x00000000,
157 (0x9c00 << 16) | (0x9944 >> 2),
158 0x00000000,
159 (0x9c00 << 16) | (0x9948 >> 2),
160 0x00000000,
161 (0x9c00 << 16) | (0x994c >> 2),
162 0x00000000,
163 (0x9c00 << 16) | (0x9950 >> 2),
164 0x00000000,
165 (0x9c00 << 16) | (0x9954 >> 2),
166 0x00000000,
167 (0x9c00 << 16) | (0x9958 >> 2),
168 0x00000000,
169 (0x9c00 << 16) | (0x995c >> 2),
170 0x00000000,
171 (0x9c00 << 16) | (0x9960 >> 2),
172 0x00000000,
173 (0x9c00 << 16) | (0x9964 >> 2),
174 0x00000000,
175 (0x9c00 << 16) | (0x9968 >> 2),
176 0x00000000,
177 (0x9c00 << 16) | (0x996c >> 2),
178 0x00000000,
179 (0x9c00 << 16) | (0x9970 >> 2),
180 0x00000000,
181 (0x9c00 << 16) | (0x9974 >> 2),
182 0x00000000,
183 (0x9c00 << 16) | (0x9978 >> 2),
184 0x00000000,
185 (0x9c00 << 16) | (0x997c >> 2),
186 0x00000000,
187 (0x9c00 << 16) | (0x9980 >> 2),
188 0x00000000,
189 (0x9c00 << 16) | (0x9984 >> 2),
190 0x00000000,
191 (0x9c00 << 16) | (0x9988 >> 2),
192 0x00000000,
193 (0x9c00 << 16) | (0x998c >> 2),
194 0x00000000,
195 (0x9c00 << 16) | (0x8c00 >> 2),
196 0x00000000,
197 (0x9c00 << 16) | (0x8c14 >> 2),
198 0x00000000,
199 (0x9c00 << 16) | (0x8c04 >> 2),
200 0x00000000,
201 (0x9c00 << 16) | (0x8c08 >> 2),
202 0x00000000,
203 (0x8000 << 16) | (0x9b7c >> 2),
204 0x00000000,
205 (0x8040 << 16) | (0x9b7c >> 2),
206 0x00000000,
207 (0x8000 << 16) | (0xe84 >> 2),
208 0x00000000,
209 (0x8040 << 16) | (0xe84 >> 2),
210 0x00000000,
211 (0x8000 << 16) | (0x89c0 >> 2),
212 0x00000000,
213 (0x8040 << 16) | (0x89c0 >> 2),
214 0x00000000,
215 (0x8000 << 16) | (0x914c >> 2),
216 0x00000000,
217 (0x8040 << 16) | (0x914c >> 2),
218 0x00000000,
219 (0x8000 << 16) | (0x8c20 >> 2),
220 0x00000000,
221 (0x8040 << 16) | (0x8c20 >> 2),
222 0x00000000,
223 (0x8000 << 16) | (0x9354 >> 2),
224 0x00000000,
225 (0x8040 << 16) | (0x9354 >> 2),
226 0x00000000,
227 (0x9c00 << 16) | (0x9060 >> 2),
228 0x00000000,
229 (0x9c00 << 16) | (0x9364 >> 2),
230 0x00000000,
231 (0x9c00 << 16) | (0x9100 >> 2),
232 0x00000000,
233 (0x9c00 << 16) | (0x913c >> 2),
234 0x00000000,
235 (0x8000 << 16) | (0x90e0 >> 2),
236 0x00000000,
237 (0x8000 << 16) | (0x90e4 >> 2),
238 0x00000000,
239 (0x8000 << 16) | (0x90e8 >> 2),
240 0x00000000,
241 (0x8040 << 16) | (0x90e0 >> 2),
242 0x00000000,
243 (0x8040 << 16) | (0x90e4 >> 2),
244 0x00000000,
245 (0x8040 << 16) | (0x90e8 >> 2),
246 0x00000000,
247 (0x9c00 << 16) | (0x8bcc >> 2),
248 0x00000000,
249 (0x9c00 << 16) | (0x8b24 >> 2),
250 0x00000000,
251 (0x9c00 << 16) | (0x88c4 >> 2),
252 0x00000000,
253 (0x9c00 << 16) | (0x8e50 >> 2),
254 0x00000000,
255 (0x9c00 << 16) | (0x8c0c >> 2),
256 0x00000000,
257 (0x9c00 << 16) | (0x8e58 >> 2),
258 0x00000000,
259 (0x9c00 << 16) | (0x8e5c >> 2),
260 0x00000000,
261 (0x9c00 << 16) | (0x9508 >> 2),
262 0x00000000,
263 (0x9c00 << 16) | (0x950c >> 2),
264 0x00000000,
265 (0x9c00 << 16) | (0x9494 >> 2),
266 0x00000000,
267 (0x9c00 << 16) | (0xac0c >> 2),
268 0x00000000,
269 (0x9c00 << 16) | (0xac10 >> 2),
270 0x00000000,
271 (0x9c00 << 16) | (0xac14 >> 2),
272 0x00000000,
273 (0x9c00 << 16) | (0xae00 >> 2),
274 0x00000000,
275 (0x9c00 << 16) | (0xac08 >> 2),
276 0x00000000,
277 (0x9c00 << 16) | (0x88d4 >> 2),
278 0x00000000,
279 (0x9c00 << 16) | (0x88c8 >> 2),
280 0x00000000,
281 (0x9c00 << 16) | (0x88cc >> 2),
282 0x00000000,
283 (0x9c00 << 16) | (0x89b0 >> 2),
284 0x00000000,
285 (0x9c00 << 16) | (0x8b10 >> 2),
286 0x00000000,
287 (0x9c00 << 16) | (0x8a14 >> 2),
288 0x00000000,
289 (0x9c00 << 16) | (0x9830 >> 2),
290 0x00000000,
291 (0x9c00 << 16) | (0x9834 >> 2),
292 0x00000000,
293 (0x9c00 << 16) | (0x9838 >> 2),
294 0x00000000,
295 (0x9c00 << 16) | (0x9a10 >> 2),
296 0x00000000,
297 (0x8000 << 16) | (0x9870 >> 2),
298 0x00000000,
299 (0x8000 << 16) | (0x9874 >> 2),
300 0x00000000,
301 (0x8001 << 16) | (0x9870 >> 2),
302 0x00000000,
303 (0x8001 << 16) | (0x9874 >> 2),
304 0x00000000,
305 (0x8040 << 16) | (0x9870 >> 2),
306 0x00000000,
307 (0x8040 << 16) | (0x9874 >> 2),
308 0x00000000,
309 (0x8041 << 16) | (0x9870 >> 2),
310 0x00000000,
311 (0x8041 << 16) | (0x9874 >> 2),
312 0x00000000,
313 0x00000000
314};
315
316static const u32 tahiti_golden_rlc_registers[] =
317{
318 0xc424, 0xffffffff, 0x00601005,
319 0xc47c, 0xffffffff, 0x10104040,
320 0xc488, 0xffffffff, 0x0100000a,
321 0xc314, 0xffffffff, 0x00000800,
322 0xc30c, 0xffffffff, 0x800000f4,
323 0xf4a8, 0xffffffff, 0x00000000
324};
325
326static const u32 tahiti_golden_registers[] =
327{
328 0x9a10, 0x00010000, 0x00018208,
329 0x9830, 0xffffffff, 0x00000000,
330 0x9834, 0xf00fffff, 0x00000400,
331 0x9838, 0x0002021c, 0x00020200,
332 0xc78, 0x00000080, 0x00000000,
333 0xd030, 0x000300c0, 0x00800040,
334 0xd830, 0x000300c0, 0x00800040,
335 0x5bb0, 0x000000f0, 0x00000070,
336 0x5bc0, 0x00200000, 0x50100000,
337 0x7030, 0x31000311, 0x00000011,
338 0x277c, 0x00000003, 0x000007ff,
339 0x240c, 0x000007ff, 0x00000000,
340 0x8a14, 0xf000001f, 0x00000007,
341 0x8b24, 0xffffffff, 0x00ffffff,
342 0x8b10, 0x0000ff0f, 0x00000000,
343 0x28a4c, 0x07ffffff, 0x4e000000,
344 0x28350, 0x3f3f3fff, 0x2a00126a,
345 0x30, 0x000000ff, 0x0040,
346 0x34, 0x00000040, 0x00004040,
347 0x9100, 0x07ffffff, 0x03000000,
348 0x8e88, 0x01ff1f3f, 0x00000000,
349 0x8e84, 0x01ff1f3f, 0x00000000,
350 0x9060, 0x0000007f, 0x00000020,
351 0x9508, 0x00010000, 0x00010000,
352 0xac14, 0x00000200, 0x000002fb,
353 0xac10, 0xffffffff, 0x0000543b,
354 0xac0c, 0xffffffff, 0xa9210876,
355 0x88d0, 0xffffffff, 0x000fff40,
356 0x88d4, 0x0000001f, 0x00000010,
357 0x1410, 0x20000000, 0x20fffed8,
358 0x15c0, 0x000c0fc0, 0x000c0400
359};
360
361static const u32 tahiti_golden_registers2[] =
362{
363 0xc64, 0x00000001, 0x00000001
364};
365
366static const u32 pitcairn_golden_rlc_registers[] =
367{
368 0xc424, 0xffffffff, 0x00601004,
369 0xc47c, 0xffffffff, 0x10102020,
370 0xc488, 0xffffffff, 0x01000020,
371 0xc314, 0xffffffff, 0x00000800,
372 0xc30c, 0xffffffff, 0x800000a4
373};
374
375static const u32 pitcairn_golden_registers[] =
376{
377 0x9a10, 0x00010000, 0x00018208,
378 0x9830, 0xffffffff, 0x00000000,
379 0x9834, 0xf00fffff, 0x00000400,
380 0x9838, 0x0002021c, 0x00020200,
381 0xc78, 0x00000080, 0x00000000,
382 0xd030, 0x000300c0, 0x00800040,
383 0xd830, 0x000300c0, 0x00800040,
384 0x5bb0, 0x000000f0, 0x00000070,
385 0x5bc0, 0x00200000, 0x50100000,
386 0x7030, 0x31000311, 0x00000011,
387 0x2ae4, 0x00073ffe, 0x000022a2,
388 0x240c, 0x000007ff, 0x00000000,
389 0x8a14, 0xf000001f, 0x00000007,
390 0x8b24, 0xffffffff, 0x00ffffff,
391 0x8b10, 0x0000ff0f, 0x00000000,
392 0x28a4c, 0x07ffffff, 0x4e000000,
393 0x28350, 0x3f3f3fff, 0x2a00126a,
394 0x30, 0x000000ff, 0x0040,
395 0x34, 0x00000040, 0x00004040,
396 0x9100, 0x07ffffff, 0x03000000,
397 0x9060, 0x0000007f, 0x00000020,
398 0x9508, 0x00010000, 0x00010000,
399 0xac14, 0x000003ff, 0x000000f7,
400 0xac10, 0xffffffff, 0x00000000,
401 0xac0c, 0xffffffff, 0x32761054,
402 0x88d4, 0x0000001f, 0x00000010,
403 0x15c0, 0x000c0fc0, 0x000c0400
404};
405
406static const u32 verde_golden_rlc_registers[] =
407{
408 0xc424, 0xffffffff, 0x033f1005,
409 0xc47c, 0xffffffff, 0x10808020,
410 0xc488, 0xffffffff, 0x00800008,
411 0xc314, 0xffffffff, 0x00001000,
412 0xc30c, 0xffffffff, 0x80010014
413};
414
415static const u32 verde_golden_registers[] =
416{
417 0x9a10, 0x00010000, 0x00018208,
418 0x9830, 0xffffffff, 0x00000000,
419 0x9834, 0xf00fffff, 0x00000400,
420 0x9838, 0x0002021c, 0x00020200,
421 0xc78, 0x00000080, 0x00000000,
422 0xd030, 0x000300c0, 0x00800040,
423 0xd030, 0x000300c0, 0x00800040,
424 0xd830, 0x000300c0, 0x00800040,
425 0xd830, 0x000300c0, 0x00800040,
426 0x5bb0, 0x000000f0, 0x00000070,
427 0x5bc0, 0x00200000, 0x50100000,
428 0x7030, 0x31000311, 0x00000011,
429 0x2ae4, 0x00073ffe, 0x000022a2,
430 0x2ae4, 0x00073ffe, 0x000022a2,
431 0x2ae4, 0x00073ffe, 0x000022a2,
432 0x240c, 0x000007ff, 0x00000000,
433 0x240c, 0x000007ff, 0x00000000,
434 0x240c, 0x000007ff, 0x00000000,
435 0x8a14, 0xf000001f, 0x00000007,
436 0x8a14, 0xf000001f, 0x00000007,
437 0x8a14, 0xf000001f, 0x00000007,
438 0x8b24, 0xffffffff, 0x00ffffff,
439 0x8b10, 0x0000ff0f, 0x00000000,
440 0x28a4c, 0x07ffffff, 0x4e000000,
441 0x28350, 0x3f3f3fff, 0x0000124a,
442 0x28350, 0x3f3f3fff, 0x0000124a,
443 0x28350, 0x3f3f3fff, 0x0000124a,
444 0x30, 0x000000ff, 0x0040,
445 0x34, 0x00000040, 0x00004040,
446 0x9100, 0x07ffffff, 0x03000000,
447 0x9100, 0x07ffffff, 0x03000000,
448 0x8e88, 0x01ff1f3f, 0x00000000,
449 0x8e88, 0x01ff1f3f, 0x00000000,
450 0x8e88, 0x01ff1f3f, 0x00000000,
451 0x8e84, 0x01ff1f3f, 0x00000000,
452 0x8e84, 0x01ff1f3f, 0x00000000,
453 0x8e84, 0x01ff1f3f, 0x00000000,
454 0x9060, 0x0000007f, 0x00000020,
455 0x9508, 0x00010000, 0x00010000,
456 0xac14, 0x000003ff, 0x00000003,
457 0xac14, 0x000003ff, 0x00000003,
458 0xac14, 0x000003ff, 0x00000003,
459 0xac10, 0xffffffff, 0x00000000,
460 0xac10, 0xffffffff, 0x00000000,
461 0xac10, 0xffffffff, 0x00000000,
462 0xac0c, 0xffffffff, 0x00001032,
463 0xac0c, 0xffffffff, 0x00001032,
464 0xac0c, 0xffffffff, 0x00001032,
465 0x88d4, 0x0000001f, 0x00000010,
466 0x88d4, 0x0000001f, 0x00000010,
467 0x88d4, 0x0000001f, 0x00000010,
468 0x15c0, 0x000c0fc0, 0x000c0400
469};
470
471static const u32 oland_golden_rlc_registers[] =
472{
473 0xc424, 0xffffffff, 0x00601005,
474 0xc47c, 0xffffffff, 0x10104040,
475 0xc488, 0xffffffff, 0x0100000a,
476 0xc314, 0xffffffff, 0x00000800,
477 0xc30c, 0xffffffff, 0x800000f4
478};
479
480static const u32 oland_golden_registers[] =
481{
482 0x9a10, 0x00010000, 0x00018208,
483 0x9830, 0xffffffff, 0x00000000,
484 0x9834, 0xf00fffff, 0x00000400,
485 0x9838, 0x0002021c, 0x00020200,
486 0xc78, 0x00000080, 0x00000000,
487 0xd030, 0x000300c0, 0x00800040,
488 0xd830, 0x000300c0, 0x00800040,
489 0x5bb0, 0x000000f0, 0x00000070,
490 0x5bc0, 0x00200000, 0x50100000,
491 0x7030, 0x31000311, 0x00000011,
492 0x2ae4, 0x00073ffe, 0x000022a2,
493 0x240c, 0x000007ff, 0x00000000,
494 0x8a14, 0xf000001f, 0x00000007,
495 0x8b24, 0xffffffff, 0x00ffffff,
496 0x8b10, 0x0000ff0f, 0x00000000,
497 0x28a4c, 0x07ffffff, 0x4e000000,
498 0x28350, 0x3f3f3fff, 0x00000082,
499 0x30, 0x000000ff, 0x0040,
500 0x34, 0x00000040, 0x00004040,
501 0x9100, 0x07ffffff, 0x03000000,
502 0x9060, 0x0000007f, 0x00000020,
503 0x9508, 0x00010000, 0x00010000,
504 0xac14, 0x000003ff, 0x000000f3,
505 0xac10, 0xffffffff, 0x00000000,
506 0xac0c, 0xffffffff, 0x00003210,
507 0x88d4, 0x0000001f, 0x00000010,
508 0x15c0, 0x000c0fc0, 0x000c0400
509};
510
511static const u32 hainan_golden_registers[] =
512{
513 0x9a10, 0x00010000, 0x00018208,
514 0x9830, 0xffffffff, 0x00000000,
515 0x9834, 0xf00fffff, 0x00000400,
516 0x9838, 0x0002021c, 0x00020200,
517 0xd0c0, 0xff000fff, 0x00000100,
518 0xd030, 0x000300c0, 0x00800040,
519 0xd8c0, 0xff000fff, 0x00000100,
520 0xd830, 0x000300c0, 0x00800040,
521 0x2ae4, 0x00073ffe, 0x000022a2,
522 0x240c, 0x000007ff, 0x00000000,
523 0x8a14, 0xf000001f, 0x00000007,
524 0x8b24, 0xffffffff, 0x00ffffff,
525 0x8b10, 0x0000ff0f, 0x00000000,
526 0x28a4c, 0x07ffffff, 0x4e000000,
527 0x28350, 0x3f3f3fff, 0x00000000,
528 0x30, 0x000000ff, 0x0040,
529 0x34, 0x00000040, 0x00004040,
530 0x9100, 0x03e00000, 0x03600000,
531 0x9060, 0x0000007f, 0x00000020,
532 0x9508, 0x00010000, 0x00010000,
533 0xac14, 0x000003ff, 0x000000f1,
534 0xac10, 0xffffffff, 0x00000000,
535 0xac0c, 0xffffffff, 0x00003210,
536 0x88d4, 0x0000001f, 0x00000010,
537 0x15c0, 0x000c0fc0, 0x000c0400
538};
539
540static const u32 hainan_golden_registers2[] =
541{
542 0x98f8, 0xffffffff, 0x02010001
543};
544
545static const u32 tahiti_mgcg_cgcg_init[] =
546{
547 0xc400, 0xffffffff, 0xfffffffc,
548 0x802c, 0xffffffff, 0xe0000000,
549 0x9a60, 0xffffffff, 0x00000100,
550 0x92a4, 0xffffffff, 0x00000100,
551 0xc164, 0xffffffff, 0x00000100,
552 0x9774, 0xffffffff, 0x00000100,
553 0x8984, 0xffffffff, 0x06000100,
554 0x8a18, 0xffffffff, 0x00000100,
555 0x92a0, 0xffffffff, 0x00000100,
556 0xc380, 0xffffffff, 0x00000100,
557 0x8b28, 0xffffffff, 0x00000100,
558 0x9144, 0xffffffff, 0x00000100,
559 0x8d88, 0xffffffff, 0x00000100,
560 0x8d8c, 0xffffffff, 0x00000100,
561 0x9030, 0xffffffff, 0x00000100,
562 0x9034, 0xffffffff, 0x00000100,
563 0x9038, 0xffffffff, 0x00000100,
564 0x903c, 0xffffffff, 0x00000100,
565 0xad80, 0xffffffff, 0x00000100,
566 0xac54, 0xffffffff, 0x00000100,
567 0x897c, 0xffffffff, 0x06000100,
568 0x9868, 0xffffffff, 0x00000100,
569 0x9510, 0xffffffff, 0x00000100,
570 0xaf04, 0xffffffff, 0x00000100,
571 0xae04, 0xffffffff, 0x00000100,
572 0x949c, 0xffffffff, 0x00000100,
573 0x802c, 0xffffffff, 0xe0000000,
574 0x9160, 0xffffffff, 0x00010000,
575 0x9164, 0xffffffff, 0x00030002,
576 0x9168, 0xffffffff, 0x00040007,
577 0x916c, 0xffffffff, 0x00060005,
578 0x9170, 0xffffffff, 0x00090008,
579 0x9174, 0xffffffff, 0x00020001,
580 0x9178, 0xffffffff, 0x00040003,
581 0x917c, 0xffffffff, 0x00000007,
582 0x9180, 0xffffffff, 0x00060005,
583 0x9184, 0xffffffff, 0x00090008,
584 0x9188, 0xffffffff, 0x00030002,
585 0x918c, 0xffffffff, 0x00050004,
586 0x9190, 0xffffffff, 0x00000008,
587 0x9194, 0xffffffff, 0x00070006,
588 0x9198, 0xffffffff, 0x000a0009,
589 0x919c, 0xffffffff, 0x00040003,
590 0x91a0, 0xffffffff, 0x00060005,
591 0x91a4, 0xffffffff, 0x00000009,
592 0x91a8, 0xffffffff, 0x00080007,
593 0x91ac, 0xffffffff, 0x000b000a,
594 0x91b0, 0xffffffff, 0x00050004,
595 0x91b4, 0xffffffff, 0x00070006,
596 0x91b8, 0xffffffff, 0x0008000b,
597 0x91bc, 0xffffffff, 0x000a0009,
598 0x91c0, 0xffffffff, 0x000d000c,
599 0x91c4, 0xffffffff, 0x00060005,
600 0x91c8, 0xffffffff, 0x00080007,
601 0x91cc, 0xffffffff, 0x0000000b,
602 0x91d0, 0xffffffff, 0x000a0009,
603 0x91d4, 0xffffffff, 0x000d000c,
604 0x91d8, 0xffffffff, 0x00070006,
605 0x91dc, 0xffffffff, 0x00090008,
606 0x91e0, 0xffffffff, 0x0000000c,
607 0x91e4, 0xffffffff, 0x000b000a,
608 0x91e8, 0xffffffff, 0x000e000d,
609 0x91ec, 0xffffffff, 0x00080007,
610 0x91f0, 0xffffffff, 0x000a0009,
611 0x91f4, 0xffffffff, 0x0000000d,
612 0x91f8, 0xffffffff, 0x000c000b,
613 0x91fc, 0xffffffff, 0x000f000e,
614 0x9200, 0xffffffff, 0x00090008,
615 0x9204, 0xffffffff, 0x000b000a,
616 0x9208, 0xffffffff, 0x000c000f,
617 0x920c, 0xffffffff, 0x000e000d,
618 0x9210, 0xffffffff, 0x00110010,
619 0x9214, 0xffffffff, 0x000a0009,
620 0x9218, 0xffffffff, 0x000c000b,
621 0x921c, 0xffffffff, 0x0000000f,
622 0x9220, 0xffffffff, 0x000e000d,
623 0x9224, 0xffffffff, 0x00110010,
624 0x9228, 0xffffffff, 0x000b000a,
625 0x922c, 0xffffffff, 0x000d000c,
626 0x9230, 0xffffffff, 0x00000010,
627 0x9234, 0xffffffff, 0x000f000e,
628 0x9238, 0xffffffff, 0x00120011,
629 0x923c, 0xffffffff, 0x000c000b,
630 0x9240, 0xffffffff, 0x000e000d,
631 0x9244, 0xffffffff, 0x00000011,
632 0x9248, 0xffffffff, 0x0010000f,
633 0x924c, 0xffffffff, 0x00130012,
634 0x9250, 0xffffffff, 0x000d000c,
635 0x9254, 0xffffffff, 0x000f000e,
636 0x9258, 0xffffffff, 0x00100013,
637 0x925c, 0xffffffff, 0x00120011,
638 0x9260, 0xffffffff, 0x00150014,
639 0x9264, 0xffffffff, 0x000e000d,
640 0x9268, 0xffffffff, 0x0010000f,
641 0x926c, 0xffffffff, 0x00000013,
642 0x9270, 0xffffffff, 0x00120011,
643 0x9274, 0xffffffff, 0x00150014,
644 0x9278, 0xffffffff, 0x000f000e,
645 0x927c, 0xffffffff, 0x00110010,
646 0x9280, 0xffffffff, 0x00000014,
647 0x9284, 0xffffffff, 0x00130012,
648 0x9288, 0xffffffff, 0x00160015,
649 0x928c, 0xffffffff, 0x0010000f,
650 0x9290, 0xffffffff, 0x00120011,
651 0x9294, 0xffffffff, 0x00000015,
652 0x9298, 0xffffffff, 0x00140013,
653 0x929c, 0xffffffff, 0x00170016,
654 0x9150, 0xffffffff, 0x96940200,
655 0x8708, 0xffffffff, 0x00900100,
656 0xc478, 0xffffffff, 0x00000080,
657 0xc404, 0xffffffff, 0x0020003f,
658 0x30, 0xffffffff, 0x0000001c,
659 0x34, 0x000f0000, 0x000f0000,
660 0x160c, 0xffffffff, 0x00000100,
661 0x1024, 0xffffffff, 0x00000100,
662 0x102c, 0x00000101, 0x00000000,
663 0x20a8, 0xffffffff, 0x00000104,
664 0x264c, 0x000c0000, 0x000c0000,
665 0x2648, 0x000c0000, 0x000c0000,
666 0x55e4, 0xff000fff, 0x00000100,
667 0x55e8, 0x00000001, 0x00000001,
668 0x2f50, 0x00000001, 0x00000001,
669 0x30cc, 0xc0000fff, 0x00000104,
670 0xc1e4, 0x00000001, 0x00000001,
671 0xd0c0, 0xfffffff0, 0x00000100,
672 0xd8c0, 0xfffffff0, 0x00000100
673};
674
675static const u32 pitcairn_mgcg_cgcg_init[] =
676{
677 0xc400, 0xffffffff, 0xfffffffc,
678 0x802c, 0xffffffff, 0xe0000000,
679 0x9a60, 0xffffffff, 0x00000100,
680 0x92a4, 0xffffffff, 0x00000100,
681 0xc164, 0xffffffff, 0x00000100,
682 0x9774, 0xffffffff, 0x00000100,
683 0x8984, 0xffffffff, 0x06000100,
684 0x8a18, 0xffffffff, 0x00000100,
685 0x92a0, 0xffffffff, 0x00000100,
686 0xc380, 0xffffffff, 0x00000100,
687 0x8b28, 0xffffffff, 0x00000100,
688 0x9144, 0xffffffff, 0x00000100,
689 0x8d88, 0xffffffff, 0x00000100,
690 0x8d8c, 0xffffffff, 0x00000100,
691 0x9030, 0xffffffff, 0x00000100,
692 0x9034, 0xffffffff, 0x00000100,
693 0x9038, 0xffffffff, 0x00000100,
694 0x903c, 0xffffffff, 0x00000100,
695 0xad80, 0xffffffff, 0x00000100,
696 0xac54, 0xffffffff, 0x00000100,
697 0x897c, 0xffffffff, 0x06000100,
698 0x9868, 0xffffffff, 0x00000100,
699 0x9510, 0xffffffff, 0x00000100,
700 0xaf04, 0xffffffff, 0x00000100,
701 0xae04, 0xffffffff, 0x00000100,
702 0x949c, 0xffffffff, 0x00000100,
703 0x802c, 0xffffffff, 0xe0000000,
704 0x9160, 0xffffffff, 0x00010000,
705 0x9164, 0xffffffff, 0x00030002,
706 0x9168, 0xffffffff, 0x00040007,
707 0x916c, 0xffffffff, 0x00060005,
708 0x9170, 0xffffffff, 0x00090008,
709 0x9174, 0xffffffff, 0x00020001,
710 0x9178, 0xffffffff, 0x00040003,
711 0x917c, 0xffffffff, 0x00000007,
712 0x9180, 0xffffffff, 0x00060005,
713 0x9184, 0xffffffff, 0x00090008,
714 0x9188, 0xffffffff, 0x00030002,
715 0x918c, 0xffffffff, 0x00050004,
716 0x9190, 0xffffffff, 0x00000008,
717 0x9194, 0xffffffff, 0x00070006,
718 0x9198, 0xffffffff, 0x000a0009,
719 0x919c, 0xffffffff, 0x00040003,
720 0x91a0, 0xffffffff, 0x00060005,
721 0x91a4, 0xffffffff, 0x00000009,
722 0x91a8, 0xffffffff, 0x00080007,
723 0x91ac, 0xffffffff, 0x000b000a,
724 0x91b0, 0xffffffff, 0x00050004,
725 0x91b4, 0xffffffff, 0x00070006,
726 0x91b8, 0xffffffff, 0x0008000b,
727 0x91bc, 0xffffffff, 0x000a0009,
728 0x91c0, 0xffffffff, 0x000d000c,
729 0x9200, 0xffffffff, 0x00090008,
730 0x9204, 0xffffffff, 0x000b000a,
731 0x9208, 0xffffffff, 0x000c000f,
732 0x920c, 0xffffffff, 0x000e000d,
733 0x9210, 0xffffffff, 0x00110010,
734 0x9214, 0xffffffff, 0x000a0009,
735 0x9218, 0xffffffff, 0x000c000b,
736 0x921c, 0xffffffff, 0x0000000f,
737 0x9220, 0xffffffff, 0x000e000d,
738 0x9224, 0xffffffff, 0x00110010,
739 0x9228, 0xffffffff, 0x000b000a,
740 0x922c, 0xffffffff, 0x000d000c,
741 0x9230, 0xffffffff, 0x00000010,
742 0x9234, 0xffffffff, 0x000f000e,
743 0x9238, 0xffffffff, 0x00120011,
744 0x923c, 0xffffffff, 0x000c000b,
745 0x9240, 0xffffffff, 0x000e000d,
746 0x9244, 0xffffffff, 0x00000011,
747 0x9248, 0xffffffff, 0x0010000f,
748 0x924c, 0xffffffff, 0x00130012,
749 0x9250, 0xffffffff, 0x000d000c,
750 0x9254, 0xffffffff, 0x000f000e,
751 0x9258, 0xffffffff, 0x00100013,
752 0x925c, 0xffffffff, 0x00120011,
753 0x9260, 0xffffffff, 0x00150014,
754 0x9150, 0xffffffff, 0x96940200,
755 0x8708, 0xffffffff, 0x00900100,
756 0xc478, 0xffffffff, 0x00000080,
757 0xc404, 0xffffffff, 0x0020003f,
758 0x30, 0xffffffff, 0x0000001c,
759 0x34, 0x000f0000, 0x000f0000,
760 0x160c, 0xffffffff, 0x00000100,
761 0x1024, 0xffffffff, 0x00000100,
762 0x102c, 0x00000101, 0x00000000,
763 0x20a8, 0xffffffff, 0x00000104,
764 0x55e4, 0xff000fff, 0x00000100,
765 0x55e8, 0x00000001, 0x00000001,
766 0x2f50, 0x00000001, 0x00000001,
767 0x30cc, 0xc0000fff, 0x00000104,
768 0xc1e4, 0x00000001, 0x00000001,
769 0xd0c0, 0xfffffff0, 0x00000100,
770 0xd8c0, 0xfffffff0, 0x00000100
771};
772
773static const u32 verde_mgcg_cgcg_init[] =
774{
775 0xc400, 0xffffffff, 0xfffffffc,
776 0x802c, 0xffffffff, 0xe0000000,
777 0x9a60, 0xffffffff, 0x00000100,
778 0x92a4, 0xffffffff, 0x00000100,
779 0xc164, 0xffffffff, 0x00000100,
780 0x9774, 0xffffffff, 0x00000100,
781 0x8984, 0xffffffff, 0x06000100,
782 0x8a18, 0xffffffff, 0x00000100,
783 0x92a0, 0xffffffff, 0x00000100,
784 0xc380, 0xffffffff, 0x00000100,
785 0x8b28, 0xffffffff, 0x00000100,
786 0x9144, 0xffffffff, 0x00000100,
787 0x8d88, 0xffffffff, 0x00000100,
788 0x8d8c, 0xffffffff, 0x00000100,
789 0x9030, 0xffffffff, 0x00000100,
790 0x9034, 0xffffffff, 0x00000100,
791 0x9038, 0xffffffff, 0x00000100,
792 0x903c, 0xffffffff, 0x00000100,
793 0xad80, 0xffffffff, 0x00000100,
794 0xac54, 0xffffffff, 0x00000100,
795 0x897c, 0xffffffff, 0x06000100,
796 0x9868, 0xffffffff, 0x00000100,
797 0x9510, 0xffffffff, 0x00000100,
798 0xaf04, 0xffffffff, 0x00000100,
799 0xae04, 0xffffffff, 0x00000100,
800 0x949c, 0xffffffff, 0x00000100,
801 0x802c, 0xffffffff, 0xe0000000,
802 0x9160, 0xffffffff, 0x00010000,
803 0x9164, 0xffffffff, 0x00030002,
804 0x9168, 0xffffffff, 0x00040007,
805 0x916c, 0xffffffff, 0x00060005,
806 0x9170, 0xffffffff, 0x00090008,
807 0x9174, 0xffffffff, 0x00020001,
808 0x9178, 0xffffffff, 0x00040003,
809 0x917c, 0xffffffff, 0x00000007,
810 0x9180, 0xffffffff, 0x00060005,
811 0x9184, 0xffffffff, 0x00090008,
812 0x9188, 0xffffffff, 0x00030002,
813 0x918c, 0xffffffff, 0x00050004,
814 0x9190, 0xffffffff, 0x00000008,
815 0x9194, 0xffffffff, 0x00070006,
816 0x9198, 0xffffffff, 0x000a0009,
817 0x919c, 0xffffffff, 0x00040003,
818 0x91a0, 0xffffffff, 0x00060005,
819 0x91a4, 0xffffffff, 0x00000009,
820 0x91a8, 0xffffffff, 0x00080007,
821 0x91ac, 0xffffffff, 0x000b000a,
822 0x91b0, 0xffffffff, 0x00050004,
823 0x91b4, 0xffffffff, 0x00070006,
824 0x91b8, 0xffffffff, 0x0008000b,
825 0x91bc, 0xffffffff, 0x000a0009,
826 0x91c0, 0xffffffff, 0x000d000c,
827 0x9200, 0xffffffff, 0x00090008,
828 0x9204, 0xffffffff, 0x000b000a,
829 0x9208, 0xffffffff, 0x000c000f,
830 0x920c, 0xffffffff, 0x000e000d,
831 0x9210, 0xffffffff, 0x00110010,
832 0x9214, 0xffffffff, 0x000a0009,
833 0x9218, 0xffffffff, 0x000c000b,
834 0x921c, 0xffffffff, 0x0000000f,
835 0x9220, 0xffffffff, 0x000e000d,
836 0x9224, 0xffffffff, 0x00110010,
837 0x9228, 0xffffffff, 0x000b000a,
838 0x922c, 0xffffffff, 0x000d000c,
839 0x9230, 0xffffffff, 0x00000010,
840 0x9234, 0xffffffff, 0x000f000e,
841 0x9238, 0xffffffff, 0x00120011,
842 0x923c, 0xffffffff, 0x000c000b,
843 0x9240, 0xffffffff, 0x000e000d,
844 0x9244, 0xffffffff, 0x00000011,
845 0x9248, 0xffffffff, 0x0010000f,
846 0x924c, 0xffffffff, 0x00130012,
847 0x9250, 0xffffffff, 0x000d000c,
848 0x9254, 0xffffffff, 0x000f000e,
849 0x9258, 0xffffffff, 0x00100013,
850 0x925c, 0xffffffff, 0x00120011,
851 0x9260, 0xffffffff, 0x00150014,
852 0x9150, 0xffffffff, 0x96940200,
853 0x8708, 0xffffffff, 0x00900100,
854 0xc478, 0xffffffff, 0x00000080,
855 0xc404, 0xffffffff, 0x0020003f,
856 0x30, 0xffffffff, 0x0000001c,
857 0x34, 0x000f0000, 0x000f0000,
858 0x160c, 0xffffffff, 0x00000100,
859 0x1024, 0xffffffff, 0x00000100,
860 0x102c, 0x00000101, 0x00000000,
861 0x20a8, 0xffffffff, 0x00000104,
862 0x264c, 0x000c0000, 0x000c0000,
863 0x2648, 0x000c0000, 0x000c0000,
864 0x55e4, 0xff000fff, 0x00000100,
865 0x55e8, 0x00000001, 0x00000001,
866 0x2f50, 0x00000001, 0x00000001,
867 0x30cc, 0xc0000fff, 0x00000104,
868 0xc1e4, 0x00000001, 0x00000001,
869 0xd0c0, 0xfffffff0, 0x00000100,
870 0xd8c0, 0xfffffff0, 0x00000100
871};
872
873static const u32 oland_mgcg_cgcg_init[] =
874{
875 0xc400, 0xffffffff, 0xfffffffc,
876 0x802c, 0xffffffff, 0xe0000000,
877 0x9a60, 0xffffffff, 0x00000100,
878 0x92a4, 0xffffffff, 0x00000100,
879 0xc164, 0xffffffff, 0x00000100,
880 0x9774, 0xffffffff, 0x00000100,
881 0x8984, 0xffffffff, 0x06000100,
882 0x8a18, 0xffffffff, 0x00000100,
883 0x92a0, 0xffffffff, 0x00000100,
884 0xc380, 0xffffffff, 0x00000100,
885 0x8b28, 0xffffffff, 0x00000100,
886 0x9144, 0xffffffff, 0x00000100,
887 0x8d88, 0xffffffff, 0x00000100,
888 0x8d8c, 0xffffffff, 0x00000100,
889 0x9030, 0xffffffff, 0x00000100,
890 0x9034, 0xffffffff, 0x00000100,
891 0x9038, 0xffffffff, 0x00000100,
892 0x903c, 0xffffffff, 0x00000100,
893 0xad80, 0xffffffff, 0x00000100,
894 0xac54, 0xffffffff, 0x00000100,
895 0x897c, 0xffffffff, 0x06000100,
896 0x9868, 0xffffffff, 0x00000100,
897 0x9510, 0xffffffff, 0x00000100,
898 0xaf04, 0xffffffff, 0x00000100,
899 0xae04, 0xffffffff, 0x00000100,
900 0x949c, 0xffffffff, 0x00000100,
901 0x802c, 0xffffffff, 0xe0000000,
902 0x9160, 0xffffffff, 0x00010000,
903 0x9164, 0xffffffff, 0x00030002,
904 0x9168, 0xffffffff, 0x00040007,
905 0x916c, 0xffffffff, 0x00060005,
906 0x9170, 0xffffffff, 0x00090008,
907 0x9174, 0xffffffff, 0x00020001,
908 0x9178, 0xffffffff, 0x00040003,
909 0x917c, 0xffffffff, 0x00000007,
910 0x9180, 0xffffffff, 0x00060005,
911 0x9184, 0xffffffff, 0x00090008,
912 0x9188, 0xffffffff, 0x00030002,
913 0x918c, 0xffffffff, 0x00050004,
914 0x9190, 0xffffffff, 0x00000008,
915 0x9194, 0xffffffff, 0x00070006,
916 0x9198, 0xffffffff, 0x000a0009,
917 0x919c, 0xffffffff, 0x00040003,
918 0x91a0, 0xffffffff, 0x00060005,
919 0x91a4, 0xffffffff, 0x00000009,
920 0x91a8, 0xffffffff, 0x00080007,
921 0x91ac, 0xffffffff, 0x000b000a,
922 0x91b0, 0xffffffff, 0x00050004,
923 0x91b4, 0xffffffff, 0x00070006,
924 0x91b8, 0xffffffff, 0x0008000b,
925 0x91bc, 0xffffffff, 0x000a0009,
926 0x91c0, 0xffffffff, 0x000d000c,
927 0x91c4, 0xffffffff, 0x00060005,
928 0x91c8, 0xffffffff, 0x00080007,
929 0x91cc, 0xffffffff, 0x0000000b,
930 0x91d0, 0xffffffff, 0x000a0009,
931 0x91d4, 0xffffffff, 0x000d000c,
932 0x9150, 0xffffffff, 0x96940200,
933 0x8708, 0xffffffff, 0x00900100,
934 0xc478, 0xffffffff, 0x00000080,
935 0xc404, 0xffffffff, 0x0020003f,
936 0x30, 0xffffffff, 0x0000001c,
937 0x34, 0x000f0000, 0x000f0000,
938 0x160c, 0xffffffff, 0x00000100,
939 0x1024, 0xffffffff, 0x00000100,
940 0x102c, 0x00000101, 0x00000000,
941 0x20a8, 0xffffffff, 0x00000104,
942 0x264c, 0x000c0000, 0x000c0000,
943 0x2648, 0x000c0000, 0x000c0000,
944 0x55e4, 0xff000fff, 0x00000100,
945 0x55e8, 0x00000001, 0x00000001,
946 0x2f50, 0x00000001, 0x00000001,
947 0x30cc, 0xc0000fff, 0x00000104,
948 0xc1e4, 0x00000001, 0x00000001,
949 0xd0c0, 0xfffffff0, 0x00000100,
950 0xd8c0, 0xfffffff0, 0x00000100
951};
952
953static const u32 hainan_mgcg_cgcg_init[] =
954{
955 0xc400, 0xffffffff, 0xfffffffc,
956 0x802c, 0xffffffff, 0xe0000000,
957 0x9a60, 0xffffffff, 0x00000100,
958 0x92a4, 0xffffffff, 0x00000100,
959 0xc164, 0xffffffff, 0x00000100,
960 0x9774, 0xffffffff, 0x00000100,
961 0x8984, 0xffffffff, 0x06000100,
962 0x8a18, 0xffffffff, 0x00000100,
963 0x92a0, 0xffffffff, 0x00000100,
964 0xc380, 0xffffffff, 0x00000100,
965 0x8b28, 0xffffffff, 0x00000100,
966 0x9144, 0xffffffff, 0x00000100,
967 0x8d88, 0xffffffff, 0x00000100,
968 0x8d8c, 0xffffffff, 0x00000100,
969 0x9030, 0xffffffff, 0x00000100,
970 0x9034, 0xffffffff, 0x00000100,
971 0x9038, 0xffffffff, 0x00000100,
972 0x903c, 0xffffffff, 0x00000100,
973 0xad80, 0xffffffff, 0x00000100,
974 0xac54, 0xffffffff, 0x00000100,
975 0x897c, 0xffffffff, 0x06000100,
976 0x9868, 0xffffffff, 0x00000100,
977 0x9510, 0xffffffff, 0x00000100,
978 0xaf04, 0xffffffff, 0x00000100,
979 0xae04, 0xffffffff, 0x00000100,
980 0x949c, 0xffffffff, 0x00000100,
981 0x802c, 0xffffffff, 0xe0000000,
982 0x9160, 0xffffffff, 0x00010000,
983 0x9164, 0xffffffff, 0x00030002,
984 0x9168, 0xffffffff, 0x00040007,
985 0x916c, 0xffffffff, 0x00060005,
986 0x9170, 0xffffffff, 0x00090008,
987 0x9174, 0xffffffff, 0x00020001,
988 0x9178, 0xffffffff, 0x00040003,
989 0x917c, 0xffffffff, 0x00000007,
990 0x9180, 0xffffffff, 0x00060005,
991 0x9184, 0xffffffff, 0x00090008,
992 0x9188, 0xffffffff, 0x00030002,
993 0x918c, 0xffffffff, 0x00050004,
994 0x9190, 0xffffffff, 0x00000008,
995 0x9194, 0xffffffff, 0x00070006,
996 0x9198, 0xffffffff, 0x000a0009,
997 0x919c, 0xffffffff, 0x00040003,
998 0x91a0, 0xffffffff, 0x00060005,
999 0x91a4, 0xffffffff, 0x00000009,
1000 0x91a8, 0xffffffff, 0x00080007,
1001 0x91ac, 0xffffffff, 0x000b000a,
1002 0x91b0, 0xffffffff, 0x00050004,
1003 0x91b4, 0xffffffff, 0x00070006,
1004 0x91b8, 0xffffffff, 0x0008000b,
1005 0x91bc, 0xffffffff, 0x000a0009,
1006 0x91c0, 0xffffffff, 0x000d000c,
1007 0x91c4, 0xffffffff, 0x00060005,
1008 0x91c8, 0xffffffff, 0x00080007,
1009 0x91cc, 0xffffffff, 0x0000000b,
1010 0x91d0, 0xffffffff, 0x000a0009,
1011 0x91d4, 0xffffffff, 0x000d000c,
1012 0x9150, 0xffffffff, 0x96940200,
1013 0x8708, 0xffffffff, 0x00900100,
1014 0xc478, 0xffffffff, 0x00000080,
1015 0xc404, 0xffffffff, 0x0020003f,
1016 0x30, 0xffffffff, 0x0000001c,
1017 0x34, 0x000f0000, 0x000f0000,
1018 0x160c, 0xffffffff, 0x00000100,
1019 0x1024, 0xffffffff, 0x00000100,
1020 0x20a8, 0xffffffff, 0x00000104,
1021 0x264c, 0x000c0000, 0x000c0000,
1022 0x2648, 0x000c0000, 0x000c0000,
1023 0x2f50, 0x00000001, 0x00000001,
1024 0x30cc, 0xc0000fff, 0x00000104,
1025 0xc1e4, 0x00000001, 0x00000001,
1026 0xd0c0, 0xfffffff0, 0x00000100,
1027 0xd8c0, 0xfffffff0, 0x00000100
1028};
1029
1030static u32 verde_pg_init[] =
1031{
1032 0x353c, 0xffffffff, 0x40000,
1033 0x3538, 0xffffffff, 0x200010ff,
1034 0x353c, 0xffffffff, 0x0,
1035 0x353c, 0xffffffff, 0x0,
1036 0x353c, 0xffffffff, 0x0,
1037 0x353c, 0xffffffff, 0x0,
1038 0x353c, 0xffffffff, 0x0,
1039 0x353c, 0xffffffff, 0x7007,
1040 0x3538, 0xffffffff, 0x300010ff,
1041 0x353c, 0xffffffff, 0x0,
1042 0x353c, 0xffffffff, 0x0,
1043 0x353c, 0xffffffff, 0x0,
1044 0x353c, 0xffffffff, 0x0,
1045 0x353c, 0xffffffff, 0x0,
1046 0x353c, 0xffffffff, 0x400000,
1047 0x3538, 0xffffffff, 0x100010ff,
1048 0x353c, 0xffffffff, 0x0,
1049 0x353c, 0xffffffff, 0x0,
1050 0x353c, 0xffffffff, 0x0,
1051 0x353c, 0xffffffff, 0x0,
1052 0x353c, 0xffffffff, 0x0,
1053 0x353c, 0xffffffff, 0x120200,
1054 0x3538, 0xffffffff, 0x500010ff,
1055 0x353c, 0xffffffff, 0x0,
1056 0x353c, 0xffffffff, 0x0,
1057 0x353c, 0xffffffff, 0x0,
1058 0x353c, 0xffffffff, 0x0,
1059 0x353c, 0xffffffff, 0x0,
1060 0x353c, 0xffffffff, 0x1e1e16,
1061 0x3538, 0xffffffff, 0x600010ff,
1062 0x353c, 0xffffffff, 0x0,
1063 0x353c, 0xffffffff, 0x0,
1064 0x353c, 0xffffffff, 0x0,
1065 0x353c, 0xffffffff, 0x0,
1066 0x353c, 0xffffffff, 0x0,
1067 0x353c, 0xffffffff, 0x171f1e,
1068 0x3538, 0xffffffff, 0x700010ff,
1069 0x353c, 0xffffffff, 0x0,
1070 0x353c, 0xffffffff, 0x0,
1071 0x353c, 0xffffffff, 0x0,
1072 0x353c, 0xffffffff, 0x0,
1073 0x353c, 0xffffffff, 0x0,
1074 0x353c, 0xffffffff, 0x0,
1075 0x3538, 0xffffffff, 0x9ff,
1076 0x3500, 0xffffffff, 0x0,
1077 0x3504, 0xffffffff, 0x10000800,
1078 0x3504, 0xffffffff, 0xf,
1079 0x3504, 0xffffffff, 0xf,
1080 0x3500, 0xffffffff, 0x4,
1081 0x3504, 0xffffffff, 0x1000051e,
1082 0x3504, 0xffffffff, 0xffff,
1083 0x3504, 0xffffffff, 0xffff,
1084 0x3500, 0xffffffff, 0x8,
1085 0x3504, 0xffffffff, 0x80500,
1086 0x3500, 0xffffffff, 0x12,
1087 0x3504, 0xffffffff, 0x9050c,
1088 0x3500, 0xffffffff, 0x1d,
1089 0x3504, 0xffffffff, 0xb052c,
1090 0x3500, 0xffffffff, 0x2a,
1091 0x3504, 0xffffffff, 0x1053e,
1092 0x3500, 0xffffffff, 0x2d,
1093 0x3504, 0xffffffff, 0x10546,
1094 0x3500, 0xffffffff, 0x30,
1095 0x3504, 0xffffffff, 0xa054e,
1096 0x3500, 0xffffffff, 0x3c,
1097 0x3504, 0xffffffff, 0x1055f,
1098 0x3500, 0xffffffff, 0x3f,
1099 0x3504, 0xffffffff, 0x10567,
1100 0x3500, 0xffffffff, 0x42,
1101 0x3504, 0xffffffff, 0x1056f,
1102 0x3500, 0xffffffff, 0x45,
1103 0x3504, 0xffffffff, 0x10572,
1104 0x3500, 0xffffffff, 0x48,
1105 0x3504, 0xffffffff, 0x20575,
1106 0x3500, 0xffffffff, 0x4c,
1107 0x3504, 0xffffffff, 0x190801,
1108 0x3500, 0xffffffff, 0x67,
1109 0x3504, 0xffffffff, 0x1082a,
1110 0x3500, 0xffffffff, 0x6a,
1111 0x3504, 0xffffffff, 0x1b082d,
1112 0x3500, 0xffffffff, 0x87,
1113 0x3504, 0xffffffff, 0x310851,
1114 0x3500, 0xffffffff, 0xba,
1115 0x3504, 0xffffffff, 0x891,
1116 0x3500, 0xffffffff, 0xbc,
1117 0x3504, 0xffffffff, 0x893,
1118 0x3500, 0xffffffff, 0xbe,
1119 0x3504, 0xffffffff, 0x20895,
1120 0x3500, 0xffffffff, 0xc2,
1121 0x3504, 0xffffffff, 0x20899,
1122 0x3500, 0xffffffff, 0xc6,
1123 0x3504, 0xffffffff, 0x2089d,
1124 0x3500, 0xffffffff, 0xca,
1125 0x3504, 0xffffffff, 0x8a1,
1126 0x3500, 0xffffffff, 0xcc,
1127 0x3504, 0xffffffff, 0x8a3,
1128 0x3500, 0xffffffff, 0xce,
1129 0x3504, 0xffffffff, 0x308a5,
1130 0x3500, 0xffffffff, 0xd3,
1131 0x3504, 0xffffffff, 0x6d08cd,
1132 0x3500, 0xffffffff, 0x142,
1133 0x3504, 0xffffffff, 0x2000095a,
1134 0x3504, 0xffffffff, 0x1,
1135 0x3500, 0xffffffff, 0x144,
1136 0x3504, 0xffffffff, 0x301f095b,
1137 0x3500, 0xffffffff, 0x165,
1138 0x3504, 0xffffffff, 0xc094d,
1139 0x3500, 0xffffffff, 0x173,
1140 0x3504, 0xffffffff, 0xf096d,
1141 0x3500, 0xffffffff, 0x184,
1142 0x3504, 0xffffffff, 0x15097f,
1143 0x3500, 0xffffffff, 0x19b,
1144 0x3504, 0xffffffff, 0xc0998,
1145 0x3500, 0xffffffff, 0x1a9,
1146 0x3504, 0xffffffff, 0x409a7,
1147 0x3500, 0xffffffff, 0x1af,
1148 0x3504, 0xffffffff, 0xcdc,
1149 0x3500, 0xffffffff, 0x1b1,
1150 0x3504, 0xffffffff, 0x800,
1151 0x3508, 0xffffffff, 0x6c9b2000,
1152 0x3510, 0xfc00, 0x2000,
1153 0x3544, 0xffffffff, 0xfc0,
1154 0x28d4, 0x00000100, 0x100
1155};
1156
1157static void si_init_golden_registers(struct radeon_device *rdev)
1158{
1159 switch (rdev->family) {
1160 case CHIP_TAHITI:
1161 radeon_program_register_sequence(rdev,
1162 tahiti_golden_registers,
1163 (const u32)ARRAY_SIZE(tahiti_golden_registers));
1164 radeon_program_register_sequence(rdev,
1165 tahiti_golden_rlc_registers,
1166 (const u32)ARRAY_SIZE(tahiti_golden_rlc_registers));
1167 radeon_program_register_sequence(rdev,
1168 tahiti_mgcg_cgcg_init,
1169 (const u32)ARRAY_SIZE(tahiti_mgcg_cgcg_init));
1170 radeon_program_register_sequence(rdev,
1171 tahiti_golden_registers2,
1172 (const u32)ARRAY_SIZE(tahiti_golden_registers2));
1173 break;
1174 case CHIP_PITCAIRN:
1175 radeon_program_register_sequence(rdev,
1176 pitcairn_golden_registers,
1177 (const u32)ARRAY_SIZE(pitcairn_golden_registers));
1178 radeon_program_register_sequence(rdev,
1179 pitcairn_golden_rlc_registers,
1180 (const u32)ARRAY_SIZE(pitcairn_golden_rlc_registers));
1181 radeon_program_register_sequence(rdev,
1182 pitcairn_mgcg_cgcg_init,
1183 (const u32)ARRAY_SIZE(pitcairn_mgcg_cgcg_init));
1184 break;
1185 case CHIP_VERDE:
1186 radeon_program_register_sequence(rdev,
1187 verde_golden_registers,
1188 (const u32)ARRAY_SIZE(verde_golden_registers));
1189 radeon_program_register_sequence(rdev,
1190 verde_golden_rlc_registers,
1191 (const u32)ARRAY_SIZE(verde_golden_rlc_registers));
1192 radeon_program_register_sequence(rdev,
1193 verde_mgcg_cgcg_init,
1194 (const u32)ARRAY_SIZE(verde_mgcg_cgcg_init));
1195 radeon_program_register_sequence(rdev,
1196 verde_pg_init,
1197 (const u32)ARRAY_SIZE(verde_pg_init));
1198 break;
1199 case CHIP_OLAND:
1200 radeon_program_register_sequence(rdev,
1201 oland_golden_registers,
1202 (const u32)ARRAY_SIZE(oland_golden_registers));
1203 radeon_program_register_sequence(rdev,
1204 oland_golden_rlc_registers,
1205 (const u32)ARRAY_SIZE(oland_golden_rlc_registers));
1206 radeon_program_register_sequence(rdev,
1207 oland_mgcg_cgcg_init,
1208 (const u32)ARRAY_SIZE(oland_mgcg_cgcg_init));
1209 break;
1210 case CHIP_HAINAN:
1211 radeon_program_register_sequence(rdev,
1212 hainan_golden_registers,
1213 (const u32)ARRAY_SIZE(hainan_golden_registers));
1214 radeon_program_register_sequence(rdev,
1215 hainan_golden_registers2,
1216 (const u32)ARRAY_SIZE(hainan_golden_registers2));
1217 radeon_program_register_sequence(rdev,
1218 hainan_mgcg_cgcg_init,
1219 (const u32)ARRAY_SIZE(hainan_mgcg_cgcg_init));
1220 break;
1221 default:
1222 break;
1223 }
1224}
1225
1226#define PCIE_BUS_CLK 10000
1227#define TCLK (PCIE_BUS_CLK / 10)
1228
1229/**
1230 * si_get_xclk - get the xclk
1231 *
1232 * @rdev: radeon_device pointer
1233 *
1234 * Returns the reference clock used by the gfx engine
1235 * (SI).
1236 */
1237u32 si_get_xclk(struct radeon_device *rdev)
1238{
1239 u32 reference_clock = rdev->clock.spll.reference_freq;
1240 u32 tmp;
1241
1242 tmp = RREG32(CG_CLKPIN_CNTL_2);
1243 if (tmp & MUX_TCLK_TO_XCLK)
1244 return TCLK;
1245
1246 tmp = RREG32(CG_CLKPIN_CNTL);
1247 if (tmp & XTALIN_DIVIDE)
1248 return reference_clock / 4;
1249
1250 return reference_clock;
1251}
1252
1253/* get temperature in millidegrees */
1254int si_get_temp(struct radeon_device *rdev)
1255{
1256 u32 temp;
1257 int actual_temp = 0;
1258
1259 temp = (RREG32(CG_MULT_THERMAL_STATUS) & CTF_TEMP_MASK) >>
1260 CTF_TEMP_SHIFT;
1261
1262 if (temp & 0x200)
1263 actual_temp = 255;
1264 else
1265 actual_temp = temp & 0x1ff;
1266
1267 actual_temp = (actual_temp * 1000);
1268
1269 return actual_temp;
1270}
1271
1272#define TAHITI_IO_MC_REGS_SIZE 36
1273
1274static const u32 tahiti_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1275 {0x0000006f, 0x03044000},
1276 {0x00000070, 0x0480c018},
1277 {0x00000071, 0x00000040},
1278 {0x00000072, 0x01000000},
1279 {0x00000074, 0x000000ff},
1280 {0x00000075, 0x00143400},
1281 {0x00000076, 0x08ec0800},
1282 {0x00000077, 0x040000cc},
1283 {0x00000079, 0x00000000},
1284 {0x0000007a, 0x21000409},
1285 {0x0000007c, 0x00000000},
1286 {0x0000007d, 0xe8000000},
1287 {0x0000007e, 0x044408a8},
1288 {0x0000007f, 0x00000003},
1289 {0x00000080, 0x00000000},
1290 {0x00000081, 0x01000000},
1291 {0x00000082, 0x02000000},
1292 {0x00000083, 0x00000000},
1293 {0x00000084, 0xe3f3e4f4},
1294 {0x00000085, 0x00052024},
1295 {0x00000087, 0x00000000},
1296 {0x00000088, 0x66036603},
1297 {0x00000089, 0x01000000},
1298 {0x0000008b, 0x1c0a0000},
1299 {0x0000008c, 0xff010000},
1300 {0x0000008e, 0xffffefff},
1301 {0x0000008f, 0xfff3efff},
1302 {0x00000090, 0xfff3efbf},
1303 {0x00000094, 0x00101101},
1304 {0x00000095, 0x00000fff},
1305 {0x00000096, 0x00116fff},
1306 {0x00000097, 0x60010000},
1307 {0x00000098, 0x10010000},
1308 {0x00000099, 0x00006000},
1309 {0x0000009a, 0x00001000},
1310 {0x0000009f, 0x00a77400}
1311};
1312
1313static const u32 pitcairn_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1314 {0x0000006f, 0x03044000},
1315 {0x00000070, 0x0480c018},
1316 {0x00000071, 0x00000040},
1317 {0x00000072, 0x01000000},
1318 {0x00000074, 0x000000ff},
1319 {0x00000075, 0x00143400},
1320 {0x00000076, 0x08ec0800},
1321 {0x00000077, 0x040000cc},
1322 {0x00000079, 0x00000000},
1323 {0x0000007a, 0x21000409},
1324 {0x0000007c, 0x00000000},
1325 {0x0000007d, 0xe8000000},
1326 {0x0000007e, 0x044408a8},
1327 {0x0000007f, 0x00000003},
1328 {0x00000080, 0x00000000},
1329 {0x00000081, 0x01000000},
1330 {0x00000082, 0x02000000},
1331 {0x00000083, 0x00000000},
1332 {0x00000084, 0xe3f3e4f4},
1333 {0x00000085, 0x00052024},
1334 {0x00000087, 0x00000000},
1335 {0x00000088, 0x66036603},
1336 {0x00000089, 0x01000000},
1337 {0x0000008b, 0x1c0a0000},
1338 {0x0000008c, 0xff010000},
1339 {0x0000008e, 0xffffefff},
1340 {0x0000008f, 0xfff3efff},
1341 {0x00000090, 0xfff3efbf},
1342 {0x00000094, 0x00101101},
1343 {0x00000095, 0x00000fff},
1344 {0x00000096, 0x00116fff},
1345 {0x00000097, 0x60010000},
1346 {0x00000098, 0x10010000},
1347 {0x00000099, 0x00006000},
1348 {0x0000009a, 0x00001000},
1349 {0x0000009f, 0x00a47400}
1350};
1351
1352static const u32 verde_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1353 {0x0000006f, 0x03044000},
1354 {0x00000070, 0x0480c018},
1355 {0x00000071, 0x00000040},
1356 {0x00000072, 0x01000000},
1357 {0x00000074, 0x000000ff},
1358 {0x00000075, 0x00143400},
1359 {0x00000076, 0x08ec0800},
1360 {0x00000077, 0x040000cc},
1361 {0x00000079, 0x00000000},
1362 {0x0000007a, 0x21000409},
1363 {0x0000007c, 0x00000000},
1364 {0x0000007d, 0xe8000000},
1365 {0x0000007e, 0x044408a8},
1366 {0x0000007f, 0x00000003},
1367 {0x00000080, 0x00000000},
1368 {0x00000081, 0x01000000},
1369 {0x00000082, 0x02000000},
1370 {0x00000083, 0x00000000},
1371 {0x00000084, 0xe3f3e4f4},
1372 {0x00000085, 0x00052024},
1373 {0x00000087, 0x00000000},
1374 {0x00000088, 0x66036603},
1375 {0x00000089, 0x01000000},
1376 {0x0000008b, 0x1c0a0000},
1377 {0x0000008c, 0xff010000},
1378 {0x0000008e, 0xffffefff},
1379 {0x0000008f, 0xfff3efff},
1380 {0x00000090, 0xfff3efbf},
1381 {0x00000094, 0x00101101},
1382 {0x00000095, 0x00000fff},
1383 {0x00000096, 0x00116fff},
1384 {0x00000097, 0x60010000},
1385 {0x00000098, 0x10010000},
1386 {0x00000099, 0x00006000},
1387 {0x0000009a, 0x00001000},
1388 {0x0000009f, 0x00a37400}
1389};
1390
1391static const u32 oland_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1392 {0x0000006f, 0x03044000},
1393 {0x00000070, 0x0480c018},
1394 {0x00000071, 0x00000040},
1395 {0x00000072, 0x01000000},
1396 {0x00000074, 0x000000ff},
1397 {0x00000075, 0x00143400},
1398 {0x00000076, 0x08ec0800},
1399 {0x00000077, 0x040000cc},
1400 {0x00000079, 0x00000000},
1401 {0x0000007a, 0x21000409},
1402 {0x0000007c, 0x00000000},
1403 {0x0000007d, 0xe8000000},
1404 {0x0000007e, 0x044408a8},
1405 {0x0000007f, 0x00000003},
1406 {0x00000080, 0x00000000},
1407 {0x00000081, 0x01000000},
1408 {0x00000082, 0x02000000},
1409 {0x00000083, 0x00000000},
1410 {0x00000084, 0xe3f3e4f4},
1411 {0x00000085, 0x00052024},
1412 {0x00000087, 0x00000000},
1413 {0x00000088, 0x66036603},
1414 {0x00000089, 0x01000000},
1415 {0x0000008b, 0x1c0a0000},
1416 {0x0000008c, 0xff010000},
1417 {0x0000008e, 0xffffefff},
1418 {0x0000008f, 0xfff3efff},
1419 {0x00000090, 0xfff3efbf},
1420 {0x00000094, 0x00101101},
1421 {0x00000095, 0x00000fff},
1422 {0x00000096, 0x00116fff},
1423 {0x00000097, 0x60010000},
1424 {0x00000098, 0x10010000},
1425 {0x00000099, 0x00006000},
1426 {0x0000009a, 0x00001000},
1427 {0x0000009f, 0x00a17730}
1428};
1429
1430static const u32 hainan_io_mc_regs[TAHITI_IO_MC_REGS_SIZE][2] = {
1431 {0x0000006f, 0x03044000},
1432 {0x00000070, 0x0480c018},
1433 {0x00000071, 0x00000040},
1434 {0x00000072, 0x01000000},
1435 {0x00000074, 0x000000ff},
1436 {0x00000075, 0x00143400},
1437 {0x00000076, 0x08ec0800},
1438 {0x00000077, 0x040000cc},
1439 {0x00000079, 0x00000000},
1440 {0x0000007a, 0x21000409},
1441 {0x0000007c, 0x00000000},
1442 {0x0000007d, 0xe8000000},
1443 {0x0000007e, 0x044408a8},
1444 {0x0000007f, 0x00000003},
1445 {0x00000080, 0x00000000},
1446 {0x00000081, 0x01000000},
1447 {0x00000082, 0x02000000},
1448 {0x00000083, 0x00000000},
1449 {0x00000084, 0xe3f3e4f4},
1450 {0x00000085, 0x00052024},
1451 {0x00000087, 0x00000000},
1452 {0x00000088, 0x66036603},
1453 {0x00000089, 0x01000000},
1454 {0x0000008b, 0x1c0a0000},
1455 {0x0000008c, 0xff010000},
1456 {0x0000008e, 0xffffefff},
1457 {0x0000008f, 0xfff3efff},
1458 {0x00000090, 0xfff3efbf},
1459 {0x00000094, 0x00101101},
1460 {0x00000095, 0x00000fff},
1461 {0x00000096, 0x00116fff},
1462 {0x00000097, 0x60010000},
1463 {0x00000098, 0x10010000},
1464 {0x00000099, 0x00006000},
1465 {0x0000009a, 0x00001000},
1466 {0x0000009f, 0x00a07730}
1467};
1468
1469/* ucode loading */
1470int si_mc_load_microcode(struct radeon_device *rdev)
1471{
1472 const __be32 *fw_data;
1473 u32 running, blackout = 0;
1474 const u32 *io_mc_regs;
1475 int i, regs_size, ucode_size;
1476
1477 if (!rdev->mc_fw)
1478 return -EINVAL;
1479
1480 ucode_size = rdev->mc_fw->size / 4;
1481
1482 switch (rdev->family) {
1483 case CHIP_TAHITI:
1484 io_mc_regs = &tahiti_io_mc_regs[0][0];
1485 regs_size = TAHITI_IO_MC_REGS_SIZE;
1486 break;
1487 case CHIP_PITCAIRN:
1488 io_mc_regs = &pitcairn_io_mc_regs[0][0];
1489 regs_size = TAHITI_IO_MC_REGS_SIZE;
1490 break;
1491 case CHIP_VERDE:
1492 default:
1493 io_mc_regs = &verde_io_mc_regs[0][0];
1494 regs_size = TAHITI_IO_MC_REGS_SIZE;
1495 break;
1496 case CHIP_OLAND:
1497 io_mc_regs = &oland_io_mc_regs[0][0];
1498 regs_size = TAHITI_IO_MC_REGS_SIZE;
1499 break;
1500 case CHIP_HAINAN:
1501 io_mc_regs = &hainan_io_mc_regs[0][0];
1502 regs_size = TAHITI_IO_MC_REGS_SIZE;
1503 break;
1504 }
1505
1506 running = RREG32(MC_SEQ_SUP_CNTL) & RUN_MASK;
1507
1508 if (running == 0) {
1509 if (running) {
1510 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
1511 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
1512 }
1513
1514 /* reset the engine and set to writable */
1515 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1516 WREG32(MC_SEQ_SUP_CNTL, 0x00000010);
1517
1518 /* load mc io regs */
1519 for (i = 0; i < regs_size; i++) {
1520 WREG32(MC_SEQ_IO_DEBUG_INDEX, io_mc_regs[(i << 1)]);
1521 WREG32(MC_SEQ_IO_DEBUG_DATA, io_mc_regs[(i << 1) + 1]);
1522 }
1523 /* load the MC ucode */
1524 fw_data = (const __be32 *)rdev->mc_fw->data;
1525 for (i = 0; i < ucode_size; i++)
1526 WREG32(MC_SEQ_SUP_PGM, be32_to_cpup(fw_data++));
1527
1528 /* put the engine back into the active state */
1529 WREG32(MC_SEQ_SUP_CNTL, 0x00000008);
1530 WREG32(MC_SEQ_SUP_CNTL, 0x00000004);
1531 WREG32(MC_SEQ_SUP_CNTL, 0x00000001);
1532
1533 /* wait for training to complete */
1534 for (i = 0; i < rdev->usec_timeout; i++) {
1535 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D0)
1536 break;
1537 udelay(1);
1538 }
1539 for (i = 0; i < rdev->usec_timeout; i++) {
1540 if (RREG32(MC_SEQ_TRAIN_WAKEUP_CNTL) & TRAIN_DONE_D1)
1541 break;
1542 udelay(1);
1543 }
1544
1545 if (running)
1546 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout);
1547 }
1548
1549 return 0;
1550}
1551
1552static int si_init_microcode(struct radeon_device *rdev)
1553{
1554 const char *chip_name;
1555 const char *rlc_chip_name;
1556 size_t pfp_req_size, me_req_size, ce_req_size, rlc_req_size, mc_req_size;
1557 size_t smc_req_size, mc2_req_size;
1558 char fw_name[30];
1559 int err;
1560
1561 DRM_DEBUG("\n");
1562
1563 switch (rdev->family) {
1564 case CHIP_TAHITI:
1565 chip_name = "TAHITI";
1566 rlc_chip_name = "TAHITI";
1567 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1568 me_req_size = SI_PM4_UCODE_SIZE * 4;
1569 ce_req_size = SI_CE_UCODE_SIZE * 4;
1570 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1571 mc_req_size = SI_MC_UCODE_SIZE * 4;
1572 mc2_req_size = TAHITI_MC_UCODE_SIZE * 4;
1573#ifdef __NetBSD__ /* XXX ALIGN means something else. */
1574 smc_req_size = round_up(TAHITI_SMC_UCODE_SIZE, 4);
1575#else
1576 smc_req_size = ALIGN(TAHITI_SMC_UCODE_SIZE, 4);
1577#endif
1578 break;
1579 case CHIP_PITCAIRN:
1580 chip_name = "PITCAIRN";
1581 rlc_chip_name = "PITCAIRN";
1582 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1583 me_req_size = SI_PM4_UCODE_SIZE * 4;
1584 ce_req_size = SI_CE_UCODE_SIZE * 4;
1585 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1586 mc_req_size = SI_MC_UCODE_SIZE * 4;
1587 mc2_req_size = PITCAIRN_MC_UCODE_SIZE * 4;
1588#ifdef __NetBSD__ /* XXX ALIGN means something else. */
1589 smc_req_size = round_up(PITCAIRN_SMC_UCODE_SIZE, 4);
1590#else
1591 smc_req_size = ALIGN(PITCAIRN_SMC_UCODE_SIZE, 4);
1592#endif
1593 break;
1594 case CHIP_VERDE:
1595 chip_name = "VERDE";
1596 rlc_chip_name = "VERDE";
1597 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1598 me_req_size = SI_PM4_UCODE_SIZE * 4;
1599 ce_req_size = SI_CE_UCODE_SIZE * 4;
1600 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1601 mc_req_size = SI_MC_UCODE_SIZE * 4;
1602 mc2_req_size = VERDE_MC_UCODE_SIZE * 4;
1603#ifdef __NetBSD__ /* XXX ALIGN means something else. */
1604 smc_req_size = round_up(VERDE_SMC_UCODE_SIZE, 4);
1605#else
1606 smc_req_size = ALIGN(VERDE_SMC_UCODE_SIZE, 4);
1607#endif
1608 break;
1609 case CHIP_OLAND:
1610 chip_name = "OLAND";
1611 rlc_chip_name = "OLAND";
1612 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1613 me_req_size = SI_PM4_UCODE_SIZE * 4;
1614 ce_req_size = SI_CE_UCODE_SIZE * 4;
1615 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1616 mc_req_size = mc2_req_size = OLAND_MC_UCODE_SIZE * 4;
1617#ifdef __NetBSD__ /* XXX ALIGN means something else. */
1618 smc_req_size = round_up(OLAND_SMC_UCODE_SIZE, 4);
1619#else
1620 smc_req_size = ALIGN(OLAND_SMC_UCODE_SIZE, 4);
1621#endif
1622 break;
1623 case CHIP_HAINAN:
1624 chip_name = "HAINAN";
1625 rlc_chip_name = "HAINAN";
1626 pfp_req_size = SI_PFP_UCODE_SIZE * 4;
1627 me_req_size = SI_PM4_UCODE_SIZE * 4;
1628 ce_req_size = SI_CE_UCODE_SIZE * 4;
1629 rlc_req_size = SI_RLC_UCODE_SIZE * 4;
1630 mc_req_size = mc2_req_size = OLAND_MC_UCODE_SIZE * 4;
1631#ifdef __NetBSD__ /* XXX ALIGN means something else. */
1632 smc_req_size = round_up(HAINAN_SMC_UCODE_SIZE, 4);
1633#else
1634 smc_req_size = ALIGN(HAINAN_SMC_UCODE_SIZE, 4);
1635#endif
1636 break;
1637 default: BUG();
1638 }
1639
1640 DRM_INFO("Loading %s Microcode\n", chip_name);
1641
1642 snprintf(fw_name, sizeof(fw_name), "radeon/%s_pfp.bin", chip_name);
1643 err = request_firmware(&rdev->pfp_fw, fw_name, rdev->dev);
1644 if (err)
1645 goto out;
1646 if (rdev->pfp_fw->size != pfp_req_size) {
1647 printk(KERN_ERR
1648 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1649 rdev->pfp_fw->size, fw_name);
1650 err = -EINVAL;
1651 goto out;
1652 }
1653
1654 snprintf(fw_name, sizeof(fw_name), "radeon/%s_me.bin", chip_name);
1655 err = request_firmware(&rdev->me_fw, fw_name, rdev->dev);
1656 if (err)
1657 goto out;
1658 if (rdev->me_fw->size != me_req_size) {
1659 printk(KERN_ERR
1660 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1661 rdev->me_fw->size, fw_name);
1662 err = -EINVAL;
1663 }
1664
1665 snprintf(fw_name, sizeof(fw_name), "radeon/%s_ce.bin", chip_name);
1666 err = request_firmware(&rdev->ce_fw, fw_name, rdev->dev);
1667 if (err)
1668 goto out;
1669 if (rdev->ce_fw->size != ce_req_size) {
1670 printk(KERN_ERR
1671 "si_cp: Bogus length %zu in firmware \"%s\"\n",
1672 rdev->ce_fw->size, fw_name);
1673 err = -EINVAL;
1674 }
1675
1676 snprintf(fw_name, sizeof(fw_name), "radeon/%s_rlc.bin", rlc_chip_name);
1677 err = request_firmware(&rdev->rlc_fw, fw_name, rdev->dev);
1678 if (err)
1679 goto out;
1680 if (rdev->rlc_fw->size != rlc_req_size) {
1681 printk(KERN_ERR
1682 "si_rlc: Bogus length %zu in firmware \"%s\"\n",
1683 rdev->rlc_fw->size, fw_name);
1684 err = -EINVAL;
1685 }
1686
1687 snprintf(fw_name, sizeof(fw_name), "radeon/%s_mc2.bin", chip_name);
1688 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
1689 if (err) {
1690 snprintf(fw_name, sizeof(fw_name), "radeon/%s_mc.bin", chip_name);
1691 err = request_firmware(&rdev->mc_fw, fw_name, rdev->dev);
1692 if (err)
1693 goto out;
1694 }
1695 if ((rdev->mc_fw->size != mc_req_size) &&
1696 (rdev->mc_fw->size != mc2_req_size)) {
1697 printk(KERN_ERR
1698 "si_mc: Bogus length %zu in firmware \"%s\"\n",
1699 rdev->mc_fw->size, fw_name);
1700 err = -EINVAL;
1701 }
1702 DRM_INFO("%s: %zu bytes\n", fw_name, rdev->mc_fw->size);
1703
1704 snprintf(fw_name, sizeof(fw_name), "radeon/%s_smc.bin", chip_name);
1705 err = request_firmware(&rdev->smc_fw, fw_name, rdev->dev);
1706 if (err) {
1707 printk(KERN_ERR
1708 "smc: error loading firmware \"%s\"\n",
1709 fw_name);
1710 release_firmware(rdev->smc_fw);
1711 rdev->smc_fw = NULL;
1712 err = 0;
1713 } else if (rdev->smc_fw->size != smc_req_size) {
1714 printk(KERN_ERR
1715 "si_smc: Bogus length %zu in firmware \"%s\"\n",
1716 rdev->smc_fw->size, fw_name);
1717 err = -EINVAL;
1718 }
1719
1720out:
1721 if (err) {
1722 if (err != -EINVAL)
1723 printk(KERN_ERR
1724 "si_cp: Failed to load firmware \"%s\"\n",
1725 fw_name);
1726 release_firmware(rdev->pfp_fw);
1727 rdev->pfp_fw = NULL;
1728 release_firmware(rdev->me_fw);
1729 rdev->me_fw = NULL;
1730 release_firmware(rdev->ce_fw);
1731 rdev->ce_fw = NULL;
1732 release_firmware(rdev->rlc_fw);
1733 rdev->rlc_fw = NULL;
1734 release_firmware(rdev->mc_fw);
1735 rdev->mc_fw = NULL;
1736 release_firmware(rdev->smc_fw);
1737 rdev->smc_fw = NULL;
1738 }
1739 return err;
1740}
1741
1742/* watermark setup */
1743static u32 dce6_line_buffer_adjust(struct radeon_device *rdev,
1744 struct radeon_crtc *radeon_crtc,
1745 struct drm_display_mode *mode,
1746 struct drm_display_mode *other_mode)
1747{
1748 u32 tmp, buffer_alloc, i;
1749 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1750 /*
1751 * Line Buffer Setup
1752 * There are 3 line buffers, each one shared by 2 display controllers.
1753 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1754 * the display controllers. The paritioning is done via one of four
1755 * preset allocations specified in bits 21:20:
1756 * 0 - half lb
1757 * 2 - whole lb, other crtc must be disabled
1758 */
1759 /* this can get tricky if we have two large displays on a paired group
1760 * of crtcs. Ideally for multiple large displays we'd assign them to
1761 * non-linked crtcs for maximum line buffer allocation.
1762 */
1763 if (radeon_crtc->base.enabled && mode) {
1764 if (other_mode) {
1765 tmp = 0; /* 1/2 */
1766 buffer_alloc = 1;
1767 } else {
1768 tmp = 2; /* whole */
1769 buffer_alloc = 2;
1770 }
1771 } else {
1772 tmp = 0;
1773 buffer_alloc = 0;
1774 }
1775
1776 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset,
1777 DC_LB_MEMORY_CONFIG(tmp));
1778
1779 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1780 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1781 for (i = 0; i < rdev->usec_timeout; i++) {
1782 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1783 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1784 break;
1785 udelay(1);
1786 }
1787
1788 if (radeon_crtc->base.enabled && mode) {
1789 switch (tmp) {
1790 case 0:
1791 default:
1792 return 4096 * 2;
1793 case 2:
1794 return 8192 * 2;
1795 }
1796 }
1797
1798 /* controller not enabled, so no lb used */
1799 return 0;
1800}
1801
1802static u32 si_get_number_of_dram_channels(struct radeon_device *rdev)
1803{
1804 u32 tmp = RREG32(MC_SHARED_CHMAP);
1805
1806 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
1807 case 0:
1808 default:
1809 return 1;
1810 case 1:
1811 return 2;
1812 case 2:
1813 return 4;
1814 case 3:
1815 return 8;
1816 case 4:
1817 return 3;
1818 case 5:
1819 return 6;
1820 case 6:
1821 return 10;
1822 case 7:
1823 return 12;
1824 case 8:
1825 return 16;
1826 }
1827}
1828
1829struct dce6_wm_params {
1830 u32 dram_channels; /* number of dram channels */
1831 u32 yclk; /* bandwidth per dram data pin in kHz */
1832 u32 sclk; /* engine clock in kHz */
1833 u32 disp_clk; /* display clock in kHz */
1834 u32 src_width; /* viewport width */
1835 u32 active_time; /* active display time in ns */
1836 u32 blank_time; /* blank time in ns */
1837 bool interlaced; /* mode is interlaced */
1838 fixed20_12 vsc; /* vertical scale ratio */
1839 u32 num_heads; /* number of active crtcs */
1840 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
1841 u32 lb_size; /* line buffer allocated to pipe */
1842 u32 vtaps; /* vertical scaler taps */
1843};
1844
1845static u32 dce6_dram_bandwidth(struct dce6_wm_params *wm)
1846{
1847 /* Calculate raw DRAM Bandwidth */
1848 fixed20_12 dram_efficiency; /* 0.7 */
1849 fixed20_12 yclk, dram_channels, bandwidth;
1850 fixed20_12 a;
1851
1852 a.full = dfixed_const(1000);
1853 yclk.full = dfixed_const(wm->yclk);
1854 yclk.full = dfixed_div(yclk, a);
1855 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1856 a.full = dfixed_const(10);
1857 dram_efficiency.full = dfixed_const(7);
1858 dram_efficiency.full = dfixed_div(dram_efficiency, a);
1859 bandwidth.full = dfixed_mul(dram_channels, yclk);
1860 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
1861
1862 return dfixed_trunc(bandwidth);
1863}
1864
1865static u32 dce6_dram_bandwidth_for_display(struct dce6_wm_params *wm)
1866{
1867 /* Calculate DRAM Bandwidth and the part allocated to display. */
1868 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
1869 fixed20_12 yclk, dram_channels, bandwidth;
1870 fixed20_12 a;
1871
1872 a.full = dfixed_const(1000);
1873 yclk.full = dfixed_const(wm->yclk);
1874 yclk.full = dfixed_div(yclk, a);
1875 dram_channels.full = dfixed_const(wm->dram_channels * 4);
1876 a.full = dfixed_const(10);
1877 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
1878 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
1879 bandwidth.full = dfixed_mul(dram_channels, yclk);
1880 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
1881
1882 return dfixed_trunc(bandwidth);
1883}
1884
1885static u32 dce6_data_return_bandwidth(struct dce6_wm_params *wm)
1886{
1887 /* Calculate the display Data return Bandwidth */
1888 fixed20_12 return_efficiency; /* 0.8 */
1889 fixed20_12 sclk, bandwidth;
1890 fixed20_12 a;
1891
1892 a.full = dfixed_const(1000);
1893 sclk.full = dfixed_const(wm->sclk);
1894 sclk.full = dfixed_div(sclk, a);
1895 a.full = dfixed_const(10);
1896 return_efficiency.full = dfixed_const(8);
1897 return_efficiency.full = dfixed_div(return_efficiency, a);
1898 a.full = dfixed_const(32);
1899 bandwidth.full = dfixed_mul(a, sclk);
1900 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
1901
1902 return dfixed_trunc(bandwidth);
1903}
1904
1905static u32 dce6_get_dmif_bytes_per_request(struct dce6_wm_params *wm)
1906{
1907 return 32;
1908}
1909
1910static u32 dce6_dmif_request_bandwidth(struct dce6_wm_params *wm)
1911{
1912 /* Calculate the DMIF Request Bandwidth */
1913 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
1914 fixed20_12 disp_clk, sclk, bandwidth;
1915 fixed20_12 a, b1, b2;
1916 u32 min_bandwidth;
1917
1918 a.full = dfixed_const(1000);
1919 disp_clk.full = dfixed_const(wm->disp_clk);
1920 disp_clk.full = dfixed_div(disp_clk, a);
1921 a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm) / 2);
1922 b1.full = dfixed_mul(a, disp_clk);
1923
1924 a.full = dfixed_const(1000);
1925 sclk.full = dfixed_const(wm->sclk);
1926 sclk.full = dfixed_div(sclk, a);
1927 a.full = dfixed_const(dce6_get_dmif_bytes_per_request(wm));
1928 b2.full = dfixed_mul(a, sclk);
1929
1930 a.full = dfixed_const(10);
1931 disp_clk_request_efficiency.full = dfixed_const(8);
1932 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
1933
1934 min_bandwidth = min(dfixed_trunc(b1), dfixed_trunc(b2));
1935
1936 a.full = dfixed_const(min_bandwidth);
1937 bandwidth.full = dfixed_mul(a, disp_clk_request_efficiency);
1938
1939 return dfixed_trunc(bandwidth);
1940}
1941
1942static u32 dce6_available_bandwidth(struct dce6_wm_params *wm)
1943{
1944 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
1945 u32 dram_bandwidth = dce6_dram_bandwidth(wm);
1946 u32 data_return_bandwidth = dce6_data_return_bandwidth(wm);
1947 u32 dmif_req_bandwidth = dce6_dmif_request_bandwidth(wm);
1948
1949 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
1950}
1951
1952static u32 dce6_average_bandwidth(struct dce6_wm_params *wm)
1953{
1954 /* Calculate the display mode Average Bandwidth
1955 * DisplayMode should contain the source and destination dimensions,
1956 * timing, etc.
1957 */
1958 fixed20_12 bpp;
1959 fixed20_12 line_time;
1960 fixed20_12 src_width;
1961 fixed20_12 bandwidth;
1962 fixed20_12 a;
1963
1964 a.full = dfixed_const(1000);
1965 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
1966 line_time.full = dfixed_div(line_time, a);
1967 bpp.full = dfixed_const(wm->bytes_per_pixel);
1968 src_width.full = dfixed_const(wm->src_width);
1969 bandwidth.full = dfixed_mul(src_width, bpp);
1970 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
1971 bandwidth.full = dfixed_div(bandwidth, line_time);
1972
1973 return dfixed_trunc(bandwidth);
1974}
1975
1976static u32 dce6_latency_watermark(struct dce6_wm_params *wm)
1977{
1978 /* First calcualte the latency in ns */
1979 u32 mc_latency = 2000; /* 2000 ns. */
1980 u32 available_bandwidth = dce6_available_bandwidth(wm);
1981 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
1982 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
1983 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
1984 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
1985 (wm->num_heads * cursor_line_pair_return_time);
1986 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
1987 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
1988 u32 tmp, dmif_size = 12288;
1989 fixed20_12 a, b, c;
1990
1991 if (wm->num_heads == 0)
1992 return 0;
1993
1994 a.full = dfixed_const(2);
1995 b.full = dfixed_const(1);
1996 if ((wm->vsc.full > a.full) ||
1997 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
1998 (wm->vtaps >= 5) ||
1999 ((wm->vsc.full >= a.full) && wm->interlaced))
2000 max_src_lines_per_dst_line = 4;
2001 else
2002 max_src_lines_per_dst_line = 2;
2003
2004 a.full = dfixed_const(available_bandwidth);
2005 b.full = dfixed_const(wm->num_heads);
2006 a.full = dfixed_div(a, b);
2007
2008 b.full = dfixed_const(mc_latency + 512);
2009 c.full = dfixed_const(wm->disp_clk);
2010 b.full = dfixed_div(b, c);
2011
2012 c.full = dfixed_const(dmif_size);
2013 b.full = dfixed_div(c, b);
2014
2015 tmp = min(dfixed_trunc(a), dfixed_trunc(b));
2016
2017 b.full = dfixed_const(1000);
2018 c.full = dfixed_const(wm->disp_clk);
2019 b.full = dfixed_div(c, b);
2020 c.full = dfixed_const(wm->bytes_per_pixel);
2021 b.full = dfixed_mul(b, c);
2022
2023 lb_fill_bw = min(tmp, dfixed_trunc(b));
2024
2025 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2026 b.full = dfixed_const(1000);
2027 c.full = dfixed_const(lb_fill_bw);
2028 b.full = dfixed_div(c, b);
2029 a.full = dfixed_div(a, b);
2030 line_fill_time = dfixed_trunc(a);
2031
2032 if (line_fill_time < wm->active_time)
2033 return latency;
2034 else
2035 return latency + (line_fill_time - wm->active_time);
2036
2037}
2038
2039static bool dce6_average_bandwidth_vs_dram_bandwidth_for_display(struct dce6_wm_params *wm)
2040{
2041 if (dce6_average_bandwidth(wm) <=
2042 (dce6_dram_bandwidth_for_display(wm) / wm->num_heads))
2043 return true;
2044 else
2045 return false;
2046};
2047
2048static bool dce6_average_bandwidth_vs_available_bandwidth(struct dce6_wm_params *wm)
2049{
2050 if (dce6_average_bandwidth(wm) <=
2051 (dce6_available_bandwidth(wm) / wm->num_heads))
2052 return true;
2053 else
2054 return false;
2055};
2056
2057static bool dce6_check_latency_hiding(struct dce6_wm_params *wm)
2058{
2059 u32 lb_partitions = wm->lb_size / wm->src_width;
2060 u32 line_time = wm->active_time + wm->blank_time;
2061 u32 latency_tolerant_lines;
2062 u32 latency_hiding;
2063 fixed20_12 a;
2064
2065 a.full = dfixed_const(1);
2066 if (wm->vsc.full > a.full)
2067 latency_tolerant_lines = 1;
2068 else {
2069 if (lb_partitions <= (wm->vtaps + 1))
2070 latency_tolerant_lines = 1;
2071 else
2072 latency_tolerant_lines = 2;
2073 }
2074
2075 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2076
2077 if (dce6_latency_watermark(wm) <= latency_hiding)
2078 return true;
2079 else
2080 return false;
2081}
2082
2083static void dce6_program_watermarks(struct radeon_device *rdev,
2084 struct radeon_crtc *radeon_crtc,
2085 u32 lb_size, u32 num_heads)
2086{
2087 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2088 struct dce6_wm_params wm_low, wm_high;
2089 u32 dram_channels;
2090 u32 pixel_period;
2091 u32 line_time = 0;
2092 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2093 u32 priority_a_mark = 0, priority_b_mark = 0;
2094 u32 priority_a_cnt = PRIORITY_OFF;
2095 u32 priority_b_cnt = PRIORITY_OFF;
2096 u32 tmp, arb_control3;
2097 fixed20_12 a, b, c;
2098
2099 if (radeon_crtc->base.enabled && num_heads && mode) {
2100 pixel_period = 1000000 / (u32)mode->clock;
2101 line_time = min((u32)mode->crtc_htotal * pixel_period, (u32)65535);
2102 priority_a_cnt = 0;
2103 priority_b_cnt = 0;
2104
2105 if (rdev->family == CHIP_ARUBA)
2106 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2107 else
2108 dram_channels = si_get_number_of_dram_channels(rdev);
2109
2110 /* watermark for high clocks */
2111 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2112 wm_high.yclk =
2113 radeon_dpm_get_mclk(rdev, false) * 10;
2114 wm_high.sclk =
2115 radeon_dpm_get_sclk(rdev, false) * 10;
2116 } else {
2117 wm_high.yclk = rdev->pm.current_mclk * 10;
2118 wm_high.sclk = rdev->pm.current_sclk * 10;
2119 }
2120
2121 wm_high.disp_clk = mode->clock;
2122 wm_high.src_width = mode->crtc_hdisplay;
2123 wm_high.active_time = mode->crtc_hdisplay * pixel_period;
2124 wm_high.blank_time = line_time - wm_high.active_time;
2125 wm_high.interlaced = false;
2126 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2127 wm_high.interlaced = true;
2128 wm_high.vsc = radeon_crtc->vsc;
2129 wm_high.vtaps = 1;
2130 if (radeon_crtc->rmx_type != RMX_OFF)
2131 wm_high.vtaps = 2;
2132 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2133 wm_high.lb_size = lb_size;
2134 wm_high.dram_channels = dram_channels;
2135 wm_high.num_heads = num_heads;
2136
2137 /* watermark for low clocks */
2138 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2139 wm_low.yclk =
2140 radeon_dpm_get_mclk(rdev, true) * 10;
2141 wm_low.sclk =
2142 radeon_dpm_get_sclk(rdev, true) * 10;
2143 } else {
2144 wm_low.yclk = rdev->pm.current_mclk * 10;
2145 wm_low.sclk = rdev->pm.current_sclk * 10;
2146 }
2147
2148 wm_low.disp_clk = mode->clock;
2149 wm_low.src_width = mode->crtc_hdisplay;
2150 wm_low.active_time = mode->crtc_hdisplay * pixel_period;
2151 wm_low.blank_time = line_time - wm_low.active_time;
2152 wm_low.interlaced = false;
2153 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2154 wm_low.interlaced = true;
2155 wm_low.vsc = radeon_crtc->vsc;
2156 wm_low.vtaps = 1;
2157 if (radeon_crtc->rmx_type != RMX_OFF)
2158 wm_low.vtaps = 2;
2159 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2160 wm_low.lb_size = lb_size;
2161 wm_low.dram_channels = dram_channels;
2162 wm_low.num_heads = num_heads;
2163
2164 /* set for high clocks */
2165 latency_watermark_a = min(dce6_latency_watermark(&wm_high), (u32)65535);
2166 /* set for low clocks */
2167 latency_watermark_b = min(dce6_latency_watermark(&wm_low), (u32)65535);
2168
2169 /* possibly force display priority to high */
2170 /* should really do this at mode validation time... */
2171 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2172 !dce6_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2173 !dce6_check_latency_hiding(&wm_high) ||
2174 (rdev->disp_priority == 2)) {
2175 DRM_DEBUG_KMS("force priority to high\n");
2176 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2177 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2178 }
2179 if (!dce6_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2180 !dce6_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2181 !dce6_check_latency_hiding(&wm_low) ||
2182 (rdev->disp_priority == 2)) {
2183 DRM_DEBUG_KMS("force priority to high\n");
2184 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2185 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2186 }
2187
2188 a.full = dfixed_const(1000);
2189 b.full = dfixed_const(mode->clock);
2190 b.full = dfixed_div(b, a);
2191 c.full = dfixed_const(latency_watermark_a);
2192 c.full = dfixed_mul(c, b);
2193 c.full = dfixed_mul(c, radeon_crtc->hsc);
2194 c.full = dfixed_div(c, a);
2195 a.full = dfixed_const(16);
2196 c.full = dfixed_div(c, a);
2197 priority_a_mark = dfixed_trunc(c);
2198 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2199
2200 a.full = dfixed_const(1000);
2201 b.full = dfixed_const(mode->clock);
2202 b.full = dfixed_div(b, a);
2203 c.full = dfixed_const(latency_watermark_b);
2204 c.full = dfixed_mul(c, b);
2205 c.full = dfixed_mul(c, radeon_crtc->hsc);
2206 c.full = dfixed_div(c, a);
2207 a.full = dfixed_const(16);
2208 c.full = dfixed_div(c, a);
2209 priority_b_mark = dfixed_trunc(c);
2210 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2211 }
2212
2213 /* select wm A */
2214 arb_control3 = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2215 tmp = arb_control3;
2216 tmp &= ~LATENCY_WATERMARK_MASK(3);
2217 tmp |= LATENCY_WATERMARK_MASK(1);
2218 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2219 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2220 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2221 LATENCY_HIGH_WATERMARK(line_time)));
2222 /* select wm B */
2223 tmp = RREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset);
2224 tmp &= ~LATENCY_WATERMARK_MASK(3);
2225 tmp |= LATENCY_WATERMARK_MASK(2);
2226 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, tmp);
2227 WREG32(DPG_PIPE_LATENCY_CONTROL + radeon_crtc->crtc_offset,
2228 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2229 LATENCY_HIGH_WATERMARK(line_time)));
2230 /* restore original selection */
2231 WREG32(DPG_PIPE_ARBITRATION_CONTROL3 + radeon_crtc->crtc_offset, arb_control3);
2232
2233 /* write the priority marks */
2234 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2235 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2236
2237 /* save values for DPM */
2238 radeon_crtc->line_time = line_time;
2239 radeon_crtc->wm_high = latency_watermark_a;
2240 radeon_crtc->wm_low = latency_watermark_b;
2241}
2242
2243void dce6_bandwidth_update(struct radeon_device *rdev)
2244{
2245 struct drm_display_mode *mode0 = NULL;
2246 struct drm_display_mode *mode1 = NULL;
2247 u32 num_heads = 0, lb_size;
2248 int i;
2249
2250 radeon_update_display_priority(rdev);
2251
2252 for (i = 0; i < rdev->num_crtc; i++) {
2253 if (rdev->mode_info.crtcs[i]->base.enabled)
2254 num_heads++;
2255 }
2256 for (i = 0; i < rdev->num_crtc; i += 2) {
2257 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2258 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2259 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2260 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2261 lb_size = dce6_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2262 dce6_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2263 }
2264}
2265
2266/*
2267 * Core functions
2268 */
2269static void si_tiling_mode_table_init(struct radeon_device *rdev)
2270{
2271 const u32 num_tile_mode_states = 32;
2272 u32 reg_offset, gb_tile_moden, split_equal_to_row_size;
2273
2274 switch (rdev->config.si.mem_row_size_in_kb) {
2275 case 1:
2276 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_1KB;
2277 break;
2278 case 2:
2279 default:
2280 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_2KB;
2281 break;
2282 case 4:
2283 split_equal_to_row_size = ADDR_SURF_TILE_SPLIT_4KB;
2284 break;
2285 }
2286
2287 if ((rdev->family == CHIP_TAHITI) ||
2288 (rdev->family == CHIP_PITCAIRN)) {
2289 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2290 switch (reg_offset) {
2291 case 0: /* non-AA compressed depth or any compressed stencil */
2292 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2293 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2294 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2295 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2296 NUM_BANKS(ADDR_SURF_16_BANK) |
2297 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2298 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2299 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2300 break;
2301 case 1: /* 2xAA/4xAA compressed depth only */
2302 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2303 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2304 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2305 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2306 NUM_BANKS(ADDR_SURF_16_BANK) |
2307 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2308 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2309 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2310 break;
2311 case 2: /* 8xAA compressed depth only */
2312 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2313 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2314 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2315 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2316 NUM_BANKS(ADDR_SURF_16_BANK) |
2317 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2318 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2319 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2320 break;
2321 case 3: /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2322 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2323 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2324 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2325 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2326 NUM_BANKS(ADDR_SURF_16_BANK) |
2327 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2328 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2329 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2330 break;
2331 case 4: /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2332 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2333 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2334 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2335 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2336 NUM_BANKS(ADDR_SURF_16_BANK) |
2337 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2338 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2339 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2340 break;
2341 case 5: /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2342 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2343 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2344 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2345 TILE_SPLIT(split_equal_to_row_size) |
2346 NUM_BANKS(ADDR_SURF_16_BANK) |
2347 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2348 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2349 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2350 break;
2351 case 6: /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2352 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2353 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2354 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2355 TILE_SPLIT(split_equal_to_row_size) |
2356 NUM_BANKS(ADDR_SURF_16_BANK) |
2357 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2358 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2359 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2360 break;
2361 case 7: /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2362 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2363 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2364 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2365 TILE_SPLIT(split_equal_to_row_size) |
2366 NUM_BANKS(ADDR_SURF_16_BANK) |
2367 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2368 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2369 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2370 break;
2371 case 8: /* 1D and 1D Array Surfaces */
2372 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2373 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2374 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2375 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2376 NUM_BANKS(ADDR_SURF_16_BANK) |
2377 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2378 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2379 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2380 break;
2381 case 9: /* Displayable maps. */
2382 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2383 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2384 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2385 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2386 NUM_BANKS(ADDR_SURF_16_BANK) |
2387 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2388 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2389 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2390 break;
2391 case 10: /* Display 8bpp. */
2392 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2393 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2394 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2395 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2396 NUM_BANKS(ADDR_SURF_16_BANK) |
2397 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2398 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2399 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2400 break;
2401 case 11: /* Display 16bpp. */
2402 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2403 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2404 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2405 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2406 NUM_BANKS(ADDR_SURF_16_BANK) |
2407 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2408 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2409 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2410 break;
2411 case 12: /* Display 32bpp. */
2412 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2413 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2414 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2415 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2416 NUM_BANKS(ADDR_SURF_16_BANK) |
2417 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2418 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2419 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2420 break;
2421 case 13: /* Thin. */
2422 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2423 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2424 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2425 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2426 NUM_BANKS(ADDR_SURF_16_BANK) |
2427 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2428 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2429 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2430 break;
2431 case 14: /* Thin 8 bpp. */
2432 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2433 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2434 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2435 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2436 NUM_BANKS(ADDR_SURF_16_BANK) |
2437 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2438 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2439 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2440 break;
2441 case 15: /* Thin 16 bpp. */
2442 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2443 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2444 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2445 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2446 NUM_BANKS(ADDR_SURF_16_BANK) |
2447 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2448 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2449 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2450 break;
2451 case 16: /* Thin 32 bpp. */
2452 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2453 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2454 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2455 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2456 NUM_BANKS(ADDR_SURF_16_BANK) |
2457 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2458 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2459 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2460 break;
2461 case 17: /* Thin 64 bpp. */
2462 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2463 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2464 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2465 TILE_SPLIT(split_equal_to_row_size) |
2466 NUM_BANKS(ADDR_SURF_16_BANK) |
2467 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2468 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2469 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2470 break;
2471 case 21: /* 8 bpp PRT. */
2472 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2473 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2474 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2475 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2476 NUM_BANKS(ADDR_SURF_16_BANK) |
2477 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2478 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2479 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2480 break;
2481 case 22: /* 16 bpp PRT */
2482 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2483 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2484 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2485 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2486 NUM_BANKS(ADDR_SURF_16_BANK) |
2487 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2488 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2489 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2490 break;
2491 case 23: /* 32 bpp PRT */
2492 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2493 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2494 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2495 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2496 NUM_BANKS(ADDR_SURF_16_BANK) |
2497 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2498 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2499 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2500 break;
2501 case 24: /* 64 bpp PRT */
2502 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2503 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2504 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2505 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2506 NUM_BANKS(ADDR_SURF_16_BANK) |
2507 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2508 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2509 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2510 break;
2511 case 25: /* 128 bpp PRT */
2512 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2513 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2514 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2515 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2516 NUM_BANKS(ADDR_SURF_8_BANK) |
2517 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2518 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2519 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2520 break;
2521 default:
2522 gb_tile_moden = 0;
2523 break;
2524 }
2525 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
2526 WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2527 }
2528 } else if ((rdev->family == CHIP_VERDE) ||
2529 (rdev->family == CHIP_OLAND) ||
2530 (rdev->family == CHIP_HAINAN)) {
2531 for (reg_offset = 0; reg_offset < num_tile_mode_states; reg_offset++) {
2532 switch (reg_offset) {
2533 case 0: /* non-AA compressed depth or any compressed stencil */
2534 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2535 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2536 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2537 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2538 NUM_BANKS(ADDR_SURF_16_BANK) |
2539 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2540 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2541 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2542 break;
2543 case 1: /* 2xAA/4xAA compressed depth only */
2544 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2545 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2546 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2547 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2548 NUM_BANKS(ADDR_SURF_16_BANK) |
2549 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2550 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2551 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2552 break;
2553 case 2: /* 8xAA compressed depth only */
2554 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2555 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2556 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2557 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2558 NUM_BANKS(ADDR_SURF_16_BANK) |
2559 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2560 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2561 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2562 break;
2563 case 3: /* 2xAA/4xAA compressed depth with stencil (for depth buffer) */
2564 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2565 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2566 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2567 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_128B) |
2568 NUM_BANKS(ADDR_SURF_16_BANK) |
2569 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2570 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2571 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2572 break;
2573 case 4: /* Maps w/ a dimension less than the 2D macro-tile dimensions (for mipmapped depth textures) */
2574 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2575 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2576 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2577 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2578 NUM_BANKS(ADDR_SURF_16_BANK) |
2579 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2580 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2581 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2582 break;
2583 case 5: /* Uncompressed 16bpp depth - and stencil buffer allocated with it */
2584 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2585 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2586 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2587 TILE_SPLIT(split_equal_to_row_size) |
2588 NUM_BANKS(ADDR_SURF_16_BANK) |
2589 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2590 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2591 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2592 break;
2593 case 6: /* Uncompressed 32bpp depth - and stencil buffer allocated with it */
2594 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2595 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2596 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2597 TILE_SPLIT(split_equal_to_row_size) |
2598 NUM_BANKS(ADDR_SURF_16_BANK) |
2599 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2600 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2601 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2602 break;
2603 case 7: /* Uncompressed 8bpp stencil without depth (drivers typically do not use) */
2604 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2605 MICRO_TILE_MODE(ADDR_SURF_DEPTH_MICRO_TILING) |
2606 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2607 TILE_SPLIT(split_equal_to_row_size) |
2608 NUM_BANKS(ADDR_SURF_16_BANK) |
2609 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2610 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2611 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2612 break;
2613 case 8: /* 1D and 1D Array Surfaces */
2614 gb_tile_moden = (ARRAY_MODE(ARRAY_LINEAR_ALIGNED) |
2615 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2616 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2617 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2618 NUM_BANKS(ADDR_SURF_16_BANK) |
2619 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2620 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2621 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2622 break;
2623 case 9: /* Displayable maps. */
2624 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2625 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2626 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2627 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2628 NUM_BANKS(ADDR_SURF_16_BANK) |
2629 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2630 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2631 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2632 break;
2633 case 10: /* Display 8bpp. */
2634 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2635 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2636 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2637 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2638 NUM_BANKS(ADDR_SURF_16_BANK) |
2639 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2640 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2641 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2642 break;
2643 case 11: /* Display 16bpp. */
2644 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2645 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2646 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2647 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2648 NUM_BANKS(ADDR_SURF_16_BANK) |
2649 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2650 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2651 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2652 break;
2653 case 12: /* Display 32bpp. */
2654 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2655 MICRO_TILE_MODE(ADDR_SURF_DISPLAY_MICRO_TILING) |
2656 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2657 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2658 NUM_BANKS(ADDR_SURF_16_BANK) |
2659 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2660 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2661 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2662 break;
2663 case 13: /* Thin. */
2664 gb_tile_moden = (ARRAY_MODE(ARRAY_1D_TILED_THIN1) |
2665 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2666 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2667 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_64B) |
2668 NUM_BANKS(ADDR_SURF_16_BANK) |
2669 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2670 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2671 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2672 break;
2673 case 14: /* Thin 8 bpp. */
2674 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2675 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2676 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2677 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2678 NUM_BANKS(ADDR_SURF_16_BANK) |
2679 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2680 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2681 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2682 break;
2683 case 15: /* Thin 16 bpp. */
2684 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2685 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2686 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2687 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2688 NUM_BANKS(ADDR_SURF_16_BANK) |
2689 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2690 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2691 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2692 break;
2693 case 16: /* Thin 32 bpp. */
2694 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2695 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2696 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2697 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2698 NUM_BANKS(ADDR_SURF_16_BANK) |
2699 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2700 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2701 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2702 break;
2703 case 17: /* Thin 64 bpp. */
2704 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2705 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2706 PIPE_CONFIG(ADDR_SURF_P4_8x16) |
2707 TILE_SPLIT(split_equal_to_row_size) |
2708 NUM_BANKS(ADDR_SURF_16_BANK) |
2709 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2710 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2711 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2712 break;
2713 case 21: /* 8 bpp PRT. */
2714 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2715 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2716 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2717 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2718 NUM_BANKS(ADDR_SURF_16_BANK) |
2719 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_2) |
2720 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2721 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2722 break;
2723 case 22: /* 16 bpp PRT */
2724 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2725 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2726 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2727 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2728 NUM_BANKS(ADDR_SURF_16_BANK) |
2729 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2730 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_4) |
2731 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_4));
2732 break;
2733 case 23: /* 32 bpp PRT */
2734 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2735 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2736 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2737 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_256B) |
2738 NUM_BANKS(ADDR_SURF_16_BANK) |
2739 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2740 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_2) |
2741 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2742 break;
2743 case 24: /* 64 bpp PRT */
2744 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2745 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2746 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2747 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_512B) |
2748 NUM_BANKS(ADDR_SURF_16_BANK) |
2749 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2750 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2751 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_2));
2752 break;
2753 case 25: /* 128 bpp PRT */
2754 gb_tile_moden = (ARRAY_MODE(ARRAY_2D_TILED_THIN1) |
2755 MICRO_TILE_MODE(ADDR_SURF_THIN_MICRO_TILING) |
2756 PIPE_CONFIG(ADDR_SURF_P8_32x32_8x16) |
2757 TILE_SPLIT(ADDR_SURF_TILE_SPLIT_1KB) |
2758 NUM_BANKS(ADDR_SURF_8_BANK) |
2759 BANK_WIDTH(ADDR_SURF_BANK_WIDTH_1) |
2760 BANK_HEIGHT(ADDR_SURF_BANK_HEIGHT_1) |
2761 MACRO_TILE_ASPECT(ADDR_SURF_MACRO_ASPECT_1));
2762 break;
2763 default:
2764 gb_tile_moden = 0;
2765 break;
2766 }
2767 rdev->config.si.tile_mode_array[reg_offset] = gb_tile_moden;
2768 WREG32(GB_TILE_MODE0 + (reg_offset * 4), gb_tile_moden);
2769 }
2770 } else
2771 DRM_ERROR("unknown asic: 0x%x\n", rdev->family);
2772}
2773
2774static void si_select_se_sh(struct radeon_device *rdev,
2775 u32 se_num, u32 sh_num)
2776{
2777 u32 data = INSTANCE_BROADCAST_WRITES;
2778
2779 if ((se_num == 0xffffffff) && (sh_num == 0xffffffff))
2780 data |= SH_BROADCAST_WRITES | SE_BROADCAST_WRITES;
2781 else if (se_num == 0xffffffff)
2782 data |= SE_BROADCAST_WRITES | SH_INDEX(sh_num);
2783 else if (sh_num == 0xffffffff)
2784 data |= SH_BROADCAST_WRITES | SE_INDEX(se_num);
2785 else
2786 data |= SH_INDEX(sh_num) | SE_INDEX(se_num);
2787 WREG32(GRBM_GFX_INDEX, data);
2788}
2789
2790static u32 si_create_bitmask(u32 bit_width)
2791{
2792 u32 i, mask = 0;
2793
2794 for (i = 0; i < bit_width; i++) {
2795 mask <<= 1;
2796 mask |= 1;
2797 }
2798 return mask;
2799}
2800
2801static u32 si_get_cu_enabled(struct radeon_device *rdev, u32 cu_per_sh)
2802{
2803 u32 data, mask;
2804
2805 data = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
2806 if (data & 1)
2807 data &= INACTIVE_CUS_MASK;
2808 else
2809 data = 0;
2810 data |= RREG32(GC_USER_SHADER_ARRAY_CONFIG);
2811
2812 data >>= INACTIVE_CUS_SHIFT;
2813
2814 mask = si_create_bitmask(cu_per_sh);
2815
2816 return ~data & mask;
2817}
2818
2819static void si_setup_spi(struct radeon_device *rdev,
2820 u32 se_num, u32 sh_per_se,
2821 u32 cu_per_sh)
2822{
2823 int i, j, k;
2824 u32 data, mask, active_cu;
2825
2826 for (i = 0; i < se_num; i++) {
2827 for (j = 0; j < sh_per_se; j++) {
2828 si_select_se_sh(rdev, i, j);
2829 data = RREG32(SPI_STATIC_THREAD_MGMT_3);
2830 active_cu = si_get_cu_enabled(rdev, cu_per_sh);
2831
2832 mask = 1;
2833 for (k = 0; k < 16; k++) {
2834 mask <<= k;
2835 if (active_cu & mask) {
2836 data &= ~mask;
2837 WREG32(SPI_STATIC_THREAD_MGMT_3, data);
2838 break;
2839 }
2840 }
2841 }
2842 }
2843 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2844}
2845
2846static u32 si_get_rb_disabled(struct radeon_device *rdev,
2847 u32 max_rb_num_per_se,
2848 u32 sh_per_se)
2849{
2850 u32 data, mask;
2851
2852 data = RREG32(CC_RB_BACKEND_DISABLE);
2853 if (data & 1)
2854 data &= BACKEND_DISABLE_MASK;
2855 else
2856 data = 0;
2857 data |= RREG32(GC_USER_RB_BACKEND_DISABLE);
2858
2859 data >>= BACKEND_DISABLE_SHIFT;
2860
2861 mask = si_create_bitmask(max_rb_num_per_se / sh_per_se);
2862
2863 return data & mask;
2864}
2865
2866static void si_setup_rb(struct radeon_device *rdev,
2867 u32 se_num, u32 sh_per_se,
2868 u32 max_rb_num_per_se)
2869{
2870 int i, j;
2871 u32 data, mask;
2872 u32 disabled_rbs = 0;
2873 u32 enabled_rbs = 0;
2874
2875 for (i = 0; i < se_num; i++) {
2876 for (j = 0; j < sh_per_se; j++) {
2877 si_select_se_sh(rdev, i, j);
2878 data = si_get_rb_disabled(rdev, max_rb_num_per_se, sh_per_se);
2879 disabled_rbs |= data << ((i * sh_per_se + j) * TAHITI_RB_BITMAP_WIDTH_PER_SH);
2880 }
2881 }
2882 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2883
2884 mask = 1;
2885 for (i = 0; i < max_rb_num_per_se * se_num; i++) {
2886 if (!(disabled_rbs & mask))
2887 enabled_rbs |= mask;
2888 mask <<= 1;
2889 }
2890
2891 rdev->config.si.backend_enable_mask = enabled_rbs;
2892
2893 for (i = 0; i < se_num; i++) {
2894 si_select_se_sh(rdev, i, 0xffffffff);
2895 data = 0;
2896 for (j = 0; j < sh_per_se; j++) {
2897 switch (enabled_rbs & 3) {
2898 case 1:
2899 data |= (RASTER_CONFIG_RB_MAP_0 << (i * sh_per_se + j) * 2);
2900 break;
2901 case 2:
2902 data |= (RASTER_CONFIG_RB_MAP_3 << (i * sh_per_se + j) * 2);
2903 break;
2904 case 3:
2905 default:
2906 data |= (RASTER_CONFIG_RB_MAP_2 << (i * sh_per_se + j) * 2);
2907 break;
2908 }
2909 enabled_rbs >>= 2;
2910 }
2911 WREG32(PA_SC_RASTER_CONFIG, data);
2912 }
2913 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
2914}
2915
2916static void si_gpu_init(struct radeon_device *rdev)
2917{
2918 u32 gb_addr_config = 0;
2919 u32 mc_shared_chmap __unused, mc_arb_ramcfg;
2920 u32 sx_debug_1;
2921 u32 hdp_host_path_cntl;
2922 u32 tmp;
2923 int i, j;
2924
2925 switch (rdev->family) {
2926 case CHIP_TAHITI:
2927 rdev->config.si.max_shader_engines = 2;
2928 rdev->config.si.max_tile_pipes = 12;
2929 rdev->config.si.max_cu_per_sh = 8;
2930 rdev->config.si.max_sh_per_se = 2;
2931 rdev->config.si.max_backends_per_se = 4;
2932 rdev->config.si.max_texture_channel_caches = 12;
2933 rdev->config.si.max_gprs = 256;
2934 rdev->config.si.max_gs_threads = 32;
2935 rdev->config.si.max_hw_contexts = 8;
2936
2937 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2938 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2939 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2940 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2941 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
2942 break;
2943 case CHIP_PITCAIRN:
2944 rdev->config.si.max_shader_engines = 2;
2945 rdev->config.si.max_tile_pipes = 8;
2946 rdev->config.si.max_cu_per_sh = 5;
2947 rdev->config.si.max_sh_per_se = 2;
2948 rdev->config.si.max_backends_per_se = 4;
2949 rdev->config.si.max_texture_channel_caches = 8;
2950 rdev->config.si.max_gprs = 256;
2951 rdev->config.si.max_gs_threads = 32;
2952 rdev->config.si.max_hw_contexts = 8;
2953
2954 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2955 rdev->config.si.sc_prim_fifo_size_backend = 0x100;
2956 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2957 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2958 gb_addr_config = TAHITI_GB_ADDR_CONFIG_GOLDEN;
2959 break;
2960 case CHIP_VERDE:
2961 default:
2962 rdev->config.si.max_shader_engines = 1;
2963 rdev->config.si.max_tile_pipes = 4;
2964 rdev->config.si.max_cu_per_sh = 5;
2965 rdev->config.si.max_sh_per_se = 2;
2966 rdev->config.si.max_backends_per_se = 4;
2967 rdev->config.si.max_texture_channel_caches = 4;
2968 rdev->config.si.max_gprs = 256;
2969 rdev->config.si.max_gs_threads = 32;
2970 rdev->config.si.max_hw_contexts = 8;
2971
2972 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2973 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2974 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2975 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2976 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2977 break;
2978 case CHIP_OLAND:
2979 rdev->config.si.max_shader_engines = 1;
2980 rdev->config.si.max_tile_pipes = 4;
2981 rdev->config.si.max_cu_per_sh = 6;
2982 rdev->config.si.max_sh_per_se = 1;
2983 rdev->config.si.max_backends_per_se = 2;
2984 rdev->config.si.max_texture_channel_caches = 4;
2985 rdev->config.si.max_gprs = 256;
2986 rdev->config.si.max_gs_threads = 16;
2987 rdev->config.si.max_hw_contexts = 8;
2988
2989 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
2990 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
2991 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
2992 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
2993 gb_addr_config = VERDE_GB_ADDR_CONFIG_GOLDEN;
2994 break;
2995 case CHIP_HAINAN:
2996 rdev->config.si.max_shader_engines = 1;
2997 rdev->config.si.max_tile_pipes = 4;
2998 rdev->config.si.max_cu_per_sh = 5;
2999 rdev->config.si.max_sh_per_se = 1;
3000 rdev->config.si.max_backends_per_se = 1;
3001 rdev->config.si.max_texture_channel_caches = 2;
3002 rdev->config.si.max_gprs = 256;
3003 rdev->config.si.max_gs_threads = 16;
3004 rdev->config.si.max_hw_contexts = 8;
3005
3006 rdev->config.si.sc_prim_fifo_size_frontend = 0x20;
3007 rdev->config.si.sc_prim_fifo_size_backend = 0x40;
3008 rdev->config.si.sc_hiz_tile_fifo_size = 0x30;
3009 rdev->config.si.sc_earlyz_tile_fifo_size = 0x130;
3010 gb_addr_config = HAINAN_GB_ADDR_CONFIG_GOLDEN;
3011 break;
3012 }
3013
3014 /* Initialize HDP */
3015 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3016 WREG32((0x2c14 + j), 0x00000000);
3017 WREG32((0x2c18 + j), 0x00000000);
3018 WREG32((0x2c1c + j), 0x00000000);
3019 WREG32((0x2c20 + j), 0x00000000);
3020 WREG32((0x2c24 + j), 0x00000000);
3021 }
3022
3023 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3024
3025 evergreen_fix_pci_max_read_req_size(rdev);
3026
3027 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
3028
3029 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3030 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3031
3032 rdev->config.si.num_tile_pipes = rdev->config.si.max_tile_pipes;
3033 rdev->config.si.mem_max_burst_length_bytes = 256;
3034 tmp = (mc_arb_ramcfg & NOOFCOLS_MASK) >> NOOFCOLS_SHIFT;
3035 rdev->config.si.mem_row_size_in_kb = (4 * (1 << (8 + tmp))) / 1024;
3036 if (rdev->config.si.mem_row_size_in_kb > 4)
3037 rdev->config.si.mem_row_size_in_kb = 4;
3038 /* XXX use MC settings? */
3039 rdev->config.si.shader_engine_tile_size = 32;
3040 rdev->config.si.num_gpus = 1;
3041 rdev->config.si.multi_gpu_tile_size = 64;
3042
3043 /* fix up row size */
3044 gb_addr_config &= ~ROW_SIZE_MASK;
3045 switch (rdev->config.si.mem_row_size_in_kb) {
3046 case 1:
3047 default:
3048 gb_addr_config |= ROW_SIZE(0);
3049 break;
3050 case 2:
3051 gb_addr_config |= ROW_SIZE(1);
3052 break;
3053 case 4:
3054 gb_addr_config |= ROW_SIZE(2);
3055 break;
3056 }
3057
3058 /* setup tiling info dword. gb_addr_config is not adequate since it does
3059 * not have bank info, so create a custom tiling dword.
3060 * bits 3:0 num_pipes
3061 * bits 7:4 num_banks
3062 * bits 11:8 group_size
3063 * bits 15:12 row_size
3064 */
3065 rdev->config.si.tile_config = 0;
3066 switch (rdev->config.si.num_tile_pipes) {
3067 case 1:
3068 rdev->config.si.tile_config |= (0 << 0);
3069 break;
3070 case 2:
3071 rdev->config.si.tile_config |= (1 << 0);
3072 break;
3073 case 4:
3074 rdev->config.si.tile_config |= (2 << 0);
3075 break;
3076 case 8:
3077 default:
3078 /* XXX what about 12? */
3079 rdev->config.si.tile_config |= (3 << 0);
3080 break;
3081 }
3082 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3083 case 0: /* four banks */
3084 rdev->config.si.tile_config |= 0 << 4;
3085 break;
3086 case 1: /* eight banks */
3087 rdev->config.si.tile_config |= 1 << 4;
3088 break;
3089 case 2: /* sixteen banks */
3090 default:
3091 rdev->config.si.tile_config |= 2 << 4;
3092 break;
3093 }
3094 rdev->config.si.tile_config |=
3095 ((gb_addr_config & PIPE_INTERLEAVE_SIZE_MASK) >> PIPE_INTERLEAVE_SIZE_SHIFT) << 8;
3096 rdev->config.si.tile_config |=
3097 ((gb_addr_config & ROW_SIZE_MASK) >> ROW_SIZE_SHIFT) << 12;
3098
3099 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3100 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3101 WREG32(DMIF_ADDR_CALC, gb_addr_config);
3102 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3103 WREG32(DMA_TILING_CONFIG + DMA0_REGISTER_OFFSET, gb_addr_config);
3104 WREG32(DMA_TILING_CONFIG + DMA1_REGISTER_OFFSET, gb_addr_config);
3105 if (rdev->has_uvd) {
3106 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3107 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3108 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3109 }
3110
3111 si_tiling_mode_table_init(rdev);
3112
3113 si_setup_rb(rdev, rdev->config.si.max_shader_engines,
3114 rdev->config.si.max_sh_per_se,
3115 rdev->config.si.max_backends_per_se);
3116
3117 si_setup_spi(rdev, rdev->config.si.max_shader_engines,
3118 rdev->config.si.max_sh_per_se,
3119 rdev->config.si.max_cu_per_sh);
3120
3121
3122 /* set HW defaults for 3D engine */
3123 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3124 ROQ_IB2_START(0x2b)));
3125 WREG32(CP_MEQ_THRESHOLDS, MEQ1_START(0x30) | MEQ2_START(0x60));
3126
3127 sx_debug_1 = RREG32(SX_DEBUG_1);
3128 WREG32(SX_DEBUG_1, sx_debug_1);
3129
3130 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3131
3132 WREG32(PA_SC_FIFO_SIZE, (SC_FRONTEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_frontend) |
3133 SC_BACKEND_PRIM_FIFO_SIZE(rdev->config.si.sc_prim_fifo_size_backend) |
3134 SC_HIZ_TILE_FIFO_SIZE(rdev->config.si.sc_hiz_tile_fifo_size) |
3135 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.si.sc_earlyz_tile_fifo_size)));
3136
3137 WREG32(VGT_NUM_INSTANCES, 1);
3138
3139 WREG32(CP_PERFMON_CNTL, 0);
3140
3141 WREG32(SQ_CONFIG, 0);
3142
3143 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3144 FORCE_EOV_MAX_REZ_CNT(255)));
3145
3146 WREG32(VGT_CACHE_INVALIDATION, CACHE_INVALIDATION(VC_AND_TC) |
3147 AUTO_INVLD_EN(ES_AND_GS_AUTO));
3148
3149 WREG32(VGT_GS_VERTEX_REUSE, 16);
3150 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3151
3152 WREG32(CB_PERFCOUNTER0_SELECT0, 0);
3153 WREG32(CB_PERFCOUNTER0_SELECT1, 0);
3154 WREG32(CB_PERFCOUNTER1_SELECT0, 0);
3155 WREG32(CB_PERFCOUNTER1_SELECT1, 0);
3156 WREG32(CB_PERFCOUNTER2_SELECT0, 0);
3157 WREG32(CB_PERFCOUNTER2_SELECT1, 0);
3158 WREG32(CB_PERFCOUNTER3_SELECT0, 0);
3159 WREG32(CB_PERFCOUNTER3_SELECT1, 0);
3160
3161 tmp = RREG32(HDP_MISC_CNTL);
3162 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3163 WREG32(HDP_MISC_CNTL, tmp);
3164
3165 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3166 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3167
3168 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3169
3170 udelay(50);
3171}
3172
3173/*
3174 * GPU scratch registers helpers function.
3175 */
3176static void si_scratch_init(struct radeon_device *rdev)
3177{
3178 int i;
3179
3180 rdev->scratch.num_reg = 7;
3181 rdev->scratch.reg_base = SCRATCH_REG0;
3182 for (i = 0; i < rdev->scratch.num_reg; i++) {
3183 rdev->scratch.free[i] = true;
3184 rdev->scratch.reg[i] = rdev->scratch.reg_base + (i * 4);
3185 }
3186}
3187
3188void si_fence_ring_emit(struct radeon_device *rdev,
3189 struct radeon_fence *fence)
3190{
3191 struct radeon_ring *ring = &rdev->ring[fence->ring];
3192 u64 addr = rdev->fence_drv[fence->ring].gpu_addr;
3193
3194 /* flush read cache over gart */
3195 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3196 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3197 radeon_ring_write(ring, 0);
3198 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3199 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3200 PACKET3_TC_ACTION_ENA |
3201 PACKET3_SH_KCACHE_ACTION_ENA |
3202 PACKET3_SH_ICACHE_ACTION_ENA);
3203 radeon_ring_write(ring, 0xFFFFFFFF);
3204 radeon_ring_write(ring, 0);
3205 radeon_ring_write(ring, 10); /* poll interval */
3206 /* EVENT_WRITE_EOP - flush caches, send int */
3207 radeon_ring_write(ring, PACKET3(PACKET3_EVENT_WRITE_EOP, 4));
3208 radeon_ring_write(ring, EVENT_TYPE(CACHE_FLUSH_AND_INV_TS_EVENT) | EVENT_INDEX(5));
3209 radeon_ring_write(ring, addr & 0xffffffff);
3210 radeon_ring_write(ring, (upper_32_bits(addr) & 0xff) | DATA_SEL(1) | INT_SEL(2));
3211 radeon_ring_write(ring, fence->seq);
3212 radeon_ring_write(ring, 0);
3213}
3214
3215/*
3216 * IB stuff
3217 */
3218void si_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3219{
3220 struct radeon_ring *ring = &rdev->ring[ib->ring];
3221 u32 header;
3222
3223 if (ib->is_const_ib) {
3224 /* set switch buffer packet before const IB */
3225 radeon_ring_write(ring, PACKET3(PACKET3_SWITCH_BUFFER, 0));
3226 radeon_ring_write(ring, 0);
3227
3228 header = PACKET3(PACKET3_INDIRECT_BUFFER_CONST, 2);
3229 } else {
3230 u32 next_rptr;
3231 if (ring->rptr_save_reg) {
3232 next_rptr = ring->wptr + 3 + 4 + 8;
3233 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3234 radeon_ring_write(ring, ((ring->rptr_save_reg -
3235 PACKET3_SET_CONFIG_REG_START) >> 2));
3236 radeon_ring_write(ring, next_rptr);
3237 } else if (rdev->wb.enabled) {
3238 next_rptr = ring->wptr + 5 + 4 + 8;
3239 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
3240 radeon_ring_write(ring, (1 << 8));
3241 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3242 radeon_ring_write(ring, upper_32_bits(ring->next_rptr_gpu_addr) & 0xffffffff);
3243 radeon_ring_write(ring, next_rptr);
3244 }
3245
3246 header = PACKET3(PACKET3_INDIRECT_BUFFER, 2);
3247 }
3248
3249 radeon_ring_write(ring, header);
3250 radeon_ring_write(ring,
3251#ifdef __BIG_ENDIAN
3252 (2 << 0) |
3253#endif
3254 (ib->gpu_addr & 0xFFFFFFFC));
3255 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFFFF);
3256 radeon_ring_write(ring, ib->length_dw |
3257 (ib->vm ? (ib->vm->id << 24) : 0));
3258
3259 if (!ib->is_const_ib) {
3260 /* flush read cache over gart for this vmid */
3261 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3262 radeon_ring_write(ring, (CP_COHER_CNTL2 - PACKET3_SET_CONFIG_REG_START) >> 2);
3263 radeon_ring_write(ring, ib->vm ? ib->vm->id : 0);
3264 radeon_ring_write(ring, PACKET3(PACKET3_SURFACE_SYNC, 3));
3265 radeon_ring_write(ring, PACKET3_TCL1_ACTION_ENA |
3266 PACKET3_TC_ACTION_ENA |
3267 PACKET3_SH_KCACHE_ACTION_ENA |
3268 PACKET3_SH_ICACHE_ACTION_ENA);
3269 radeon_ring_write(ring, 0xFFFFFFFF);
3270 radeon_ring_write(ring, 0);
3271 radeon_ring_write(ring, 10); /* poll interval */
3272 }
3273}
3274
3275/*
3276 * CP.
3277 */
3278static void si_cp_enable(struct radeon_device *rdev, bool enable)
3279{
3280 if (enable)
3281 WREG32(CP_ME_CNTL, 0);
3282 else {
3283 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3284 radeon_ttm_set_active_vram_size(rdev, rdev->mc.visible_vram_size);
3285 WREG32(CP_ME_CNTL, (CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT));
3286 WREG32(SCRATCH_UMSK, 0);
3287 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3288 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3289 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3290 }
3291 udelay(50);
3292}
3293
3294static int si_cp_load_microcode(struct radeon_device *rdev)
3295{
3296 const __be32 *fw_data;
3297 int i;
3298
3299 if (!rdev->me_fw || !rdev->pfp_fw)
3300 return -EINVAL;
3301
3302 si_cp_enable(rdev, false);
3303
3304 /* PFP */
3305 fw_data = (const __be32 *)rdev->pfp_fw->data;
3306 WREG32(CP_PFP_UCODE_ADDR, 0);
3307 for (i = 0; i < SI_PFP_UCODE_SIZE; i++)
3308 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3309 WREG32(CP_PFP_UCODE_ADDR, 0);
3310
3311 /* CE */
3312 fw_data = (const __be32 *)rdev->ce_fw->data;
3313 WREG32(CP_CE_UCODE_ADDR, 0);
3314 for (i = 0; i < SI_CE_UCODE_SIZE; i++)
3315 WREG32(CP_CE_UCODE_DATA, be32_to_cpup(fw_data++));
3316 WREG32(CP_CE_UCODE_ADDR, 0);
3317
3318 /* ME */
3319 fw_data = (const __be32 *)rdev->me_fw->data;
3320 WREG32(CP_ME_RAM_WADDR, 0);
3321 for (i = 0; i < SI_PM4_UCODE_SIZE; i++)
3322 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3323 WREG32(CP_ME_RAM_WADDR, 0);
3324
3325 WREG32(CP_PFP_UCODE_ADDR, 0);
3326 WREG32(CP_CE_UCODE_ADDR, 0);
3327 WREG32(CP_ME_RAM_WADDR, 0);
3328 WREG32(CP_ME_RAM_RADDR, 0);
3329 return 0;
3330}
3331
3332static int si_cp_start(struct radeon_device *rdev)
3333{
3334 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3335 int r, i;
3336
3337 r = radeon_ring_lock(rdev, ring, 7 + 4);
3338 if (r) {
3339 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3340 return r;
3341 }
3342 /* init the CP */
3343 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3344 radeon_ring_write(ring, 0x1);
3345 radeon_ring_write(ring, 0x0);
3346 radeon_ring_write(ring, rdev->config.si.max_hw_contexts - 1);
3347 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3348 radeon_ring_write(ring, 0);
3349 radeon_ring_write(ring, 0);
3350
3351 /* init the CE partitions */
3352 radeon_ring_write(ring, PACKET3(PACKET3_SET_BASE, 2));
3353 radeon_ring_write(ring, PACKET3_BASE_INDEX(CE_PARTITION_BASE));
3354 radeon_ring_write(ring, 0xc000);
3355 radeon_ring_write(ring, 0xe000);
3356 radeon_ring_unlock_commit(rdev, ring);
3357
3358 si_cp_enable(rdev, true);
3359
3360 r = radeon_ring_lock(rdev, ring, si_default_size + 10);
3361 if (r) {
3362 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3363 return r;
3364 }
3365
3366 /* setup clear context state */
3367 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3368 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3369
3370 for (i = 0; i < si_default_size; i++)
3371 radeon_ring_write(ring, si_default_state[i]);
3372
3373 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3374 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3375
3376 /* set clear context state */
3377 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3378 radeon_ring_write(ring, 0);
3379
3380 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONTEXT_REG, 2));
3381 radeon_ring_write(ring, 0x00000316);
3382 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3383 radeon_ring_write(ring, 0x00000010); /* VGT_OUT_DEALLOC_CNTL */
3384
3385 radeon_ring_unlock_commit(rdev, ring);
3386
3387 for (i = RADEON_RING_TYPE_GFX_INDEX; i <= CAYMAN_RING_TYPE_CP2_INDEX; ++i) {
3388 ring = &rdev->ring[i];
3389 r = radeon_ring_lock(rdev, ring, 2);
3390
3391 /* clear the compute context state */
3392 radeon_ring_write(ring, PACKET3_COMPUTE(PACKET3_CLEAR_STATE, 0));
3393 radeon_ring_write(ring, 0);
3394
3395 radeon_ring_unlock_commit(rdev, ring);
3396 }
3397
3398 return 0;
3399}
3400
3401static void si_cp_fini(struct radeon_device *rdev)
3402{
3403 struct radeon_ring *ring;
3404 si_cp_enable(rdev, false);
3405
3406 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3407 radeon_ring_fini(rdev, ring);
3408 radeon_scratch_free(rdev, ring->rptr_save_reg);
3409
3410 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3411 radeon_ring_fini(rdev, ring);
3412 radeon_scratch_free(rdev, ring->rptr_save_reg);
3413
3414 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3415 radeon_ring_fini(rdev, ring);
3416 radeon_scratch_free(rdev, ring->rptr_save_reg);
3417}
3418
3419static int si_cp_resume(struct radeon_device *rdev)
3420{
3421 struct radeon_ring *ring;
3422 u32 tmp;
3423 u32 rb_bufsz;
3424 int r;
3425
3426 si_enable_gui_idle_interrupt(rdev, false);
3427
3428 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3429 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3430
3431 /* Set the write pointer delay */
3432 WREG32(CP_RB_WPTR_DELAY, 0);
3433
3434 WREG32(CP_DEBUG, 0);
3435 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3436
3437 /* ring 0 - compute and gfx */
3438 /* Set ring buffer size */
3439 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3440 rb_bufsz = order_base_2(ring->ring_size / 8);
3441 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3442#ifdef __BIG_ENDIAN
3443 tmp |= BUF_SWAP_32BIT;
3444#endif
3445 WREG32(CP_RB0_CNTL, tmp);
3446
3447 /* Initialize the ring buffer's read and write pointers */
3448 WREG32(CP_RB0_CNTL, tmp | RB_RPTR_WR_ENA);
3449 ring->wptr = 0;
3450 WREG32(CP_RB0_WPTR, ring->wptr);
3451
3452 /* set the wb address whether it's enabled or not */
3453 WREG32(CP_RB0_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC);
3454 WREG32(CP_RB0_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3455
3456 if (rdev->wb.enabled)
3457 WREG32(SCRATCH_UMSK, 0xff);
3458 else {
3459 tmp |= RB_NO_UPDATE;
3460 WREG32(SCRATCH_UMSK, 0);
3461 }
3462
3463 mdelay(1);
3464 WREG32(CP_RB0_CNTL, tmp);
3465
3466 WREG32(CP_RB0_BASE, ring->gpu_addr >> 8);
3467
3468 /* ring1 - compute only */
3469 /* Set ring buffer size */
3470 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
3471 rb_bufsz = order_base_2(ring->ring_size / 8);
3472 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3473#ifdef __BIG_ENDIAN
3474 tmp |= BUF_SWAP_32BIT;
3475#endif
3476 WREG32(CP_RB1_CNTL, tmp);
3477
3478 /* Initialize the ring buffer's read and write pointers */
3479 WREG32(CP_RB1_CNTL, tmp | RB_RPTR_WR_ENA);
3480 ring->wptr = 0;
3481 WREG32(CP_RB1_WPTR, ring->wptr);
3482
3483 /* set the wb address whether it's enabled or not */
3484 WREG32(CP_RB1_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFFFFFFFC);
3485 WREG32(CP_RB1_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP1_RPTR_OFFSET) & 0xFF);
3486
3487 mdelay(1);
3488 WREG32(CP_RB1_CNTL, tmp);
3489
3490 WREG32(CP_RB1_BASE, ring->gpu_addr >> 8);
3491
3492 /* ring2 - compute only */
3493 /* Set ring buffer size */
3494 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
3495 rb_bufsz = order_base_2(ring->ring_size / 8);
3496 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3497#ifdef __BIG_ENDIAN
3498 tmp |= BUF_SWAP_32BIT;
3499#endif
3500 WREG32(CP_RB2_CNTL, tmp);
3501
3502 /* Initialize the ring buffer's read and write pointers */
3503 WREG32(CP_RB2_CNTL, tmp | RB_RPTR_WR_ENA);
3504 ring->wptr = 0;
3505 WREG32(CP_RB2_WPTR, ring->wptr);
3506
3507 /* set the wb address whether it's enabled or not */
3508 WREG32(CP_RB2_RPTR_ADDR, (rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFFFFFFFC);
3509 WREG32(CP_RB2_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP2_RPTR_OFFSET) & 0xFF);
3510
3511 mdelay(1);
3512 WREG32(CP_RB2_CNTL, tmp);
3513
3514 WREG32(CP_RB2_BASE, ring->gpu_addr >> 8);
3515
3516 /* start the rings */
3517 si_cp_start(rdev);
3518 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = true;
3519 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = true;
3520 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = true;
3521 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX]);
3522 if (r) {
3523 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ready = false;
3524 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3525 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3526 return r;
3527 }
3528 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP1_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX]);
3529 if (r) {
3530 rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX].ready = false;
3531 }
3532 r = radeon_ring_test(rdev, CAYMAN_RING_TYPE_CP2_INDEX, &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX]);
3533 if (r) {
3534 rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX].ready = false;
3535 }
3536
3537 si_enable_gui_idle_interrupt(rdev, true);
3538
3539 if (rdev->asic->copy.copy_ring_index == RADEON_RING_TYPE_GFX_INDEX)
3540 radeon_ttm_set_active_vram_size(rdev, rdev->mc.real_vram_size);
3541
3542 return 0;
3543}
3544
3545u32 si_gpu_check_soft_reset(struct radeon_device *rdev)
3546{
3547 u32 reset_mask = 0;
3548 u32 tmp;
3549
3550 /* GRBM_STATUS */
3551 tmp = RREG32(GRBM_STATUS);
3552 if (tmp & (PA_BUSY | SC_BUSY |
3553 BCI_BUSY | SX_BUSY |
3554 TA_BUSY | VGT_BUSY |
3555 DB_BUSY | CB_BUSY |
3556 GDS_BUSY | SPI_BUSY |
3557 IA_BUSY | IA_BUSY_NO_DMA))
3558 reset_mask |= RADEON_RESET_GFX;
3559
3560 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3561 CP_BUSY | CP_COHERENCY_BUSY))
3562 reset_mask |= RADEON_RESET_CP;
3563
3564 if (tmp & GRBM_EE_BUSY)
3565 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3566
3567 /* GRBM_STATUS2 */
3568 tmp = RREG32(GRBM_STATUS2);
3569 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3570 reset_mask |= RADEON_RESET_RLC;
3571
3572 /* DMA_STATUS_REG 0 */
3573 tmp = RREG32(DMA_STATUS_REG + DMA0_REGISTER_OFFSET);
3574 if (!(tmp & DMA_IDLE))
3575 reset_mask |= RADEON_RESET_DMA;
3576
3577 /* DMA_STATUS_REG 1 */
3578 tmp = RREG32(DMA_STATUS_REG + DMA1_REGISTER_OFFSET);
3579 if (!(tmp & DMA_IDLE))
3580 reset_mask |= RADEON_RESET_DMA1;
3581
3582 /* SRBM_STATUS2 */
3583 tmp = RREG32(SRBM_STATUS2);
3584 if (tmp & DMA_BUSY)
3585 reset_mask |= RADEON_RESET_DMA;
3586
3587 if (tmp & DMA1_BUSY)
3588 reset_mask |= RADEON_RESET_DMA1;
3589
3590 /* SRBM_STATUS */
3591 tmp = RREG32(SRBM_STATUS);
3592
3593 if (tmp & IH_BUSY)
3594 reset_mask |= RADEON_RESET_IH;
3595
3596 if (tmp & SEM_BUSY)
3597 reset_mask |= RADEON_RESET_SEM;
3598
3599 if (tmp & GRBM_RQ_PENDING)
3600 reset_mask |= RADEON_RESET_GRBM;
3601
3602 if (tmp & VMC_BUSY)
3603 reset_mask |= RADEON_RESET_VMC;
3604
3605 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3606 MCC_BUSY | MCD_BUSY))
3607 reset_mask |= RADEON_RESET_MC;
3608
3609 if (evergreen_is_display_hung(rdev))
3610 reset_mask |= RADEON_RESET_DISPLAY;
3611
3612 /* VM_L2_STATUS */
3613 tmp = RREG32(VM_L2_STATUS);
3614 if (tmp & L2_BUSY)
3615 reset_mask |= RADEON_RESET_VMC;
3616
3617 /* Skip MC reset as it's mostly likely not hung, just busy */
3618 if (reset_mask & RADEON_RESET_MC) {
3619 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3620 reset_mask &= ~RADEON_RESET_MC;
3621 }
3622
3623 return reset_mask;
3624}
3625
3626static void si_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3627{
3628 struct evergreen_mc_save save;
3629 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3630 u32 tmp;
3631
3632 if (reset_mask == 0)
3633 return;
3634
3635 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3636
3637 evergreen_print_gpu_status_regs(rdev);
3638 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
3639 RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR));
3640 dev_info(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
3641 RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS));
3642
3643 /* disable PG/CG */
3644 si_fini_pg(rdev);
3645 si_fini_cg(rdev);
3646
3647 /* stop the rlc */
3648 si_rlc_stop(rdev);
3649
3650 /* Disable CP parsing/prefetching */
3651 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3652
3653 if (reset_mask & RADEON_RESET_DMA) {
3654 /* dma0 */
3655 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3656 tmp &= ~DMA_RB_ENABLE;
3657 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
3658 }
3659 if (reset_mask & RADEON_RESET_DMA1) {
3660 /* dma1 */
3661 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3662 tmp &= ~DMA_RB_ENABLE;
3663 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3664 }
3665
3666 udelay(50);
3667
3668 evergreen_mc_stop(rdev, &save);
3669 if (evergreen_mc_wait_for_idle(rdev)) {
3670 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3671 }
3672
3673 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE | RADEON_RESET_CP)) {
3674 grbm_soft_reset = SOFT_RESET_CB |
3675 SOFT_RESET_DB |
3676 SOFT_RESET_GDS |
3677 SOFT_RESET_PA |
3678 SOFT_RESET_SC |
3679 SOFT_RESET_BCI |
3680 SOFT_RESET_SPI |
3681 SOFT_RESET_SX |
3682 SOFT_RESET_TC |
3683 SOFT_RESET_TA |
3684 SOFT_RESET_VGT |
3685 SOFT_RESET_IA;
3686 }
3687
3688 if (reset_mask & RADEON_RESET_CP) {
3689 grbm_soft_reset |= SOFT_RESET_CP | SOFT_RESET_VGT;
3690
3691 srbm_soft_reset |= SOFT_RESET_GRBM;
3692 }
3693
3694 if (reset_mask & RADEON_RESET_DMA)
3695 srbm_soft_reset |= SOFT_RESET_DMA;
3696
3697 if (reset_mask & RADEON_RESET_DMA1)
3698 srbm_soft_reset |= SOFT_RESET_DMA1;
3699
3700 if (reset_mask & RADEON_RESET_DISPLAY)
3701 srbm_soft_reset |= SOFT_RESET_DC;
3702
3703 if (reset_mask & RADEON_RESET_RLC)
3704 grbm_soft_reset |= SOFT_RESET_RLC;
3705
3706 if (reset_mask & RADEON_RESET_SEM)
3707 srbm_soft_reset |= SOFT_RESET_SEM;
3708
3709 if (reset_mask & RADEON_RESET_IH)
3710 srbm_soft_reset |= SOFT_RESET_IH;
3711
3712 if (reset_mask & RADEON_RESET_GRBM)
3713 srbm_soft_reset |= SOFT_RESET_GRBM;
3714
3715 if (reset_mask & RADEON_RESET_VMC)
3716 srbm_soft_reset |= SOFT_RESET_VMC;
3717
3718 if (reset_mask & RADEON_RESET_MC)
3719 srbm_soft_reset |= SOFT_RESET_MC;
3720
3721 if (grbm_soft_reset) {
3722 tmp = RREG32(GRBM_SOFT_RESET);
3723 tmp |= grbm_soft_reset;
3724 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
3725 WREG32(GRBM_SOFT_RESET, tmp);
3726 tmp = RREG32(GRBM_SOFT_RESET);
3727
3728 udelay(50);
3729
3730 tmp &= ~grbm_soft_reset;
3731 WREG32(GRBM_SOFT_RESET, tmp);
3732 tmp = RREG32(GRBM_SOFT_RESET);
3733 }
3734
3735 if (srbm_soft_reset) {
3736 tmp = RREG32(SRBM_SOFT_RESET);
3737 tmp |= srbm_soft_reset;
3738 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
3739 WREG32(SRBM_SOFT_RESET, tmp);
3740 tmp = RREG32(SRBM_SOFT_RESET);
3741
3742 udelay(50);
3743
3744 tmp &= ~srbm_soft_reset;
3745 WREG32(SRBM_SOFT_RESET, tmp);
3746 tmp = RREG32(SRBM_SOFT_RESET);
3747 }
3748
3749 /* Wait a little for things to settle down */
3750 udelay(50);
3751
3752 evergreen_mc_resume(rdev, &save);
3753 udelay(50);
3754
3755 evergreen_print_gpu_status_regs(rdev);
3756}
3757
3758static void si_set_clk_bypass_mode(struct radeon_device *rdev)
3759{
3760 u32 tmp, i;
3761
3762 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3763 tmp |= SPLL_BYPASS_EN;
3764 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3765
3766 tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
3767 tmp |= SPLL_CTLREQ_CHG;
3768 WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
3769
3770 for (i = 0; i < rdev->usec_timeout; i++) {
3771 if (RREG32(SPLL_STATUS) & SPLL_CHG_STATUS)
3772 break;
3773 udelay(1);
3774 }
3775
3776 tmp = RREG32(CG_SPLL_FUNC_CNTL_2);
3777 tmp &= ~(SPLL_CTLREQ_CHG | SCLK_MUX_UPDATE);
3778 WREG32(CG_SPLL_FUNC_CNTL_2, tmp);
3779
3780 tmp = RREG32(MPLL_CNTL_MODE);
3781 tmp &= ~MPLL_MCLK_SEL;
3782 WREG32(MPLL_CNTL_MODE, tmp);
3783}
3784
3785static void si_spll_powerdown(struct radeon_device *rdev)
3786{
3787 u32 tmp;
3788
3789 tmp = RREG32(SPLL_CNTL_MODE);
3790 tmp |= SPLL_SW_DIR_CONTROL;
3791 WREG32(SPLL_CNTL_MODE, tmp);
3792
3793 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3794 tmp |= SPLL_RESET;
3795 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3796
3797 tmp = RREG32(CG_SPLL_FUNC_CNTL);
3798 tmp |= SPLL_SLEEP;
3799 WREG32(CG_SPLL_FUNC_CNTL, tmp);
3800
3801 tmp = RREG32(SPLL_CNTL_MODE);
3802 tmp &= ~SPLL_SW_DIR_CONTROL;
3803 WREG32(SPLL_CNTL_MODE, tmp);
3804}
3805
3806static void si_gpu_pci_config_reset(struct radeon_device *rdev)
3807{
3808 struct evergreen_mc_save save;
3809 u32 tmp, i;
3810
3811 dev_info(rdev->dev, "GPU pci config reset\n");
3812
3813 /* disable dpm? */
3814
3815 /* disable cg/pg */
3816 si_fini_pg(rdev);
3817 si_fini_cg(rdev);
3818
3819 /* Disable CP parsing/prefetching */
3820 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT | CP_CE_HALT);
3821 /* dma0 */
3822 tmp = RREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET);
3823 tmp &= ~DMA_RB_ENABLE;
3824 WREG32(DMA_RB_CNTL + DMA0_REGISTER_OFFSET, tmp);
3825 /* dma1 */
3826 tmp = RREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET);
3827 tmp &= ~DMA_RB_ENABLE;
3828 WREG32(DMA_RB_CNTL + DMA1_REGISTER_OFFSET, tmp);
3829 /* XXX other engines? */
3830
3831 /* halt the rlc, disable cp internal ints */
3832 si_rlc_stop(rdev);
3833
3834 udelay(50);
3835
3836 /* disable mem access */
3837 evergreen_mc_stop(rdev, &save);
3838 if (evergreen_mc_wait_for_idle(rdev)) {
3839 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
3840 }
3841
3842 /* set mclk/sclk to bypass */
3843 si_set_clk_bypass_mode(rdev);
3844 /* powerdown spll */
3845 si_spll_powerdown(rdev);
3846 /* disable BM */
3847 pci_clear_master(rdev->pdev);
3848 /* reset */
3849 radeon_pci_config_reset(rdev);
3850 /* wait for asic to come out of reset */
3851 for (i = 0; i < rdev->usec_timeout; i++) {
3852 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
3853 break;
3854 udelay(1);
3855 }
3856}
3857
3858int si_asic_reset(struct radeon_device *rdev)
3859{
3860 u32 reset_mask;
3861
3862 reset_mask = si_gpu_check_soft_reset(rdev);
3863
3864 if (reset_mask)
3865 r600_set_bios_scratch_engine_hung(rdev, true);
3866
3867 /* try soft reset */
3868 si_gpu_soft_reset(rdev, reset_mask);
3869
3870 reset_mask = si_gpu_check_soft_reset(rdev);
3871
3872 /* try pci config reset */
3873 if (reset_mask && radeon_hard_reset)
3874 si_gpu_pci_config_reset(rdev);
3875
3876 reset_mask = si_gpu_check_soft_reset(rdev);
3877
3878 if (!reset_mask)
3879 r600_set_bios_scratch_engine_hung(rdev, false);
3880
3881 return 0;
3882}
3883
3884/**
3885 * si_gfx_is_lockup - Check if the GFX engine is locked up
3886 *
3887 * @rdev: radeon_device pointer
3888 * @ring: radeon_ring structure holding ring information
3889 *
3890 * Check if the GFX engine is locked up.
3891 * Returns true if the engine appears to be locked up, false if not.
3892 */
3893bool si_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
3894{
3895 u32 reset_mask = si_gpu_check_soft_reset(rdev);
3896
3897 if (!(reset_mask & (RADEON_RESET_GFX |
3898 RADEON_RESET_COMPUTE |
3899 RADEON_RESET_CP))) {
3900 radeon_ring_lockup_update(rdev, ring);
3901 return false;
3902 }
3903 return radeon_ring_test_lockup(rdev, ring);
3904}
3905
3906/* MC */
3907static void si_mc_program(struct radeon_device *rdev)
3908{
3909 struct evergreen_mc_save save;
3910 u32 tmp;
3911 int i, j;
3912
3913 /* Initialize HDP */
3914 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3915 WREG32((0x2c14 + j), 0x00000000);
3916 WREG32((0x2c18 + j), 0x00000000);
3917 WREG32((0x2c1c + j), 0x00000000);
3918 WREG32((0x2c20 + j), 0x00000000);
3919 WREG32((0x2c24 + j), 0x00000000);
3920 }
3921 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
3922
3923 evergreen_mc_stop(rdev, &save);
3924 if (radeon_mc_wait_for_idle(rdev)) {
3925 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3926 }
3927 if (!ASIC_IS_NODCE(rdev))
3928 /* Lockout access through VGA aperture*/
3929 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
3930 /* Update configuration */
3931 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
3932 rdev->mc.vram_start >> 12);
3933 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
3934 rdev->mc.vram_end >> 12);
3935 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR,
3936 rdev->vram_scratch.gpu_addr >> 12);
3937 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
3938 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
3939 WREG32(MC_VM_FB_LOCATION, tmp);
3940 /* XXX double check these! */
3941 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
3942 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
3943 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
3944 WREG32(MC_VM_AGP_BASE, 0);
3945 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3946 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3947 if (radeon_mc_wait_for_idle(rdev)) {
3948 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3949 }
3950 evergreen_mc_resume(rdev, &save);
3951 if (!ASIC_IS_NODCE(rdev)) {
3952 /* we need to own VRAM, so turn off the VGA renderer here
3953 * to stop it overwriting our objects */
3954 rv515_vga_render_disable(rdev);
3955 }
3956}
3957
3958void si_vram_gtt_location(struct radeon_device *rdev,
3959 struct radeon_mc *mc)
3960{
3961 if (mc->mc_vram_size > 0xFFC0000000ULL) {
3962 /* leave room for at least 1024M GTT */
3963 dev_warn(rdev->dev, "limiting VRAM\n");
3964 mc->real_vram_size = 0xFFC0000000ULL;
3965 mc->mc_vram_size = 0xFFC0000000ULL;
3966 }
3967 radeon_vram_location(rdev, &rdev->mc, 0);
3968 rdev->mc.gtt_base_align = 0;
3969 radeon_gtt_location(rdev, mc);
3970}
3971
3972static int si_mc_init(struct radeon_device *rdev)
3973{
3974 u32 tmp;
3975 int chansize, numchan;
3976
3977 /* Get VRAM informations */
3978 rdev->mc.vram_is_ddr = true;
3979 tmp = RREG32(MC_ARB_RAMCFG);
3980 if (tmp & CHANSIZE_OVERRIDE) {
3981 chansize = 16;
3982 } else if (tmp & CHANSIZE_MASK) {
3983 chansize = 64;
3984 } else {
3985 chansize = 32;
3986 }
3987 tmp = RREG32(MC_SHARED_CHMAP);
3988 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3989 case 0:
3990 default:
3991 numchan = 1;
3992 break;
3993 case 1:
3994 numchan = 2;
3995 break;
3996 case 2:
3997 numchan = 4;
3998 break;
3999 case 3:
4000 numchan = 8;
4001 break;
4002 case 4:
4003 numchan = 3;
4004 break;
4005 case 5:
4006 numchan = 6;
4007 break;
4008 case 6:
4009 numchan = 10;
4010 break;
4011 case 7:
4012 numchan = 12;
4013 break;
4014 case 8:
4015 numchan = 16;
4016 break;
4017 }
4018 rdev->mc.vram_width = numchan * chansize;
4019 /* Could aper size report 0 ? */
4020 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
4021 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
4022 /* size in MB on si */
4023 tmp = RREG32(CONFIG_MEMSIZE);
4024 /* some boards may have garbage in the upper 16 bits */
4025 if (tmp & 0xffff0000) {
4026 DRM_INFO("Probable bad vram size: 0x%08x\n", tmp);
4027 if (tmp & 0xffff)
4028 tmp &= 0xffff;
4029 }
4030 rdev->mc.mc_vram_size = tmp * 1024ULL * 1024ULL;
4031 rdev->mc.real_vram_size = rdev->mc.mc_vram_size;
4032 rdev->mc.visible_vram_size = rdev->mc.aper_size;
4033 si_vram_gtt_location(rdev, &rdev->mc);
4034 radeon_update_bandwidth_info(rdev);
4035
4036 return 0;
4037}
4038
4039/*
4040 * GART
4041 */
4042void si_pcie_gart_tlb_flush(struct radeon_device *rdev)
4043{
4044 /* flush hdp cache */
4045 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
4046
4047 /* bits 0-15 are the VM contexts0-15 */
4048 WREG32(VM_INVALIDATE_REQUEST, 1);
4049}
4050
4051static int si_pcie_gart_enable(struct radeon_device *rdev)
4052{
4053 int r, i;
4054
4055 if (rdev->gart.robj == NULL) {
4056 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
4057 return -EINVAL;
4058 }
4059 r = radeon_gart_table_vram_pin(rdev);
4060 if (r)
4061 return r;
4062 radeon_gart_restore(rdev);
4063 /* Setup TLB control */
4064 WREG32(MC_VM_MX_L1_TLB_CNTL,
4065 (0xA << 7) |
4066 ENABLE_L1_TLB |
4067 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
4068 ENABLE_ADVANCED_DRIVER_MODEL |
4069 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
4070 /* Setup L2 cache */
4071 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE |
4072 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
4073 ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
4074 EFFECTIVE_L2_QUEUE_SIZE(7) |
4075 CONTEXT1_IDENTITY_ACCESS_MODE(1));
4076 WREG32(VM_L2_CNTL2, INVALIDATE_ALL_L1_TLBS | INVALIDATE_L2_CACHE);
4077 WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
4078 L2_CACHE_BIGK_FRAGMENT_SIZE(0));
4079 /* setup context0 */
4080 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
4081 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
4082 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
4083 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
4084 (u32)(rdev->dummy_page.addr >> 12));
4085 WREG32(VM_CONTEXT0_CNTL2, 0);
4086 WREG32(VM_CONTEXT0_CNTL, (ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
4087 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT));
4088
4089 WREG32(0x15D4, 0);
4090 WREG32(0x15D8, 0);
4091 WREG32(0x15DC, 0);
4092
4093 /* empty context1-15 */
4094 /* set vm size, must be a multiple of 4 */
4095 WREG32(VM_CONTEXT1_PAGE_TABLE_START_ADDR, 0);
4096 WREG32(VM_CONTEXT1_PAGE_TABLE_END_ADDR, rdev->vm_manager.max_pfn);
4097 /* Assign the pt base to something valid for now; the pts used for
4098 * the VMs are determined by the application and setup and assigned
4099 * on the fly in the vm part of radeon_gart.c
4100 */
4101 for (i = 1; i < 16; i++) {
4102 if (i < 8)
4103 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (i << 2),
4104 rdev->gart.table_addr >> 12);
4105 else
4106 WREG32(VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((i - 8) << 2),
4107 rdev->gart.table_addr >> 12);
4108 }
4109
4110 /* enable context1-15 */
4111 WREG32(VM_CONTEXT1_PROTECTION_FAULT_DEFAULT_ADDR,
4112 (u32)(rdev->dummy_page.addr >> 12));
4113 WREG32(VM_CONTEXT1_CNTL2, 4);
4114 WREG32(VM_CONTEXT1_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(1) |
4115 RANGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4116 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT |
4117 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4118 DUMMY_PAGE_PROTECTION_FAULT_ENABLE_DEFAULT |
4119 PDE0_PROTECTION_FAULT_ENABLE_INTERRUPT |
4120 PDE0_PROTECTION_FAULT_ENABLE_DEFAULT |
4121 VALID_PROTECTION_FAULT_ENABLE_INTERRUPT |
4122 VALID_PROTECTION_FAULT_ENABLE_DEFAULT |
4123 READ_PROTECTION_FAULT_ENABLE_INTERRUPT |
4124 READ_PROTECTION_FAULT_ENABLE_DEFAULT |
4125 WRITE_PROTECTION_FAULT_ENABLE_INTERRUPT |
4126 WRITE_PROTECTION_FAULT_ENABLE_DEFAULT);
4127
4128 si_pcie_gart_tlb_flush(rdev);
4129 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
4130 (unsigned)(rdev->mc.gtt_size >> 20),
4131 (unsigned long long)rdev->gart.table_addr);
4132 rdev->gart.ready = true;
4133 return 0;
4134}
4135
4136static void si_pcie_gart_disable(struct radeon_device *rdev)
4137{
4138 /* Disable all tables */
4139 WREG32(VM_CONTEXT0_CNTL, 0);
4140 WREG32(VM_CONTEXT1_CNTL, 0);
4141 /* Setup TLB control */
4142 WREG32(MC_VM_MX_L1_TLB_CNTL, SYSTEM_ACCESS_MODE_NOT_IN_SYS |
4143 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU);
4144 /* Setup L2 cache */
4145 WREG32(VM_L2_CNTL, ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
4146 ENABLE_L2_PDE0_CACHE_LRU_UPDATE_BY_WRITE |
4147 EFFECTIVE_L2_QUEUE_SIZE(7) |
4148 CONTEXT1_IDENTITY_ACCESS_MODE(1));
4149 WREG32(VM_L2_CNTL2, 0);
4150 WREG32(VM_L2_CNTL3, L2_CACHE_BIGK_ASSOCIATIVITY |
4151 L2_CACHE_BIGK_FRAGMENT_SIZE(0));
4152 radeon_gart_table_vram_unpin(rdev);
4153}
4154
4155static void si_pcie_gart_fini(struct radeon_device *rdev)
4156{
4157 si_pcie_gart_disable(rdev);
4158 radeon_gart_table_vram_free(rdev);
4159 radeon_gart_fini(rdev);
4160}
4161
4162/* vm parser */
4163static bool si_vm_reg_valid(u32 reg)
4164{
4165 /* context regs are fine */
4166 if (reg >= 0x28000)
4167 return true;
4168
4169 /* check config regs */
4170 switch (reg) {
4171 case GRBM_GFX_INDEX:
4172 case CP_STRMOUT_CNTL:
4173 case VGT_VTX_VECT_EJECT_REG:
4174 case VGT_CACHE_INVALIDATION:
4175 case VGT_ESGS_RING_SIZE:
4176 case VGT_GSVS_RING_SIZE:
4177 case VGT_GS_VERTEX_REUSE:
4178 case VGT_PRIMITIVE_TYPE:
4179 case VGT_INDEX_TYPE:
4180 case VGT_NUM_INDICES:
4181 case VGT_NUM_INSTANCES:
4182 case VGT_TF_RING_SIZE:
4183 case VGT_HS_OFFCHIP_PARAM:
4184 case VGT_TF_MEMORY_BASE:
4185 case PA_CL_ENHANCE:
4186 case PA_SU_LINE_STIPPLE_VALUE:
4187 case PA_SC_LINE_STIPPLE_STATE:
4188 case PA_SC_ENHANCE:
4189 case SQC_CACHES:
4190 case SPI_STATIC_THREAD_MGMT_1:
4191 case SPI_STATIC_THREAD_MGMT_2:
4192 case SPI_STATIC_THREAD_MGMT_3:
4193 case SPI_PS_MAX_WAVE_ID:
4194 case SPI_CONFIG_CNTL:
4195 case SPI_CONFIG_CNTL_1:
4196 case TA_CNTL_AUX:
4197 return true;
4198 default:
4199 DRM_ERROR("Invalid register 0x%x in CS\n", reg);
4200 return false;
4201 }
4202}
4203
4204static int si_vm_packet3_ce_check(struct radeon_device *rdev,
4205 u32 *ib, struct radeon_cs_packet *pkt)
4206{
4207 switch (pkt->opcode) {
4208 case PACKET3_NOP:
4209 case PACKET3_SET_BASE:
4210 case PACKET3_SET_CE_DE_COUNTERS:
4211 case PACKET3_LOAD_CONST_RAM:
4212 case PACKET3_WRITE_CONST_RAM:
4213 case PACKET3_WRITE_CONST_RAM_OFFSET:
4214 case PACKET3_DUMP_CONST_RAM:
4215 case PACKET3_INCREMENT_CE_COUNTER:
4216 case PACKET3_WAIT_ON_DE_COUNTER:
4217 case PACKET3_CE_WRITE:
4218 break;
4219 default:
4220 DRM_ERROR("Invalid CE packet3: 0x%x\n", pkt->opcode);
4221 return -EINVAL;
4222 }
4223 return 0;
4224}
4225
4226static int si_vm_packet3_cp_dma_check(u32 *ib, u32 idx)
4227{
4228 u32 start_reg, reg, i;
4229 u32 command = ib[idx + 4];
4230 u32 info = ib[idx + 1];
4231 u32 idx_value = ib[idx];
4232 if (command & PACKET3_CP_DMA_CMD_SAS) {
4233 /* src address space is register */
4234 if (((info & 0x60000000) >> 29) == 0) {
4235 start_reg = idx_value << 2;
4236 if (command & PACKET3_CP_DMA_CMD_SAIC) {
4237 reg = start_reg;
4238 if (!si_vm_reg_valid(reg)) {
4239 DRM_ERROR("CP DMA Bad SRC register\n");
4240 return -EINVAL;
4241 }
4242 } else {
4243 for (i = 0; i < (command & 0x1fffff); i++) {
4244 reg = start_reg + (4 * i);
4245 if (!si_vm_reg_valid(reg)) {
4246 DRM_ERROR("CP DMA Bad SRC register\n");
4247 return -EINVAL;
4248 }
4249 }
4250 }
4251 }
4252 }
4253 if (command & PACKET3_CP_DMA_CMD_DAS) {
4254 /* dst address space is register */
4255 if (((info & 0x00300000) >> 20) == 0) {
4256 start_reg = ib[idx + 2];
4257 if (command & PACKET3_CP_DMA_CMD_DAIC) {
4258 reg = start_reg;
4259 if (!si_vm_reg_valid(reg)) {
4260 DRM_ERROR("CP DMA Bad DST register\n");
4261 return -EINVAL;
4262 }
4263 } else {
4264 for (i = 0; i < (command & 0x1fffff); i++) {
4265 reg = start_reg + (4 * i);
4266 if (!si_vm_reg_valid(reg)) {
4267 DRM_ERROR("CP DMA Bad DST register\n");
4268 return -EINVAL;
4269 }
4270 }
4271 }
4272 }
4273 }
4274 return 0;
4275}
4276
4277static int si_vm_packet3_gfx_check(struct radeon_device *rdev,
4278 u32 *ib, struct radeon_cs_packet *pkt)
4279{
4280 int r;
4281 u32 idx = pkt->idx + 1;
4282 u32 idx_value = ib[idx];
4283 u32 start_reg, end_reg, reg, i;
4284
4285 switch (pkt->opcode) {
4286 case PACKET3_NOP:
4287 case PACKET3_SET_BASE:
4288 case PACKET3_CLEAR_STATE:
4289 case PACKET3_INDEX_BUFFER_SIZE:
4290 case PACKET3_DISPATCH_DIRECT:
4291 case PACKET3_DISPATCH_INDIRECT:
4292 case PACKET3_ALLOC_GDS:
4293 case PACKET3_WRITE_GDS_RAM:
4294 case PACKET3_ATOMIC_GDS:
4295 case PACKET3_ATOMIC:
4296 case PACKET3_OCCLUSION_QUERY:
4297 case PACKET3_SET_PREDICATION:
4298 case PACKET3_COND_EXEC:
4299 case PACKET3_PRED_EXEC:
4300 case PACKET3_DRAW_INDIRECT:
4301 case PACKET3_DRAW_INDEX_INDIRECT:
4302 case PACKET3_INDEX_BASE:
4303 case PACKET3_DRAW_INDEX_2:
4304 case PACKET3_CONTEXT_CONTROL:
4305 case PACKET3_INDEX_TYPE:
4306 case PACKET3_DRAW_INDIRECT_MULTI:
4307 case PACKET3_DRAW_INDEX_AUTO:
4308 case PACKET3_DRAW_INDEX_IMMD:
4309 case PACKET3_NUM_INSTANCES:
4310 case PACKET3_DRAW_INDEX_MULTI_AUTO:
4311 case PACKET3_STRMOUT_BUFFER_UPDATE:
4312 case PACKET3_DRAW_INDEX_OFFSET_2:
4313 case PACKET3_DRAW_INDEX_MULTI_ELEMENT:
4314 case PACKET3_DRAW_INDEX_INDIRECT_MULTI:
4315 case PACKET3_MPEG_INDEX:
4316 case PACKET3_WAIT_REG_MEM:
4317 case PACKET3_MEM_WRITE:
4318 case PACKET3_PFP_SYNC_ME:
4319 case PACKET3_SURFACE_SYNC:
4320 case PACKET3_EVENT_WRITE:
4321 case PACKET3_EVENT_WRITE_EOP:
4322 case PACKET3_EVENT_WRITE_EOS:
4323 case PACKET3_SET_CONTEXT_REG:
4324 case PACKET3_SET_CONTEXT_REG_INDIRECT:
4325 case PACKET3_SET_SH_REG:
4326 case PACKET3_SET_SH_REG_OFFSET:
4327 case PACKET3_INCREMENT_DE_COUNTER:
4328 case PACKET3_WAIT_ON_CE_COUNTER:
4329 case PACKET3_WAIT_ON_AVAIL_BUFFER:
4330 case PACKET3_ME_WRITE:
4331 break;
4332 case PACKET3_COPY_DATA:
4333 if ((idx_value & 0xf00) == 0) {
4334 reg = ib[idx + 3] * 4;
4335 if (!si_vm_reg_valid(reg))
4336 return -EINVAL;
4337 }
4338 break;
4339 case PACKET3_WRITE_DATA:
4340 if ((idx_value & 0xf00) == 0) {
4341 start_reg = ib[idx + 1] * 4;
4342 if (idx_value & 0x10000) {
4343 if (!si_vm_reg_valid(start_reg))
4344 return -EINVAL;
4345 } else {
4346 for (i = 0; i < (pkt->count - 2); i++) {
4347 reg = start_reg + (4 * i);
4348 if (!si_vm_reg_valid(reg))
4349 return -EINVAL;
4350 }
4351 }
4352 }
4353 break;
4354 case PACKET3_COND_WRITE:
4355 if (idx_value & 0x100) {
4356 reg = ib[idx + 5] * 4;
4357 if (!si_vm_reg_valid(reg))
4358 return -EINVAL;
4359 }
4360 break;
4361 case PACKET3_COPY_DW:
4362 if (idx_value & 0x2) {
4363 reg = ib[idx + 3] * 4;
4364 if (!si_vm_reg_valid(reg))
4365 return -EINVAL;
4366 }
4367 break;
4368 case PACKET3_SET_CONFIG_REG:
4369 start_reg = (idx_value << 2) + PACKET3_SET_CONFIG_REG_START;
4370 end_reg = 4 * pkt->count + start_reg - 4;
4371 if ((start_reg < PACKET3_SET_CONFIG_REG_START) ||
4372 (start_reg >= PACKET3_SET_CONFIG_REG_END) ||
4373 (end_reg >= PACKET3_SET_CONFIG_REG_END)) {
4374 DRM_ERROR("bad PACKET3_SET_CONFIG_REG\n");
4375 return -EINVAL;
4376 }
4377 for (i = 0; i < pkt->count; i++) {
4378 reg = start_reg + (4 * i);
4379 if (!si_vm_reg_valid(reg))
4380 return -EINVAL;
4381 }
4382 break;
4383 case PACKET3_CP_DMA:
4384 r = si_vm_packet3_cp_dma_check(ib, idx);
4385 if (r)
4386 return r;
4387 break;
4388 default:
4389 DRM_ERROR("Invalid GFX packet3: 0x%x\n", pkt->opcode);
4390 return -EINVAL;
4391 }
4392 return 0;
4393}
4394
4395static int si_vm_packet3_compute_check(struct radeon_device *rdev,
4396 u32 *ib, struct radeon_cs_packet *pkt)
4397{
4398 int r;
4399 u32 idx = pkt->idx + 1;
4400 u32 idx_value = ib[idx];
4401 u32 start_reg, reg, i;
4402
4403 switch (pkt->opcode) {
4404 case PACKET3_NOP:
4405 case PACKET3_SET_BASE:
4406 case PACKET3_CLEAR_STATE:
4407 case PACKET3_DISPATCH_DIRECT:
4408 case PACKET3_DISPATCH_INDIRECT:
4409 case PACKET3_ALLOC_GDS:
4410 case PACKET3_WRITE_GDS_RAM:
4411 case PACKET3_ATOMIC_GDS:
4412 case PACKET3_ATOMIC:
4413 case PACKET3_OCCLUSION_QUERY:
4414 case PACKET3_SET_PREDICATION:
4415 case PACKET3_COND_EXEC:
4416 case PACKET3_PRED_EXEC:
4417 case PACKET3_CONTEXT_CONTROL:
4418 case PACKET3_STRMOUT_BUFFER_UPDATE:
4419 case PACKET3_WAIT_REG_MEM:
4420 case PACKET3_MEM_WRITE:
4421 case PACKET3_PFP_SYNC_ME:
4422 case PACKET3_SURFACE_SYNC:
4423 case PACKET3_EVENT_WRITE:
4424 case PACKET3_EVENT_WRITE_EOP:
4425 case PACKET3_EVENT_WRITE_EOS:
4426 case PACKET3_SET_CONTEXT_REG:
4427 case PACKET3_SET_CONTEXT_REG_INDIRECT:
4428 case PACKET3_SET_SH_REG:
4429 case PACKET3_SET_SH_REG_OFFSET:
4430 case PACKET3_INCREMENT_DE_COUNTER:
4431 case PACKET3_WAIT_ON_CE_COUNTER:
4432 case PACKET3_WAIT_ON_AVAIL_BUFFER:
4433 case PACKET3_ME_WRITE:
4434 break;
4435 case PACKET3_COPY_DATA:
4436 if ((idx_value & 0xf00) == 0) {
4437 reg = ib[idx + 3] * 4;
4438 if (!si_vm_reg_valid(reg))
4439 return -EINVAL;
4440 }
4441 break;
4442 case PACKET3_WRITE_DATA:
4443 if ((idx_value & 0xf00) == 0) {
4444 start_reg = ib[idx + 1] * 4;
4445 if (idx_value & 0x10000) {
4446 if (!si_vm_reg_valid(start_reg))
4447 return -EINVAL;
4448 } else {
4449 for (i = 0; i < (pkt->count - 2); i++) {
4450 reg = start_reg + (4 * i);
4451 if (!si_vm_reg_valid(reg))
4452 return -EINVAL;
4453 }
4454 }
4455 }
4456 break;
4457 case PACKET3_COND_WRITE:
4458 if (idx_value & 0x100) {
4459 reg = ib[idx + 5] * 4;
4460 if (!si_vm_reg_valid(reg))
4461 return -EINVAL;
4462 }
4463 break;
4464 case PACKET3_COPY_DW:
4465 if (idx_value & 0x2) {
4466 reg = ib[idx + 3] * 4;
4467 if (!si_vm_reg_valid(reg))
4468 return -EINVAL;
4469 }
4470 break;
4471 case PACKET3_CP_DMA:
4472 r = si_vm_packet3_cp_dma_check(ib, idx);
4473 if (r)
4474 return r;
4475 break;
4476 default:
4477 DRM_ERROR("Invalid Compute packet3: 0x%x\n", pkt->opcode);
4478 return -EINVAL;
4479 }
4480 return 0;
4481}
4482
4483int si_ib_parse(struct radeon_device *rdev, struct radeon_ib *ib)
4484{
4485 int ret = 0;
4486 u32 idx = 0;
4487 struct radeon_cs_packet pkt;
4488
4489 do {
4490 pkt.idx = idx;
4491 pkt.type = RADEON_CP_PACKET_GET_TYPE(ib->ptr[idx]);
4492 pkt.count = RADEON_CP_PACKET_GET_COUNT(ib->ptr[idx]);
4493 pkt.one_reg_wr = 0;
4494 switch (pkt.type) {
4495 case RADEON_PACKET_TYPE0:
4496 dev_err(rdev->dev, "Packet0 not allowed!\n");
4497 ret = -EINVAL;
4498 break;
4499 case RADEON_PACKET_TYPE2:
4500 idx += 1;
4501 break;
4502 case RADEON_PACKET_TYPE3:
4503 pkt.opcode = RADEON_CP_PACKET3_GET_OPCODE(ib->ptr[idx]);
4504 if (ib->is_const_ib)
4505 ret = si_vm_packet3_ce_check(rdev, ib->ptr, &pkt);
4506 else {
4507 switch (ib->ring) {
4508 case RADEON_RING_TYPE_GFX_INDEX:
4509 ret = si_vm_packet3_gfx_check(rdev, ib->ptr, &pkt);
4510 break;
4511 case CAYMAN_RING_TYPE_CP1_INDEX:
4512 case CAYMAN_RING_TYPE_CP2_INDEX:
4513 ret = si_vm_packet3_compute_check(rdev, ib->ptr, &pkt);
4514 break;
4515 default:
4516 dev_err(rdev->dev, "Non-PM4 ring %d !\n", ib->ring);
4517 ret = -EINVAL;
4518 break;
4519 }
4520 }
4521 idx += pkt.count + 2;
4522 break;
4523 default:
4524 dev_err(rdev->dev, "Unknown packet type %d !\n", pkt.type);
4525 ret = -EINVAL;
4526 break;
4527 }
4528 if (ret)
4529 break;
4530 } while (idx < ib->length_dw);
4531
4532 return ret;
4533}
4534
4535/*
4536 * vm
4537 */
4538int si_vm_init(struct radeon_device *rdev)
4539{
4540 /* number of VMs */
4541 rdev->vm_manager.nvm = 16;
4542 /* base offset of vram pages */
4543 rdev->vm_manager.vram_base_offset = 0;
4544
4545 return 0;
4546}
4547
4548void si_vm_fini(struct radeon_device *rdev)
4549{
4550}
4551
4552/**
4553 * si_vm_decode_fault - print human readable fault info
4554 *
4555 * @rdev: radeon_device pointer
4556 * @status: VM_CONTEXT1_PROTECTION_FAULT_STATUS register value
4557 * @addr: VM_CONTEXT1_PROTECTION_FAULT_ADDR register value
4558 *
4559 * Print human readable fault information (SI).
4560 */
4561static void si_vm_decode_fault(struct radeon_device *rdev,
4562 u32 status, u32 addr)
4563{
4564 u32 mc_id = (status & MEMORY_CLIENT_ID_MASK) >> MEMORY_CLIENT_ID_SHIFT;
4565 u32 vmid = (status & FAULT_VMID_MASK) >> FAULT_VMID_SHIFT;
4566 u32 protections = (status & PROTECTIONS_MASK) >> PROTECTIONS_SHIFT;
4567 const char *block;
4568
4569 if (rdev->family == CHIP_TAHITI) {
4570 switch (mc_id) {
4571 case 160:
4572 case 144:
4573 case 96:
4574 case 80:
4575 case 224:
4576 case 208:
4577 case 32:
4578 case 16:
4579 block = "CB";
4580 break;
4581 case 161:
4582 case 145:
4583 case 97:
4584 case 81:
4585 case 225:
4586 case 209:
4587 case 33:
4588 case 17:
4589 block = "CB_FMASK";
4590 break;
4591 case 162:
4592 case 146:
4593 case 98:
4594 case 82:
4595 case 226:
4596 case 210:
4597 case 34:
4598 case 18:
4599 block = "CB_CMASK";
4600 break;
4601 case 163:
4602 case 147:
4603 case 99:
4604 case 83:
4605 case 227:
4606 case 211:
4607 case 35:
4608 case 19:
4609 block = "CB_IMMED";
4610 break;
4611 case 164:
4612 case 148:
4613 case 100:
4614 case 84:
4615 case 228:
4616 case 212:
4617 case 36:
4618 case 20:
4619 block = "DB";
4620 break;
4621 case 165:
4622 case 149:
4623 case 101:
4624 case 85:
4625 case 229:
4626 case 213:
4627 case 37:
4628 case 21:
4629 block = "DB_HTILE";
4630 break;
4631 case 167:
4632 case 151:
4633 case 103:
4634 case 87:
4635 case 231:
4636 case 215:
4637 case 39:
4638 case 23:
4639 block = "DB_STEN";
4640 break;
4641 case 72:
4642 case 68:
4643 case 64:
4644 case 8:
4645 case 4:
4646 case 0:
4647 case 136:
4648 case 132:
4649 case 128:
4650 case 200:
4651 case 196:
4652 case 192:
4653 block = "TC";
4654 break;
4655 case 112:
4656 case 48:
4657 block = "CP";
4658 break;
4659 case 49:
4660 case 177:
4661 case 50:
4662 case 178:
4663 block = "SH";
4664 break;
4665 case 53:
4666 case 190:
4667 block = "VGT";
4668 break;
4669 case 117:
4670 block = "IH";
4671 break;
4672 case 51:
4673 case 115:
4674 block = "RLC";
4675 break;
4676 case 119:
4677 case 183:
4678 block = "DMA0";
4679 break;
4680 case 61:
4681 block = "DMA1";
4682 break;
4683 case 248:
4684 case 120:
4685 block = "HDP";
4686 break;
4687 default:
4688 block = "unknown";
4689 break;
4690 }
4691 } else {
4692 switch (mc_id) {
4693 case 32:
4694 case 16:
4695 case 96:
4696 case 80:
4697 case 160:
4698 case 144:
4699 case 224:
4700 case 208:
4701 block = "CB";
4702 break;
4703 case 33:
4704 case 17:
4705 case 97:
4706 case 81:
4707 case 161:
4708 case 145:
4709 case 225:
4710 case 209:
4711 block = "CB_FMASK";
4712 break;
4713 case 34:
4714 case 18:
4715 case 98:
4716 case 82:
4717 case 162:
4718 case 146:
4719 case 226:
4720 case 210:
4721 block = "CB_CMASK";
4722 break;
4723 case 35:
4724 case 19:
4725 case 99:
4726 case 83:
4727 case 163:
4728 case 147:
4729 case 227:
4730 case 211:
4731 block = "CB_IMMED";
4732 break;
4733 case 36:
4734 case 20:
4735 case 100:
4736 case 84:
4737 case 164:
4738 case 148:
4739 case 228:
4740 case 212:
4741 block = "DB";
4742 break;
4743 case 37:
4744 case 21:
4745 case 101:
4746 case 85:
4747 case 165:
4748 case 149:
4749 case 229:
4750 case 213:
4751 block = "DB_HTILE";
4752 break;
4753 case 39:
4754 case 23:
4755 case 103:
4756 case 87:
4757 case 167:
4758 case 151:
4759 case 231:
4760 case 215:
4761 block = "DB_STEN";
4762 break;
4763 case 72:
4764 case 68:
4765 case 8:
4766 case 4:
4767 case 136:
4768 case 132:
4769 case 200:
4770 case 196:
4771 block = "TC";
4772 break;
4773 case 112:
4774 case 48:
4775 block = "CP";
4776 break;
4777 case 49:
4778 case 177:
4779 case 50:
4780 case 178:
4781 block = "SH";
4782 break;
4783 case 53:
4784 block = "VGT";
4785 break;
4786 case 117:
4787 block = "IH";
4788 break;
4789 case 51:
4790 case 115:
4791 block = "RLC";
4792 break;
4793 case 119:
4794 case 183:
4795 block = "DMA0";
4796 break;
4797 case 61:
4798 block = "DMA1";
4799 break;
4800 case 248:
4801 case 120:
4802 block = "HDP";
4803 break;
4804 default:
4805 block = "unknown";
4806 break;
4807 }
4808 }
4809
4810 printk("VM fault (0x%02x, vmid %d) at page %u, %s from %s (%d)\n",
4811 protections, vmid, addr,
4812 (status & MEMORY_CLIENT_RW_MASK) ? "write" : "read",
4813 block, mc_id);
4814}
4815
4816void si_vm_flush(struct radeon_device *rdev, int ridx, struct radeon_vm *vm)
4817{
4818 struct radeon_ring *ring = &rdev->ring[ridx];
4819
4820 if (vm == NULL)
4821 return;
4822
4823 /* write new base address */
4824 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4825 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4826 WRITE_DATA_DST_SEL(0)));
4827
4828 if (vm->id < 8) {
4829 radeon_ring_write(ring,
4830 (VM_CONTEXT0_PAGE_TABLE_BASE_ADDR + (vm->id << 2)) >> 2);
4831 } else {
4832 radeon_ring_write(ring,
4833 (VM_CONTEXT8_PAGE_TABLE_BASE_ADDR + ((vm->id - 8) << 2)) >> 2);
4834 }
4835 radeon_ring_write(ring, 0);
4836 radeon_ring_write(ring, vm->pd_gpu_addr >> 12);
4837
4838 /* flush hdp cache */
4839 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4840 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4841 WRITE_DATA_DST_SEL(0)));
4842 radeon_ring_write(ring, HDP_MEM_COHERENCY_FLUSH_CNTL >> 2);
4843 radeon_ring_write(ring, 0);
4844 radeon_ring_write(ring, 0x1);
4845
4846 /* bits 0-15 are the VM contexts0-15 */
4847 radeon_ring_write(ring, PACKET3(PACKET3_WRITE_DATA, 3));
4848 radeon_ring_write(ring, (WRITE_DATA_ENGINE_SEL(0) |
4849 WRITE_DATA_DST_SEL(0)));
4850 radeon_ring_write(ring, VM_INVALIDATE_REQUEST >> 2);
4851 radeon_ring_write(ring, 0);
4852 radeon_ring_write(ring, 1 << vm->id);
4853
4854 /* sync PFP to ME, otherwise we might get invalid PFP reads */
4855 radeon_ring_write(ring, PACKET3(PACKET3_PFP_SYNC_ME, 0));
4856 radeon_ring_write(ring, 0x0);
4857}
4858
4859/*
4860 * Power and clock gating
4861 */
4862static void si_wait_for_rlc_serdes(struct radeon_device *rdev)
4863{
4864 int i;
4865
4866 for (i = 0; i < rdev->usec_timeout; i++) {
4867 if (RREG32(RLC_SERDES_MASTER_BUSY_0) == 0)
4868 break;
4869 udelay(1);
4870 }
4871
4872 for (i = 0; i < rdev->usec_timeout; i++) {
4873 if (RREG32(RLC_SERDES_MASTER_BUSY_1) == 0)
4874 break;
4875 udelay(1);
4876 }
4877}
4878
4879static void si_enable_gui_idle_interrupt(struct radeon_device *rdev,
4880 bool enable)
4881{
4882 u32 tmp = RREG32(CP_INT_CNTL_RING0);
4883 u32 mask;
4884 int i;
4885
4886 if (enable)
4887 tmp |= (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4888 else
4889 tmp &= ~(CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4890 WREG32(CP_INT_CNTL_RING0, tmp);
4891
4892 if (!enable) {
4893 /* read a gfx register */
4894 tmp = RREG32(DB_DEPTH_INFO);
4895
4896 mask = RLC_BUSY_STATUS | GFX_POWER_STATUS | GFX_CLOCK_STATUS | GFX_LS_STATUS;
4897 for (i = 0; i < rdev->usec_timeout; i++) {
4898 if ((RREG32(RLC_STAT) & mask) == (GFX_CLOCK_STATUS | GFX_POWER_STATUS))
4899 break;
4900 udelay(1);
4901 }
4902 }
4903}
4904
4905static void si_set_uvd_dcm(struct radeon_device *rdev,
4906 bool sw_mode)
4907{
4908 u32 tmp, tmp2;
4909
4910 tmp = RREG32(UVD_CGC_CTRL);
4911 tmp &= ~(CLK_OD_MASK | CG_DT_MASK);
4912 tmp |= DCM | CG_DT(1) | CLK_OD(4);
4913
4914 if (sw_mode) {
4915 tmp &= ~0x7ffff800;
4916 tmp2 = DYN_OR_EN | DYN_RR_EN | G_DIV_ID(7);
4917 } else {
4918 tmp |= 0x7ffff800;
4919 tmp2 = 0;
4920 }
4921
4922 WREG32(UVD_CGC_CTRL, tmp);
4923 WREG32_UVD_CTX(UVD_CGC_CTRL2, tmp2);
4924}
4925
4926void si_init_uvd_internal_cg(struct radeon_device *rdev)
4927{
4928 bool hw_mode = true;
4929
4930 if (hw_mode) {
4931 si_set_uvd_dcm(rdev, false);
4932 } else {
4933 u32 tmp = RREG32(UVD_CGC_CTRL);
4934 tmp &= ~DCM;
4935 WREG32(UVD_CGC_CTRL, tmp);
4936 }
4937}
4938
4939static u32 si_halt_rlc(struct radeon_device *rdev)
4940{
4941 u32 data, orig;
4942
4943 orig = data = RREG32(RLC_CNTL);
4944
4945 if (data & RLC_ENABLE) {
4946 data &= ~RLC_ENABLE;
4947 WREG32(RLC_CNTL, data);
4948
4949 si_wait_for_rlc_serdes(rdev);
4950 }
4951
4952 return orig;
4953}
4954
4955static void si_update_rlc(struct radeon_device *rdev, u32 rlc)
4956{
4957 u32 tmp;
4958
4959 tmp = RREG32(RLC_CNTL);
4960 if (tmp != rlc)
4961 WREG32(RLC_CNTL, rlc);
4962}
4963
4964static void si_enable_dma_pg(struct radeon_device *rdev, bool enable)
4965{
4966 u32 data, orig;
4967
4968 orig = data = RREG32(DMA_PG);
4969 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA))
4970 data |= PG_CNTL_ENABLE;
4971 else
4972 data &= ~PG_CNTL_ENABLE;
4973 if (orig != data)
4974 WREG32(DMA_PG, data);
4975}
4976
4977static void si_init_dma_pg(struct radeon_device *rdev)
4978{
4979 u32 tmp;
4980
4981 WREG32(DMA_PGFSM_WRITE, 0x00002000);
4982 WREG32(DMA_PGFSM_CONFIG, 0x100010ff);
4983
4984 for (tmp = 0; tmp < 5; tmp++)
4985 WREG32(DMA_PGFSM_WRITE, 0);
4986}
4987
4988static void si_enable_gfx_cgpg(struct radeon_device *rdev,
4989 bool enable)
4990{
4991 u32 tmp;
4992
4993 if (enable && (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG)) {
4994 tmp = RLC_PUD(0x10) | RLC_PDD(0x10) | RLC_TTPD(0x10) | RLC_MSD(0x10);
4995 WREG32(RLC_TTOP_D, tmp);
4996
4997 tmp = RREG32(RLC_PG_CNTL);
4998 tmp |= GFX_PG_ENABLE;
4999 WREG32(RLC_PG_CNTL, tmp);
5000
5001 tmp = RREG32(RLC_AUTO_PG_CTRL);
5002 tmp |= AUTO_PG_EN;
5003 WREG32(RLC_AUTO_PG_CTRL, tmp);
5004 } else {
5005 tmp = RREG32(RLC_AUTO_PG_CTRL);
5006 tmp &= ~AUTO_PG_EN;
5007 WREG32(RLC_AUTO_PG_CTRL, tmp);
5008
5009 tmp = RREG32(DB_RENDER_CONTROL);
5010 }
5011}
5012
5013static void si_init_gfx_cgpg(struct radeon_device *rdev)
5014{
5015 u32 tmp;
5016
5017 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5018
5019 tmp = RREG32(RLC_PG_CNTL);
5020 tmp |= GFX_PG_SRC;
5021 WREG32(RLC_PG_CNTL, tmp);
5022
5023 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5024
5025 tmp = RREG32(RLC_AUTO_PG_CTRL);
5026
5027 tmp &= ~GRBM_REG_SGIT_MASK;
5028 tmp |= GRBM_REG_SGIT(0x700);
5029 tmp &= ~PG_AFTER_GRBM_REG_ST_MASK;
5030 WREG32(RLC_AUTO_PG_CTRL, tmp);
5031}
5032
5033static u32 si_get_cu_active_bitmap(struct radeon_device *rdev, u32 se, u32 sh)
5034{
5035 u32 mask = 0, tmp, tmp1;
5036 int i;
5037
5038 si_select_se_sh(rdev, se, sh);
5039 tmp = RREG32(CC_GC_SHADER_ARRAY_CONFIG);
5040 tmp1 = RREG32(GC_USER_SHADER_ARRAY_CONFIG);
5041 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5042
5043 tmp &= 0xffff0000;
5044
5045 tmp |= tmp1;
5046 tmp >>= 16;
5047
5048 for (i = 0; i < rdev->config.si.max_cu_per_sh; i ++) {
5049 mask <<= 1;
5050 mask |= 1;
5051 }
5052
5053 return (~tmp) & mask;
5054}
5055
5056static void si_init_ao_cu_mask(struct radeon_device *rdev)
5057{
5058 u32 i, j, k, active_cu_number = 0;
5059 u32 mask, counter, cu_bitmap;
5060 u32 tmp = 0;
5061
5062 for (i = 0; i < rdev->config.si.max_shader_engines; i++) {
5063 for (j = 0; j < rdev->config.si.max_sh_per_se; j++) {
5064 mask = 1;
5065 cu_bitmap = 0;
5066 counter = 0;
5067 for (k = 0; k < rdev->config.si.max_cu_per_sh; k++) {
5068 if (si_get_cu_active_bitmap(rdev, i, j) & mask) {
5069 if (counter < 2)
5070 cu_bitmap |= mask;
5071 counter++;
5072 }
5073 mask <<= 1;
5074 }
5075
5076 active_cu_number += counter;
5077 tmp |= (cu_bitmap << (i * 16 + j * 8));
5078 }
5079 }
5080
5081 WREG32(RLC_PG_AO_CU_MASK, tmp);
5082
5083 tmp = RREG32(RLC_MAX_PG_CU);
5084 tmp &= ~MAX_PU_CU_MASK;
5085 tmp |= MAX_PU_CU(active_cu_number);
5086 WREG32(RLC_MAX_PG_CU, tmp);
5087}
5088
5089static void si_enable_cgcg(struct radeon_device *rdev,
5090 bool enable)
5091{
5092 u32 data, orig, tmp;
5093
5094 orig = data = RREG32(RLC_CGCG_CGLS_CTRL);
5095
5096 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CGCG)) {
5097 si_enable_gui_idle_interrupt(rdev, true);
5098
5099 WREG32(RLC_GCPM_GENERAL_3, 0x00000080);
5100
5101 tmp = si_halt_rlc(rdev);
5102
5103 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5104 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5105 WREG32(RLC_SERDES_WR_CTRL, 0x00b000ff);
5106
5107 si_wait_for_rlc_serdes(rdev);
5108
5109 si_update_rlc(rdev, tmp);
5110
5111 WREG32(RLC_SERDES_WR_CTRL, 0x007000ff);
5112
5113 data |= CGCG_EN | CGLS_EN;
5114 } else {
5115 si_enable_gui_idle_interrupt(rdev, false);
5116
5117 RREG32(CB_CGTT_SCLK_CTRL);
5118 RREG32(CB_CGTT_SCLK_CTRL);
5119 RREG32(CB_CGTT_SCLK_CTRL);
5120 RREG32(CB_CGTT_SCLK_CTRL);
5121
5122 data &= ~(CGCG_EN | CGLS_EN);
5123 }
5124
5125 if (orig != data)
5126 WREG32(RLC_CGCG_CGLS_CTRL, data);
5127}
5128
5129static void si_enable_mgcg(struct radeon_device *rdev,
5130 bool enable)
5131{
5132 u32 data, orig, tmp = 0;
5133
5134 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_MGCG)) {
5135 orig = data = RREG32(CGTS_SM_CTRL_REG);
5136 data = 0x96940200;
5137 if (orig != data)
5138 WREG32(CGTS_SM_CTRL_REG, data);
5139
5140 if (rdev->cg_flags & RADEON_CG_SUPPORT_GFX_CP_LS) {
5141 orig = data = RREG32(CP_MEM_SLP_CNTL);
5142 data |= CP_MEM_LS_EN;
5143 if (orig != data)
5144 WREG32(CP_MEM_SLP_CNTL, data);
5145 }
5146
5147 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5148 data &= 0xffffffc0;
5149 if (orig != data)
5150 WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5151
5152 tmp = si_halt_rlc(rdev);
5153
5154 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5155 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5156 WREG32(RLC_SERDES_WR_CTRL, 0x00d000ff);
5157
5158 si_update_rlc(rdev, tmp);
5159 } else {
5160 orig = data = RREG32(RLC_CGTT_MGCG_OVERRIDE);
5161 data |= 0x00000003;
5162 if (orig != data)
5163 WREG32(RLC_CGTT_MGCG_OVERRIDE, data);
5164
5165 data = RREG32(CP_MEM_SLP_CNTL);
5166 if (data & CP_MEM_LS_EN) {
5167 data &= ~CP_MEM_LS_EN;
5168 WREG32(CP_MEM_SLP_CNTL, data);
5169 }
5170 orig = data = RREG32(CGTS_SM_CTRL_REG);
5171 data |= LS_OVERRIDE | OVERRIDE;
5172 if (orig != data)
5173 WREG32(CGTS_SM_CTRL_REG, data);
5174
5175 tmp = si_halt_rlc(rdev);
5176
5177 WREG32(RLC_SERDES_WR_MASTER_MASK_0, 0xffffffff);
5178 WREG32(RLC_SERDES_WR_MASTER_MASK_1, 0xffffffff);
5179 WREG32(RLC_SERDES_WR_CTRL, 0x00e000ff);
5180
5181 si_update_rlc(rdev, tmp);
5182 }
5183}
5184
5185static void si_enable_uvd_mgcg(struct radeon_device *rdev,
5186 bool enable)
5187{
5188 u32 orig, data, tmp;
5189
5190 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_UVD_MGCG)) {
5191 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5192 tmp |= 0x3fff;
5193 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5194
5195 orig = data = RREG32(UVD_CGC_CTRL);
5196 data |= DCM;
5197 if (orig != data)
5198 WREG32(UVD_CGC_CTRL, data);
5199
5200 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0);
5201 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0);
5202 } else {
5203 tmp = RREG32_UVD_CTX(UVD_CGC_MEM_CTRL);
5204 tmp &= ~0x3fff;
5205 WREG32_UVD_CTX(UVD_CGC_MEM_CTRL, tmp);
5206
5207 orig = data = RREG32(UVD_CGC_CTRL);
5208 data &= ~DCM;
5209 if (orig != data)
5210 WREG32(UVD_CGC_CTRL, data);
5211
5212 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_0, 0xffffffff);
5213 WREG32_SMC(SMC_CG_IND_START + CG_CGTT_LOCAL_1, 0xffffffff);
5214 }
5215}
5216
5217static const u32 mc_cg_registers[] =
5218{
5219 MC_HUB_MISC_HUB_CG,
5220 MC_HUB_MISC_SIP_CG,
5221 MC_HUB_MISC_VM_CG,
5222 MC_XPB_CLK_GAT,
5223 ATC_MISC_CG,
5224 MC_CITF_MISC_WR_CG,
5225 MC_CITF_MISC_RD_CG,
5226 MC_CITF_MISC_VM_CG,
5227 VM_L2_CG,
5228};
5229
5230static void si_enable_mc_ls(struct radeon_device *rdev,
5231 bool enable)
5232{
5233 int i;
5234 u32 orig, data;
5235
5236 for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5237 orig = data = RREG32(mc_cg_registers[i]);
5238 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_LS))
5239 data |= MC_LS_ENABLE;
5240 else
5241 data &= ~MC_LS_ENABLE;
5242 if (data != orig)
5243 WREG32(mc_cg_registers[i], data);
5244 }
5245}
5246
5247static void si_enable_mc_mgcg(struct radeon_device *rdev,
5248 bool enable)
5249{
5250 int i;
5251 u32 orig, data;
5252
5253 for (i = 0; i < ARRAY_SIZE(mc_cg_registers); i++) {
5254 orig = data = RREG32(mc_cg_registers[i]);
5255 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_MC_MGCG))
5256 data |= MC_CG_ENABLE;
5257 else
5258 data &= ~MC_CG_ENABLE;
5259 if (data != orig)
5260 WREG32(mc_cg_registers[i], data);
5261 }
5262}
5263
5264static void si_enable_dma_mgcg(struct radeon_device *rdev,
5265 bool enable)
5266{
5267 u32 orig, data, offset;
5268 int i;
5269
5270 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_SDMA_MGCG)) {
5271 for (i = 0; i < 2; i++) {
5272 if (i == 0)
5273 offset = DMA0_REGISTER_OFFSET;
5274 else
5275 offset = DMA1_REGISTER_OFFSET;
5276 orig = data = RREG32(DMA_POWER_CNTL + offset);
5277 data &= ~MEM_POWER_OVERRIDE;
5278 if (data != orig)
5279 WREG32(DMA_POWER_CNTL + offset, data);
5280 WREG32(DMA_CLK_CTRL + offset, 0x00000100);
5281 }
5282 } else {
5283 for (i = 0; i < 2; i++) {
5284 if (i == 0)
5285 offset = DMA0_REGISTER_OFFSET;
5286 else
5287 offset = DMA1_REGISTER_OFFSET;
5288 orig = data = RREG32(DMA_POWER_CNTL + offset);
5289 data |= MEM_POWER_OVERRIDE;
5290 if (data != orig)
5291 WREG32(DMA_POWER_CNTL + offset, data);
5292
5293 orig = data = RREG32(DMA_CLK_CTRL + offset);
5294 data = 0xff000000;
5295 if (data != orig)
5296 WREG32(DMA_CLK_CTRL + offset, data);
5297 }
5298 }
5299}
5300
5301static void si_enable_bif_mgls(struct radeon_device *rdev,
5302 bool enable)
5303{
5304 u32 orig, data;
5305
5306 orig = data = RREG32_PCIE(PCIE_CNTL2);
5307
5308 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_BIF_LS))
5309 data |= SLV_MEM_LS_EN | MST_MEM_LS_EN |
5310 REPLAY_MEM_LS_EN | SLV_MEM_AGGRESSIVE_LS_EN;
5311 else
5312 data &= ~(SLV_MEM_LS_EN | MST_MEM_LS_EN |
5313 REPLAY_MEM_LS_EN | SLV_MEM_AGGRESSIVE_LS_EN);
5314
5315 if (orig != data)
5316 WREG32_PCIE(PCIE_CNTL2, data);
5317}
5318
5319static void si_enable_hdp_mgcg(struct radeon_device *rdev,
5320 bool enable)
5321{
5322 u32 orig, data;
5323
5324 orig = data = RREG32(HDP_HOST_PATH_CNTL);
5325
5326 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_MGCG))
5327 data &= ~CLOCK_GATING_DIS;
5328 else
5329 data |= CLOCK_GATING_DIS;
5330
5331 if (orig != data)
5332 WREG32(HDP_HOST_PATH_CNTL, data);
5333}
5334
5335static void si_enable_hdp_ls(struct radeon_device *rdev,
5336 bool enable)
5337{
5338 u32 orig, data;
5339
5340 orig = data = RREG32(HDP_MEM_POWER_LS);
5341
5342 if (enable && (rdev->cg_flags & RADEON_CG_SUPPORT_HDP_LS))
5343 data |= HDP_LS_ENABLE;
5344 else
5345 data &= ~HDP_LS_ENABLE;
5346
5347 if (orig != data)
5348 WREG32(HDP_MEM_POWER_LS, data);
5349}
5350
5351static void si_update_cg(struct radeon_device *rdev,
5352 u32 block, bool enable)
5353{
5354 if (block & RADEON_CG_BLOCK_GFX) {
5355 si_enable_gui_idle_interrupt(rdev, false);
5356 /* order matters! */
5357 if (enable) {
5358 si_enable_mgcg(rdev, true);
5359 si_enable_cgcg(rdev, true);
5360 } else {
5361 si_enable_cgcg(rdev, false);
5362 si_enable_mgcg(rdev, false);
5363 }
5364 si_enable_gui_idle_interrupt(rdev, true);
5365 }
5366
5367 if (block & RADEON_CG_BLOCK_MC) {
5368 si_enable_mc_mgcg(rdev, enable);
5369 si_enable_mc_ls(rdev, enable);
5370 }
5371
5372 if (block & RADEON_CG_BLOCK_SDMA) {
5373 si_enable_dma_mgcg(rdev, enable);
5374 }
5375
5376 if (block & RADEON_CG_BLOCK_BIF) {
5377 si_enable_bif_mgls(rdev, enable);
5378 }
5379
5380 if (block & RADEON_CG_BLOCK_UVD) {
5381 if (rdev->has_uvd) {
5382 si_enable_uvd_mgcg(rdev, enable);
5383 }
5384 }
5385
5386 if (block & RADEON_CG_BLOCK_HDP) {
5387 si_enable_hdp_mgcg(rdev, enable);
5388 si_enable_hdp_ls(rdev, enable);
5389 }
5390}
5391
5392static void si_init_cg(struct radeon_device *rdev)
5393{
5394 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX |
5395 RADEON_CG_BLOCK_MC |
5396 RADEON_CG_BLOCK_SDMA |
5397 RADEON_CG_BLOCK_BIF |
5398 RADEON_CG_BLOCK_HDP), true);
5399 if (rdev->has_uvd) {
5400 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, true);
5401 si_init_uvd_internal_cg(rdev);
5402 }
5403}
5404
5405static void si_fini_cg(struct radeon_device *rdev)
5406{
5407 if (rdev->has_uvd) {
5408 si_update_cg(rdev, RADEON_CG_BLOCK_UVD, false);
5409 }
5410 si_update_cg(rdev, (RADEON_CG_BLOCK_GFX |
5411 RADEON_CG_BLOCK_MC |
5412 RADEON_CG_BLOCK_SDMA |
5413 RADEON_CG_BLOCK_BIF |
5414 RADEON_CG_BLOCK_HDP), false);
5415}
5416
5417u32 si_get_csb_size(struct radeon_device *rdev)
5418{
5419 u32 count = 0;
5420 const struct cs_section_def *sect = NULL;
5421 const struct cs_extent_def *ext = NULL;
5422
5423 if (rdev->rlc.cs_data == NULL)
5424 return 0;
5425
5426 /* begin clear state */
5427 count += 2;
5428 /* context control state */
5429 count += 3;
5430
5431 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
5432 for (ext = sect->section; ext->extent != NULL; ++ext) {
5433 if (sect->id == SECT_CONTEXT)
5434 count += 2 + ext->reg_count;
5435 else
5436 return 0;
5437 }
5438 }
5439 /* pa_sc_raster_config */
5440 count += 3;
5441 /* end clear state */
5442 count += 2;
5443 /* clear state */
5444 count += 2;
5445
5446 return count;
5447}
5448
5449void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer)
5450{
5451 u32 count = 0, i;
5452 const struct cs_section_def *sect = NULL;
5453 const struct cs_extent_def *ext = NULL;
5454
5455 if (rdev->rlc.cs_data == NULL)
5456 return;
5457 if (buffer == NULL)
5458 return;
5459
5460 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_PREAMBLE_CNTL, 0));
5461 buffer[count++] = cpu_to_le32(PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
5462
5463 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_CONTEXT_CONTROL, 1));
5464 buffer[count++] = cpu_to_le32(0x80000000);
5465 buffer[count++] = cpu_to_le32(0x80000000);
5466
5467 for (sect = rdev->rlc.cs_data; sect->section != NULL; ++sect) {
5468 for (ext = sect->section; ext->extent != NULL; ++ext) {
5469 if (sect->id == SECT_CONTEXT) {
5470 buffer[count++] =
5471 cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, ext->reg_count));
5472 buffer[count++] = cpu_to_le32(ext->reg_index - 0xa000);
5473 for (i = 0; i < ext->reg_count; i++)
5474 buffer[count++] = cpu_to_le32(ext->extent[i]);
5475 } else {
5476 return;
5477 }
5478 }
5479 }
5480
5481 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_SET_CONTEXT_REG, 1));
5482 buffer[count++] = cpu_to_le32(PA_SC_RASTER_CONFIG - PACKET3_SET_CONTEXT_REG_START);
5483 switch (rdev->family) {
5484 case CHIP_TAHITI:
5485 case CHIP_PITCAIRN:
5486 buffer[count++] = cpu_to_le32(0x2a00126a);
5487 break;
5488 case CHIP_VERDE:
5489 buffer[count++] = cpu_to_le32(0x0000124a);
5490 break;
5491 case CHIP_OLAND:
5492 buffer[count++] = cpu_to_le32(0x00000082);
5493 break;
5494 case CHIP_HAINAN:
5495 buffer[count++] = cpu_to_le32(0x00000000);
5496 break;
5497 default:
5498 buffer[count++] = cpu_to_le32(0x00000000);
5499 break;
5500 }
5501
5502 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_PREAMBLE_CNTL, 0));
5503 buffer[count++] = cpu_to_le32(PACKET3_PREAMBLE_END_CLEAR_STATE);
5504
5505 buffer[count++] = cpu_to_le32(PACKET3(PACKET3_CLEAR_STATE, 0));
5506 buffer[count++] = cpu_to_le32(0);
5507}
5508
5509static void si_init_pg(struct radeon_device *rdev)
5510{
5511 if (rdev->pg_flags) {
5512 if (rdev->pg_flags & RADEON_PG_SUPPORT_SDMA) {
5513 si_init_dma_pg(rdev);
5514 }
5515 si_init_ao_cu_mask(rdev);
5516 if (rdev->pg_flags & RADEON_PG_SUPPORT_GFX_PG) {
5517 si_init_gfx_cgpg(rdev);
5518 } else {
5519 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5520 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5521 }
5522 si_enable_dma_pg(rdev, true);
5523 si_enable_gfx_cgpg(rdev, true);
5524 } else {
5525 WREG32(RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
5526 WREG32(RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
5527 }
5528}
5529
5530static void si_fini_pg(struct radeon_device *rdev)
5531{
5532 if (rdev->pg_flags) {
5533 si_enable_dma_pg(rdev, false);
5534 si_enable_gfx_cgpg(rdev, false);
5535 }
5536}
5537
5538/*
5539 * RLC
5540 */
5541void si_rlc_reset(struct radeon_device *rdev)
5542{
5543 u32 tmp = RREG32(GRBM_SOFT_RESET);
5544
5545 tmp |= SOFT_RESET_RLC;
5546 WREG32(GRBM_SOFT_RESET, tmp);
5547 udelay(50);
5548 tmp &= ~SOFT_RESET_RLC;
5549 WREG32(GRBM_SOFT_RESET, tmp);
5550 udelay(50);
5551}
5552
5553static void si_rlc_stop(struct radeon_device *rdev)
5554{
5555 WREG32(RLC_CNTL, 0);
5556
5557 si_enable_gui_idle_interrupt(rdev, false);
5558
5559 si_wait_for_rlc_serdes(rdev);
5560}
5561
5562static void si_rlc_start(struct radeon_device *rdev)
5563{
5564 WREG32(RLC_CNTL, RLC_ENABLE);
5565
5566 si_enable_gui_idle_interrupt(rdev, true);
5567
5568 udelay(50);
5569}
5570
5571static bool si_lbpw_supported(struct radeon_device *rdev)
5572{
5573 u32 tmp;
5574
5575 /* Enable LBPW only for DDR3 */
5576 tmp = RREG32(MC_SEQ_MISC0);
5577 if ((tmp & 0xF0000000) == 0xB0000000)
5578 return true;
5579 return false;
5580}
5581
5582static void si_enable_lbpw(struct radeon_device *rdev, bool enable)
5583{
5584 u32 tmp;
5585
5586 tmp = RREG32(RLC_LB_CNTL);
5587 if (enable)
5588 tmp |= LOAD_BALANCE_ENABLE;
5589 else
5590 tmp &= ~LOAD_BALANCE_ENABLE;
5591 WREG32(RLC_LB_CNTL, tmp);
5592
5593 if (!enable) {
5594 si_select_se_sh(rdev, 0xffffffff, 0xffffffff);
5595 WREG32(SPI_LB_CU_MASK, 0x00ff);
5596 }
5597}
5598
5599static int si_rlc_resume(struct radeon_device *rdev)
5600{
5601 u32 i;
5602 const __be32 *fw_data;
5603
5604 if (!rdev->rlc_fw)
5605 return -EINVAL;
5606
5607 si_rlc_stop(rdev);
5608
5609 si_rlc_reset(rdev);
5610
5611 si_init_pg(rdev);
5612
5613 si_init_cg(rdev);
5614
5615 WREG32(RLC_RL_BASE, 0);
5616 WREG32(RLC_RL_SIZE, 0);
5617 WREG32(RLC_LB_CNTL, 0);
5618 WREG32(RLC_LB_CNTR_MAX, 0xffffffff);
5619 WREG32(RLC_LB_CNTR_INIT, 0);
5620 WREG32(RLC_LB_INIT_CU_MASK, 0xffffffff);
5621
5622 WREG32(RLC_MC_CNTL, 0);
5623 WREG32(RLC_UCODE_CNTL, 0);
5624
5625 fw_data = (const __be32 *)rdev->rlc_fw->data;
5626 for (i = 0; i < SI_RLC_UCODE_SIZE; i++) {
5627 WREG32(RLC_UCODE_ADDR, i);
5628 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
5629 }
5630 WREG32(RLC_UCODE_ADDR, 0);
5631
5632 si_enable_lbpw(rdev, si_lbpw_supported(rdev));
5633
5634 si_rlc_start(rdev);
5635
5636 return 0;
5637}
5638
5639static void si_enable_interrupts(struct radeon_device *rdev)
5640{
5641 u32 ih_cntl = RREG32(IH_CNTL);
5642 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5643
5644 ih_cntl |= ENABLE_INTR;
5645 ih_rb_cntl |= IH_RB_ENABLE;
5646 WREG32(IH_CNTL, ih_cntl);
5647 WREG32(IH_RB_CNTL, ih_rb_cntl);
5648 rdev->ih.enabled = true;
5649}
5650
5651static void si_disable_interrupts(struct radeon_device *rdev)
5652{
5653 u32 ih_rb_cntl = RREG32(IH_RB_CNTL);
5654 u32 ih_cntl = RREG32(IH_CNTL);
5655
5656 ih_rb_cntl &= ~IH_RB_ENABLE;
5657 ih_cntl &= ~ENABLE_INTR;
5658 WREG32(IH_RB_CNTL, ih_rb_cntl);
5659 WREG32(IH_CNTL, ih_cntl);
5660 /* set rptr, wptr to 0 */
5661 WREG32(IH_RB_RPTR, 0);
5662 WREG32(IH_RB_WPTR, 0);
5663 rdev->ih.enabled = false;
5664 rdev->ih.rptr = 0;
5665}
5666
5667static void si_disable_interrupt_state(struct radeon_device *rdev)
5668{
5669 u32 tmp;
5670
5671 tmp = RREG32(CP_INT_CNTL_RING0) &
5672 (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5673 WREG32(CP_INT_CNTL_RING0, tmp);
5674 WREG32(CP_INT_CNTL_RING1, 0);
5675 WREG32(CP_INT_CNTL_RING2, 0);
5676 tmp = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5677 WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, tmp);
5678 tmp = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5679 WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, tmp);
5680 WREG32(GRBM_INT_CNTL, 0);
5681 if (rdev->num_crtc >= 2) {
5682 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5683 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5684 }
5685 if (rdev->num_crtc >= 4) {
5686 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5687 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5688 }
5689 if (rdev->num_crtc >= 6) {
5690 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5691 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5692 }
5693
5694 if (rdev->num_crtc >= 2) {
5695 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
5696 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
5697 }
5698 if (rdev->num_crtc >= 4) {
5699 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
5700 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
5701 }
5702 if (rdev->num_crtc >= 6) {
5703 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
5704 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
5705 }
5706
5707 if (!ASIC_IS_NODCE(rdev)) {
5708 WREG32(DAC_AUTODETECT_INT_CONTROL, 0);
5709
5710 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5711 WREG32(DC_HPD1_INT_CONTROL, tmp);
5712 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5713 WREG32(DC_HPD2_INT_CONTROL, tmp);
5714 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5715 WREG32(DC_HPD3_INT_CONTROL, tmp);
5716 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5717 WREG32(DC_HPD4_INT_CONTROL, tmp);
5718 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5719 WREG32(DC_HPD5_INT_CONTROL, tmp);
5720 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
5721 WREG32(DC_HPD6_INT_CONTROL, tmp);
5722 }
5723}
5724
5725static int si_irq_init(struct radeon_device *rdev)
5726{
5727 int ret = 0;
5728 int rb_bufsz;
5729 u32 interrupt_cntl, ih_cntl, ih_rb_cntl;
5730
5731 /* allocate ring */
5732 ret = r600_ih_ring_alloc(rdev);
5733 if (ret)
5734 return ret;
5735
5736 /* disable irqs */
5737 si_disable_interrupts(rdev);
5738
5739 /* init rlc */
5740 ret = si_rlc_resume(rdev);
5741 if (ret) {
5742 r600_ih_ring_fini(rdev);
5743 return ret;
5744 }
5745
5746 /* setup interrupt control */
5747 /* set dummy read address to ring address */
5748 WREG32(INTERRUPT_CNTL2, rdev->ih.gpu_addr >> 8);
5749 interrupt_cntl = RREG32(INTERRUPT_CNTL);
5750 /* IH_DUMMY_RD_OVERRIDE=0 - dummy read disabled with msi, enabled without msi
5751 * IH_DUMMY_RD_OVERRIDE=1 - dummy read controlled by IH_DUMMY_RD_EN
5752 */
5753 interrupt_cntl &= ~IH_DUMMY_RD_OVERRIDE;
5754 /* IH_REQ_NONSNOOP_EN=1 if ring is in non-cacheable memory, e.g., vram */
5755 interrupt_cntl &= ~IH_REQ_NONSNOOP_EN;
5756 WREG32(INTERRUPT_CNTL, interrupt_cntl);
5757
5758 WREG32(IH_RB_BASE, rdev->ih.gpu_addr >> 8);
5759 rb_bufsz = order_base_2(rdev->ih.ring_size / 4);
5760
5761 ih_rb_cntl = (IH_WPTR_OVERFLOW_ENABLE |
5762 IH_WPTR_OVERFLOW_CLEAR |
5763 (rb_bufsz << 1));
5764
5765 if (rdev->wb.enabled)
5766 ih_rb_cntl |= IH_WPTR_WRITEBACK_ENABLE;
5767
5768 /* set the writeback address whether it's enabled or not */
5769 WREG32(IH_RB_WPTR_ADDR_LO, (rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFFFFFFFC);
5770 WREG32(IH_RB_WPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + R600_WB_IH_WPTR_OFFSET) & 0xFF);
5771
5772 WREG32(IH_RB_CNTL, ih_rb_cntl);
5773
5774 /* set rptr, wptr to 0 */
5775 WREG32(IH_RB_RPTR, 0);
5776 WREG32(IH_RB_WPTR, 0);
5777
5778 /* Default settings for IH_CNTL (disabled at first) */
5779 ih_cntl = MC_WRREQ_CREDIT(0x10) | MC_WR_CLEAN_CNT(0x10) | MC_VMID(0);
5780 /* RPTR_REARM only works if msi's are enabled */
5781 if (rdev->msi_enabled)
5782 ih_cntl |= RPTR_REARM;
5783 WREG32(IH_CNTL, ih_cntl);
5784
5785 /* force the active interrupt state to all disabled */
5786 si_disable_interrupt_state(rdev);
5787
5788 pci_set_master(rdev->pdev);
5789
5790 /* enable irqs */
5791 si_enable_interrupts(rdev);
5792
5793 return ret;
5794}
5795
5796int si_irq_set(struct radeon_device *rdev)
5797{
5798 u32 cp_int_cntl;
5799 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
5800 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
5801 u32 hpd1 = 0, hpd2 = 0, hpd3 = 0, hpd4 = 0, hpd5 = 0, hpd6 = 0;
5802 u32 grbm_int_cntl = 0;
5803 u32 dma_cntl, dma_cntl1;
5804 u32 thermal_int = 0;
5805
5806 if (!rdev->irq.installed) {
5807 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
5808 return -EINVAL;
5809 }
5810 /* don't enable anything if the ih is disabled */
5811 if (!rdev->ih.enabled) {
5812 si_disable_interrupts(rdev);
5813 /* force the active interrupt state to all disabled */
5814 si_disable_interrupt_state(rdev);
5815 return 0;
5816 }
5817
5818 cp_int_cntl = RREG32(CP_INT_CNTL_RING0) &
5819 (CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
5820
5821 if (!ASIC_IS_NODCE(rdev)) {
5822 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~DC_HPDx_INT_EN;
5823 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~DC_HPDx_INT_EN;
5824 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~DC_HPDx_INT_EN;
5825 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~DC_HPDx_INT_EN;
5826 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~DC_HPDx_INT_EN;
5827 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~DC_HPDx_INT_EN;
5828 }
5829
5830 dma_cntl = RREG32(DMA_CNTL + DMA0_REGISTER_OFFSET) & ~TRAP_ENABLE;
5831 dma_cntl1 = RREG32(DMA_CNTL + DMA1_REGISTER_OFFSET) & ~TRAP_ENABLE;
5832
5833 thermal_int = RREG32(CG_THERMAL_INT) &
5834 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
5835
5836 /* enable CP interrupts on all rings */
5837 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
5838 DRM_DEBUG("si_irq_set: sw int gfx\n");
5839 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
5840 }
5841 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
5842 DRM_DEBUG("si_irq_set: sw int cp1\n");
5843 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
5844 }
5845 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
5846 DRM_DEBUG("si_irq_set: sw int cp2\n");
5847 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
5848 }
5849 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
5850 DRM_DEBUG("si_irq_set: sw int dma\n");
5851 dma_cntl |= TRAP_ENABLE;
5852 }
5853
5854 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
5855 DRM_DEBUG("si_irq_set: sw int dma1\n");
5856 dma_cntl1 |= TRAP_ENABLE;
5857 }
5858 if (rdev->irq.crtc_vblank_int[0] ||
5859 atomic_read(&rdev->irq.pflip[0])) {
5860 DRM_DEBUG("si_irq_set: vblank 0\n");
5861 crtc1 |= VBLANK_INT_MASK;
5862 }
5863 if (rdev->irq.crtc_vblank_int[1] ||
5864 atomic_read(&rdev->irq.pflip[1])) {
5865 DRM_DEBUG("si_irq_set: vblank 1\n");
5866 crtc2 |= VBLANK_INT_MASK;
5867 }
5868 if (rdev->irq.crtc_vblank_int[2] ||
5869 atomic_read(&rdev->irq.pflip[2])) {
5870 DRM_DEBUG("si_irq_set: vblank 2\n");
5871 crtc3 |= VBLANK_INT_MASK;
5872 }
5873 if (rdev->irq.crtc_vblank_int[3] ||
5874 atomic_read(&rdev->irq.pflip[3])) {
5875 DRM_DEBUG("si_irq_set: vblank 3\n");
5876 crtc4 |= VBLANK_INT_MASK;
5877 }
5878 if (rdev->irq.crtc_vblank_int[4] ||
5879 atomic_read(&rdev->irq.pflip[4])) {
5880 DRM_DEBUG("si_irq_set: vblank 4\n");
5881 crtc5 |= VBLANK_INT_MASK;
5882 }
5883 if (rdev->irq.crtc_vblank_int[5] ||
5884 atomic_read(&rdev->irq.pflip[5])) {
5885 DRM_DEBUG("si_irq_set: vblank 5\n");
5886 crtc6 |= VBLANK_INT_MASK;
5887 }
5888 if (rdev->irq.hpd[0]) {
5889 DRM_DEBUG("si_irq_set: hpd 1\n");
5890 hpd1 |= DC_HPDx_INT_EN;
5891 }
5892 if (rdev->irq.hpd[1]) {
5893 DRM_DEBUG("si_irq_set: hpd 2\n");
5894 hpd2 |= DC_HPDx_INT_EN;
5895 }
5896 if (rdev->irq.hpd[2]) {
5897 DRM_DEBUG("si_irq_set: hpd 3\n");
5898 hpd3 |= DC_HPDx_INT_EN;
5899 }
5900 if (rdev->irq.hpd[3]) {
5901 DRM_DEBUG("si_irq_set: hpd 4\n");
5902 hpd4 |= DC_HPDx_INT_EN;
5903 }
5904 if (rdev->irq.hpd[4]) {
5905 DRM_DEBUG("si_irq_set: hpd 5\n");
5906 hpd5 |= DC_HPDx_INT_EN;
5907 }
5908 if (rdev->irq.hpd[5]) {
5909 DRM_DEBUG("si_irq_set: hpd 6\n");
5910 hpd6 |= DC_HPDx_INT_EN;
5911 }
5912
5913 WREG32(CP_INT_CNTL_RING0, cp_int_cntl);
5914 WREG32(CP_INT_CNTL_RING1, cp_int_cntl1);
5915 WREG32(CP_INT_CNTL_RING2, cp_int_cntl2);
5916
5917 WREG32(DMA_CNTL + DMA0_REGISTER_OFFSET, dma_cntl);
5918 WREG32(DMA_CNTL + DMA1_REGISTER_OFFSET, dma_cntl1);
5919
5920 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
5921
5922 if (rdev->irq.dpm_thermal) {
5923 DRM_DEBUG("dpm thermal\n");
5924 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
5925 }
5926
5927 if (rdev->num_crtc >= 2) {
5928 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
5929 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
5930 }
5931 if (rdev->num_crtc >= 4) {
5932 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
5933 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
5934 }
5935 if (rdev->num_crtc >= 6) {
5936 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
5937 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
5938 }
5939
5940 if (rdev->num_crtc >= 2) {
5941 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
5942 GRPH_PFLIP_INT_MASK);
5943 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
5944 GRPH_PFLIP_INT_MASK);
5945 }
5946 if (rdev->num_crtc >= 4) {
5947 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
5948 GRPH_PFLIP_INT_MASK);
5949 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
5950 GRPH_PFLIP_INT_MASK);
5951 }
5952 if (rdev->num_crtc >= 6) {
5953 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
5954 GRPH_PFLIP_INT_MASK);
5955 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
5956 GRPH_PFLIP_INT_MASK);
5957 }
5958
5959 if (!ASIC_IS_NODCE(rdev)) {
5960 WREG32(DC_HPD1_INT_CONTROL, hpd1);
5961 WREG32(DC_HPD2_INT_CONTROL, hpd2);
5962 WREG32(DC_HPD3_INT_CONTROL, hpd3);
5963 WREG32(DC_HPD4_INT_CONTROL, hpd4);
5964 WREG32(DC_HPD5_INT_CONTROL, hpd5);
5965 WREG32(DC_HPD6_INT_CONTROL, hpd6);
5966 }
5967
5968 WREG32(CG_THERMAL_INT, thermal_int);
5969
5970 return 0;
5971}
5972
5973static inline void si_irq_ack(struct radeon_device *rdev)
5974{
5975 u32 tmp;
5976
5977 if (ASIC_IS_NODCE(rdev))
5978 return;
5979
5980 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
5981 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
5982 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
5983 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
5984 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
5985 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
5986 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
5987 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
5988 if (rdev->num_crtc >= 4) {
5989 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
5990 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
5991 }
5992 if (rdev->num_crtc >= 6) {
5993 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
5994 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
5995 }
5996
5997 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
5998 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
5999 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
6000 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6001 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
6002 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
6003 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
6004 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
6005 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
6006 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
6007 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
6008 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
6009
6010 if (rdev->num_crtc >= 4) {
6011 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
6012 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6013 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
6014 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6015 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
6016 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
6017 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
6018 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
6019 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
6020 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
6021 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
6022 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
6023 }
6024
6025 if (rdev->num_crtc >= 6) {
6026 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
6027 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6028 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
6029 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
6030 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
6031 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
6032 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
6033 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
6034 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
6035 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
6036 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
6037 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
6038 }
6039
6040 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
6041 tmp = RREG32(DC_HPD1_INT_CONTROL);
6042 tmp |= DC_HPDx_INT_ACK;
6043 WREG32(DC_HPD1_INT_CONTROL, tmp);
6044 }
6045 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
6046 tmp = RREG32(DC_HPD2_INT_CONTROL);
6047 tmp |= DC_HPDx_INT_ACK;
6048 WREG32(DC_HPD2_INT_CONTROL, tmp);
6049 }
6050 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
6051 tmp = RREG32(DC_HPD3_INT_CONTROL);
6052 tmp |= DC_HPDx_INT_ACK;
6053 WREG32(DC_HPD3_INT_CONTROL, tmp);
6054 }
6055 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
6056 tmp = RREG32(DC_HPD4_INT_CONTROL);
6057 tmp |= DC_HPDx_INT_ACK;
6058 WREG32(DC_HPD4_INT_CONTROL, tmp);
6059 }
6060 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
6061 tmp = RREG32(DC_HPD5_INT_CONTROL);
6062 tmp |= DC_HPDx_INT_ACK;
6063 WREG32(DC_HPD5_INT_CONTROL, tmp);
6064 }
6065 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
6066 tmp = RREG32(DC_HPD5_INT_CONTROL);
6067 tmp |= DC_HPDx_INT_ACK;
6068 WREG32(DC_HPD6_INT_CONTROL, tmp);
6069 }
6070}
6071
6072static void si_irq_disable(struct radeon_device *rdev)
6073{
6074 si_disable_interrupts(rdev);
6075 /* Wait and acknowledge irq */
6076 mdelay(1);
6077 si_irq_ack(rdev);
6078 si_disable_interrupt_state(rdev);
6079}
6080
6081static void si_irq_suspend(struct radeon_device *rdev)
6082{
6083 si_irq_disable(rdev);
6084 si_rlc_stop(rdev);
6085}
6086
6087static void si_irq_fini(struct radeon_device *rdev)
6088{
6089 si_irq_suspend(rdev);
6090 r600_ih_ring_fini(rdev);
6091}
6092
6093static inline u32 si_get_ih_wptr(struct radeon_device *rdev)
6094{
6095 u32 wptr, tmp;
6096
6097 if (rdev->wb.enabled)
6098 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
6099 else
6100 wptr = RREG32(IH_RB_WPTR);
6101
6102 if (wptr & RB_OVERFLOW) {
6103 /* When a ring buffer overflow happen start parsing interrupt
6104 * from the last not overwritten vector (wptr + 16). Hopefully
6105 * this should allow us to catchup.
6106 */
6107 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, %d, %d)\n",
6108 wptr, rdev->ih.rptr, (wptr + 16) + rdev->ih.ptr_mask);
6109 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
6110 tmp = RREG32(IH_RB_CNTL);
6111 tmp |= IH_WPTR_OVERFLOW_CLEAR;
6112 WREG32(IH_RB_CNTL, tmp);
6113 }
6114 return (wptr & rdev->ih.ptr_mask);
6115}
6116
6117/* SI IV Ring
6118 * Each IV ring entry is 128 bits:
6119 * [7:0] - interrupt source id
6120 * [31:8] - reserved
6121 * [59:32] - interrupt source data
6122 * [63:60] - reserved
6123 * [71:64] - RINGID
6124 * [79:72] - VMID
6125 * [127:80] - reserved
6126 */
6127int si_irq_process(struct radeon_device *rdev)
6128{
6129 u32 wptr;
6130 u32 rptr;
6131 u32 src_id, src_data, ring_id;
6132 u32 ring_index;
6133 bool queue_hotplug = false;
6134 bool queue_thermal = false;
6135 u32 status, addr;
6136
6137 if (!rdev->ih.enabled || rdev->shutdown)
6138 return IRQ_NONE;
6139
6140 wptr = si_get_ih_wptr(rdev);
6141
6142restart_ih:
6143 /* is somebody else already processing irqs? */
6144 if (atomic_xchg(&rdev->ih.lock, 1))
6145 return IRQ_NONE;
6146
6147 rptr = rdev->ih.rptr;
6148 DRM_DEBUG("si_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
6149
6150 /* Order reading of wptr vs. reading of IH ring data */
6151 rmb();
6152
6153 /* display interrupts */
6154 si_irq_ack(rdev);
6155
6156 while (rptr != wptr) {
6157 /* wptr/rptr are in bytes! */
6158 ring_index = rptr / 4;
6159 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
6160 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
6161 ring_id = le32_to_cpu(rdev->ih.ring[ring_index + 2]) & 0xff;
6162
6163 switch (src_id) {
6164 case 1: /* D1 vblank/vline */
6165 switch (src_data) {
6166 case 0: /* D1 vblank */
6167 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT) {
6168 if (rdev->irq.crtc_vblank_int[0]) {
6169 drm_handle_vblank(rdev->ddev, 0);
6170#ifdef __NetBSD__
6171 spin_lock(&rdev->irq.vblank_lock);
6172 rdev->pm.vblank_sync = true;
6173 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
6174 spin_unlock(&rdev->irq.vblank_lock);
6175#else
6176 rdev->pm.vblank_sync = true;
6177 wake_up(&rdev->irq.vblank_queue);
6178#endif
6179 }
6180 if (atomic_read(&rdev->irq.pflip[0]))
6181 radeon_crtc_handle_flip(rdev, 0);
6182 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
6183 DRM_DEBUG("IH: D1 vblank\n");
6184 }
6185 break;
6186 case 1: /* D1 vline */
6187 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT) {
6188 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
6189 DRM_DEBUG("IH: D1 vline\n");
6190 }
6191 break;
6192 default:
6193 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6194 break;
6195 }
6196 break;
6197 case 2: /* D2 vblank/vline */
6198 switch (src_data) {
6199 case 0: /* D2 vblank */
6200 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT) {
6201 if (rdev->irq.crtc_vblank_int[1]) {
6202 drm_handle_vblank(rdev->ddev, 1);
6203#ifdef __NetBSD__
6204 spin_lock(&rdev->irq.vblank_lock);
6205 rdev->pm.vblank_sync = true;
6206 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
6207 spin_unlock(&rdev->irq.vblank_lock);
6208#else
6209 rdev->pm.vblank_sync = true;
6210 wake_up(&rdev->irq.vblank_queue);
6211#endif
6212 }
6213 if (atomic_read(&rdev->irq.pflip[1]))
6214 radeon_crtc_handle_flip(rdev, 1);
6215 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
6216 DRM_DEBUG("IH: D2 vblank\n");
6217 }
6218 break;
6219 case 1: /* D2 vline */
6220 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT) {
6221 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
6222 DRM_DEBUG("IH: D2 vline\n");
6223 }
6224 break;
6225 default:
6226 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6227 break;
6228 }
6229 break;
6230 case 3: /* D3 vblank/vline */
6231 switch (src_data) {
6232 case 0: /* D3 vblank */
6233 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT) {
6234 if (rdev->irq.crtc_vblank_int[2]) {
6235 drm_handle_vblank(rdev->ddev, 2);
6236#ifdef __NetBSD__
6237 spin_lock(&rdev->irq.vblank_lock);
6238 rdev->pm.vblank_sync = true;
6239 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
6240 spin_unlock(&rdev->irq.vblank_lock);
6241#else
6242 rdev->pm.vblank_sync = true;
6243 wake_up(&rdev->irq.vblank_queue);
6244#endif
6245 }
6246 if (atomic_read(&rdev->irq.pflip[2]))
6247 radeon_crtc_handle_flip(rdev, 2);
6248 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
6249 DRM_DEBUG("IH: D3 vblank\n");
6250 }
6251 break;
6252 case 1: /* D3 vline */
6253 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT) {
6254 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
6255 DRM_DEBUG("IH: D3 vline\n");
6256 }
6257 break;
6258 default:
6259 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6260 break;
6261 }
6262 break;
6263 case 4: /* D4 vblank/vline */
6264 switch (src_data) {
6265 case 0: /* D4 vblank */
6266 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT) {
6267 if (rdev->irq.crtc_vblank_int[3]) {
6268 drm_handle_vblank(rdev->ddev, 3);
6269#ifdef __NetBSD__
6270 spin_lock(&rdev->irq.vblank_lock);
6271 rdev->pm.vblank_sync = true;
6272 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
6273 spin_unlock(&rdev->irq.vblank_lock);
6274#else
6275 rdev->pm.vblank_sync = true;
6276 wake_up(&rdev->irq.vblank_queue);
6277#endif
6278 }
6279 if (atomic_read(&rdev->irq.pflip[3]))
6280 radeon_crtc_handle_flip(rdev, 3);
6281 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
6282 DRM_DEBUG("IH: D4 vblank\n");
6283 }
6284 break;
6285 case 1: /* D4 vline */
6286 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT) {
6287 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
6288 DRM_DEBUG("IH: D4 vline\n");
6289 }
6290 break;
6291 default:
6292 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6293 break;
6294 }
6295 break;
6296 case 5: /* D5 vblank/vline */
6297 switch (src_data) {
6298 case 0: /* D5 vblank */
6299 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT) {
6300 if (rdev->irq.crtc_vblank_int[4]) {
6301 drm_handle_vblank(rdev->ddev, 4);
6302#ifdef __NetBSD__
6303 spin_lock(&rdev->irq.vblank_lock);
6304 rdev->pm.vblank_sync = true;
6305 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
6306 spin_unlock(&rdev->irq.vblank_lock);
6307#else
6308 rdev->pm.vblank_sync = true;
6309 wake_up(&rdev->irq.vblank_queue);
6310#endif
6311 }
6312 if (atomic_read(&rdev->irq.pflip[4]))
6313 radeon_crtc_handle_flip(rdev, 4);
6314 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
6315 DRM_DEBUG("IH: D5 vblank\n");
6316 }
6317 break;
6318 case 1: /* D5 vline */
6319 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT) {
6320 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
6321 DRM_DEBUG("IH: D5 vline\n");
6322 }
6323 break;
6324 default:
6325 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6326 break;
6327 }
6328 break;
6329 case 6: /* D6 vblank/vline */
6330 switch (src_data) {
6331 case 0: /* D6 vblank */
6332 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT) {
6333 if (rdev->irq.crtc_vblank_int[5]) {
6334 drm_handle_vblank(rdev->ddev, 5);
6335#ifdef __NetBSD__
6336 spin_lock(&rdev->irq.vblank_lock);
6337 rdev->pm.vblank_sync = true;
6338 DRM_SPIN_WAKEUP_ONE(&rdev->irq.vblank_queue, &rdev->irq.vblank_lock);
6339 spin_unlock(&rdev->irq.vblank_lock);
6340#else
6341 rdev->pm.vblank_sync = true;
6342 wake_up(&rdev->irq.vblank_queue);
6343#endif
6344 }
6345 if (atomic_read(&rdev->irq.pflip[5]))
6346 radeon_crtc_handle_flip(rdev, 5);
6347 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
6348 DRM_DEBUG("IH: D6 vblank\n");
6349 }
6350 break;
6351 case 1: /* D6 vline */
6352 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT) {
6353 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
6354 DRM_DEBUG("IH: D6 vline\n");
6355 }
6356 break;
6357 default:
6358 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6359 break;
6360 }
6361 break;
6362 case 8: /* D1 page flip */
6363 case 10: /* D2 page flip */
6364 case 12: /* D3 page flip */
6365 case 14: /* D4 page flip */
6366 case 16: /* D5 page flip */
6367 case 18: /* D6 page flip */
6368 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
6369 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
6370 break;
6371 case 42: /* HPD hotplug */
6372 switch (src_data) {
6373 case 0:
6374 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
6375 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
6376 queue_hotplug = true;
6377 DRM_DEBUG("IH: HPD1\n");
6378 }
6379 break;
6380 case 1:
6381 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
6382 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
6383 queue_hotplug = true;
6384 DRM_DEBUG("IH: HPD2\n");
6385 }
6386 break;
6387 case 2:
6388 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
6389 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
6390 queue_hotplug = true;
6391 DRM_DEBUG("IH: HPD3\n");
6392 }
6393 break;
6394 case 3:
6395 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
6396 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
6397 queue_hotplug = true;
6398 DRM_DEBUG("IH: HPD4\n");
6399 }
6400 break;
6401 case 4:
6402 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
6403 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
6404 queue_hotplug = true;
6405 DRM_DEBUG("IH: HPD5\n");
6406 }
6407 break;
6408 case 5:
6409 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
6410 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
6411 queue_hotplug = true;
6412 DRM_DEBUG("IH: HPD6\n");
6413 }
6414 break;
6415 default:
6416 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6417 break;
6418 }
6419 break;
6420 case 124: /* UVD */
6421 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
6422 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
6423 break;
6424 case 146:
6425 case 147:
6426 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
6427 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
6428 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
6429 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
6430 addr);
6431 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
6432 status);
6433 si_vm_decode_fault(rdev, status, addr);
6434 /* reset addr and status */
6435 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
6436 break;
6437 case 176: /* RINGID0 CP_INT */
6438 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6439 break;
6440 case 177: /* RINGID1 CP_INT */
6441 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6442 break;
6443 case 178: /* RINGID2 CP_INT */
6444 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6445 break;
6446 case 181: /* CP EOP event */
6447 DRM_DEBUG("IH: CP EOP\n");
6448 switch (ring_id) {
6449 case 0:
6450 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
6451 break;
6452 case 1:
6453 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6454 break;
6455 case 2:
6456 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6457 break;
6458 }
6459 break;
6460 case 224: /* DMA trap event */
6461 DRM_DEBUG("IH: DMA trap\n");
6462 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
6463 break;
6464 case 230: /* thermal low to high */
6465 DRM_DEBUG("IH: thermal low to high\n");
6466 rdev->pm.dpm.thermal.high_to_low = false;
6467 queue_thermal = true;
6468 break;
6469 case 231: /* thermal high to low */
6470 DRM_DEBUG("IH: thermal high to low\n");
6471 rdev->pm.dpm.thermal.high_to_low = true;
6472 queue_thermal = true;
6473 break;
6474 case 233: /* GUI IDLE */
6475 DRM_DEBUG("IH: GUI idle\n");
6476 break;
6477 case 244: /* DMA trap event */
6478 DRM_DEBUG("IH: DMA1 trap\n");
6479 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6480 break;
6481 default:
6482 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
6483 break;
6484 }
6485
6486 /* wptr/rptr are in bytes! */
6487 rptr += 16;
6488 rptr &= rdev->ih.ptr_mask;
6489 }
6490 if (queue_hotplug)
6491 schedule_work(&rdev->hotplug_work);
6492 if (queue_thermal && rdev->pm.dpm_enabled)
6493 schedule_work(&rdev->pm.dpm.thermal.work);
6494 rdev->ih.rptr = rptr;
6495 WREG32(IH_RB_RPTR, rdev->ih.rptr);
6496 atomic_set(&rdev->ih.lock, 0);
6497
6498 /* make sure wptr hasn't changed while processing */
6499 wptr = si_get_ih_wptr(rdev);
6500 if (wptr != rptr)
6501 goto restart_ih;
6502
6503 return IRQ_HANDLED;
6504}
6505
6506/*
6507 * startup/shutdown callbacks
6508 */
6509static int si_startup(struct radeon_device *rdev)
6510{
6511 struct radeon_ring *ring;
6512 int r;
6513
6514 /* enable pcie gen2/3 link */
6515 si_pcie_gen3_enable(rdev);
6516 /* enable aspm */
6517 si_program_aspm(rdev);
6518
6519 /* scratch needs to be initialized before MC */
6520 r = r600_vram_scratch_init(rdev);
6521 if (r)
6522 return r;
6523
6524 si_mc_program(rdev);
6525
6526 if (!rdev->pm.dpm_enabled) {
6527 r = si_mc_load_microcode(rdev);
6528 if (r) {
6529 DRM_ERROR("Failed to load MC firmware!\n");
6530 return r;
6531 }
6532 }
6533
6534 r = si_pcie_gart_enable(rdev);
6535 if (r)
6536 return r;
6537 si_gpu_init(rdev);
6538
6539 /* allocate rlc buffers */
6540 if (rdev->family == CHIP_VERDE) {
6541 rdev->rlc.reg_list = verde_rlc_save_restore_register_list;
6542 rdev->rlc.reg_list_size =
6543 (u32)ARRAY_SIZE(verde_rlc_save_restore_register_list);
6544 }
6545 rdev->rlc.cs_data = si_cs_data;
6546 r = sumo_rlc_init(rdev);
6547 if (r) {
6548 DRM_ERROR("Failed to init rlc BOs!\n");
6549 return r;
6550 }
6551
6552 /* allocate wb buffer */
6553 r = radeon_wb_init(rdev);
6554 if (r)
6555 return r;
6556
6557 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
6558 if (r) {
6559 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6560 return r;
6561 }
6562
6563 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
6564 if (r) {
6565 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6566 return r;
6567 }
6568
6569 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
6570 if (r) {
6571 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
6572 return r;
6573 }
6574
6575 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
6576 if (r) {
6577 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6578 return r;
6579 }
6580
6581 r = radeon_fence_driver_start_ring(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
6582 if (r) {
6583 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
6584 return r;
6585 }
6586
6587 if (rdev->has_uvd) {
6588 r = uvd_v2_2_resume(rdev);
6589 if (!r) {
6590 r = radeon_fence_driver_start_ring(rdev,
6591 R600_RING_TYPE_UVD_INDEX);
6592 if (r)
6593 dev_err(rdev->dev, "UVD fences init error (%d).\n", r);
6594 }
6595 if (r)
6596 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
6597 }
6598
6599 /* Enable IRQ */
6600 if (!rdev->irq.installed) {
6601 r = radeon_irq_kms_init(rdev);
6602 if (r)
6603 return r;
6604 }
6605
6606 r = si_irq_init(rdev);
6607 if (r) {
6608 DRM_ERROR("radeon: IH init failed (%d).\n", r);
6609 radeon_irq_kms_fini(rdev);
6610 return r;
6611 }
6612 si_irq_set(rdev);
6613
6614 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6615 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
6616 RADEON_CP_PACKET2);
6617 if (r)
6618 return r;
6619
6620 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6621 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP1_RPTR_OFFSET,
6622 RADEON_CP_PACKET2);
6623 if (r)
6624 return r;
6625
6626 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6627 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP2_RPTR_OFFSET,
6628 RADEON_CP_PACKET2);
6629 if (r)
6630 return r;
6631
6632 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6633 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
6634 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
6635 if (r)
6636 return r;
6637
6638 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6639 r = radeon_ring_init(rdev, ring, ring->ring_size, CAYMAN_WB_DMA1_RPTR_OFFSET,
6640 DMA_PACKET(DMA_PACKET_NOP, 0, 0, 0, 0));
6641 if (r)
6642 return r;
6643
6644 r = si_cp_load_microcode(rdev);
6645 if (r)
6646 return r;
6647 r = si_cp_resume(rdev);
6648 if (r)
6649 return r;
6650
6651 r = cayman_dma_resume(rdev);
6652 if (r)
6653 return r;
6654
6655 if (rdev->has_uvd) {
6656 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6657 if (ring->ring_size) {
6658 r = radeon_ring_init(rdev, ring, ring->ring_size, 0,
6659 RADEON_CP_PACKET2);
6660 if (!r)
6661 r = uvd_v1_0_init(rdev);
6662 if (r)
6663 DRM_ERROR("radeon: failed initializing UVD (%d).\n", r);
6664 }
6665 }
6666
6667 r = radeon_ib_pool_init(rdev);
6668 if (r) {
6669 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
6670 return r;
6671 }
6672
6673 r = radeon_vm_manager_init(rdev);
6674 if (r) {
6675 dev_err(rdev->dev, "vm manager initialization failed (%d).\n", r);
6676 return r;
6677 }
6678
6679 r = dce6_audio_init(rdev);
6680 if (r)
6681 return r;
6682
6683 return 0;
6684}
6685
6686int si_resume(struct radeon_device *rdev)
6687{
6688 int r;
6689
6690 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
6691 * posting will perform necessary task to bring back GPU into good
6692 * shape.
6693 */
6694 /* post card */
6695 atom_asic_init(rdev->mode_info.atom_context);
6696
6697 /* init golden registers */
6698 si_init_golden_registers(rdev);
6699
6700 if (rdev->pm.pm_method == PM_METHOD_DPM)
6701 radeon_pm_resume(rdev);
6702
6703 rdev->accel_working = true;
6704 r = si_startup(rdev);
6705 if (r) {
6706 DRM_ERROR("si startup failed on resume\n");
6707 rdev->accel_working = false;
6708 return r;
6709 }
6710
6711 return r;
6712
6713}
6714
6715int si_suspend(struct radeon_device *rdev)
6716{
6717 radeon_pm_suspend(rdev);
6718 dce6_audio_fini(rdev);
6719 radeon_vm_manager_fini(rdev);
6720 si_cp_enable(rdev, false);
6721 cayman_dma_stop(rdev);
6722 if (rdev->has_uvd) {
6723 uvd_v1_0_fini(rdev);
6724 radeon_uvd_suspend(rdev);
6725 }
6726 si_fini_pg(rdev);
6727 si_fini_cg(rdev);
6728 si_irq_suspend(rdev);
6729 radeon_wb_disable(rdev);
6730 si_pcie_gart_disable(rdev);
6731 return 0;
6732}
6733
6734/* Plan is to move initialization in that function and use
6735 * helper function so that radeon_device_init pretty much
6736 * do nothing more than calling asic specific function. This
6737 * should also allow to remove a bunch of callback function
6738 * like vram_info.
6739 */
6740int si_init(struct radeon_device *rdev)
6741{
6742 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6743 int r;
6744
6745 /* Read BIOS */
6746 if (!radeon_get_bios(rdev)) {
6747 if (ASIC_IS_AVIVO(rdev))
6748 return -EINVAL;
6749 }
6750 /* Must be an ATOMBIOS */
6751 if (!rdev->is_atom_bios) {
6752 dev_err(rdev->dev, "Expecting atombios for cayman GPU\n");
6753 return -EINVAL;
6754 }
6755 r = radeon_atombios_init(rdev);
6756 if (r)
6757 return r;
6758
6759 /* Post card if necessary */
6760 if (!radeon_card_posted(rdev)) {
6761 if (!rdev->bios) {
6762 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
6763 return -EINVAL;
6764 }
6765 DRM_INFO("GPU not posted. posting now...\n");
6766 atom_asic_init(rdev->mode_info.atom_context);
6767 }
6768 /* init golden registers */
6769 si_init_golden_registers(rdev);
6770 /* Initialize scratch registers */
6771 si_scratch_init(rdev);
6772 /* Initialize surface registers */
6773 radeon_surface_init(rdev);
6774 /* Initialize clocks */
6775 radeon_get_clock_info(rdev->ddev);
6776
6777 /* Fence driver */
6778 r = radeon_fence_driver_init(rdev);
6779 if (r)
6780 return r;
6781
6782 /* initialize memory controller */
6783 r = si_mc_init(rdev);
6784 if (r)
6785 return r;
6786 /* Memory manager */
6787 r = radeon_bo_init(rdev);
6788 if (r)
6789 return r;
6790
6791 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->ce_fw ||
6792 !rdev->rlc_fw || !rdev->mc_fw) {
6793 r = si_init_microcode(rdev);
6794 if (r) {
6795 DRM_ERROR("Failed to load firmware!\n");
6796 return r;
6797 }
6798 }
6799
6800 /* Initialize power management */
6801 radeon_pm_init(rdev);
6802
6803 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
6804 ring->ring_obj = NULL;
6805 r600_ring_init(rdev, ring, 1024 * 1024);
6806
6807 ring = &rdev->ring[CAYMAN_RING_TYPE_CP1_INDEX];
6808 ring->ring_obj = NULL;
6809 r600_ring_init(rdev, ring, 1024 * 1024);
6810
6811 ring = &rdev->ring[CAYMAN_RING_TYPE_CP2_INDEX];
6812 ring->ring_obj = NULL;
6813 r600_ring_init(rdev, ring, 1024 * 1024);
6814
6815 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
6816 ring->ring_obj = NULL;
6817 r600_ring_init(rdev, ring, 64 * 1024);
6818
6819 ring = &rdev->ring[CAYMAN_RING_TYPE_DMA1_INDEX];
6820 ring->ring_obj = NULL;
6821 r600_ring_init(rdev, ring, 64 * 1024);
6822
6823 if (rdev->has_uvd) {
6824 r = radeon_uvd_init(rdev);
6825 if (!r) {
6826 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
6827 ring->ring_obj = NULL;
6828 r600_ring_init(rdev, ring, 4096);
6829 }
6830 }
6831
6832 rdev->ih.ring_obj = NULL;
6833 r600_ih_ring_init(rdev, 64 * 1024);
6834
6835 r = r600_pcie_gart_init(rdev);
6836 if (r)
6837 return r;
6838
6839 rdev->accel_working = true;
6840 r = si_startup(rdev);
6841 if (r) {
6842 dev_err(rdev->dev, "disabling GPU acceleration\n");
6843 si_cp_fini(rdev);
6844 cayman_dma_fini(rdev);
6845 si_irq_fini(rdev);
6846 sumo_rlc_fini(rdev);
6847 radeon_wb_fini(rdev);
6848 radeon_ib_pool_fini(rdev);
6849 radeon_vm_manager_fini(rdev);
6850 radeon_irq_kms_fini(rdev);
6851 si_pcie_gart_fini(rdev);
6852 rdev->accel_working = false;
6853 }
6854
6855 /* Don't start up if the MC ucode is missing.
6856 * The default clocks and voltages before the MC ucode
6857 * is loaded are not suffient for advanced operations.
6858 */
6859 if (!rdev->mc_fw) {
6860 DRM_ERROR("radeon: MC ucode required for NI+.\n");
6861 return -EINVAL;
6862 }
6863
6864 return 0;
6865}
6866
6867void si_fini(struct radeon_device *rdev)
6868{
6869 radeon_pm_fini(rdev);
6870 si_cp_fini(rdev);
6871 cayman_dma_fini(rdev);
6872 si_fini_pg(rdev);
6873 si_fini_cg(rdev);
6874 si_irq_fini(rdev);
6875 sumo_rlc_fini(rdev);
6876 radeon_wb_fini(rdev);
6877 radeon_vm_manager_fini(rdev);
6878 radeon_ib_pool_fini(rdev);
6879 radeon_irq_kms_fini(rdev);
6880 if (rdev->has_uvd) {
6881 uvd_v1_0_fini(rdev);
6882 radeon_uvd_fini(rdev);
6883 }
6884 si_pcie_gart_fini(rdev);
6885 r600_vram_scratch_fini(rdev);
6886 radeon_gem_fini(rdev);
6887 radeon_fence_driver_fini(rdev);
6888 radeon_bo_fini(rdev);
6889 radeon_atombios_fini(rdev);
6890 kfree(rdev->bios);
6891 rdev->bios = NULL;
6892}
6893
6894/**
6895 * si_get_gpu_clock_counter - return GPU clock counter snapshot
6896 *
6897 * @rdev: radeon_device pointer
6898 *
6899 * Fetches a GPU clock counter snapshot (SI).
6900 * Returns the 64 bit clock counter snapshot.
6901 */
6902uint64_t si_get_gpu_clock_counter(struct radeon_device *rdev)
6903{
6904 uint64_t clock;
6905
6906 mutex_lock(&rdev->gpu_clock_mutex);
6907 WREG32(RLC_CAPTURE_GPU_CLOCK_COUNT, 1);
6908 clock = (uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_LSB) |
6909 ((uint64_t)RREG32(RLC_GPU_CLOCK_COUNT_MSB) << 32ULL);
6910 mutex_unlock(&rdev->gpu_clock_mutex);
6911 return clock;
6912}
6913
6914int si_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
6915{
6916 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
6917 int r;
6918
6919 /* bypass vclk and dclk with bclk */
6920 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6921 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
6922 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6923
6924 /* put PLL in bypass mode */
6925 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
6926
6927 if (!vclk || !dclk) {
6928 /* keep the Bypass mode, put PLL to sleep */
6929 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6930 return 0;
6931 }
6932
6933 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
6934 16384, 0x03FFFFFF, 0, 128, 5,
6935 &fb_div, &vclk_div, &dclk_div);
6936 if (r)
6937 return r;
6938
6939 /* set RESET_ANTI_MUX to 0 */
6940 WREG32_P(CG_UPLL_FUNC_CNTL_5, 0, ~RESET_ANTI_MUX_MASK);
6941
6942 /* set VCO_MODE to 1 */
6943 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
6944
6945 /* toggle UPLL_SLEEP to 1 then back to 0 */
6946 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
6947 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
6948
6949 /* deassert UPLL_RESET */
6950 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6951
6952 mdelay(1);
6953
6954 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
6955 if (r)
6956 return r;
6957
6958 /* assert UPLL_RESET again */
6959 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
6960
6961 /* disable spread spectrum. */
6962 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
6963
6964 /* set feedback divider */
6965 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
6966
6967 /* set ref divider to 0 */
6968 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
6969
6970 if (fb_div < 307200)
6971 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
6972 else
6973 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
6974
6975 /* set PDIV_A and PDIV_B */
6976 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6977 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
6978 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
6979
6980 /* give the PLL some time to settle */
6981 mdelay(15);
6982
6983 /* deassert PLL_RESET */
6984 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
6985
6986 mdelay(15);
6987
6988 /* switch from bypass mode to normal mode */
6989 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
6990
6991 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
6992 if (r)
6993 return r;
6994
6995 /* switch VCLK and DCLK selection */
6996 WREG32_P(CG_UPLL_FUNC_CNTL_2,
6997 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
6998 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
6999
7000 mdelay(100);
7001
7002 return 0;
7003}
7004
7005static void si_pcie_gen3_enable(struct radeon_device *rdev)
7006{
7007#ifndef __NetBSD__ /* XXX radeon pcie */
7008 struct pci_dev *root = rdev->pdev->bus->self;
7009 int bridge_pos, gpu_pos;
7010 u32 speed_cntl, mask, current_data_rate;
7011 int ret, i;
7012 u16 tmp16;
7013
7014 if (radeon_pcie_gen2 == 0)
7015 return;
7016
7017 if (rdev->flags & RADEON_IS_IGP)
7018 return;
7019
7020 if (!(rdev->flags & RADEON_IS_PCIE))
7021 return;
7022
7023 ret = drm_pcie_get_speed_cap_mask(rdev->ddev, &mask);
7024 if (ret != 0)
7025 return;
7026
7027 if (!(mask & (DRM_PCIE_SPEED_50 | DRM_PCIE_SPEED_80)))
7028 return;
7029
7030 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7031 current_data_rate = (speed_cntl & LC_CURRENT_DATA_RATE_MASK) >>
7032 LC_CURRENT_DATA_RATE_SHIFT;
7033 if (mask & DRM_PCIE_SPEED_80) {
7034 if (current_data_rate == 2) {
7035 DRM_INFO("PCIE gen 3 link speeds already enabled\n");
7036 return;
7037 }
7038 DRM_INFO("enabling PCIE gen 3 link speeds, disable with radeon.pcie_gen2=0\n");
7039 } else if (mask & DRM_PCIE_SPEED_50) {
7040 if (current_data_rate == 1) {
7041 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
7042 return;
7043 }
7044 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
7045 }
7046
7047 bridge_pos = pci_pcie_cap(root);
7048 if (!bridge_pos)
7049 return;
7050
7051 gpu_pos = pci_pcie_cap(rdev->pdev);
7052 if (!gpu_pos)
7053 return;
7054
7055 if (mask & DRM_PCIE_SPEED_80) {
7056 /* re-try equalization if gen3 is not already enabled */
7057 if (current_data_rate != 2) {
7058 u16 bridge_cfg, gpu_cfg;
7059 u16 bridge_cfg2, gpu_cfg2;
7060 u32 max_lw, current_lw, tmp;
7061
7062 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
7063 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
7064
7065 tmp16 = bridge_cfg | PCI_EXP_LNKCTL_HAWD;
7066 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
7067
7068 tmp16 = gpu_cfg | PCI_EXP_LNKCTL_HAWD;
7069 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
7070
7071 tmp = RREG32_PCIE(PCIE_LC_STATUS1);
7072 max_lw = (tmp & LC_DETECTED_LINK_WIDTH_MASK) >> LC_DETECTED_LINK_WIDTH_SHIFT;
7073 current_lw = (tmp & LC_OPERATING_LINK_WIDTH_MASK) >> LC_OPERATING_LINK_WIDTH_SHIFT;
7074
7075 if (current_lw < max_lw) {
7076 tmp = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
7077 if (tmp & LC_RENEGOTIATION_SUPPORT) {
7078 tmp &= ~(LC_LINK_WIDTH_MASK | LC_UPCONFIGURE_DIS);
7079 tmp |= (max_lw << LC_LINK_WIDTH_SHIFT);
7080 tmp |= LC_UPCONFIGURE_SUPPORT | LC_RENEGOTIATE_EN | LC_RECONFIG_NOW;
7081 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, tmp);
7082 }
7083 }
7084
7085 for (i = 0; i < 10; i++) {
7086 /* check status */
7087 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_DEVSTA, &tmp16);
7088 if (tmp16 & PCI_EXP_DEVSTA_TRPND)
7089 break;
7090
7091 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &bridge_cfg);
7092 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &gpu_cfg);
7093
7094 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &bridge_cfg2);
7095 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &gpu_cfg2);
7096
7097 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7098 tmp |= LC_SET_QUIESCE;
7099 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7100
7101 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7102 tmp |= LC_REDO_EQ;
7103 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7104
7105 mdelay(100);
7106
7107 /* linkctl */
7108 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL, &tmp16);
7109 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
7110 tmp16 |= (bridge_cfg & PCI_EXP_LNKCTL_HAWD);
7111 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL, tmp16);
7112
7113 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, &tmp16);
7114 tmp16 &= ~PCI_EXP_LNKCTL_HAWD;
7115 tmp16 |= (gpu_cfg & PCI_EXP_LNKCTL_HAWD);
7116 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL, tmp16);
7117
7118 /* linkctl2 */
7119 pci_read_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, &tmp16);
7120 tmp16 &= ~((1 << 4) | (7 << 9));
7121 tmp16 |= (bridge_cfg2 & ((1 << 4) | (7 << 9)));
7122 pci_write_config_word(root, bridge_pos + PCI_EXP_LNKCTL2, tmp16);
7123
7124 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
7125 tmp16 &= ~((1 << 4) | (7 << 9));
7126 tmp16 |= (gpu_cfg2 & ((1 << 4) | (7 << 9)));
7127 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
7128
7129 tmp = RREG32_PCIE_PORT(PCIE_LC_CNTL4);
7130 tmp &= ~LC_SET_QUIESCE;
7131 WREG32_PCIE_PORT(PCIE_LC_CNTL4, tmp);
7132 }
7133 }
7134 }
7135
7136 /* set the link speed */
7137 speed_cntl |= LC_FORCE_EN_SW_SPEED_CHANGE | LC_FORCE_DIS_HW_SPEED_CHANGE;
7138 speed_cntl &= ~LC_FORCE_DIS_SW_SPEED_CHANGE;
7139 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
7140
7141 pci_read_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, &tmp16);
7142 tmp16 &= ~0xf;
7143 if (mask & DRM_PCIE_SPEED_80)
7144 tmp16 |= 3; /* gen3 */
7145 else if (mask & DRM_PCIE_SPEED_50)
7146 tmp16 |= 2; /* gen2 */
7147 else
7148 tmp16 |= 1; /* gen1 */
7149 pci_write_config_word(rdev->pdev, gpu_pos + PCI_EXP_LNKCTL2, tmp16);
7150
7151 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7152 speed_cntl |= LC_INITIATE_LINK_SPEED_CHANGE;
7153 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
7154
7155 for (i = 0; i < rdev->usec_timeout; i++) {
7156 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
7157 if ((speed_cntl & LC_INITIATE_LINK_SPEED_CHANGE) == 0)
7158 break;
7159 udelay(1);
7160 }
7161#endif
7162}
7163
7164static void si_program_aspm(struct radeon_device *rdev)
7165{
7166 u32 data, orig;
7167 bool disable_l0s = false, disable_l1 = false, disable_plloff_in_l1 = false;
7168 bool disable_clkreq = false;
7169
7170 if (radeon_aspm == 0)
7171 return;
7172
7173 if (!(rdev->flags & RADEON_IS_PCIE))
7174 return;
7175
7176 orig = data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
7177 data &= ~LC_XMIT_N_FTS_MASK;
7178 data |= LC_XMIT_N_FTS(0x24) | LC_XMIT_N_FTS_OVERRIDE_EN;
7179 if (orig != data)
7180 WREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL, data);
7181
7182 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL3);
7183 data |= LC_GO_TO_RECOVERY;
7184 if (orig != data)
7185 WREG32_PCIE_PORT(PCIE_LC_CNTL3, data);
7186
7187 orig = data = RREG32_PCIE(PCIE_P_CNTL);
7188 data |= P_IGNORE_EDB_ERR;
7189 if (orig != data)
7190 WREG32_PCIE(PCIE_P_CNTL, data);
7191
7192 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
7193 data &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
7194 data |= LC_PMI_TO_L1_DIS;
7195 if (!disable_l0s)
7196 data |= LC_L0S_INACTIVITY(7);
7197
7198 if (!disable_l1) {
7199 data |= LC_L1_INACTIVITY(7);
7200 data &= ~LC_PMI_TO_L1_DIS;
7201 if (orig != data)
7202 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7203
7204 if (!disable_plloff_in_l1) {
7205 bool clk_req_support;
7206
7207 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
7208 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
7209 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
7210 if (orig != data)
7211 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
7212
7213 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
7214 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
7215 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
7216 if (orig != data)
7217 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
7218
7219 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
7220 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
7221 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
7222 if (orig != data)
7223 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
7224
7225 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
7226 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
7227 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
7228 if (orig != data)
7229 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
7230
7231 if ((rdev->family != CHIP_OLAND) && (rdev->family != CHIP_HAINAN)) {
7232 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
7233 data &= ~PLL_RAMP_UP_TIME_0_MASK;
7234 if (orig != data)
7235 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
7236
7237 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
7238 data &= ~PLL_RAMP_UP_TIME_1_MASK;
7239 if (orig != data)
7240 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
7241
7242 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2);
7243 data &= ~PLL_RAMP_UP_TIME_2_MASK;
7244 if (orig != data)
7245 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_2, data);
7246
7247 orig = data = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3);
7248 data &= ~PLL_RAMP_UP_TIME_3_MASK;
7249 if (orig != data)
7250 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_3, data);
7251
7252 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
7253 data &= ~PLL_RAMP_UP_TIME_0_MASK;
7254 if (orig != data)
7255 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
7256
7257 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
7258 data &= ~PLL_RAMP_UP_TIME_1_MASK;
7259 if (orig != data)
7260 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
7261
7262 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2);
7263 data &= ~PLL_RAMP_UP_TIME_2_MASK;
7264 if (orig != data)
7265 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_2, data);
7266
7267 orig = data = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3);
7268 data &= ~PLL_RAMP_UP_TIME_3_MASK;
7269 if (orig != data)
7270 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_3, data);
7271 }
7272 orig = data = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
7273 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
7274 data |= LC_DYN_LANES_PWR_STATE(3);
7275 if (orig != data)
7276 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
7277
7278 orig = data = RREG32_PIF_PHY0(PB0_PIF_CNTL);
7279 data &= ~LS2_EXIT_TIME_MASK;
7280 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
7281 data |= LS2_EXIT_TIME(5);
7282 if (orig != data)
7283 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
7284
7285 orig = data = RREG32_PIF_PHY1(PB1_PIF_CNTL);
7286 data &= ~LS2_EXIT_TIME_MASK;
7287 if ((rdev->family == CHIP_OLAND) || (rdev->family == CHIP_HAINAN))
7288 data |= LS2_EXIT_TIME(5);
7289 if (orig != data)
7290 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
7291
7292 if (!disable_clkreq) {
7293#ifndef __NetBSD__ /* XXX radeon pcie */
7294 struct pci_dev *root = rdev->pdev->bus->self;
7295 u32 lnkcap;
7296#endif
7297
7298 clk_req_support = false;
7299#ifndef __NetBSD__ /* XXX radeon pcie */
7300 pcie_capability_read_dword(root, PCI_EXP_LNKCAP, &lnkcap);
7301 if (lnkcap & PCI_EXP_LNKCAP_CLKPM)
7302 clk_req_support = true;
7303#endif
7304 } else {
7305 clk_req_support = false;
7306 }
7307
7308 if (clk_req_support) {
7309 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL2);
7310 data |= LC_ALLOW_PDWN_IN_L1 | LC_ALLOW_PDWN_IN_L23;
7311 if (orig != data)
7312 WREG32_PCIE_PORT(PCIE_LC_CNTL2, data);
7313
7314 orig = data = RREG32(THM_CLK_CNTL);
7315 data &= ~(CMON_CLK_SEL_MASK | TMON_CLK_SEL_MASK);
7316 data |= CMON_CLK_SEL(1) | TMON_CLK_SEL(1);
7317 if (orig != data)
7318 WREG32(THM_CLK_CNTL, data);
7319
7320 orig = data = RREG32(MISC_CLK_CNTL);
7321 data &= ~(DEEP_SLEEP_CLK_SEL_MASK | ZCLK_SEL_MASK);
7322 data |= DEEP_SLEEP_CLK_SEL(1) | ZCLK_SEL(1);
7323 if (orig != data)
7324 WREG32(MISC_CLK_CNTL, data);
7325
7326 orig = data = RREG32(CG_CLKPIN_CNTL);
7327 data &= ~BCLK_AS_XCLK;
7328 if (orig != data)
7329 WREG32(CG_CLKPIN_CNTL, data);
7330
7331 orig = data = RREG32(CG_CLKPIN_CNTL_2);
7332 data &= ~FORCE_BIF_REFCLK_EN;
7333 if (orig != data)
7334 WREG32(CG_CLKPIN_CNTL_2, data);
7335
7336 orig = data = RREG32(MPLL_BYPASSCLK_SEL);
7337 data &= ~MPLL_CLKOUT_SEL_MASK;
7338 data |= MPLL_CLKOUT_SEL(4);
7339 if (orig != data)
7340 WREG32(MPLL_BYPASSCLK_SEL, data);
7341
7342 orig = data = RREG32(SPLL_CNTL_MODE);
7343 data &= ~SPLL_REFCLK_SEL_MASK;
7344 if (orig != data)
7345 WREG32(SPLL_CNTL_MODE, data);
7346 }
7347 }
7348 } else {
7349 if (orig != data)
7350 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7351 }
7352
7353 orig = data = RREG32_PCIE(PCIE_CNTL2);
7354 data |= SLV_MEM_LS_EN | MST_MEM_LS_EN | REPLAY_MEM_LS_EN;
7355 if (orig != data)
7356 WREG32_PCIE(PCIE_CNTL2, data);
7357
7358 if (!disable_l0s) {
7359 data = RREG32_PCIE_PORT(PCIE_LC_N_FTS_CNTL);
7360 if((data & LC_N_FTS_MASK) == LC_N_FTS_MASK) {
7361 data = RREG32_PCIE(PCIE_LC_STATUS1);
7362 if ((data & LC_REVERSE_XMIT) && (data & LC_REVERSE_RCVR)) {
7363 orig = data = RREG32_PCIE_PORT(PCIE_LC_CNTL);
7364 data &= ~LC_L0S_INACTIVITY_MASK;
7365 if (orig != data)
7366 WREG32_PCIE_PORT(PCIE_LC_CNTL, data);
7367 }
7368 }
7369 }
7370}
7371