xref: /OK3568_Linux_fs/kernel/drivers/gpu/drm/radeon/radeon_display.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * Copyright 2007-8 Advanced Micro Devices, Inc.
3*4882a593Smuzhiyun  * Copyright 2008 Red Hat Inc.
4*4882a593Smuzhiyun  *
5*4882a593Smuzhiyun  * Permission is hereby granted, free of charge, to any person obtaining a
6*4882a593Smuzhiyun  * copy of this software and associated documentation files (the "Software"),
7*4882a593Smuzhiyun  * to deal in the Software without restriction, including without limitation
8*4882a593Smuzhiyun  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9*4882a593Smuzhiyun  * and/or sell copies of the Software, and to permit persons to whom the
10*4882a593Smuzhiyun  * Software is furnished to do so, subject to the following conditions:
11*4882a593Smuzhiyun  *
12*4882a593Smuzhiyun  * The above copyright notice and this permission notice shall be included in
13*4882a593Smuzhiyun  * all copies or substantial portions of the Software.
14*4882a593Smuzhiyun  *
15*4882a593Smuzhiyun  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16*4882a593Smuzhiyun  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17*4882a593Smuzhiyun  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18*4882a593Smuzhiyun  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19*4882a593Smuzhiyun  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20*4882a593Smuzhiyun  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21*4882a593Smuzhiyun  * OTHER DEALINGS IN THE SOFTWARE.
22*4882a593Smuzhiyun  *
23*4882a593Smuzhiyun  * Authors: Dave Airlie
24*4882a593Smuzhiyun  *          Alex Deucher
25*4882a593Smuzhiyun  */
26*4882a593Smuzhiyun 
27*4882a593Smuzhiyun #include <linux/pci.h>
28*4882a593Smuzhiyun #include <linux/pm_runtime.h>
29*4882a593Smuzhiyun #include <linux/gcd.h>
30*4882a593Smuzhiyun 
31*4882a593Smuzhiyun #include <asm/div64.h>
32*4882a593Smuzhiyun 
33*4882a593Smuzhiyun #include <drm/drm_crtc_helper.h>
34*4882a593Smuzhiyun #include <drm/drm_device.h>
35*4882a593Smuzhiyun #include <drm/drm_drv.h>
36*4882a593Smuzhiyun #include <drm/drm_edid.h>
37*4882a593Smuzhiyun #include <drm/drm_fb_helper.h>
38*4882a593Smuzhiyun #include <drm/drm_fourcc.h>
39*4882a593Smuzhiyun #include <drm/drm_gem_framebuffer_helper.h>
40*4882a593Smuzhiyun #include <drm/drm_plane_helper.h>
41*4882a593Smuzhiyun #include <drm/drm_probe_helper.h>
42*4882a593Smuzhiyun #include <drm/drm_vblank.h>
43*4882a593Smuzhiyun #include <drm/radeon_drm.h>
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun #include "atom.h"
46*4882a593Smuzhiyun #include "radeon.h"
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun u32 radeon_get_vblank_counter_kms(struct drm_crtc *crtc);
49*4882a593Smuzhiyun int radeon_enable_vblank_kms(struct drm_crtc *crtc);
50*4882a593Smuzhiyun void radeon_disable_vblank_kms(struct drm_crtc *crtc);
51*4882a593Smuzhiyun 
avivo_crtc_load_lut(struct drm_crtc * crtc)52*4882a593Smuzhiyun static void avivo_crtc_load_lut(struct drm_crtc *crtc)
53*4882a593Smuzhiyun {
54*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
55*4882a593Smuzhiyun 	struct drm_device *dev = crtc->dev;
56*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
57*4882a593Smuzhiyun 	u16 *r, *g, *b;
58*4882a593Smuzhiyun 	int i;
59*4882a593Smuzhiyun 
60*4882a593Smuzhiyun 	DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
61*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUTA_CONTROL + radeon_crtc->crtc_offset, 0);
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
64*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
65*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
68*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
69*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUT_RW_SELECT, radeon_crtc->crtc_id);
72*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUT_RW_MODE, 0);
73*4882a593Smuzhiyun 	WREG32(AVIVO_DC_LUT_WRITE_EN_MASK, 0x0000003f);
74*4882a593Smuzhiyun 
75*4882a593Smuzhiyun 	WREG8(AVIVO_DC_LUT_RW_INDEX, 0);
76*4882a593Smuzhiyun 	r = crtc->gamma_store;
77*4882a593Smuzhiyun 	g = r + crtc->gamma_size;
78*4882a593Smuzhiyun 	b = g + crtc->gamma_size;
79*4882a593Smuzhiyun 	for (i = 0; i < 256; i++) {
80*4882a593Smuzhiyun 		WREG32(AVIVO_DC_LUT_30_COLOR,
81*4882a593Smuzhiyun 		       ((*r++ & 0xffc0) << 14) |
82*4882a593Smuzhiyun 		       ((*g++ & 0xffc0) << 4) |
83*4882a593Smuzhiyun 		       (*b++ >> 6));
84*4882a593Smuzhiyun 	}
85*4882a593Smuzhiyun 
86*4882a593Smuzhiyun 	/* Only change bit 0 of LUT_SEL, other bits are set elsewhere */
87*4882a593Smuzhiyun 	WREG32_P(AVIVO_D1GRPH_LUT_SEL + radeon_crtc->crtc_offset, radeon_crtc->crtc_id, ~1);
88*4882a593Smuzhiyun }
89*4882a593Smuzhiyun 
dce4_crtc_load_lut(struct drm_crtc * crtc)90*4882a593Smuzhiyun static void dce4_crtc_load_lut(struct drm_crtc *crtc)
91*4882a593Smuzhiyun {
92*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
93*4882a593Smuzhiyun 	struct drm_device *dev = crtc->dev;
94*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
95*4882a593Smuzhiyun 	u16 *r, *g, *b;
96*4882a593Smuzhiyun 	int i;
97*4882a593Smuzhiyun 
98*4882a593Smuzhiyun 	DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
99*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
100*4882a593Smuzhiyun 
101*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
102*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
103*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
104*4882a593Smuzhiyun 
105*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
106*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
107*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
108*4882a593Smuzhiyun 
109*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
110*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
111*4882a593Smuzhiyun 
112*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
113*4882a593Smuzhiyun 	r = crtc->gamma_store;
114*4882a593Smuzhiyun 	g = r + crtc->gamma_size;
115*4882a593Smuzhiyun 	b = g + crtc->gamma_size;
116*4882a593Smuzhiyun 	for (i = 0; i < 256; i++) {
117*4882a593Smuzhiyun 		WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
118*4882a593Smuzhiyun 		       ((*r++ & 0xffc0) << 14) |
119*4882a593Smuzhiyun 		       ((*g++ & 0xffc0) << 4) |
120*4882a593Smuzhiyun 		       (*b++ >> 6));
121*4882a593Smuzhiyun 	}
122*4882a593Smuzhiyun }
123*4882a593Smuzhiyun 
dce5_crtc_load_lut(struct drm_crtc * crtc)124*4882a593Smuzhiyun static void dce5_crtc_load_lut(struct drm_crtc *crtc)
125*4882a593Smuzhiyun {
126*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
127*4882a593Smuzhiyun 	struct drm_device *dev = crtc->dev;
128*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
129*4882a593Smuzhiyun 	u16 *r, *g, *b;
130*4882a593Smuzhiyun 	int i;
131*4882a593Smuzhiyun 
132*4882a593Smuzhiyun 	DRM_DEBUG_KMS("%d\n", radeon_crtc->crtc_id);
133*4882a593Smuzhiyun 
134*4882a593Smuzhiyun 	msleep(10);
135*4882a593Smuzhiyun 
136*4882a593Smuzhiyun 	WREG32(NI_INPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
137*4882a593Smuzhiyun 	       (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS) |
138*4882a593Smuzhiyun 		NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS)));
139*4882a593Smuzhiyun 	WREG32(NI_PRESCALE_GRPH_CONTROL + radeon_crtc->crtc_offset,
140*4882a593Smuzhiyun 	       NI_GRPH_PRESCALE_BYPASS);
141*4882a593Smuzhiyun 	WREG32(NI_PRESCALE_OVL_CONTROL + radeon_crtc->crtc_offset,
142*4882a593Smuzhiyun 	       NI_OVL_PRESCALE_BYPASS);
143*4882a593Smuzhiyun 	WREG32(NI_INPUT_GAMMA_CONTROL + radeon_crtc->crtc_offset,
144*4882a593Smuzhiyun 	       (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT) |
145*4882a593Smuzhiyun 		NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT)));
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_CONTROL + radeon_crtc->crtc_offset, 0);
148*4882a593Smuzhiyun 
149*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE + radeon_crtc->crtc_offset, 0);
150*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN + radeon_crtc->crtc_offset, 0);
151*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED + radeon_crtc->crtc_offset, 0);
152*4882a593Smuzhiyun 
153*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE + radeon_crtc->crtc_offset, 0xffff);
154*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN + radeon_crtc->crtc_offset, 0xffff);
155*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED + radeon_crtc->crtc_offset, 0xffff);
156*4882a593Smuzhiyun 
157*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_RW_MODE + radeon_crtc->crtc_offset, 0);
158*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK + radeon_crtc->crtc_offset, 0x00000007);
159*4882a593Smuzhiyun 
160*4882a593Smuzhiyun 	WREG32(EVERGREEN_DC_LUT_RW_INDEX + radeon_crtc->crtc_offset, 0);
161*4882a593Smuzhiyun 	r = crtc->gamma_store;
162*4882a593Smuzhiyun 	g = r + crtc->gamma_size;
163*4882a593Smuzhiyun 	b = g + crtc->gamma_size;
164*4882a593Smuzhiyun 	for (i = 0; i < 256; i++) {
165*4882a593Smuzhiyun 		WREG32(EVERGREEN_DC_LUT_30_COLOR + radeon_crtc->crtc_offset,
166*4882a593Smuzhiyun 		       ((*r++ & 0xffc0) << 14) |
167*4882a593Smuzhiyun 		       ((*g++ & 0xffc0) << 4) |
168*4882a593Smuzhiyun 		       (*b++ >> 6));
169*4882a593Smuzhiyun 	}
170*4882a593Smuzhiyun 
171*4882a593Smuzhiyun 	WREG32(NI_DEGAMMA_CONTROL + radeon_crtc->crtc_offset,
172*4882a593Smuzhiyun 	       (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
173*4882a593Smuzhiyun 		NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
174*4882a593Smuzhiyun 		NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS) |
175*4882a593Smuzhiyun 		NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS)));
176*4882a593Smuzhiyun 	WREG32(NI_GAMUT_REMAP_CONTROL + radeon_crtc->crtc_offset,
177*4882a593Smuzhiyun 	       (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS) |
178*4882a593Smuzhiyun 		NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS)));
179*4882a593Smuzhiyun 	WREG32(NI_REGAMMA_CONTROL + radeon_crtc->crtc_offset,
180*4882a593Smuzhiyun 	       (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS) |
181*4882a593Smuzhiyun 		NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS)));
182*4882a593Smuzhiyun 	WREG32(NI_OUTPUT_CSC_CONTROL + radeon_crtc->crtc_offset,
183*4882a593Smuzhiyun 	       (NI_OUTPUT_CSC_GRPH_MODE(radeon_crtc->output_csc) |
184*4882a593Smuzhiyun 		NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS)));
185*4882a593Smuzhiyun 	/* XXX match this to the depth of the crtc fmt block, move to modeset? */
186*4882a593Smuzhiyun 	WREG32(0x6940 + radeon_crtc->crtc_offset, 0);
187*4882a593Smuzhiyun 	if (ASIC_IS_DCE8(rdev)) {
188*4882a593Smuzhiyun 		/* XXX this only needs to be programmed once per crtc at startup,
189*4882a593Smuzhiyun 		 * not sure where the best place for it is
190*4882a593Smuzhiyun 		 */
191*4882a593Smuzhiyun 		WREG32(CIK_ALPHA_CONTROL + radeon_crtc->crtc_offset,
192*4882a593Smuzhiyun 		       CIK_CURSOR_ALPHA_BLND_ENA);
193*4882a593Smuzhiyun 	}
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun 
legacy_crtc_load_lut(struct drm_crtc * crtc)196*4882a593Smuzhiyun static void legacy_crtc_load_lut(struct drm_crtc *crtc)
197*4882a593Smuzhiyun {
198*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
199*4882a593Smuzhiyun 	struct drm_device *dev = crtc->dev;
200*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
201*4882a593Smuzhiyun 	u16 *r, *g, *b;
202*4882a593Smuzhiyun 	int i;
203*4882a593Smuzhiyun 	uint32_t dac2_cntl;
204*4882a593Smuzhiyun 
205*4882a593Smuzhiyun 	dac2_cntl = RREG32(RADEON_DAC_CNTL2);
206*4882a593Smuzhiyun 	if (radeon_crtc->crtc_id == 0)
207*4882a593Smuzhiyun 		dac2_cntl &= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL;
208*4882a593Smuzhiyun 	else
209*4882a593Smuzhiyun 		dac2_cntl |= RADEON_DAC2_PALETTE_ACC_CTL;
210*4882a593Smuzhiyun 	WREG32(RADEON_DAC_CNTL2, dac2_cntl);
211*4882a593Smuzhiyun 
212*4882a593Smuzhiyun 	WREG8(RADEON_PALETTE_INDEX, 0);
213*4882a593Smuzhiyun 	r = crtc->gamma_store;
214*4882a593Smuzhiyun 	g = r + crtc->gamma_size;
215*4882a593Smuzhiyun 	b = g + crtc->gamma_size;
216*4882a593Smuzhiyun 	for (i = 0; i < 256; i++) {
217*4882a593Smuzhiyun 		WREG32(RADEON_PALETTE_30_DATA,
218*4882a593Smuzhiyun 		       ((*r++ & 0xffc0) << 14) |
219*4882a593Smuzhiyun 		       ((*g++ & 0xffc0) << 4) |
220*4882a593Smuzhiyun 		       (*b++ >> 6));
221*4882a593Smuzhiyun 	}
222*4882a593Smuzhiyun }
223*4882a593Smuzhiyun 
radeon_crtc_load_lut(struct drm_crtc * crtc)224*4882a593Smuzhiyun void radeon_crtc_load_lut(struct drm_crtc *crtc)
225*4882a593Smuzhiyun {
226*4882a593Smuzhiyun 	struct drm_device *dev = crtc->dev;
227*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
228*4882a593Smuzhiyun 
229*4882a593Smuzhiyun 	if (!crtc->enabled)
230*4882a593Smuzhiyun 		return;
231*4882a593Smuzhiyun 
232*4882a593Smuzhiyun 	if (ASIC_IS_DCE5(rdev))
233*4882a593Smuzhiyun 		dce5_crtc_load_lut(crtc);
234*4882a593Smuzhiyun 	else if (ASIC_IS_DCE4(rdev))
235*4882a593Smuzhiyun 		dce4_crtc_load_lut(crtc);
236*4882a593Smuzhiyun 	else if (ASIC_IS_AVIVO(rdev))
237*4882a593Smuzhiyun 		avivo_crtc_load_lut(crtc);
238*4882a593Smuzhiyun 	else
239*4882a593Smuzhiyun 		legacy_crtc_load_lut(crtc);
240*4882a593Smuzhiyun }
241*4882a593Smuzhiyun 
radeon_crtc_gamma_set(struct drm_crtc * crtc,u16 * red,u16 * green,u16 * blue,uint32_t size,struct drm_modeset_acquire_ctx * ctx)242*4882a593Smuzhiyun static int radeon_crtc_gamma_set(struct drm_crtc *crtc, u16 *red, u16 *green,
243*4882a593Smuzhiyun 				 u16 *blue, uint32_t size,
244*4882a593Smuzhiyun 				 struct drm_modeset_acquire_ctx *ctx)
245*4882a593Smuzhiyun {
246*4882a593Smuzhiyun 	radeon_crtc_load_lut(crtc);
247*4882a593Smuzhiyun 
248*4882a593Smuzhiyun 	return 0;
249*4882a593Smuzhiyun }
250*4882a593Smuzhiyun 
radeon_crtc_destroy(struct drm_crtc * crtc)251*4882a593Smuzhiyun static void radeon_crtc_destroy(struct drm_crtc *crtc)
252*4882a593Smuzhiyun {
253*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 	drm_crtc_cleanup(crtc);
256*4882a593Smuzhiyun 	destroy_workqueue(radeon_crtc->flip_queue);
257*4882a593Smuzhiyun 	kfree(radeon_crtc);
258*4882a593Smuzhiyun }
259*4882a593Smuzhiyun 
260*4882a593Smuzhiyun /**
261*4882a593Smuzhiyun  * radeon_unpin_work_func - unpin old buffer object
262*4882a593Smuzhiyun  *
263*4882a593Smuzhiyun  * @__work - kernel work item
264*4882a593Smuzhiyun  *
265*4882a593Smuzhiyun  * Unpin the old frame buffer object outside of the interrupt handler
266*4882a593Smuzhiyun  */
radeon_unpin_work_func(struct work_struct * __work)267*4882a593Smuzhiyun static void radeon_unpin_work_func(struct work_struct *__work)
268*4882a593Smuzhiyun {
269*4882a593Smuzhiyun 	struct radeon_flip_work *work =
270*4882a593Smuzhiyun 		container_of(__work, struct radeon_flip_work, unpin_work);
271*4882a593Smuzhiyun 	int r;
272*4882a593Smuzhiyun 
273*4882a593Smuzhiyun 	/* unpin of the old buffer */
274*4882a593Smuzhiyun 	r = radeon_bo_reserve(work->old_rbo, false);
275*4882a593Smuzhiyun 	if (likely(r == 0)) {
276*4882a593Smuzhiyun 		r = radeon_bo_unpin(work->old_rbo);
277*4882a593Smuzhiyun 		if (unlikely(r != 0)) {
278*4882a593Smuzhiyun 			DRM_ERROR("failed to unpin buffer after flip\n");
279*4882a593Smuzhiyun 		}
280*4882a593Smuzhiyun 		radeon_bo_unreserve(work->old_rbo);
281*4882a593Smuzhiyun 	} else
282*4882a593Smuzhiyun 		DRM_ERROR("failed to reserve buffer after flip\n");
283*4882a593Smuzhiyun 
284*4882a593Smuzhiyun 	drm_gem_object_put(&work->old_rbo->tbo.base);
285*4882a593Smuzhiyun 	kfree(work);
286*4882a593Smuzhiyun }
287*4882a593Smuzhiyun 
radeon_crtc_handle_vblank(struct radeon_device * rdev,int crtc_id)288*4882a593Smuzhiyun void radeon_crtc_handle_vblank(struct radeon_device *rdev, int crtc_id)
289*4882a593Smuzhiyun {
290*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
291*4882a593Smuzhiyun 	unsigned long flags;
292*4882a593Smuzhiyun 	u32 update_pending;
293*4882a593Smuzhiyun 	int vpos, hpos;
294*4882a593Smuzhiyun 
295*4882a593Smuzhiyun 	/* can happen during initialization */
296*4882a593Smuzhiyun 	if (radeon_crtc == NULL)
297*4882a593Smuzhiyun 		return;
298*4882a593Smuzhiyun 
299*4882a593Smuzhiyun 	/* Skip the pageflip completion check below (based on polling) on
300*4882a593Smuzhiyun 	 * asics which reliably support hw pageflip completion irqs. pflip
301*4882a593Smuzhiyun 	 * irqs are a reliable and race-free method of handling pageflip
302*4882a593Smuzhiyun 	 * completion detection. A use_pflipirq module parameter < 2 allows
303*4882a593Smuzhiyun 	 * to override this in case of asics with faulty pflip irqs.
304*4882a593Smuzhiyun 	 * A module parameter of 0 would only use this polling based path,
305*4882a593Smuzhiyun 	 * a parameter of 1 would use pflip irq only as a backup to this
306*4882a593Smuzhiyun 	 * path, as in Linux 3.16.
307*4882a593Smuzhiyun 	 */
308*4882a593Smuzhiyun 	if ((radeon_use_pflipirq == 2) && ASIC_IS_DCE4(rdev))
309*4882a593Smuzhiyun 		return;
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 	spin_lock_irqsave(&rdev->ddev->event_lock, flags);
312*4882a593Smuzhiyun 	if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
313*4882a593Smuzhiyun 		DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
314*4882a593Smuzhiyun 				 "RADEON_FLIP_SUBMITTED(%d)\n",
315*4882a593Smuzhiyun 				 radeon_crtc->flip_status,
316*4882a593Smuzhiyun 				 RADEON_FLIP_SUBMITTED);
317*4882a593Smuzhiyun 		spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
318*4882a593Smuzhiyun 		return;
319*4882a593Smuzhiyun 	}
320*4882a593Smuzhiyun 
321*4882a593Smuzhiyun 	update_pending = radeon_page_flip_pending(rdev, crtc_id);
322*4882a593Smuzhiyun 
323*4882a593Smuzhiyun 	/* Has the pageflip already completed in crtc, or is it certain
324*4882a593Smuzhiyun 	 * to complete in this vblank? GET_DISTANCE_TO_VBLANKSTART provides
325*4882a593Smuzhiyun 	 * distance to start of "fudged earlier" vblank in vpos, distance to
326*4882a593Smuzhiyun 	 * start of real vblank in hpos. vpos >= 0 && hpos < 0 means we are in
327*4882a593Smuzhiyun 	 * the last few scanlines before start of real vblank, where the vblank
328*4882a593Smuzhiyun 	 * irq can fire, so we have sampled update_pending a bit too early and
329*4882a593Smuzhiyun 	 * know the flip will complete at leading edge of the upcoming real
330*4882a593Smuzhiyun 	 * vblank. On pre-AVIVO hardware, flips also complete inside the real
331*4882a593Smuzhiyun 	 * vblank, not only at leading edge, so if update_pending for hpos >= 0
332*4882a593Smuzhiyun 	 *  == inside real vblank, the flip will complete almost immediately.
333*4882a593Smuzhiyun 	 * Note that this method of completion handling is still not 100% race
334*4882a593Smuzhiyun 	 * free, as we could execute before the radeon_flip_work_func managed
335*4882a593Smuzhiyun 	 * to run and set the RADEON_FLIP_SUBMITTED status, thereby we no-op,
336*4882a593Smuzhiyun 	 * but the flip still gets programmed into hw and completed during
337*4882a593Smuzhiyun 	 * vblank, leading to a delayed emission of the flip completion event.
338*4882a593Smuzhiyun 	 * This applies at least to pre-AVIVO hardware, where flips are always
339*4882a593Smuzhiyun 	 * completing inside vblank, not only at leading edge of vblank.
340*4882a593Smuzhiyun 	 */
341*4882a593Smuzhiyun 	if (update_pending &&
342*4882a593Smuzhiyun 	    (DRM_SCANOUTPOS_VALID &
343*4882a593Smuzhiyun 	     radeon_get_crtc_scanoutpos(rdev->ddev, crtc_id,
344*4882a593Smuzhiyun 					GET_DISTANCE_TO_VBLANKSTART,
345*4882a593Smuzhiyun 					&vpos, &hpos, NULL, NULL,
346*4882a593Smuzhiyun 					&rdev->mode_info.crtcs[crtc_id]->base.hwmode)) &&
347*4882a593Smuzhiyun 	    ((vpos >= 0 && hpos < 0) || (hpos >= 0 && !ASIC_IS_AVIVO(rdev)))) {
348*4882a593Smuzhiyun 		/* crtc didn't flip in this target vblank interval,
349*4882a593Smuzhiyun 		 * but flip is pending in crtc. Based on the current
350*4882a593Smuzhiyun 		 * scanout position we know that the current frame is
351*4882a593Smuzhiyun 		 * (nearly) complete and the flip will (likely)
352*4882a593Smuzhiyun 		 * complete before the start of the next frame.
353*4882a593Smuzhiyun 		 */
354*4882a593Smuzhiyun 		update_pending = 0;
355*4882a593Smuzhiyun 	}
356*4882a593Smuzhiyun 	spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
357*4882a593Smuzhiyun 	if (!update_pending)
358*4882a593Smuzhiyun 		radeon_crtc_handle_flip(rdev, crtc_id);
359*4882a593Smuzhiyun }
360*4882a593Smuzhiyun 
361*4882a593Smuzhiyun /**
362*4882a593Smuzhiyun  * radeon_crtc_handle_flip - page flip completed
363*4882a593Smuzhiyun  *
364*4882a593Smuzhiyun  * @rdev: radeon device pointer
365*4882a593Smuzhiyun  * @crtc_id: crtc number this event is for
366*4882a593Smuzhiyun  *
367*4882a593Smuzhiyun  * Called when we are sure that a page flip for this crtc is completed.
368*4882a593Smuzhiyun  */
radeon_crtc_handle_flip(struct radeon_device * rdev,int crtc_id)369*4882a593Smuzhiyun void radeon_crtc_handle_flip(struct radeon_device *rdev, int crtc_id)
370*4882a593Smuzhiyun {
371*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
372*4882a593Smuzhiyun 	struct radeon_flip_work *work;
373*4882a593Smuzhiyun 	unsigned long flags;
374*4882a593Smuzhiyun 
375*4882a593Smuzhiyun 	/* this can happen at init */
376*4882a593Smuzhiyun 	if (radeon_crtc == NULL)
377*4882a593Smuzhiyun 		return;
378*4882a593Smuzhiyun 
379*4882a593Smuzhiyun 	spin_lock_irqsave(&rdev->ddev->event_lock, flags);
380*4882a593Smuzhiyun 	work = radeon_crtc->flip_work;
381*4882a593Smuzhiyun 	if (radeon_crtc->flip_status != RADEON_FLIP_SUBMITTED) {
382*4882a593Smuzhiyun 		DRM_DEBUG_DRIVER("radeon_crtc->flip_status = %d != "
383*4882a593Smuzhiyun 				 "RADEON_FLIP_SUBMITTED(%d)\n",
384*4882a593Smuzhiyun 				 radeon_crtc->flip_status,
385*4882a593Smuzhiyun 				 RADEON_FLIP_SUBMITTED);
386*4882a593Smuzhiyun 		spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
387*4882a593Smuzhiyun 		return;
388*4882a593Smuzhiyun 	}
389*4882a593Smuzhiyun 
390*4882a593Smuzhiyun 	/* Pageflip completed. Clean up. */
391*4882a593Smuzhiyun 	radeon_crtc->flip_status = RADEON_FLIP_NONE;
392*4882a593Smuzhiyun 	radeon_crtc->flip_work = NULL;
393*4882a593Smuzhiyun 
394*4882a593Smuzhiyun 	/* wakeup userspace */
395*4882a593Smuzhiyun 	if (work->event)
396*4882a593Smuzhiyun 		drm_crtc_send_vblank_event(&radeon_crtc->base, work->event);
397*4882a593Smuzhiyun 
398*4882a593Smuzhiyun 	spin_unlock_irqrestore(&rdev->ddev->event_lock, flags);
399*4882a593Smuzhiyun 
400*4882a593Smuzhiyun 	drm_crtc_vblank_put(&radeon_crtc->base);
401*4882a593Smuzhiyun 	radeon_irq_kms_pflip_irq_put(rdev, work->crtc_id);
402*4882a593Smuzhiyun 	queue_work(radeon_crtc->flip_queue, &work->unpin_work);
403*4882a593Smuzhiyun }
404*4882a593Smuzhiyun 
405*4882a593Smuzhiyun /**
406*4882a593Smuzhiyun  * radeon_flip_work_func - page flip framebuffer
407*4882a593Smuzhiyun  *
408*4882a593Smuzhiyun  * @work - kernel work item
409*4882a593Smuzhiyun  *
410*4882a593Smuzhiyun  * Wait for the buffer object to become idle and do the actual page flip
411*4882a593Smuzhiyun  */
radeon_flip_work_func(struct work_struct * __work)412*4882a593Smuzhiyun static void radeon_flip_work_func(struct work_struct *__work)
413*4882a593Smuzhiyun {
414*4882a593Smuzhiyun 	struct radeon_flip_work *work =
415*4882a593Smuzhiyun 		container_of(__work, struct radeon_flip_work, flip_work);
416*4882a593Smuzhiyun 	struct radeon_device *rdev = work->rdev;
417*4882a593Smuzhiyun 	struct drm_device *dev = rdev->ddev;
418*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[work->crtc_id];
419*4882a593Smuzhiyun 
420*4882a593Smuzhiyun 	struct drm_crtc *crtc = &radeon_crtc->base;
421*4882a593Smuzhiyun 	unsigned long flags;
422*4882a593Smuzhiyun 	int r;
423*4882a593Smuzhiyun 	int vpos, hpos;
424*4882a593Smuzhiyun 
425*4882a593Smuzhiyun 	down_read(&rdev->exclusive_lock);
426*4882a593Smuzhiyun 	if (work->fence) {
427*4882a593Smuzhiyun 		struct radeon_fence *fence;
428*4882a593Smuzhiyun 
429*4882a593Smuzhiyun 		fence = to_radeon_fence(work->fence);
430*4882a593Smuzhiyun 		if (fence && fence->rdev == rdev) {
431*4882a593Smuzhiyun 			r = radeon_fence_wait(fence, false);
432*4882a593Smuzhiyun 			if (r == -EDEADLK) {
433*4882a593Smuzhiyun 				up_read(&rdev->exclusive_lock);
434*4882a593Smuzhiyun 				do {
435*4882a593Smuzhiyun 					r = radeon_gpu_reset(rdev);
436*4882a593Smuzhiyun 				} while (r == -EAGAIN);
437*4882a593Smuzhiyun 				down_read(&rdev->exclusive_lock);
438*4882a593Smuzhiyun 			}
439*4882a593Smuzhiyun 		} else
440*4882a593Smuzhiyun 			r = dma_fence_wait(work->fence, false);
441*4882a593Smuzhiyun 
442*4882a593Smuzhiyun 		if (r)
443*4882a593Smuzhiyun 			DRM_ERROR("failed to wait on page flip fence (%d)!\n", r);
444*4882a593Smuzhiyun 
445*4882a593Smuzhiyun 		/* We continue with the page flip even if we failed to wait on
446*4882a593Smuzhiyun 		 * the fence, otherwise the DRM core and userspace will be
447*4882a593Smuzhiyun 		 * confused about which BO the CRTC is scanning out
448*4882a593Smuzhiyun 		 */
449*4882a593Smuzhiyun 
450*4882a593Smuzhiyun 		dma_fence_put(work->fence);
451*4882a593Smuzhiyun 		work->fence = NULL;
452*4882a593Smuzhiyun 	}
453*4882a593Smuzhiyun 
454*4882a593Smuzhiyun 	/* Wait until we're out of the vertical blank period before the one
455*4882a593Smuzhiyun 	 * targeted by the flip. Always wait on pre DCE4 to avoid races with
456*4882a593Smuzhiyun 	 * flip completion handling from vblank irq, as these old asics don't
457*4882a593Smuzhiyun 	 * have reliable pageflip completion interrupts.
458*4882a593Smuzhiyun 	 */
459*4882a593Smuzhiyun 	while (radeon_crtc->enabled &&
460*4882a593Smuzhiyun 		(radeon_get_crtc_scanoutpos(dev, work->crtc_id, 0,
461*4882a593Smuzhiyun 					    &vpos, &hpos, NULL, NULL,
462*4882a593Smuzhiyun 					    &crtc->hwmode)
463*4882a593Smuzhiyun 		& (DRM_SCANOUTPOS_VALID | DRM_SCANOUTPOS_IN_VBLANK)) ==
464*4882a593Smuzhiyun 		(DRM_SCANOUTPOS_VALID | DRM_SCANOUTPOS_IN_VBLANK) &&
465*4882a593Smuzhiyun 		(!ASIC_IS_AVIVO(rdev) ||
466*4882a593Smuzhiyun 		((int) (work->target_vblank -
467*4882a593Smuzhiyun 		crtc->funcs->get_vblank_counter(crtc)) > 0)))
468*4882a593Smuzhiyun 		usleep_range(1000, 2000);
469*4882a593Smuzhiyun 
470*4882a593Smuzhiyun 	/* We borrow the event spin lock for protecting flip_status */
471*4882a593Smuzhiyun 	spin_lock_irqsave(&crtc->dev->event_lock, flags);
472*4882a593Smuzhiyun 
473*4882a593Smuzhiyun 	/* set the proper interrupt */
474*4882a593Smuzhiyun 	radeon_irq_kms_pflip_irq_get(rdev, radeon_crtc->crtc_id);
475*4882a593Smuzhiyun 
476*4882a593Smuzhiyun 	/* do the flip (mmio) */
477*4882a593Smuzhiyun 	radeon_page_flip(rdev, radeon_crtc->crtc_id, work->base, work->async);
478*4882a593Smuzhiyun 
479*4882a593Smuzhiyun 	radeon_crtc->flip_status = RADEON_FLIP_SUBMITTED;
480*4882a593Smuzhiyun 	spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
481*4882a593Smuzhiyun 	up_read(&rdev->exclusive_lock);
482*4882a593Smuzhiyun }
483*4882a593Smuzhiyun 
radeon_crtc_page_flip_target(struct drm_crtc * crtc,struct drm_framebuffer * fb,struct drm_pending_vblank_event * event,uint32_t page_flip_flags,uint32_t target,struct drm_modeset_acquire_ctx * ctx)484*4882a593Smuzhiyun static int radeon_crtc_page_flip_target(struct drm_crtc *crtc,
485*4882a593Smuzhiyun 					struct drm_framebuffer *fb,
486*4882a593Smuzhiyun 					struct drm_pending_vblank_event *event,
487*4882a593Smuzhiyun 					uint32_t page_flip_flags,
488*4882a593Smuzhiyun 					uint32_t target,
489*4882a593Smuzhiyun 					struct drm_modeset_acquire_ctx *ctx)
490*4882a593Smuzhiyun {
491*4882a593Smuzhiyun 	struct drm_device *dev = crtc->dev;
492*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
493*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
494*4882a593Smuzhiyun 	struct drm_gem_object *obj;
495*4882a593Smuzhiyun 	struct radeon_flip_work *work;
496*4882a593Smuzhiyun 	struct radeon_bo *new_rbo;
497*4882a593Smuzhiyun 	uint32_t tiling_flags, pitch_pixels;
498*4882a593Smuzhiyun 	uint64_t base;
499*4882a593Smuzhiyun 	unsigned long flags;
500*4882a593Smuzhiyun 	int r;
501*4882a593Smuzhiyun 
502*4882a593Smuzhiyun 	work = kzalloc(sizeof *work, GFP_KERNEL);
503*4882a593Smuzhiyun 	if (work == NULL)
504*4882a593Smuzhiyun 		return -ENOMEM;
505*4882a593Smuzhiyun 
506*4882a593Smuzhiyun 	INIT_WORK(&work->flip_work, radeon_flip_work_func);
507*4882a593Smuzhiyun 	INIT_WORK(&work->unpin_work, radeon_unpin_work_func);
508*4882a593Smuzhiyun 
509*4882a593Smuzhiyun 	work->rdev = rdev;
510*4882a593Smuzhiyun 	work->crtc_id = radeon_crtc->crtc_id;
511*4882a593Smuzhiyun 	work->event = event;
512*4882a593Smuzhiyun 	work->async = (page_flip_flags & DRM_MODE_PAGE_FLIP_ASYNC) != 0;
513*4882a593Smuzhiyun 
514*4882a593Smuzhiyun 	/* schedule unpin of the old buffer */
515*4882a593Smuzhiyun 	obj = crtc->primary->fb->obj[0];
516*4882a593Smuzhiyun 
517*4882a593Smuzhiyun 	/* take a reference to the old object */
518*4882a593Smuzhiyun 	drm_gem_object_get(obj);
519*4882a593Smuzhiyun 	work->old_rbo = gem_to_radeon_bo(obj);
520*4882a593Smuzhiyun 
521*4882a593Smuzhiyun 	obj = fb->obj[0];
522*4882a593Smuzhiyun 	new_rbo = gem_to_radeon_bo(obj);
523*4882a593Smuzhiyun 
524*4882a593Smuzhiyun 	/* pin the new buffer */
525*4882a593Smuzhiyun 	DRM_DEBUG_DRIVER("flip-ioctl() cur_rbo = %p, new_rbo = %p\n",
526*4882a593Smuzhiyun 			 work->old_rbo, new_rbo);
527*4882a593Smuzhiyun 
528*4882a593Smuzhiyun 	r = radeon_bo_reserve(new_rbo, false);
529*4882a593Smuzhiyun 	if (unlikely(r != 0)) {
530*4882a593Smuzhiyun 		DRM_ERROR("failed to reserve new rbo buffer before flip\n");
531*4882a593Smuzhiyun 		goto cleanup;
532*4882a593Smuzhiyun 	}
533*4882a593Smuzhiyun 	/* Only 27 bit offset for legacy CRTC */
534*4882a593Smuzhiyun 	r = radeon_bo_pin_restricted(new_rbo, RADEON_GEM_DOMAIN_VRAM,
535*4882a593Smuzhiyun 				     ASIC_IS_AVIVO(rdev) ? 0 : 1 << 27, &base);
536*4882a593Smuzhiyun 	if (unlikely(r != 0)) {
537*4882a593Smuzhiyun 		radeon_bo_unreserve(new_rbo);
538*4882a593Smuzhiyun 		r = -EINVAL;
539*4882a593Smuzhiyun 		DRM_ERROR("failed to pin new rbo buffer before flip\n");
540*4882a593Smuzhiyun 		goto cleanup;
541*4882a593Smuzhiyun 	}
542*4882a593Smuzhiyun 	work->fence = dma_fence_get(dma_resv_get_excl(new_rbo->tbo.base.resv));
543*4882a593Smuzhiyun 	radeon_bo_get_tiling_flags(new_rbo, &tiling_flags, NULL);
544*4882a593Smuzhiyun 	radeon_bo_unreserve(new_rbo);
545*4882a593Smuzhiyun 
546*4882a593Smuzhiyun 	if (!ASIC_IS_AVIVO(rdev)) {
547*4882a593Smuzhiyun 		/* crtc offset is from display base addr not FB location */
548*4882a593Smuzhiyun 		base -= radeon_crtc->legacy_display_base_addr;
549*4882a593Smuzhiyun 		pitch_pixels = fb->pitches[0] / fb->format->cpp[0];
550*4882a593Smuzhiyun 
551*4882a593Smuzhiyun 		if (tiling_flags & RADEON_TILING_MACRO) {
552*4882a593Smuzhiyun 			if (ASIC_IS_R300(rdev)) {
553*4882a593Smuzhiyun 				base &= ~0x7ff;
554*4882a593Smuzhiyun 			} else {
555*4882a593Smuzhiyun 				int byteshift = fb->format->cpp[0] * 8 >> 4;
556*4882a593Smuzhiyun 				int tile_addr = (((crtc->y >> 3) * pitch_pixels +  crtc->x) >> (8 - byteshift)) << 11;
557*4882a593Smuzhiyun 				base += tile_addr + ((crtc->x << byteshift) % 256) + ((crtc->y % 8) << 8);
558*4882a593Smuzhiyun 			}
559*4882a593Smuzhiyun 		} else {
560*4882a593Smuzhiyun 			int offset = crtc->y * pitch_pixels + crtc->x;
561*4882a593Smuzhiyun 			switch (fb->format->cpp[0] * 8) {
562*4882a593Smuzhiyun 			case 8:
563*4882a593Smuzhiyun 			default:
564*4882a593Smuzhiyun 				offset *= 1;
565*4882a593Smuzhiyun 				break;
566*4882a593Smuzhiyun 			case 15:
567*4882a593Smuzhiyun 			case 16:
568*4882a593Smuzhiyun 				offset *= 2;
569*4882a593Smuzhiyun 				break;
570*4882a593Smuzhiyun 			case 24:
571*4882a593Smuzhiyun 				offset *= 3;
572*4882a593Smuzhiyun 				break;
573*4882a593Smuzhiyun 			case 32:
574*4882a593Smuzhiyun 				offset *= 4;
575*4882a593Smuzhiyun 				break;
576*4882a593Smuzhiyun 			}
577*4882a593Smuzhiyun 			base += offset;
578*4882a593Smuzhiyun 		}
579*4882a593Smuzhiyun 		base &= ~7;
580*4882a593Smuzhiyun 	}
581*4882a593Smuzhiyun 	work->base = base;
582*4882a593Smuzhiyun 	work->target_vblank = target - (uint32_t)drm_crtc_vblank_count(crtc) +
583*4882a593Smuzhiyun 		crtc->funcs->get_vblank_counter(crtc);
584*4882a593Smuzhiyun 
585*4882a593Smuzhiyun 	/* We borrow the event spin lock for protecting flip_work */
586*4882a593Smuzhiyun 	spin_lock_irqsave(&crtc->dev->event_lock, flags);
587*4882a593Smuzhiyun 
588*4882a593Smuzhiyun 	if (radeon_crtc->flip_status != RADEON_FLIP_NONE) {
589*4882a593Smuzhiyun 		DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
590*4882a593Smuzhiyun 		spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
591*4882a593Smuzhiyun 		r = -EBUSY;
592*4882a593Smuzhiyun 		goto pflip_cleanup;
593*4882a593Smuzhiyun 	}
594*4882a593Smuzhiyun 	radeon_crtc->flip_status = RADEON_FLIP_PENDING;
595*4882a593Smuzhiyun 	radeon_crtc->flip_work = work;
596*4882a593Smuzhiyun 
597*4882a593Smuzhiyun 	/* update crtc fb */
598*4882a593Smuzhiyun 	crtc->primary->fb = fb;
599*4882a593Smuzhiyun 
600*4882a593Smuzhiyun 	spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
601*4882a593Smuzhiyun 
602*4882a593Smuzhiyun 	queue_work(radeon_crtc->flip_queue, &work->flip_work);
603*4882a593Smuzhiyun 	return 0;
604*4882a593Smuzhiyun 
605*4882a593Smuzhiyun pflip_cleanup:
606*4882a593Smuzhiyun 	if (unlikely(radeon_bo_reserve(new_rbo, false) != 0)) {
607*4882a593Smuzhiyun 		DRM_ERROR("failed to reserve new rbo in error path\n");
608*4882a593Smuzhiyun 		goto cleanup;
609*4882a593Smuzhiyun 	}
610*4882a593Smuzhiyun 	if (unlikely(radeon_bo_unpin(new_rbo) != 0)) {
611*4882a593Smuzhiyun 		DRM_ERROR("failed to unpin new rbo in error path\n");
612*4882a593Smuzhiyun 	}
613*4882a593Smuzhiyun 	radeon_bo_unreserve(new_rbo);
614*4882a593Smuzhiyun 
615*4882a593Smuzhiyun cleanup:
616*4882a593Smuzhiyun 	drm_gem_object_put(&work->old_rbo->tbo.base);
617*4882a593Smuzhiyun 	dma_fence_put(work->fence);
618*4882a593Smuzhiyun 	kfree(work);
619*4882a593Smuzhiyun 	return r;
620*4882a593Smuzhiyun }
621*4882a593Smuzhiyun 
622*4882a593Smuzhiyun static int
radeon_crtc_set_config(struct drm_mode_set * set,struct drm_modeset_acquire_ctx * ctx)623*4882a593Smuzhiyun radeon_crtc_set_config(struct drm_mode_set *set,
624*4882a593Smuzhiyun 		       struct drm_modeset_acquire_ctx *ctx)
625*4882a593Smuzhiyun {
626*4882a593Smuzhiyun 	struct drm_device *dev;
627*4882a593Smuzhiyun 	struct radeon_device *rdev;
628*4882a593Smuzhiyun 	struct drm_crtc *crtc;
629*4882a593Smuzhiyun 	bool active = false;
630*4882a593Smuzhiyun 	int ret;
631*4882a593Smuzhiyun 
632*4882a593Smuzhiyun 	if (!set || !set->crtc)
633*4882a593Smuzhiyun 		return -EINVAL;
634*4882a593Smuzhiyun 
635*4882a593Smuzhiyun 	dev = set->crtc->dev;
636*4882a593Smuzhiyun 
637*4882a593Smuzhiyun 	ret = pm_runtime_get_sync(dev->dev);
638*4882a593Smuzhiyun 	if (ret < 0) {
639*4882a593Smuzhiyun 		pm_runtime_put_autosuspend(dev->dev);
640*4882a593Smuzhiyun 		return ret;
641*4882a593Smuzhiyun 	}
642*4882a593Smuzhiyun 
643*4882a593Smuzhiyun 	ret = drm_crtc_helper_set_config(set, ctx);
644*4882a593Smuzhiyun 
645*4882a593Smuzhiyun 	list_for_each_entry(crtc, &dev->mode_config.crtc_list, head)
646*4882a593Smuzhiyun 		if (crtc->enabled)
647*4882a593Smuzhiyun 			active = true;
648*4882a593Smuzhiyun 
649*4882a593Smuzhiyun 	pm_runtime_mark_last_busy(dev->dev);
650*4882a593Smuzhiyun 
651*4882a593Smuzhiyun 	rdev = dev->dev_private;
652*4882a593Smuzhiyun 	/* if we have active crtcs and we don't have a power ref,
653*4882a593Smuzhiyun 	   take the current one */
654*4882a593Smuzhiyun 	if (active && !rdev->have_disp_power_ref) {
655*4882a593Smuzhiyun 		rdev->have_disp_power_ref = true;
656*4882a593Smuzhiyun 		return ret;
657*4882a593Smuzhiyun 	}
658*4882a593Smuzhiyun 	/* if we have no active crtcs, then drop the power ref
659*4882a593Smuzhiyun 	   we got before */
660*4882a593Smuzhiyun 	if (!active && rdev->have_disp_power_ref) {
661*4882a593Smuzhiyun 		pm_runtime_put_autosuspend(dev->dev);
662*4882a593Smuzhiyun 		rdev->have_disp_power_ref = false;
663*4882a593Smuzhiyun 	}
664*4882a593Smuzhiyun 
665*4882a593Smuzhiyun 	/* drop the power reference we got coming in here */
666*4882a593Smuzhiyun 	pm_runtime_put_autosuspend(dev->dev);
667*4882a593Smuzhiyun 	return ret;
668*4882a593Smuzhiyun }
669*4882a593Smuzhiyun 
670*4882a593Smuzhiyun static const struct drm_crtc_funcs radeon_crtc_funcs = {
671*4882a593Smuzhiyun 	.cursor_set2 = radeon_crtc_cursor_set2,
672*4882a593Smuzhiyun 	.cursor_move = radeon_crtc_cursor_move,
673*4882a593Smuzhiyun 	.gamma_set = radeon_crtc_gamma_set,
674*4882a593Smuzhiyun 	.set_config = radeon_crtc_set_config,
675*4882a593Smuzhiyun 	.destroy = radeon_crtc_destroy,
676*4882a593Smuzhiyun 	.page_flip_target = radeon_crtc_page_flip_target,
677*4882a593Smuzhiyun 	.get_vblank_counter = radeon_get_vblank_counter_kms,
678*4882a593Smuzhiyun 	.enable_vblank = radeon_enable_vblank_kms,
679*4882a593Smuzhiyun 	.disable_vblank = radeon_disable_vblank_kms,
680*4882a593Smuzhiyun 	.get_vblank_timestamp = drm_crtc_vblank_helper_get_vblank_timestamp,
681*4882a593Smuzhiyun };
682*4882a593Smuzhiyun 
radeon_crtc_init(struct drm_device * dev,int index)683*4882a593Smuzhiyun static void radeon_crtc_init(struct drm_device *dev, int index)
684*4882a593Smuzhiyun {
685*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
686*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc;
687*4882a593Smuzhiyun 
688*4882a593Smuzhiyun 	radeon_crtc = kzalloc(sizeof(struct radeon_crtc) + (RADEONFB_CONN_LIMIT * sizeof(struct drm_connector *)), GFP_KERNEL);
689*4882a593Smuzhiyun 	if (radeon_crtc == NULL)
690*4882a593Smuzhiyun 		return;
691*4882a593Smuzhiyun 
692*4882a593Smuzhiyun 	drm_crtc_init(dev, &radeon_crtc->base, &radeon_crtc_funcs);
693*4882a593Smuzhiyun 
694*4882a593Smuzhiyun 	drm_mode_crtc_set_gamma_size(&radeon_crtc->base, 256);
695*4882a593Smuzhiyun 	radeon_crtc->crtc_id = index;
696*4882a593Smuzhiyun 	radeon_crtc->flip_queue = alloc_workqueue("radeon-crtc", WQ_HIGHPRI, 0);
697*4882a593Smuzhiyun 	rdev->mode_info.crtcs[index] = radeon_crtc;
698*4882a593Smuzhiyun 
699*4882a593Smuzhiyun 	if (rdev->family >= CHIP_BONAIRE) {
700*4882a593Smuzhiyun 		radeon_crtc->max_cursor_width = CIK_CURSOR_WIDTH;
701*4882a593Smuzhiyun 		radeon_crtc->max_cursor_height = CIK_CURSOR_HEIGHT;
702*4882a593Smuzhiyun 	} else {
703*4882a593Smuzhiyun 		radeon_crtc->max_cursor_width = CURSOR_WIDTH;
704*4882a593Smuzhiyun 		radeon_crtc->max_cursor_height = CURSOR_HEIGHT;
705*4882a593Smuzhiyun 	}
706*4882a593Smuzhiyun 	dev->mode_config.cursor_width = radeon_crtc->max_cursor_width;
707*4882a593Smuzhiyun 	dev->mode_config.cursor_height = radeon_crtc->max_cursor_height;
708*4882a593Smuzhiyun 
709*4882a593Smuzhiyun #if 0
710*4882a593Smuzhiyun 	radeon_crtc->mode_set.crtc = &radeon_crtc->base;
711*4882a593Smuzhiyun 	radeon_crtc->mode_set.connectors = (struct drm_connector **)(radeon_crtc + 1);
712*4882a593Smuzhiyun 	radeon_crtc->mode_set.num_connectors = 0;
713*4882a593Smuzhiyun #endif
714*4882a593Smuzhiyun 
715*4882a593Smuzhiyun 	if (rdev->is_atom_bios && (ASIC_IS_AVIVO(rdev) || radeon_r4xx_atom))
716*4882a593Smuzhiyun 		radeon_atombios_init_crtc(dev, radeon_crtc);
717*4882a593Smuzhiyun 	else
718*4882a593Smuzhiyun 		radeon_legacy_init_crtc(dev, radeon_crtc);
719*4882a593Smuzhiyun }
720*4882a593Smuzhiyun 
721*4882a593Smuzhiyun static const char *encoder_names[38] = {
722*4882a593Smuzhiyun 	"NONE",
723*4882a593Smuzhiyun 	"INTERNAL_LVDS",
724*4882a593Smuzhiyun 	"INTERNAL_TMDS1",
725*4882a593Smuzhiyun 	"INTERNAL_TMDS2",
726*4882a593Smuzhiyun 	"INTERNAL_DAC1",
727*4882a593Smuzhiyun 	"INTERNAL_DAC2",
728*4882a593Smuzhiyun 	"INTERNAL_SDVOA",
729*4882a593Smuzhiyun 	"INTERNAL_SDVOB",
730*4882a593Smuzhiyun 	"SI170B",
731*4882a593Smuzhiyun 	"CH7303",
732*4882a593Smuzhiyun 	"CH7301",
733*4882a593Smuzhiyun 	"INTERNAL_DVO1",
734*4882a593Smuzhiyun 	"EXTERNAL_SDVOA",
735*4882a593Smuzhiyun 	"EXTERNAL_SDVOB",
736*4882a593Smuzhiyun 	"TITFP513",
737*4882a593Smuzhiyun 	"INTERNAL_LVTM1",
738*4882a593Smuzhiyun 	"VT1623",
739*4882a593Smuzhiyun 	"HDMI_SI1930",
740*4882a593Smuzhiyun 	"HDMI_INTERNAL",
741*4882a593Smuzhiyun 	"INTERNAL_KLDSCP_TMDS1",
742*4882a593Smuzhiyun 	"INTERNAL_KLDSCP_DVO1",
743*4882a593Smuzhiyun 	"INTERNAL_KLDSCP_DAC1",
744*4882a593Smuzhiyun 	"INTERNAL_KLDSCP_DAC2",
745*4882a593Smuzhiyun 	"SI178",
746*4882a593Smuzhiyun 	"MVPU_FPGA",
747*4882a593Smuzhiyun 	"INTERNAL_DDI",
748*4882a593Smuzhiyun 	"VT1625",
749*4882a593Smuzhiyun 	"HDMI_SI1932",
750*4882a593Smuzhiyun 	"DP_AN9801",
751*4882a593Smuzhiyun 	"DP_DP501",
752*4882a593Smuzhiyun 	"INTERNAL_UNIPHY",
753*4882a593Smuzhiyun 	"INTERNAL_KLDSCP_LVTMA",
754*4882a593Smuzhiyun 	"INTERNAL_UNIPHY1",
755*4882a593Smuzhiyun 	"INTERNAL_UNIPHY2",
756*4882a593Smuzhiyun 	"NUTMEG",
757*4882a593Smuzhiyun 	"TRAVIS",
758*4882a593Smuzhiyun 	"INTERNAL_VCE",
759*4882a593Smuzhiyun 	"INTERNAL_UNIPHY3",
760*4882a593Smuzhiyun };
761*4882a593Smuzhiyun 
762*4882a593Smuzhiyun static const char *hpd_names[6] = {
763*4882a593Smuzhiyun 	"HPD1",
764*4882a593Smuzhiyun 	"HPD2",
765*4882a593Smuzhiyun 	"HPD3",
766*4882a593Smuzhiyun 	"HPD4",
767*4882a593Smuzhiyun 	"HPD5",
768*4882a593Smuzhiyun 	"HPD6",
769*4882a593Smuzhiyun };
770*4882a593Smuzhiyun 
radeon_print_display_setup(struct drm_device * dev)771*4882a593Smuzhiyun static void radeon_print_display_setup(struct drm_device *dev)
772*4882a593Smuzhiyun {
773*4882a593Smuzhiyun 	struct drm_connector *connector;
774*4882a593Smuzhiyun 	struct radeon_connector *radeon_connector;
775*4882a593Smuzhiyun 	struct drm_encoder *encoder;
776*4882a593Smuzhiyun 	struct radeon_encoder *radeon_encoder;
777*4882a593Smuzhiyun 	uint32_t devices;
778*4882a593Smuzhiyun 	int i = 0;
779*4882a593Smuzhiyun 
780*4882a593Smuzhiyun 	DRM_INFO("Radeon Display Connectors\n");
781*4882a593Smuzhiyun 	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
782*4882a593Smuzhiyun 		radeon_connector = to_radeon_connector(connector);
783*4882a593Smuzhiyun 		DRM_INFO("Connector %d:\n", i);
784*4882a593Smuzhiyun 		DRM_INFO("  %s\n", connector->name);
785*4882a593Smuzhiyun 		if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
786*4882a593Smuzhiyun 			DRM_INFO("  %s\n", hpd_names[radeon_connector->hpd.hpd]);
787*4882a593Smuzhiyun 		if (radeon_connector->ddc_bus) {
788*4882a593Smuzhiyun 			DRM_INFO("  DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
789*4882a593Smuzhiyun 				 radeon_connector->ddc_bus->rec.mask_clk_reg,
790*4882a593Smuzhiyun 				 radeon_connector->ddc_bus->rec.mask_data_reg,
791*4882a593Smuzhiyun 				 radeon_connector->ddc_bus->rec.a_clk_reg,
792*4882a593Smuzhiyun 				 radeon_connector->ddc_bus->rec.a_data_reg,
793*4882a593Smuzhiyun 				 radeon_connector->ddc_bus->rec.en_clk_reg,
794*4882a593Smuzhiyun 				 radeon_connector->ddc_bus->rec.en_data_reg,
795*4882a593Smuzhiyun 				 radeon_connector->ddc_bus->rec.y_clk_reg,
796*4882a593Smuzhiyun 				 radeon_connector->ddc_bus->rec.y_data_reg);
797*4882a593Smuzhiyun 			if (radeon_connector->router.ddc_valid)
798*4882a593Smuzhiyun 				DRM_INFO("  DDC Router 0x%x/0x%x\n",
799*4882a593Smuzhiyun 					 radeon_connector->router.ddc_mux_control_pin,
800*4882a593Smuzhiyun 					 radeon_connector->router.ddc_mux_state);
801*4882a593Smuzhiyun 			if (radeon_connector->router.cd_valid)
802*4882a593Smuzhiyun 				DRM_INFO("  Clock/Data Router 0x%x/0x%x\n",
803*4882a593Smuzhiyun 					 radeon_connector->router.cd_mux_control_pin,
804*4882a593Smuzhiyun 					 radeon_connector->router.cd_mux_state);
805*4882a593Smuzhiyun 		} else {
806*4882a593Smuzhiyun 			if (connector->connector_type == DRM_MODE_CONNECTOR_VGA ||
807*4882a593Smuzhiyun 			    connector->connector_type == DRM_MODE_CONNECTOR_DVII ||
808*4882a593Smuzhiyun 			    connector->connector_type == DRM_MODE_CONNECTOR_DVID ||
809*4882a593Smuzhiyun 			    connector->connector_type == DRM_MODE_CONNECTOR_DVIA ||
810*4882a593Smuzhiyun 			    connector->connector_type == DRM_MODE_CONNECTOR_HDMIA ||
811*4882a593Smuzhiyun 			    connector->connector_type == DRM_MODE_CONNECTOR_HDMIB)
812*4882a593Smuzhiyun 				DRM_INFO("  DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
813*4882a593Smuzhiyun 		}
814*4882a593Smuzhiyun 		DRM_INFO("  Encoders:\n");
815*4882a593Smuzhiyun 		list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
816*4882a593Smuzhiyun 			radeon_encoder = to_radeon_encoder(encoder);
817*4882a593Smuzhiyun 			devices = radeon_encoder->devices & radeon_connector->devices;
818*4882a593Smuzhiyun 			if (devices) {
819*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_CRT1_SUPPORT)
820*4882a593Smuzhiyun 					DRM_INFO("    CRT1: %s\n", encoder_names[radeon_encoder->encoder_id]);
821*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_CRT2_SUPPORT)
822*4882a593Smuzhiyun 					DRM_INFO("    CRT2: %s\n", encoder_names[radeon_encoder->encoder_id]);
823*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_LCD1_SUPPORT)
824*4882a593Smuzhiyun 					DRM_INFO("    LCD1: %s\n", encoder_names[radeon_encoder->encoder_id]);
825*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_DFP1_SUPPORT)
826*4882a593Smuzhiyun 					DRM_INFO("    DFP1: %s\n", encoder_names[radeon_encoder->encoder_id]);
827*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_DFP2_SUPPORT)
828*4882a593Smuzhiyun 					DRM_INFO("    DFP2: %s\n", encoder_names[radeon_encoder->encoder_id]);
829*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_DFP3_SUPPORT)
830*4882a593Smuzhiyun 					DRM_INFO("    DFP3: %s\n", encoder_names[radeon_encoder->encoder_id]);
831*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_DFP4_SUPPORT)
832*4882a593Smuzhiyun 					DRM_INFO("    DFP4: %s\n", encoder_names[radeon_encoder->encoder_id]);
833*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_DFP5_SUPPORT)
834*4882a593Smuzhiyun 					DRM_INFO("    DFP5: %s\n", encoder_names[radeon_encoder->encoder_id]);
835*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_DFP6_SUPPORT)
836*4882a593Smuzhiyun 					DRM_INFO("    DFP6: %s\n", encoder_names[radeon_encoder->encoder_id]);
837*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_TV1_SUPPORT)
838*4882a593Smuzhiyun 					DRM_INFO("    TV1: %s\n", encoder_names[radeon_encoder->encoder_id]);
839*4882a593Smuzhiyun 				if (devices & ATOM_DEVICE_CV_SUPPORT)
840*4882a593Smuzhiyun 					DRM_INFO("    CV: %s\n", encoder_names[radeon_encoder->encoder_id]);
841*4882a593Smuzhiyun 			}
842*4882a593Smuzhiyun 		}
843*4882a593Smuzhiyun 		i++;
844*4882a593Smuzhiyun 	}
845*4882a593Smuzhiyun }
846*4882a593Smuzhiyun 
radeon_setup_enc_conn(struct drm_device * dev)847*4882a593Smuzhiyun static bool radeon_setup_enc_conn(struct drm_device *dev)
848*4882a593Smuzhiyun {
849*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
850*4882a593Smuzhiyun 	bool ret = false;
851*4882a593Smuzhiyun 
852*4882a593Smuzhiyun 	if (rdev->bios) {
853*4882a593Smuzhiyun 		if (rdev->is_atom_bios) {
854*4882a593Smuzhiyun 			ret = radeon_get_atom_connector_info_from_supported_devices_table(dev);
855*4882a593Smuzhiyun 			if (!ret)
856*4882a593Smuzhiyun 				ret = radeon_get_atom_connector_info_from_object_table(dev);
857*4882a593Smuzhiyun 		} else {
858*4882a593Smuzhiyun 			ret = radeon_get_legacy_connector_info_from_bios(dev);
859*4882a593Smuzhiyun 			if (!ret)
860*4882a593Smuzhiyun 				ret = radeon_get_legacy_connector_info_from_table(dev);
861*4882a593Smuzhiyun 		}
862*4882a593Smuzhiyun 	} else {
863*4882a593Smuzhiyun 		if (!ASIC_IS_AVIVO(rdev))
864*4882a593Smuzhiyun 			ret = radeon_get_legacy_connector_info_from_table(dev);
865*4882a593Smuzhiyun 	}
866*4882a593Smuzhiyun 	if (ret) {
867*4882a593Smuzhiyun 		radeon_setup_encoder_clones(dev);
868*4882a593Smuzhiyun 		radeon_print_display_setup(dev);
869*4882a593Smuzhiyun 	}
870*4882a593Smuzhiyun 
871*4882a593Smuzhiyun 	return ret;
872*4882a593Smuzhiyun }
873*4882a593Smuzhiyun 
874*4882a593Smuzhiyun /* avivo */
875*4882a593Smuzhiyun 
876*4882a593Smuzhiyun /**
877*4882a593Smuzhiyun  * avivo_reduce_ratio - fractional number reduction
878*4882a593Smuzhiyun  *
879*4882a593Smuzhiyun  * @nom: nominator
880*4882a593Smuzhiyun  * @den: denominator
881*4882a593Smuzhiyun  * @nom_min: minimum value for nominator
882*4882a593Smuzhiyun  * @den_min: minimum value for denominator
883*4882a593Smuzhiyun  *
884*4882a593Smuzhiyun  * Find the greatest common divisor and apply it on both nominator and
885*4882a593Smuzhiyun  * denominator, but make nominator and denominator are at least as large
886*4882a593Smuzhiyun  * as their minimum values.
887*4882a593Smuzhiyun  */
avivo_reduce_ratio(unsigned * nom,unsigned * den,unsigned nom_min,unsigned den_min)888*4882a593Smuzhiyun static void avivo_reduce_ratio(unsigned *nom, unsigned *den,
889*4882a593Smuzhiyun 			       unsigned nom_min, unsigned den_min)
890*4882a593Smuzhiyun {
891*4882a593Smuzhiyun 	unsigned tmp;
892*4882a593Smuzhiyun 
893*4882a593Smuzhiyun 	/* reduce the numbers to a simpler ratio */
894*4882a593Smuzhiyun 	tmp = gcd(*nom, *den);
895*4882a593Smuzhiyun 	*nom /= tmp;
896*4882a593Smuzhiyun 	*den /= tmp;
897*4882a593Smuzhiyun 
898*4882a593Smuzhiyun 	/* make sure nominator is large enough */
899*4882a593Smuzhiyun 	if (*nom < nom_min) {
900*4882a593Smuzhiyun 		tmp = DIV_ROUND_UP(nom_min, *nom);
901*4882a593Smuzhiyun 		*nom *= tmp;
902*4882a593Smuzhiyun 		*den *= tmp;
903*4882a593Smuzhiyun 	}
904*4882a593Smuzhiyun 
905*4882a593Smuzhiyun 	/* make sure the denominator is large enough */
906*4882a593Smuzhiyun 	if (*den < den_min) {
907*4882a593Smuzhiyun 		tmp = DIV_ROUND_UP(den_min, *den);
908*4882a593Smuzhiyun 		*nom *= tmp;
909*4882a593Smuzhiyun 		*den *= tmp;
910*4882a593Smuzhiyun 	}
911*4882a593Smuzhiyun }
912*4882a593Smuzhiyun 
913*4882a593Smuzhiyun /**
914*4882a593Smuzhiyun  * avivo_get_fb_ref_div - feedback and ref divider calculation
915*4882a593Smuzhiyun  *
916*4882a593Smuzhiyun  * @nom: nominator
917*4882a593Smuzhiyun  * @den: denominator
918*4882a593Smuzhiyun  * @post_div: post divider
919*4882a593Smuzhiyun  * @fb_div_max: feedback divider maximum
920*4882a593Smuzhiyun  * @ref_div_max: reference divider maximum
921*4882a593Smuzhiyun  * @fb_div: resulting feedback divider
922*4882a593Smuzhiyun  * @ref_div: resulting reference divider
923*4882a593Smuzhiyun  *
924*4882a593Smuzhiyun  * Calculate feedback and reference divider for a given post divider. Makes
925*4882a593Smuzhiyun  * sure we stay within the limits.
926*4882a593Smuzhiyun  */
avivo_get_fb_ref_div(unsigned nom,unsigned den,unsigned post_div,unsigned fb_div_max,unsigned ref_div_max,unsigned * fb_div,unsigned * ref_div)927*4882a593Smuzhiyun static void avivo_get_fb_ref_div(unsigned nom, unsigned den, unsigned post_div,
928*4882a593Smuzhiyun 				 unsigned fb_div_max, unsigned ref_div_max,
929*4882a593Smuzhiyun 				 unsigned *fb_div, unsigned *ref_div)
930*4882a593Smuzhiyun {
931*4882a593Smuzhiyun 	/* limit reference * post divider to a maximum */
932*4882a593Smuzhiyun 	ref_div_max = max(min(100 / post_div, ref_div_max), 1u);
933*4882a593Smuzhiyun 
934*4882a593Smuzhiyun 	/* get matching reference and feedback divider */
935*4882a593Smuzhiyun 	*ref_div = min(max(den/post_div, 1u), ref_div_max);
936*4882a593Smuzhiyun 	*fb_div = DIV_ROUND_CLOSEST(nom * *ref_div * post_div, den);
937*4882a593Smuzhiyun 
938*4882a593Smuzhiyun 	/* limit fb divider to its maximum */
939*4882a593Smuzhiyun 	if (*fb_div > fb_div_max) {
940*4882a593Smuzhiyun 		*ref_div = (*ref_div * fb_div_max)/(*fb_div);
941*4882a593Smuzhiyun 		*fb_div = fb_div_max;
942*4882a593Smuzhiyun 	}
943*4882a593Smuzhiyun }
944*4882a593Smuzhiyun 
945*4882a593Smuzhiyun /**
946*4882a593Smuzhiyun  * radeon_compute_pll_avivo - compute PLL paramaters
947*4882a593Smuzhiyun  *
948*4882a593Smuzhiyun  * @pll: information about the PLL
949*4882a593Smuzhiyun  * @dot_clock_p: resulting pixel clock
950*4882a593Smuzhiyun  * fb_div_p: resulting feedback divider
951*4882a593Smuzhiyun  * frac_fb_div_p: fractional part of the feedback divider
952*4882a593Smuzhiyun  * ref_div_p: resulting reference divider
953*4882a593Smuzhiyun  * post_div_p: resulting reference divider
954*4882a593Smuzhiyun  *
955*4882a593Smuzhiyun  * Try to calculate the PLL parameters to generate the given frequency:
956*4882a593Smuzhiyun  * dot_clock = (ref_freq * feedback_div) / (ref_div * post_div)
957*4882a593Smuzhiyun  */
radeon_compute_pll_avivo(struct radeon_pll * pll,u32 freq,u32 * dot_clock_p,u32 * fb_div_p,u32 * frac_fb_div_p,u32 * ref_div_p,u32 * post_div_p)958*4882a593Smuzhiyun void radeon_compute_pll_avivo(struct radeon_pll *pll,
959*4882a593Smuzhiyun 			      u32 freq,
960*4882a593Smuzhiyun 			      u32 *dot_clock_p,
961*4882a593Smuzhiyun 			      u32 *fb_div_p,
962*4882a593Smuzhiyun 			      u32 *frac_fb_div_p,
963*4882a593Smuzhiyun 			      u32 *ref_div_p,
964*4882a593Smuzhiyun 			      u32 *post_div_p)
965*4882a593Smuzhiyun {
966*4882a593Smuzhiyun 	unsigned target_clock = pll->flags & RADEON_PLL_USE_FRAC_FB_DIV ?
967*4882a593Smuzhiyun 		freq : freq / 10;
968*4882a593Smuzhiyun 
969*4882a593Smuzhiyun 	unsigned fb_div_min, fb_div_max, fb_div;
970*4882a593Smuzhiyun 	unsigned post_div_min, post_div_max, post_div;
971*4882a593Smuzhiyun 	unsigned ref_div_min, ref_div_max, ref_div;
972*4882a593Smuzhiyun 	unsigned post_div_best, diff_best;
973*4882a593Smuzhiyun 	unsigned nom, den;
974*4882a593Smuzhiyun 
975*4882a593Smuzhiyun 	/* determine allowed feedback divider range */
976*4882a593Smuzhiyun 	fb_div_min = pll->min_feedback_div;
977*4882a593Smuzhiyun 	fb_div_max = pll->max_feedback_div;
978*4882a593Smuzhiyun 
979*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
980*4882a593Smuzhiyun 		fb_div_min *= 10;
981*4882a593Smuzhiyun 		fb_div_max *= 10;
982*4882a593Smuzhiyun 	}
983*4882a593Smuzhiyun 
984*4882a593Smuzhiyun 	/* determine allowed ref divider range */
985*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_REF_DIV)
986*4882a593Smuzhiyun 		ref_div_min = pll->reference_div;
987*4882a593Smuzhiyun 	else
988*4882a593Smuzhiyun 		ref_div_min = pll->min_ref_div;
989*4882a593Smuzhiyun 
990*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV &&
991*4882a593Smuzhiyun 	    pll->flags & RADEON_PLL_USE_REF_DIV)
992*4882a593Smuzhiyun 		ref_div_max = pll->reference_div;
993*4882a593Smuzhiyun 	else if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
994*4882a593Smuzhiyun 		/* fix for problems on RS880 */
995*4882a593Smuzhiyun 		ref_div_max = min(pll->max_ref_div, 7u);
996*4882a593Smuzhiyun 	else
997*4882a593Smuzhiyun 		ref_div_max = pll->max_ref_div;
998*4882a593Smuzhiyun 
999*4882a593Smuzhiyun 	/* determine allowed post divider range */
1000*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_POST_DIV) {
1001*4882a593Smuzhiyun 		post_div_min = pll->post_div;
1002*4882a593Smuzhiyun 		post_div_max = pll->post_div;
1003*4882a593Smuzhiyun 	} else {
1004*4882a593Smuzhiyun 		unsigned vco_min, vco_max;
1005*4882a593Smuzhiyun 
1006*4882a593Smuzhiyun 		if (pll->flags & RADEON_PLL_IS_LCD) {
1007*4882a593Smuzhiyun 			vco_min = pll->lcd_pll_out_min;
1008*4882a593Smuzhiyun 			vco_max = pll->lcd_pll_out_max;
1009*4882a593Smuzhiyun 		} else {
1010*4882a593Smuzhiyun 			vco_min = pll->pll_out_min;
1011*4882a593Smuzhiyun 			vco_max = pll->pll_out_max;
1012*4882a593Smuzhiyun 		}
1013*4882a593Smuzhiyun 
1014*4882a593Smuzhiyun 		if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1015*4882a593Smuzhiyun 			vco_min *= 10;
1016*4882a593Smuzhiyun 			vco_max *= 10;
1017*4882a593Smuzhiyun 		}
1018*4882a593Smuzhiyun 
1019*4882a593Smuzhiyun 		post_div_min = vco_min / target_clock;
1020*4882a593Smuzhiyun 		if ((target_clock * post_div_min) < vco_min)
1021*4882a593Smuzhiyun 			++post_div_min;
1022*4882a593Smuzhiyun 		if (post_div_min < pll->min_post_div)
1023*4882a593Smuzhiyun 			post_div_min = pll->min_post_div;
1024*4882a593Smuzhiyun 
1025*4882a593Smuzhiyun 		post_div_max = vco_max / target_clock;
1026*4882a593Smuzhiyun 		if ((target_clock * post_div_max) > vco_max)
1027*4882a593Smuzhiyun 			--post_div_max;
1028*4882a593Smuzhiyun 		if (post_div_max > pll->max_post_div)
1029*4882a593Smuzhiyun 			post_div_max = pll->max_post_div;
1030*4882a593Smuzhiyun 	}
1031*4882a593Smuzhiyun 
1032*4882a593Smuzhiyun 	/* represent the searched ratio as fractional number */
1033*4882a593Smuzhiyun 	nom = target_clock;
1034*4882a593Smuzhiyun 	den = pll->reference_freq;
1035*4882a593Smuzhiyun 
1036*4882a593Smuzhiyun 	/* reduce the numbers to a simpler ratio */
1037*4882a593Smuzhiyun 	avivo_reduce_ratio(&nom, &den, fb_div_min, post_div_min);
1038*4882a593Smuzhiyun 
1039*4882a593Smuzhiyun 	/* now search for a post divider */
1040*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP)
1041*4882a593Smuzhiyun 		post_div_best = post_div_min;
1042*4882a593Smuzhiyun 	else
1043*4882a593Smuzhiyun 		post_div_best = post_div_max;
1044*4882a593Smuzhiyun 	diff_best = ~0;
1045*4882a593Smuzhiyun 
1046*4882a593Smuzhiyun 	for (post_div = post_div_min; post_div <= post_div_max; ++post_div) {
1047*4882a593Smuzhiyun 		unsigned diff;
1048*4882a593Smuzhiyun 		avivo_get_fb_ref_div(nom, den, post_div, fb_div_max,
1049*4882a593Smuzhiyun 				     ref_div_max, &fb_div, &ref_div);
1050*4882a593Smuzhiyun 		diff = abs(target_clock - (pll->reference_freq * fb_div) /
1051*4882a593Smuzhiyun 			(ref_div * post_div));
1052*4882a593Smuzhiyun 
1053*4882a593Smuzhiyun 		if (diff < diff_best || (diff == diff_best &&
1054*4882a593Smuzhiyun 		    !(pll->flags & RADEON_PLL_PREFER_MINM_OVER_MAXP))) {
1055*4882a593Smuzhiyun 
1056*4882a593Smuzhiyun 			post_div_best = post_div;
1057*4882a593Smuzhiyun 			diff_best = diff;
1058*4882a593Smuzhiyun 		}
1059*4882a593Smuzhiyun 	}
1060*4882a593Smuzhiyun 	post_div = post_div_best;
1061*4882a593Smuzhiyun 
1062*4882a593Smuzhiyun 	/* get the feedback and reference divider for the optimal value */
1063*4882a593Smuzhiyun 	avivo_get_fb_ref_div(nom, den, post_div, fb_div_max, ref_div_max,
1064*4882a593Smuzhiyun 			     &fb_div, &ref_div);
1065*4882a593Smuzhiyun 
1066*4882a593Smuzhiyun 	/* reduce the numbers to a simpler ratio once more */
1067*4882a593Smuzhiyun 	/* this also makes sure that the reference divider is large enough */
1068*4882a593Smuzhiyun 	avivo_reduce_ratio(&fb_div, &ref_div, fb_div_min, ref_div_min);
1069*4882a593Smuzhiyun 
1070*4882a593Smuzhiyun 	/* avoid high jitter with small fractional dividers */
1071*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV && (fb_div % 10)) {
1072*4882a593Smuzhiyun 		fb_div_min = max(fb_div_min, (9 - (fb_div % 10)) * 20 + 50);
1073*4882a593Smuzhiyun 		if (fb_div < fb_div_min) {
1074*4882a593Smuzhiyun 			unsigned tmp = DIV_ROUND_UP(fb_div_min, fb_div);
1075*4882a593Smuzhiyun 			fb_div *= tmp;
1076*4882a593Smuzhiyun 			ref_div *= tmp;
1077*4882a593Smuzhiyun 		}
1078*4882a593Smuzhiyun 	}
1079*4882a593Smuzhiyun 
1080*4882a593Smuzhiyun 	/* and finally save the result */
1081*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1082*4882a593Smuzhiyun 		*fb_div_p = fb_div / 10;
1083*4882a593Smuzhiyun 		*frac_fb_div_p = fb_div % 10;
1084*4882a593Smuzhiyun 	} else {
1085*4882a593Smuzhiyun 		*fb_div_p = fb_div;
1086*4882a593Smuzhiyun 		*frac_fb_div_p = 0;
1087*4882a593Smuzhiyun 	}
1088*4882a593Smuzhiyun 
1089*4882a593Smuzhiyun 	*dot_clock_p = ((pll->reference_freq * *fb_div_p * 10) +
1090*4882a593Smuzhiyun 			(pll->reference_freq * *frac_fb_div_p)) /
1091*4882a593Smuzhiyun 		       (ref_div * post_div * 10);
1092*4882a593Smuzhiyun 	*ref_div_p = ref_div;
1093*4882a593Smuzhiyun 	*post_div_p = post_div;
1094*4882a593Smuzhiyun 
1095*4882a593Smuzhiyun 	DRM_DEBUG_KMS("%d - %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1096*4882a593Smuzhiyun 		      freq, *dot_clock_p * 10, *fb_div_p, *frac_fb_div_p,
1097*4882a593Smuzhiyun 		      ref_div, post_div);
1098*4882a593Smuzhiyun }
1099*4882a593Smuzhiyun 
1100*4882a593Smuzhiyun /* pre-avivo */
radeon_div(uint64_t n,uint32_t d)1101*4882a593Smuzhiyun static inline uint32_t radeon_div(uint64_t n, uint32_t d)
1102*4882a593Smuzhiyun {
1103*4882a593Smuzhiyun 	uint64_t mod;
1104*4882a593Smuzhiyun 
1105*4882a593Smuzhiyun 	n += d / 2;
1106*4882a593Smuzhiyun 
1107*4882a593Smuzhiyun 	mod = do_div(n, d);
1108*4882a593Smuzhiyun 	return n;
1109*4882a593Smuzhiyun }
1110*4882a593Smuzhiyun 
radeon_compute_pll_legacy(struct radeon_pll * pll,uint64_t freq,uint32_t * dot_clock_p,uint32_t * fb_div_p,uint32_t * frac_fb_div_p,uint32_t * ref_div_p,uint32_t * post_div_p)1111*4882a593Smuzhiyun void radeon_compute_pll_legacy(struct radeon_pll *pll,
1112*4882a593Smuzhiyun 			       uint64_t freq,
1113*4882a593Smuzhiyun 			       uint32_t *dot_clock_p,
1114*4882a593Smuzhiyun 			       uint32_t *fb_div_p,
1115*4882a593Smuzhiyun 			       uint32_t *frac_fb_div_p,
1116*4882a593Smuzhiyun 			       uint32_t *ref_div_p,
1117*4882a593Smuzhiyun 			       uint32_t *post_div_p)
1118*4882a593Smuzhiyun {
1119*4882a593Smuzhiyun 	uint32_t min_ref_div = pll->min_ref_div;
1120*4882a593Smuzhiyun 	uint32_t max_ref_div = pll->max_ref_div;
1121*4882a593Smuzhiyun 	uint32_t min_post_div = pll->min_post_div;
1122*4882a593Smuzhiyun 	uint32_t max_post_div = pll->max_post_div;
1123*4882a593Smuzhiyun 	uint32_t min_fractional_feed_div = 0;
1124*4882a593Smuzhiyun 	uint32_t max_fractional_feed_div = 0;
1125*4882a593Smuzhiyun 	uint32_t best_vco = pll->best_vco;
1126*4882a593Smuzhiyun 	uint32_t best_post_div = 1;
1127*4882a593Smuzhiyun 	uint32_t best_ref_div = 1;
1128*4882a593Smuzhiyun 	uint32_t best_feedback_div = 1;
1129*4882a593Smuzhiyun 	uint32_t best_frac_feedback_div = 0;
1130*4882a593Smuzhiyun 	uint32_t best_freq = -1;
1131*4882a593Smuzhiyun 	uint32_t best_error = 0xffffffff;
1132*4882a593Smuzhiyun 	uint32_t best_vco_diff = 1;
1133*4882a593Smuzhiyun 	uint32_t post_div;
1134*4882a593Smuzhiyun 	u32 pll_out_min, pll_out_max;
1135*4882a593Smuzhiyun 
1136*4882a593Smuzhiyun 	DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq, pll->min_ref_div, pll->max_ref_div);
1137*4882a593Smuzhiyun 	freq = freq * 1000;
1138*4882a593Smuzhiyun 
1139*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_IS_LCD) {
1140*4882a593Smuzhiyun 		pll_out_min = pll->lcd_pll_out_min;
1141*4882a593Smuzhiyun 		pll_out_max = pll->lcd_pll_out_max;
1142*4882a593Smuzhiyun 	} else {
1143*4882a593Smuzhiyun 		pll_out_min = pll->pll_out_min;
1144*4882a593Smuzhiyun 		pll_out_max = pll->pll_out_max;
1145*4882a593Smuzhiyun 	}
1146*4882a593Smuzhiyun 
1147*4882a593Smuzhiyun 	if (pll_out_min > 64800)
1148*4882a593Smuzhiyun 		pll_out_min = 64800;
1149*4882a593Smuzhiyun 
1150*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_REF_DIV)
1151*4882a593Smuzhiyun 		min_ref_div = max_ref_div = pll->reference_div;
1152*4882a593Smuzhiyun 	else {
1153*4882a593Smuzhiyun 		while (min_ref_div < max_ref_div-1) {
1154*4882a593Smuzhiyun 			uint32_t mid = (min_ref_div + max_ref_div) / 2;
1155*4882a593Smuzhiyun 			uint32_t pll_in = pll->reference_freq / mid;
1156*4882a593Smuzhiyun 			if (pll_in < pll->pll_in_min)
1157*4882a593Smuzhiyun 				max_ref_div = mid;
1158*4882a593Smuzhiyun 			else if (pll_in > pll->pll_in_max)
1159*4882a593Smuzhiyun 				min_ref_div = mid;
1160*4882a593Smuzhiyun 			else
1161*4882a593Smuzhiyun 				break;
1162*4882a593Smuzhiyun 		}
1163*4882a593Smuzhiyun 	}
1164*4882a593Smuzhiyun 
1165*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_POST_DIV)
1166*4882a593Smuzhiyun 		min_post_div = max_post_div = pll->post_div;
1167*4882a593Smuzhiyun 
1168*4882a593Smuzhiyun 	if (pll->flags & RADEON_PLL_USE_FRAC_FB_DIV) {
1169*4882a593Smuzhiyun 		min_fractional_feed_div = pll->min_frac_feedback_div;
1170*4882a593Smuzhiyun 		max_fractional_feed_div = pll->max_frac_feedback_div;
1171*4882a593Smuzhiyun 	}
1172*4882a593Smuzhiyun 
1173*4882a593Smuzhiyun 	for (post_div = max_post_div; post_div >= min_post_div; --post_div) {
1174*4882a593Smuzhiyun 		uint32_t ref_div;
1175*4882a593Smuzhiyun 
1176*4882a593Smuzhiyun 		if ((pll->flags & RADEON_PLL_NO_ODD_POST_DIV) && (post_div & 1))
1177*4882a593Smuzhiyun 			continue;
1178*4882a593Smuzhiyun 
1179*4882a593Smuzhiyun 		/* legacy radeons only have a few post_divs */
1180*4882a593Smuzhiyun 		if (pll->flags & RADEON_PLL_LEGACY) {
1181*4882a593Smuzhiyun 			if ((post_div == 5) ||
1182*4882a593Smuzhiyun 			    (post_div == 7) ||
1183*4882a593Smuzhiyun 			    (post_div == 9) ||
1184*4882a593Smuzhiyun 			    (post_div == 10) ||
1185*4882a593Smuzhiyun 			    (post_div == 11) ||
1186*4882a593Smuzhiyun 			    (post_div == 13) ||
1187*4882a593Smuzhiyun 			    (post_div == 14) ||
1188*4882a593Smuzhiyun 			    (post_div == 15))
1189*4882a593Smuzhiyun 				continue;
1190*4882a593Smuzhiyun 		}
1191*4882a593Smuzhiyun 
1192*4882a593Smuzhiyun 		for (ref_div = min_ref_div; ref_div <= max_ref_div; ++ref_div) {
1193*4882a593Smuzhiyun 			uint32_t feedback_div, current_freq = 0, error, vco_diff;
1194*4882a593Smuzhiyun 			uint32_t pll_in = pll->reference_freq / ref_div;
1195*4882a593Smuzhiyun 			uint32_t min_feed_div = pll->min_feedback_div;
1196*4882a593Smuzhiyun 			uint32_t max_feed_div = pll->max_feedback_div + 1;
1197*4882a593Smuzhiyun 
1198*4882a593Smuzhiyun 			if (pll_in < pll->pll_in_min || pll_in > pll->pll_in_max)
1199*4882a593Smuzhiyun 				continue;
1200*4882a593Smuzhiyun 
1201*4882a593Smuzhiyun 			while (min_feed_div < max_feed_div) {
1202*4882a593Smuzhiyun 				uint32_t vco;
1203*4882a593Smuzhiyun 				uint32_t min_frac_feed_div = min_fractional_feed_div;
1204*4882a593Smuzhiyun 				uint32_t max_frac_feed_div = max_fractional_feed_div + 1;
1205*4882a593Smuzhiyun 				uint32_t frac_feedback_div;
1206*4882a593Smuzhiyun 				uint64_t tmp;
1207*4882a593Smuzhiyun 
1208*4882a593Smuzhiyun 				feedback_div = (min_feed_div + max_feed_div) / 2;
1209*4882a593Smuzhiyun 
1210*4882a593Smuzhiyun 				tmp = (uint64_t)pll->reference_freq * feedback_div;
1211*4882a593Smuzhiyun 				vco = radeon_div(tmp, ref_div);
1212*4882a593Smuzhiyun 
1213*4882a593Smuzhiyun 				if (vco < pll_out_min) {
1214*4882a593Smuzhiyun 					min_feed_div = feedback_div + 1;
1215*4882a593Smuzhiyun 					continue;
1216*4882a593Smuzhiyun 				} else if (vco > pll_out_max) {
1217*4882a593Smuzhiyun 					max_feed_div = feedback_div;
1218*4882a593Smuzhiyun 					continue;
1219*4882a593Smuzhiyun 				}
1220*4882a593Smuzhiyun 
1221*4882a593Smuzhiyun 				while (min_frac_feed_div < max_frac_feed_div) {
1222*4882a593Smuzhiyun 					frac_feedback_div = (min_frac_feed_div + max_frac_feed_div) / 2;
1223*4882a593Smuzhiyun 					tmp = (uint64_t)pll->reference_freq * 10000 * feedback_div;
1224*4882a593Smuzhiyun 					tmp += (uint64_t)pll->reference_freq * 1000 * frac_feedback_div;
1225*4882a593Smuzhiyun 					current_freq = radeon_div(tmp, ref_div * post_div);
1226*4882a593Smuzhiyun 
1227*4882a593Smuzhiyun 					if (pll->flags & RADEON_PLL_PREFER_CLOSEST_LOWER) {
1228*4882a593Smuzhiyun 						if (freq < current_freq)
1229*4882a593Smuzhiyun 							error = 0xffffffff;
1230*4882a593Smuzhiyun 						else
1231*4882a593Smuzhiyun 							error = freq - current_freq;
1232*4882a593Smuzhiyun 					} else
1233*4882a593Smuzhiyun 						error = abs(current_freq - freq);
1234*4882a593Smuzhiyun 					vco_diff = abs(vco - best_vco);
1235*4882a593Smuzhiyun 
1236*4882a593Smuzhiyun 					if ((best_vco == 0 && error < best_error) ||
1237*4882a593Smuzhiyun 					    (best_vco != 0 &&
1238*4882a593Smuzhiyun 					     ((best_error > 100 && error < best_error - 100) ||
1239*4882a593Smuzhiyun 					      (abs(error - best_error) < 100 && vco_diff < best_vco_diff)))) {
1240*4882a593Smuzhiyun 						best_post_div = post_div;
1241*4882a593Smuzhiyun 						best_ref_div = ref_div;
1242*4882a593Smuzhiyun 						best_feedback_div = feedback_div;
1243*4882a593Smuzhiyun 						best_frac_feedback_div = frac_feedback_div;
1244*4882a593Smuzhiyun 						best_freq = current_freq;
1245*4882a593Smuzhiyun 						best_error = error;
1246*4882a593Smuzhiyun 						best_vco_diff = vco_diff;
1247*4882a593Smuzhiyun 					} else if (current_freq == freq) {
1248*4882a593Smuzhiyun 						if (best_freq == -1) {
1249*4882a593Smuzhiyun 							best_post_div = post_div;
1250*4882a593Smuzhiyun 							best_ref_div = ref_div;
1251*4882a593Smuzhiyun 							best_feedback_div = feedback_div;
1252*4882a593Smuzhiyun 							best_frac_feedback_div = frac_feedback_div;
1253*4882a593Smuzhiyun 							best_freq = current_freq;
1254*4882a593Smuzhiyun 							best_error = error;
1255*4882a593Smuzhiyun 							best_vco_diff = vco_diff;
1256*4882a593Smuzhiyun 						} else if (((pll->flags & RADEON_PLL_PREFER_LOW_REF_DIV) && (ref_div < best_ref_div)) ||
1257*4882a593Smuzhiyun 							   ((pll->flags & RADEON_PLL_PREFER_HIGH_REF_DIV) && (ref_div > best_ref_div)) ||
1258*4882a593Smuzhiyun 							   ((pll->flags & RADEON_PLL_PREFER_LOW_FB_DIV) && (feedback_div < best_feedback_div)) ||
1259*4882a593Smuzhiyun 							   ((pll->flags & RADEON_PLL_PREFER_HIGH_FB_DIV) && (feedback_div > best_feedback_div)) ||
1260*4882a593Smuzhiyun 							   ((pll->flags & RADEON_PLL_PREFER_LOW_POST_DIV) && (post_div < best_post_div)) ||
1261*4882a593Smuzhiyun 							   ((pll->flags & RADEON_PLL_PREFER_HIGH_POST_DIV) && (post_div > best_post_div))) {
1262*4882a593Smuzhiyun 							best_post_div = post_div;
1263*4882a593Smuzhiyun 							best_ref_div = ref_div;
1264*4882a593Smuzhiyun 							best_feedback_div = feedback_div;
1265*4882a593Smuzhiyun 							best_frac_feedback_div = frac_feedback_div;
1266*4882a593Smuzhiyun 							best_freq = current_freq;
1267*4882a593Smuzhiyun 							best_error = error;
1268*4882a593Smuzhiyun 							best_vco_diff = vco_diff;
1269*4882a593Smuzhiyun 						}
1270*4882a593Smuzhiyun 					}
1271*4882a593Smuzhiyun 					if (current_freq < freq)
1272*4882a593Smuzhiyun 						min_frac_feed_div = frac_feedback_div + 1;
1273*4882a593Smuzhiyun 					else
1274*4882a593Smuzhiyun 						max_frac_feed_div = frac_feedback_div;
1275*4882a593Smuzhiyun 				}
1276*4882a593Smuzhiyun 				if (current_freq < freq)
1277*4882a593Smuzhiyun 					min_feed_div = feedback_div + 1;
1278*4882a593Smuzhiyun 				else
1279*4882a593Smuzhiyun 					max_feed_div = feedback_div;
1280*4882a593Smuzhiyun 			}
1281*4882a593Smuzhiyun 		}
1282*4882a593Smuzhiyun 	}
1283*4882a593Smuzhiyun 
1284*4882a593Smuzhiyun 	*dot_clock_p = best_freq / 10000;
1285*4882a593Smuzhiyun 	*fb_div_p = best_feedback_div;
1286*4882a593Smuzhiyun 	*frac_fb_div_p = best_frac_feedback_div;
1287*4882a593Smuzhiyun 	*ref_div_p = best_ref_div;
1288*4882a593Smuzhiyun 	*post_div_p = best_post_div;
1289*4882a593Smuzhiyun 	DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1290*4882a593Smuzhiyun 		      (long long)freq,
1291*4882a593Smuzhiyun 		      best_freq / 1000, best_feedback_div, best_frac_feedback_div,
1292*4882a593Smuzhiyun 		      best_ref_div, best_post_div);
1293*4882a593Smuzhiyun 
1294*4882a593Smuzhiyun }
1295*4882a593Smuzhiyun 
1296*4882a593Smuzhiyun static const struct drm_framebuffer_funcs radeon_fb_funcs = {
1297*4882a593Smuzhiyun 	.destroy = drm_gem_fb_destroy,
1298*4882a593Smuzhiyun 	.create_handle = drm_gem_fb_create_handle,
1299*4882a593Smuzhiyun };
1300*4882a593Smuzhiyun 
1301*4882a593Smuzhiyun int
radeon_framebuffer_init(struct drm_device * dev,struct drm_framebuffer * fb,const struct drm_mode_fb_cmd2 * mode_cmd,struct drm_gem_object * obj)1302*4882a593Smuzhiyun radeon_framebuffer_init(struct drm_device *dev,
1303*4882a593Smuzhiyun 			struct drm_framebuffer *fb,
1304*4882a593Smuzhiyun 			const struct drm_mode_fb_cmd2 *mode_cmd,
1305*4882a593Smuzhiyun 			struct drm_gem_object *obj)
1306*4882a593Smuzhiyun {
1307*4882a593Smuzhiyun 	int ret;
1308*4882a593Smuzhiyun 	fb->obj[0] = obj;
1309*4882a593Smuzhiyun 	drm_helper_mode_fill_fb_struct(dev, fb, mode_cmd);
1310*4882a593Smuzhiyun 	ret = drm_framebuffer_init(dev, fb, &radeon_fb_funcs);
1311*4882a593Smuzhiyun 	if (ret) {
1312*4882a593Smuzhiyun 		fb->obj[0] = NULL;
1313*4882a593Smuzhiyun 		return ret;
1314*4882a593Smuzhiyun 	}
1315*4882a593Smuzhiyun 	return 0;
1316*4882a593Smuzhiyun }
1317*4882a593Smuzhiyun 
1318*4882a593Smuzhiyun static struct drm_framebuffer *
radeon_user_framebuffer_create(struct drm_device * dev,struct drm_file * file_priv,const struct drm_mode_fb_cmd2 * mode_cmd)1319*4882a593Smuzhiyun radeon_user_framebuffer_create(struct drm_device *dev,
1320*4882a593Smuzhiyun 			       struct drm_file *file_priv,
1321*4882a593Smuzhiyun 			       const struct drm_mode_fb_cmd2 *mode_cmd)
1322*4882a593Smuzhiyun {
1323*4882a593Smuzhiyun 	struct drm_gem_object *obj;
1324*4882a593Smuzhiyun 	struct drm_framebuffer *fb;
1325*4882a593Smuzhiyun 	int ret;
1326*4882a593Smuzhiyun 
1327*4882a593Smuzhiyun 	obj = drm_gem_object_lookup(file_priv, mode_cmd->handles[0]);
1328*4882a593Smuzhiyun 	if (obj ==  NULL) {
1329*4882a593Smuzhiyun 		dev_err(&dev->pdev->dev, "No GEM object associated to handle 0x%08X, "
1330*4882a593Smuzhiyun 			"can't create framebuffer\n", mode_cmd->handles[0]);
1331*4882a593Smuzhiyun 		return ERR_PTR(-ENOENT);
1332*4882a593Smuzhiyun 	}
1333*4882a593Smuzhiyun 
1334*4882a593Smuzhiyun 	/* Handle is imported dma-buf, so cannot be migrated to VRAM for scanout */
1335*4882a593Smuzhiyun 	if (obj->import_attach) {
1336*4882a593Smuzhiyun 		DRM_DEBUG_KMS("Cannot create framebuffer from imported dma_buf\n");
1337*4882a593Smuzhiyun 		drm_gem_object_put(obj);
1338*4882a593Smuzhiyun 		return ERR_PTR(-EINVAL);
1339*4882a593Smuzhiyun 	}
1340*4882a593Smuzhiyun 
1341*4882a593Smuzhiyun 	fb = kzalloc(sizeof(*fb), GFP_KERNEL);
1342*4882a593Smuzhiyun 	if (fb == NULL) {
1343*4882a593Smuzhiyun 		drm_gem_object_put(obj);
1344*4882a593Smuzhiyun 		return ERR_PTR(-ENOMEM);
1345*4882a593Smuzhiyun 	}
1346*4882a593Smuzhiyun 
1347*4882a593Smuzhiyun 	ret = radeon_framebuffer_init(dev, fb, mode_cmd, obj);
1348*4882a593Smuzhiyun 	if (ret) {
1349*4882a593Smuzhiyun 		kfree(fb);
1350*4882a593Smuzhiyun 		drm_gem_object_put(obj);
1351*4882a593Smuzhiyun 		return ERR_PTR(ret);
1352*4882a593Smuzhiyun 	}
1353*4882a593Smuzhiyun 
1354*4882a593Smuzhiyun 	return fb;
1355*4882a593Smuzhiyun }
1356*4882a593Smuzhiyun 
1357*4882a593Smuzhiyun static const struct drm_mode_config_funcs radeon_mode_funcs = {
1358*4882a593Smuzhiyun 	.fb_create = radeon_user_framebuffer_create,
1359*4882a593Smuzhiyun 	.output_poll_changed = drm_fb_helper_output_poll_changed,
1360*4882a593Smuzhiyun };
1361*4882a593Smuzhiyun 
1362*4882a593Smuzhiyun static const struct drm_prop_enum_list radeon_tmds_pll_enum_list[] =
1363*4882a593Smuzhiyun {	{ 0, "driver" },
1364*4882a593Smuzhiyun 	{ 1, "bios" },
1365*4882a593Smuzhiyun };
1366*4882a593Smuzhiyun 
1367*4882a593Smuzhiyun static const struct drm_prop_enum_list radeon_tv_std_enum_list[] =
1368*4882a593Smuzhiyun {	{ TV_STD_NTSC, "ntsc" },
1369*4882a593Smuzhiyun 	{ TV_STD_PAL, "pal" },
1370*4882a593Smuzhiyun 	{ TV_STD_PAL_M, "pal-m" },
1371*4882a593Smuzhiyun 	{ TV_STD_PAL_60, "pal-60" },
1372*4882a593Smuzhiyun 	{ TV_STD_NTSC_J, "ntsc-j" },
1373*4882a593Smuzhiyun 	{ TV_STD_SCART_PAL, "scart-pal" },
1374*4882a593Smuzhiyun 	{ TV_STD_PAL_CN, "pal-cn" },
1375*4882a593Smuzhiyun 	{ TV_STD_SECAM, "secam" },
1376*4882a593Smuzhiyun };
1377*4882a593Smuzhiyun 
1378*4882a593Smuzhiyun static const struct drm_prop_enum_list radeon_underscan_enum_list[] =
1379*4882a593Smuzhiyun {	{ UNDERSCAN_OFF, "off" },
1380*4882a593Smuzhiyun 	{ UNDERSCAN_ON, "on" },
1381*4882a593Smuzhiyun 	{ UNDERSCAN_AUTO, "auto" },
1382*4882a593Smuzhiyun };
1383*4882a593Smuzhiyun 
1384*4882a593Smuzhiyun static const struct drm_prop_enum_list radeon_audio_enum_list[] =
1385*4882a593Smuzhiyun {	{ RADEON_AUDIO_DISABLE, "off" },
1386*4882a593Smuzhiyun 	{ RADEON_AUDIO_ENABLE, "on" },
1387*4882a593Smuzhiyun 	{ RADEON_AUDIO_AUTO, "auto" },
1388*4882a593Smuzhiyun };
1389*4882a593Smuzhiyun 
1390*4882a593Smuzhiyun /* XXX support different dither options? spatial, temporal, both, etc. */
1391*4882a593Smuzhiyun static const struct drm_prop_enum_list radeon_dither_enum_list[] =
1392*4882a593Smuzhiyun {	{ RADEON_FMT_DITHER_DISABLE, "off" },
1393*4882a593Smuzhiyun 	{ RADEON_FMT_DITHER_ENABLE, "on" },
1394*4882a593Smuzhiyun };
1395*4882a593Smuzhiyun 
1396*4882a593Smuzhiyun static const struct drm_prop_enum_list radeon_output_csc_enum_list[] =
1397*4882a593Smuzhiyun {	{ RADEON_OUTPUT_CSC_BYPASS, "bypass" },
1398*4882a593Smuzhiyun 	{ RADEON_OUTPUT_CSC_TVRGB, "tvrgb" },
1399*4882a593Smuzhiyun 	{ RADEON_OUTPUT_CSC_YCBCR601, "ycbcr601" },
1400*4882a593Smuzhiyun 	{ RADEON_OUTPUT_CSC_YCBCR709, "ycbcr709" },
1401*4882a593Smuzhiyun };
1402*4882a593Smuzhiyun 
radeon_modeset_create_props(struct radeon_device * rdev)1403*4882a593Smuzhiyun static int radeon_modeset_create_props(struct radeon_device *rdev)
1404*4882a593Smuzhiyun {
1405*4882a593Smuzhiyun 	int sz;
1406*4882a593Smuzhiyun 
1407*4882a593Smuzhiyun 	if (rdev->is_atom_bios) {
1408*4882a593Smuzhiyun 		rdev->mode_info.coherent_mode_property =
1409*4882a593Smuzhiyun 			drm_property_create_range(rdev->ddev, 0 , "coherent", 0, 1);
1410*4882a593Smuzhiyun 		if (!rdev->mode_info.coherent_mode_property)
1411*4882a593Smuzhiyun 			return -ENOMEM;
1412*4882a593Smuzhiyun 	}
1413*4882a593Smuzhiyun 
1414*4882a593Smuzhiyun 	if (!ASIC_IS_AVIVO(rdev)) {
1415*4882a593Smuzhiyun 		sz = ARRAY_SIZE(radeon_tmds_pll_enum_list);
1416*4882a593Smuzhiyun 		rdev->mode_info.tmds_pll_property =
1417*4882a593Smuzhiyun 			drm_property_create_enum(rdev->ddev, 0,
1418*4882a593Smuzhiyun 					    "tmds_pll",
1419*4882a593Smuzhiyun 					    radeon_tmds_pll_enum_list, sz);
1420*4882a593Smuzhiyun 	}
1421*4882a593Smuzhiyun 
1422*4882a593Smuzhiyun 	rdev->mode_info.load_detect_property =
1423*4882a593Smuzhiyun 		drm_property_create_range(rdev->ddev, 0, "load detection", 0, 1);
1424*4882a593Smuzhiyun 	if (!rdev->mode_info.load_detect_property)
1425*4882a593Smuzhiyun 		return -ENOMEM;
1426*4882a593Smuzhiyun 
1427*4882a593Smuzhiyun 	drm_mode_create_scaling_mode_property(rdev->ddev);
1428*4882a593Smuzhiyun 
1429*4882a593Smuzhiyun 	sz = ARRAY_SIZE(radeon_tv_std_enum_list);
1430*4882a593Smuzhiyun 	rdev->mode_info.tv_std_property =
1431*4882a593Smuzhiyun 		drm_property_create_enum(rdev->ddev, 0,
1432*4882a593Smuzhiyun 				    "tv standard",
1433*4882a593Smuzhiyun 				    radeon_tv_std_enum_list, sz);
1434*4882a593Smuzhiyun 
1435*4882a593Smuzhiyun 	sz = ARRAY_SIZE(radeon_underscan_enum_list);
1436*4882a593Smuzhiyun 	rdev->mode_info.underscan_property =
1437*4882a593Smuzhiyun 		drm_property_create_enum(rdev->ddev, 0,
1438*4882a593Smuzhiyun 				    "underscan",
1439*4882a593Smuzhiyun 				    radeon_underscan_enum_list, sz);
1440*4882a593Smuzhiyun 
1441*4882a593Smuzhiyun 	rdev->mode_info.underscan_hborder_property =
1442*4882a593Smuzhiyun 		drm_property_create_range(rdev->ddev, 0,
1443*4882a593Smuzhiyun 					"underscan hborder", 0, 128);
1444*4882a593Smuzhiyun 	if (!rdev->mode_info.underscan_hborder_property)
1445*4882a593Smuzhiyun 		return -ENOMEM;
1446*4882a593Smuzhiyun 
1447*4882a593Smuzhiyun 	rdev->mode_info.underscan_vborder_property =
1448*4882a593Smuzhiyun 		drm_property_create_range(rdev->ddev, 0,
1449*4882a593Smuzhiyun 					"underscan vborder", 0, 128);
1450*4882a593Smuzhiyun 	if (!rdev->mode_info.underscan_vborder_property)
1451*4882a593Smuzhiyun 		return -ENOMEM;
1452*4882a593Smuzhiyun 
1453*4882a593Smuzhiyun 	sz = ARRAY_SIZE(radeon_audio_enum_list);
1454*4882a593Smuzhiyun 	rdev->mode_info.audio_property =
1455*4882a593Smuzhiyun 		drm_property_create_enum(rdev->ddev, 0,
1456*4882a593Smuzhiyun 					 "audio",
1457*4882a593Smuzhiyun 					 radeon_audio_enum_list, sz);
1458*4882a593Smuzhiyun 
1459*4882a593Smuzhiyun 	sz = ARRAY_SIZE(radeon_dither_enum_list);
1460*4882a593Smuzhiyun 	rdev->mode_info.dither_property =
1461*4882a593Smuzhiyun 		drm_property_create_enum(rdev->ddev, 0,
1462*4882a593Smuzhiyun 					 "dither",
1463*4882a593Smuzhiyun 					 radeon_dither_enum_list, sz);
1464*4882a593Smuzhiyun 
1465*4882a593Smuzhiyun 	sz = ARRAY_SIZE(radeon_output_csc_enum_list);
1466*4882a593Smuzhiyun 	rdev->mode_info.output_csc_property =
1467*4882a593Smuzhiyun 		drm_property_create_enum(rdev->ddev, 0,
1468*4882a593Smuzhiyun 					 "output_csc",
1469*4882a593Smuzhiyun 					 radeon_output_csc_enum_list, sz);
1470*4882a593Smuzhiyun 
1471*4882a593Smuzhiyun 	return 0;
1472*4882a593Smuzhiyun }
1473*4882a593Smuzhiyun 
radeon_update_display_priority(struct radeon_device * rdev)1474*4882a593Smuzhiyun void radeon_update_display_priority(struct radeon_device *rdev)
1475*4882a593Smuzhiyun {
1476*4882a593Smuzhiyun 	/* adjustment options for the display watermarks */
1477*4882a593Smuzhiyun 	if ((radeon_disp_priority == 0) || (radeon_disp_priority > 2)) {
1478*4882a593Smuzhiyun 		/* set display priority to high for r3xx, rv515 chips
1479*4882a593Smuzhiyun 		 * this avoids flickering due to underflow to the
1480*4882a593Smuzhiyun 		 * display controllers during heavy acceleration.
1481*4882a593Smuzhiyun 		 * Don't force high on rs4xx igp chips as it seems to
1482*4882a593Smuzhiyun 		 * affect the sound card.  See kernel bug 15982.
1483*4882a593Smuzhiyun 		 */
1484*4882a593Smuzhiyun 		if ((ASIC_IS_R300(rdev) || (rdev->family == CHIP_RV515)) &&
1485*4882a593Smuzhiyun 		    !(rdev->flags & RADEON_IS_IGP))
1486*4882a593Smuzhiyun 			rdev->disp_priority = 2;
1487*4882a593Smuzhiyun 		else
1488*4882a593Smuzhiyun 			rdev->disp_priority = 0;
1489*4882a593Smuzhiyun 	} else
1490*4882a593Smuzhiyun 		rdev->disp_priority = radeon_disp_priority;
1491*4882a593Smuzhiyun 
1492*4882a593Smuzhiyun }
1493*4882a593Smuzhiyun 
1494*4882a593Smuzhiyun /*
1495*4882a593Smuzhiyun  * Allocate hdmi structs and determine register offsets
1496*4882a593Smuzhiyun  */
radeon_afmt_init(struct radeon_device * rdev)1497*4882a593Smuzhiyun static void radeon_afmt_init(struct radeon_device *rdev)
1498*4882a593Smuzhiyun {
1499*4882a593Smuzhiyun 	int i;
1500*4882a593Smuzhiyun 
1501*4882a593Smuzhiyun 	for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++)
1502*4882a593Smuzhiyun 		rdev->mode_info.afmt[i] = NULL;
1503*4882a593Smuzhiyun 
1504*4882a593Smuzhiyun 	if (ASIC_IS_NODCE(rdev)) {
1505*4882a593Smuzhiyun 		/* nothing to do */
1506*4882a593Smuzhiyun 	} else if (ASIC_IS_DCE4(rdev)) {
1507*4882a593Smuzhiyun 		static uint32_t eg_offsets[] = {
1508*4882a593Smuzhiyun 			EVERGREEN_CRTC0_REGISTER_OFFSET,
1509*4882a593Smuzhiyun 			EVERGREEN_CRTC1_REGISTER_OFFSET,
1510*4882a593Smuzhiyun 			EVERGREEN_CRTC2_REGISTER_OFFSET,
1511*4882a593Smuzhiyun 			EVERGREEN_CRTC3_REGISTER_OFFSET,
1512*4882a593Smuzhiyun 			EVERGREEN_CRTC4_REGISTER_OFFSET,
1513*4882a593Smuzhiyun 			EVERGREEN_CRTC5_REGISTER_OFFSET,
1514*4882a593Smuzhiyun 			0x13830 - 0x7030,
1515*4882a593Smuzhiyun 		};
1516*4882a593Smuzhiyun 		int num_afmt;
1517*4882a593Smuzhiyun 
1518*4882a593Smuzhiyun 		/* DCE8 has 7 audio blocks tied to DIG encoders */
1519*4882a593Smuzhiyun 		/* DCE6 has 6 audio blocks tied to DIG encoders */
1520*4882a593Smuzhiyun 		/* DCE4/5 has 6 audio blocks tied to DIG encoders */
1521*4882a593Smuzhiyun 		/* DCE4.1 has 2 audio blocks tied to DIG encoders */
1522*4882a593Smuzhiyun 		if (ASIC_IS_DCE8(rdev))
1523*4882a593Smuzhiyun 			num_afmt = 7;
1524*4882a593Smuzhiyun 		else if (ASIC_IS_DCE6(rdev))
1525*4882a593Smuzhiyun 			num_afmt = 6;
1526*4882a593Smuzhiyun 		else if (ASIC_IS_DCE5(rdev))
1527*4882a593Smuzhiyun 			num_afmt = 6;
1528*4882a593Smuzhiyun 		else if (ASIC_IS_DCE41(rdev))
1529*4882a593Smuzhiyun 			num_afmt = 2;
1530*4882a593Smuzhiyun 		else /* DCE4 */
1531*4882a593Smuzhiyun 			num_afmt = 6;
1532*4882a593Smuzhiyun 
1533*4882a593Smuzhiyun 		BUG_ON(num_afmt > ARRAY_SIZE(eg_offsets));
1534*4882a593Smuzhiyun 		for (i = 0; i < num_afmt; i++) {
1535*4882a593Smuzhiyun 			rdev->mode_info.afmt[i] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1536*4882a593Smuzhiyun 			if (rdev->mode_info.afmt[i]) {
1537*4882a593Smuzhiyun 				rdev->mode_info.afmt[i]->offset = eg_offsets[i];
1538*4882a593Smuzhiyun 				rdev->mode_info.afmt[i]->id = i;
1539*4882a593Smuzhiyun 			}
1540*4882a593Smuzhiyun 		}
1541*4882a593Smuzhiyun 	} else if (ASIC_IS_DCE3(rdev)) {
1542*4882a593Smuzhiyun 		/* DCE3.x has 2 audio blocks tied to DIG encoders */
1543*4882a593Smuzhiyun 		rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1544*4882a593Smuzhiyun 		if (rdev->mode_info.afmt[0]) {
1545*4882a593Smuzhiyun 			rdev->mode_info.afmt[0]->offset = DCE3_HDMI_OFFSET0;
1546*4882a593Smuzhiyun 			rdev->mode_info.afmt[0]->id = 0;
1547*4882a593Smuzhiyun 		}
1548*4882a593Smuzhiyun 		rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1549*4882a593Smuzhiyun 		if (rdev->mode_info.afmt[1]) {
1550*4882a593Smuzhiyun 			rdev->mode_info.afmt[1]->offset = DCE3_HDMI_OFFSET1;
1551*4882a593Smuzhiyun 			rdev->mode_info.afmt[1]->id = 1;
1552*4882a593Smuzhiyun 		}
1553*4882a593Smuzhiyun 	} else if (ASIC_IS_DCE2(rdev)) {
1554*4882a593Smuzhiyun 		/* DCE2 has at least 1 routable audio block */
1555*4882a593Smuzhiyun 		rdev->mode_info.afmt[0] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1556*4882a593Smuzhiyun 		if (rdev->mode_info.afmt[0]) {
1557*4882a593Smuzhiyun 			rdev->mode_info.afmt[0]->offset = DCE2_HDMI_OFFSET0;
1558*4882a593Smuzhiyun 			rdev->mode_info.afmt[0]->id = 0;
1559*4882a593Smuzhiyun 		}
1560*4882a593Smuzhiyun 		/* r6xx has 2 routable audio blocks */
1561*4882a593Smuzhiyun 		if (rdev->family >= CHIP_R600) {
1562*4882a593Smuzhiyun 			rdev->mode_info.afmt[1] = kzalloc(sizeof(struct radeon_afmt), GFP_KERNEL);
1563*4882a593Smuzhiyun 			if (rdev->mode_info.afmt[1]) {
1564*4882a593Smuzhiyun 				rdev->mode_info.afmt[1]->offset = DCE2_HDMI_OFFSET1;
1565*4882a593Smuzhiyun 				rdev->mode_info.afmt[1]->id = 1;
1566*4882a593Smuzhiyun 			}
1567*4882a593Smuzhiyun 		}
1568*4882a593Smuzhiyun 	}
1569*4882a593Smuzhiyun }
1570*4882a593Smuzhiyun 
radeon_afmt_fini(struct radeon_device * rdev)1571*4882a593Smuzhiyun static void radeon_afmt_fini(struct radeon_device *rdev)
1572*4882a593Smuzhiyun {
1573*4882a593Smuzhiyun 	int i;
1574*4882a593Smuzhiyun 
1575*4882a593Smuzhiyun 	for (i = 0; i < RADEON_MAX_AFMT_BLOCKS; i++) {
1576*4882a593Smuzhiyun 		kfree(rdev->mode_info.afmt[i]);
1577*4882a593Smuzhiyun 		rdev->mode_info.afmt[i] = NULL;
1578*4882a593Smuzhiyun 	}
1579*4882a593Smuzhiyun }
1580*4882a593Smuzhiyun 
radeon_modeset_init(struct radeon_device * rdev)1581*4882a593Smuzhiyun int radeon_modeset_init(struct radeon_device *rdev)
1582*4882a593Smuzhiyun {
1583*4882a593Smuzhiyun 	int i;
1584*4882a593Smuzhiyun 	int ret;
1585*4882a593Smuzhiyun 
1586*4882a593Smuzhiyun 	drm_mode_config_init(rdev->ddev);
1587*4882a593Smuzhiyun 	rdev->mode_info.mode_config_initialized = true;
1588*4882a593Smuzhiyun 
1589*4882a593Smuzhiyun 	rdev->ddev->mode_config.funcs = &radeon_mode_funcs;
1590*4882a593Smuzhiyun 
1591*4882a593Smuzhiyun 	if (radeon_use_pflipirq == 2 && rdev->family >= CHIP_R600)
1592*4882a593Smuzhiyun 		rdev->ddev->mode_config.async_page_flip = true;
1593*4882a593Smuzhiyun 
1594*4882a593Smuzhiyun 	if (ASIC_IS_DCE5(rdev)) {
1595*4882a593Smuzhiyun 		rdev->ddev->mode_config.max_width = 16384;
1596*4882a593Smuzhiyun 		rdev->ddev->mode_config.max_height = 16384;
1597*4882a593Smuzhiyun 	} else if (ASIC_IS_AVIVO(rdev)) {
1598*4882a593Smuzhiyun 		rdev->ddev->mode_config.max_width = 8192;
1599*4882a593Smuzhiyun 		rdev->ddev->mode_config.max_height = 8192;
1600*4882a593Smuzhiyun 	} else {
1601*4882a593Smuzhiyun 		rdev->ddev->mode_config.max_width = 4096;
1602*4882a593Smuzhiyun 		rdev->ddev->mode_config.max_height = 4096;
1603*4882a593Smuzhiyun 	}
1604*4882a593Smuzhiyun 
1605*4882a593Smuzhiyun 	rdev->ddev->mode_config.preferred_depth = 24;
1606*4882a593Smuzhiyun 	rdev->ddev->mode_config.prefer_shadow = 1;
1607*4882a593Smuzhiyun 
1608*4882a593Smuzhiyun 	rdev->ddev->mode_config.fb_base = rdev->mc.aper_base;
1609*4882a593Smuzhiyun 
1610*4882a593Smuzhiyun 	ret = radeon_modeset_create_props(rdev);
1611*4882a593Smuzhiyun 	if (ret) {
1612*4882a593Smuzhiyun 		return ret;
1613*4882a593Smuzhiyun 	}
1614*4882a593Smuzhiyun 
1615*4882a593Smuzhiyun 	/* init i2c buses */
1616*4882a593Smuzhiyun 	radeon_i2c_init(rdev);
1617*4882a593Smuzhiyun 
1618*4882a593Smuzhiyun 	/* check combios for a valid hardcoded EDID - Sun servers */
1619*4882a593Smuzhiyun 	if (!rdev->is_atom_bios) {
1620*4882a593Smuzhiyun 		/* check for hardcoded EDID in BIOS */
1621*4882a593Smuzhiyun 		radeon_combios_check_hardcoded_edid(rdev);
1622*4882a593Smuzhiyun 	}
1623*4882a593Smuzhiyun 
1624*4882a593Smuzhiyun 	/* allocate crtcs */
1625*4882a593Smuzhiyun 	for (i = 0; i < rdev->num_crtc; i++) {
1626*4882a593Smuzhiyun 		radeon_crtc_init(rdev->ddev, i);
1627*4882a593Smuzhiyun 	}
1628*4882a593Smuzhiyun 
1629*4882a593Smuzhiyun 	/* okay we should have all the bios connectors */
1630*4882a593Smuzhiyun 	ret = radeon_setup_enc_conn(rdev->ddev);
1631*4882a593Smuzhiyun 	if (!ret) {
1632*4882a593Smuzhiyun 		return ret;
1633*4882a593Smuzhiyun 	}
1634*4882a593Smuzhiyun 
1635*4882a593Smuzhiyun 	/* init dig PHYs, disp eng pll */
1636*4882a593Smuzhiyun 	if (rdev->is_atom_bios) {
1637*4882a593Smuzhiyun 		radeon_atom_encoder_init(rdev);
1638*4882a593Smuzhiyun 		radeon_atom_disp_eng_pll_init(rdev);
1639*4882a593Smuzhiyun 	}
1640*4882a593Smuzhiyun 
1641*4882a593Smuzhiyun 	/* initialize hpd */
1642*4882a593Smuzhiyun 	radeon_hpd_init(rdev);
1643*4882a593Smuzhiyun 
1644*4882a593Smuzhiyun 	/* setup afmt */
1645*4882a593Smuzhiyun 	radeon_afmt_init(rdev);
1646*4882a593Smuzhiyun 
1647*4882a593Smuzhiyun 	radeon_fbdev_init(rdev);
1648*4882a593Smuzhiyun 	drm_kms_helper_poll_init(rdev->ddev);
1649*4882a593Smuzhiyun 
1650*4882a593Smuzhiyun 	/* do pm late init */
1651*4882a593Smuzhiyun 	ret = radeon_pm_late_init(rdev);
1652*4882a593Smuzhiyun 
1653*4882a593Smuzhiyun 	return 0;
1654*4882a593Smuzhiyun }
1655*4882a593Smuzhiyun 
radeon_modeset_fini(struct radeon_device * rdev)1656*4882a593Smuzhiyun void radeon_modeset_fini(struct radeon_device *rdev)
1657*4882a593Smuzhiyun {
1658*4882a593Smuzhiyun 	if (rdev->mode_info.mode_config_initialized) {
1659*4882a593Smuzhiyun 		drm_kms_helper_poll_fini(rdev->ddev);
1660*4882a593Smuzhiyun 		radeon_hpd_fini(rdev);
1661*4882a593Smuzhiyun 		drm_helper_force_disable_all(rdev->ddev);
1662*4882a593Smuzhiyun 		radeon_fbdev_fini(rdev);
1663*4882a593Smuzhiyun 		radeon_afmt_fini(rdev);
1664*4882a593Smuzhiyun 		drm_mode_config_cleanup(rdev->ddev);
1665*4882a593Smuzhiyun 		rdev->mode_info.mode_config_initialized = false;
1666*4882a593Smuzhiyun 	}
1667*4882a593Smuzhiyun 
1668*4882a593Smuzhiyun 	kfree(rdev->mode_info.bios_hardcoded_edid);
1669*4882a593Smuzhiyun 
1670*4882a593Smuzhiyun 	/* free i2c buses */
1671*4882a593Smuzhiyun 	radeon_i2c_fini(rdev);
1672*4882a593Smuzhiyun }
1673*4882a593Smuzhiyun 
is_hdtv_mode(const struct drm_display_mode * mode)1674*4882a593Smuzhiyun static bool is_hdtv_mode(const struct drm_display_mode *mode)
1675*4882a593Smuzhiyun {
1676*4882a593Smuzhiyun 	/* try and guess if this is a tv or a monitor */
1677*4882a593Smuzhiyun 	if ((mode->vdisplay == 480 && mode->hdisplay == 720) || /* 480p */
1678*4882a593Smuzhiyun 	    (mode->vdisplay == 576) || /* 576p */
1679*4882a593Smuzhiyun 	    (mode->vdisplay == 720) || /* 720p */
1680*4882a593Smuzhiyun 	    (mode->vdisplay == 1080)) /* 1080p */
1681*4882a593Smuzhiyun 		return true;
1682*4882a593Smuzhiyun 	else
1683*4882a593Smuzhiyun 		return false;
1684*4882a593Smuzhiyun }
1685*4882a593Smuzhiyun 
radeon_crtc_scaling_mode_fixup(struct drm_crtc * crtc,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)1686*4882a593Smuzhiyun bool radeon_crtc_scaling_mode_fixup(struct drm_crtc *crtc,
1687*4882a593Smuzhiyun 				const struct drm_display_mode *mode,
1688*4882a593Smuzhiyun 				struct drm_display_mode *adjusted_mode)
1689*4882a593Smuzhiyun {
1690*4882a593Smuzhiyun 	struct drm_device *dev = crtc->dev;
1691*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
1692*4882a593Smuzhiyun 	struct drm_encoder *encoder;
1693*4882a593Smuzhiyun 	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1694*4882a593Smuzhiyun 	struct radeon_encoder *radeon_encoder;
1695*4882a593Smuzhiyun 	struct drm_connector *connector;
1696*4882a593Smuzhiyun 	bool first = true;
1697*4882a593Smuzhiyun 	u32 src_v = 1, dst_v = 1;
1698*4882a593Smuzhiyun 	u32 src_h = 1, dst_h = 1;
1699*4882a593Smuzhiyun 
1700*4882a593Smuzhiyun 	radeon_crtc->h_border = 0;
1701*4882a593Smuzhiyun 	radeon_crtc->v_border = 0;
1702*4882a593Smuzhiyun 
1703*4882a593Smuzhiyun 	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1704*4882a593Smuzhiyun 		if (encoder->crtc != crtc)
1705*4882a593Smuzhiyun 			continue;
1706*4882a593Smuzhiyun 		radeon_encoder = to_radeon_encoder(encoder);
1707*4882a593Smuzhiyun 		connector = radeon_get_connector_for_encoder(encoder);
1708*4882a593Smuzhiyun 
1709*4882a593Smuzhiyun 		if (first) {
1710*4882a593Smuzhiyun 			/* set scaling */
1711*4882a593Smuzhiyun 			if (radeon_encoder->rmx_type == RMX_OFF)
1712*4882a593Smuzhiyun 				radeon_crtc->rmx_type = RMX_OFF;
1713*4882a593Smuzhiyun 			else if (mode->hdisplay < radeon_encoder->native_mode.hdisplay ||
1714*4882a593Smuzhiyun 				 mode->vdisplay < radeon_encoder->native_mode.vdisplay)
1715*4882a593Smuzhiyun 				radeon_crtc->rmx_type = radeon_encoder->rmx_type;
1716*4882a593Smuzhiyun 			else
1717*4882a593Smuzhiyun 				radeon_crtc->rmx_type = RMX_OFF;
1718*4882a593Smuzhiyun 			/* copy native mode */
1719*4882a593Smuzhiyun 			memcpy(&radeon_crtc->native_mode,
1720*4882a593Smuzhiyun 			       &radeon_encoder->native_mode,
1721*4882a593Smuzhiyun 				sizeof(struct drm_display_mode));
1722*4882a593Smuzhiyun 			src_v = crtc->mode.vdisplay;
1723*4882a593Smuzhiyun 			dst_v = radeon_crtc->native_mode.vdisplay;
1724*4882a593Smuzhiyun 			src_h = crtc->mode.hdisplay;
1725*4882a593Smuzhiyun 			dst_h = radeon_crtc->native_mode.hdisplay;
1726*4882a593Smuzhiyun 
1727*4882a593Smuzhiyun 			/* fix up for overscan on hdmi */
1728*4882a593Smuzhiyun 			if (ASIC_IS_AVIVO(rdev) &&
1729*4882a593Smuzhiyun 			    (!(mode->flags & DRM_MODE_FLAG_INTERLACE)) &&
1730*4882a593Smuzhiyun 			    ((radeon_encoder->underscan_type == UNDERSCAN_ON) ||
1731*4882a593Smuzhiyun 			     ((radeon_encoder->underscan_type == UNDERSCAN_AUTO) &&
1732*4882a593Smuzhiyun 			      drm_detect_hdmi_monitor(radeon_connector_edid(connector)) &&
1733*4882a593Smuzhiyun 			      is_hdtv_mode(mode)))) {
1734*4882a593Smuzhiyun 				if (radeon_encoder->underscan_hborder != 0)
1735*4882a593Smuzhiyun 					radeon_crtc->h_border = radeon_encoder->underscan_hborder;
1736*4882a593Smuzhiyun 				else
1737*4882a593Smuzhiyun 					radeon_crtc->h_border = (mode->hdisplay >> 5) + 16;
1738*4882a593Smuzhiyun 				if (radeon_encoder->underscan_vborder != 0)
1739*4882a593Smuzhiyun 					radeon_crtc->v_border = radeon_encoder->underscan_vborder;
1740*4882a593Smuzhiyun 				else
1741*4882a593Smuzhiyun 					radeon_crtc->v_border = (mode->vdisplay >> 5) + 16;
1742*4882a593Smuzhiyun 				radeon_crtc->rmx_type = RMX_FULL;
1743*4882a593Smuzhiyun 				src_v = crtc->mode.vdisplay;
1744*4882a593Smuzhiyun 				dst_v = crtc->mode.vdisplay - (radeon_crtc->v_border * 2);
1745*4882a593Smuzhiyun 				src_h = crtc->mode.hdisplay;
1746*4882a593Smuzhiyun 				dst_h = crtc->mode.hdisplay - (radeon_crtc->h_border * 2);
1747*4882a593Smuzhiyun 			}
1748*4882a593Smuzhiyun 			first = false;
1749*4882a593Smuzhiyun 		} else {
1750*4882a593Smuzhiyun 			if (radeon_crtc->rmx_type != radeon_encoder->rmx_type) {
1751*4882a593Smuzhiyun 				/* WARNING: Right now this can't happen but
1752*4882a593Smuzhiyun 				 * in the future we need to check that scaling
1753*4882a593Smuzhiyun 				 * are consistent across different encoder
1754*4882a593Smuzhiyun 				 * (ie all encoder can work with the same
1755*4882a593Smuzhiyun 				 *  scaling).
1756*4882a593Smuzhiyun 				 */
1757*4882a593Smuzhiyun 				DRM_ERROR("Scaling not consistent across encoder.\n");
1758*4882a593Smuzhiyun 				return false;
1759*4882a593Smuzhiyun 			}
1760*4882a593Smuzhiyun 		}
1761*4882a593Smuzhiyun 	}
1762*4882a593Smuzhiyun 	if (radeon_crtc->rmx_type != RMX_OFF) {
1763*4882a593Smuzhiyun 		fixed20_12 a, b;
1764*4882a593Smuzhiyun 		a.full = dfixed_const(src_v);
1765*4882a593Smuzhiyun 		b.full = dfixed_const(dst_v);
1766*4882a593Smuzhiyun 		radeon_crtc->vsc.full = dfixed_div(a, b);
1767*4882a593Smuzhiyun 		a.full = dfixed_const(src_h);
1768*4882a593Smuzhiyun 		b.full = dfixed_const(dst_h);
1769*4882a593Smuzhiyun 		radeon_crtc->hsc.full = dfixed_div(a, b);
1770*4882a593Smuzhiyun 	} else {
1771*4882a593Smuzhiyun 		radeon_crtc->vsc.full = dfixed_const(1);
1772*4882a593Smuzhiyun 		radeon_crtc->hsc.full = dfixed_const(1);
1773*4882a593Smuzhiyun 	}
1774*4882a593Smuzhiyun 	return true;
1775*4882a593Smuzhiyun }
1776*4882a593Smuzhiyun 
1777*4882a593Smuzhiyun /*
1778*4882a593Smuzhiyun  * Retrieve current video scanout position of crtc on a given gpu, and
1779*4882a593Smuzhiyun  * an optional accurate timestamp of when query happened.
1780*4882a593Smuzhiyun  *
1781*4882a593Smuzhiyun  * \param dev Device to query.
1782*4882a593Smuzhiyun  * \param crtc Crtc to query.
1783*4882a593Smuzhiyun  * \param flags Flags from caller (DRM_CALLED_FROM_VBLIRQ or 0).
1784*4882a593Smuzhiyun  *              For driver internal use only also supports these flags:
1785*4882a593Smuzhiyun  *
1786*4882a593Smuzhiyun  *              USE_REAL_VBLANKSTART to use the real start of vblank instead
1787*4882a593Smuzhiyun  *              of a fudged earlier start of vblank.
1788*4882a593Smuzhiyun  *
1789*4882a593Smuzhiyun  *              GET_DISTANCE_TO_VBLANKSTART to return distance to the
1790*4882a593Smuzhiyun  *              fudged earlier start of vblank in *vpos and the distance
1791*4882a593Smuzhiyun  *              to true start of vblank in *hpos.
1792*4882a593Smuzhiyun  *
1793*4882a593Smuzhiyun  * \param *vpos Location where vertical scanout position should be stored.
1794*4882a593Smuzhiyun  * \param *hpos Location where horizontal scanout position should go.
1795*4882a593Smuzhiyun  * \param *stime Target location for timestamp taken immediately before
1796*4882a593Smuzhiyun  *               scanout position query. Can be NULL to skip timestamp.
1797*4882a593Smuzhiyun  * \param *etime Target location for timestamp taken immediately after
1798*4882a593Smuzhiyun  *               scanout position query. Can be NULL to skip timestamp.
1799*4882a593Smuzhiyun  *
1800*4882a593Smuzhiyun  * Returns vpos as a positive number while in active scanout area.
1801*4882a593Smuzhiyun  * Returns vpos as a negative number inside vblank, counting the number
1802*4882a593Smuzhiyun  * of scanlines to go until end of vblank, e.g., -1 means "one scanline
1803*4882a593Smuzhiyun  * until start of active scanout / end of vblank."
1804*4882a593Smuzhiyun  *
1805*4882a593Smuzhiyun  * \return Flags, or'ed together as follows:
1806*4882a593Smuzhiyun  *
1807*4882a593Smuzhiyun  * DRM_SCANOUTPOS_VALID = Query successful.
1808*4882a593Smuzhiyun  * DRM_SCANOUTPOS_INVBL = Inside vblank.
1809*4882a593Smuzhiyun  * DRM_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of
1810*4882a593Smuzhiyun  * this flag means that returned position may be offset by a constant but
1811*4882a593Smuzhiyun  * unknown small number of scanlines wrt. real scanout position.
1812*4882a593Smuzhiyun  *
1813*4882a593Smuzhiyun  */
radeon_get_crtc_scanoutpos(struct drm_device * dev,unsigned int pipe,unsigned int flags,int * vpos,int * hpos,ktime_t * stime,ktime_t * etime,const struct drm_display_mode * mode)1814*4882a593Smuzhiyun int radeon_get_crtc_scanoutpos(struct drm_device *dev, unsigned int pipe,
1815*4882a593Smuzhiyun 			       unsigned int flags, int *vpos, int *hpos,
1816*4882a593Smuzhiyun 			       ktime_t *stime, ktime_t *etime,
1817*4882a593Smuzhiyun 			       const struct drm_display_mode *mode)
1818*4882a593Smuzhiyun {
1819*4882a593Smuzhiyun 	u32 stat_crtc = 0, vbl = 0, position = 0;
1820*4882a593Smuzhiyun 	int vbl_start, vbl_end, vtotal, ret = 0;
1821*4882a593Smuzhiyun 	bool in_vbl = true;
1822*4882a593Smuzhiyun 
1823*4882a593Smuzhiyun 	struct radeon_device *rdev = dev->dev_private;
1824*4882a593Smuzhiyun 
1825*4882a593Smuzhiyun 	/* preempt_disable_rt() should go right here in PREEMPT_RT patchset. */
1826*4882a593Smuzhiyun 
1827*4882a593Smuzhiyun 	/* Get optional system timestamp before query. */
1828*4882a593Smuzhiyun 	if (stime)
1829*4882a593Smuzhiyun 		*stime = ktime_get();
1830*4882a593Smuzhiyun 
1831*4882a593Smuzhiyun 	if (ASIC_IS_DCE4(rdev)) {
1832*4882a593Smuzhiyun 		if (pipe == 0) {
1833*4882a593Smuzhiyun 			vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1834*4882a593Smuzhiyun 				     EVERGREEN_CRTC0_REGISTER_OFFSET);
1835*4882a593Smuzhiyun 			position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1836*4882a593Smuzhiyun 					  EVERGREEN_CRTC0_REGISTER_OFFSET);
1837*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1838*4882a593Smuzhiyun 		}
1839*4882a593Smuzhiyun 		if (pipe == 1) {
1840*4882a593Smuzhiyun 			vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1841*4882a593Smuzhiyun 				     EVERGREEN_CRTC1_REGISTER_OFFSET);
1842*4882a593Smuzhiyun 			position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1843*4882a593Smuzhiyun 					  EVERGREEN_CRTC1_REGISTER_OFFSET);
1844*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1845*4882a593Smuzhiyun 		}
1846*4882a593Smuzhiyun 		if (pipe == 2) {
1847*4882a593Smuzhiyun 			vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1848*4882a593Smuzhiyun 				     EVERGREEN_CRTC2_REGISTER_OFFSET);
1849*4882a593Smuzhiyun 			position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1850*4882a593Smuzhiyun 					  EVERGREEN_CRTC2_REGISTER_OFFSET);
1851*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1852*4882a593Smuzhiyun 		}
1853*4882a593Smuzhiyun 		if (pipe == 3) {
1854*4882a593Smuzhiyun 			vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1855*4882a593Smuzhiyun 				     EVERGREEN_CRTC3_REGISTER_OFFSET);
1856*4882a593Smuzhiyun 			position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1857*4882a593Smuzhiyun 					  EVERGREEN_CRTC3_REGISTER_OFFSET);
1858*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1859*4882a593Smuzhiyun 		}
1860*4882a593Smuzhiyun 		if (pipe == 4) {
1861*4882a593Smuzhiyun 			vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1862*4882a593Smuzhiyun 				     EVERGREEN_CRTC4_REGISTER_OFFSET);
1863*4882a593Smuzhiyun 			position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1864*4882a593Smuzhiyun 					  EVERGREEN_CRTC4_REGISTER_OFFSET);
1865*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1866*4882a593Smuzhiyun 		}
1867*4882a593Smuzhiyun 		if (pipe == 5) {
1868*4882a593Smuzhiyun 			vbl = RREG32(EVERGREEN_CRTC_V_BLANK_START_END +
1869*4882a593Smuzhiyun 				     EVERGREEN_CRTC5_REGISTER_OFFSET);
1870*4882a593Smuzhiyun 			position = RREG32(EVERGREEN_CRTC_STATUS_POSITION +
1871*4882a593Smuzhiyun 					  EVERGREEN_CRTC5_REGISTER_OFFSET);
1872*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1873*4882a593Smuzhiyun 		}
1874*4882a593Smuzhiyun 	} else if (ASIC_IS_AVIVO(rdev)) {
1875*4882a593Smuzhiyun 		if (pipe == 0) {
1876*4882a593Smuzhiyun 			vbl = RREG32(AVIVO_D1CRTC_V_BLANK_START_END);
1877*4882a593Smuzhiyun 			position = RREG32(AVIVO_D1CRTC_STATUS_POSITION);
1878*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1879*4882a593Smuzhiyun 		}
1880*4882a593Smuzhiyun 		if (pipe == 1) {
1881*4882a593Smuzhiyun 			vbl = RREG32(AVIVO_D2CRTC_V_BLANK_START_END);
1882*4882a593Smuzhiyun 			position = RREG32(AVIVO_D2CRTC_STATUS_POSITION);
1883*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1884*4882a593Smuzhiyun 		}
1885*4882a593Smuzhiyun 	} else {
1886*4882a593Smuzhiyun 		/* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */
1887*4882a593Smuzhiyun 		if (pipe == 0) {
1888*4882a593Smuzhiyun 			/* Assume vbl_end == 0, get vbl_start from
1889*4882a593Smuzhiyun 			 * upper 16 bits.
1890*4882a593Smuzhiyun 			 */
1891*4882a593Smuzhiyun 			vbl = (RREG32(RADEON_CRTC_V_TOTAL_DISP) &
1892*4882a593Smuzhiyun 				RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1893*4882a593Smuzhiyun 			/* Only retrieve vpos from upper 16 bits, set hpos == 0. */
1894*4882a593Smuzhiyun 			position = (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1895*4882a593Smuzhiyun 			stat_crtc = RREG32(RADEON_CRTC_STATUS);
1896*4882a593Smuzhiyun 			if (!(stat_crtc & 1))
1897*4882a593Smuzhiyun 				in_vbl = false;
1898*4882a593Smuzhiyun 
1899*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1900*4882a593Smuzhiyun 		}
1901*4882a593Smuzhiyun 		if (pipe == 1) {
1902*4882a593Smuzhiyun 			vbl = (RREG32(RADEON_CRTC2_V_TOTAL_DISP) &
1903*4882a593Smuzhiyun 				RADEON_CRTC_V_DISP) >> RADEON_CRTC_V_DISP_SHIFT;
1904*4882a593Smuzhiyun 			position = (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE) >> 16) & RADEON_CRTC_V_TOTAL;
1905*4882a593Smuzhiyun 			stat_crtc = RREG32(RADEON_CRTC2_STATUS);
1906*4882a593Smuzhiyun 			if (!(stat_crtc & 1))
1907*4882a593Smuzhiyun 				in_vbl = false;
1908*4882a593Smuzhiyun 
1909*4882a593Smuzhiyun 			ret |= DRM_SCANOUTPOS_VALID;
1910*4882a593Smuzhiyun 		}
1911*4882a593Smuzhiyun 	}
1912*4882a593Smuzhiyun 
1913*4882a593Smuzhiyun 	/* Get optional system timestamp after query. */
1914*4882a593Smuzhiyun 	if (etime)
1915*4882a593Smuzhiyun 		*etime = ktime_get();
1916*4882a593Smuzhiyun 
1917*4882a593Smuzhiyun 	/* preempt_enable_rt() should go right here in PREEMPT_RT patchset. */
1918*4882a593Smuzhiyun 
1919*4882a593Smuzhiyun 	/* Decode into vertical and horizontal scanout position. */
1920*4882a593Smuzhiyun 	*vpos = position & 0x1fff;
1921*4882a593Smuzhiyun 	*hpos = (position >> 16) & 0x1fff;
1922*4882a593Smuzhiyun 
1923*4882a593Smuzhiyun 	/* Valid vblank area boundaries from gpu retrieved? */
1924*4882a593Smuzhiyun 	if (vbl > 0) {
1925*4882a593Smuzhiyun 		/* Yes: Decode. */
1926*4882a593Smuzhiyun 		ret |= DRM_SCANOUTPOS_ACCURATE;
1927*4882a593Smuzhiyun 		vbl_start = vbl & 0x1fff;
1928*4882a593Smuzhiyun 		vbl_end = (vbl >> 16) & 0x1fff;
1929*4882a593Smuzhiyun 	}
1930*4882a593Smuzhiyun 	else {
1931*4882a593Smuzhiyun 		/* No: Fake something reasonable which gives at least ok results. */
1932*4882a593Smuzhiyun 		vbl_start = mode->crtc_vdisplay;
1933*4882a593Smuzhiyun 		vbl_end = 0;
1934*4882a593Smuzhiyun 	}
1935*4882a593Smuzhiyun 
1936*4882a593Smuzhiyun 	/* Called from driver internal vblank counter query code? */
1937*4882a593Smuzhiyun 	if (flags & GET_DISTANCE_TO_VBLANKSTART) {
1938*4882a593Smuzhiyun 	    /* Caller wants distance from real vbl_start in *hpos */
1939*4882a593Smuzhiyun 	    *hpos = *vpos - vbl_start;
1940*4882a593Smuzhiyun 	}
1941*4882a593Smuzhiyun 
1942*4882a593Smuzhiyun 	/* Fudge vblank to start a few scanlines earlier to handle the
1943*4882a593Smuzhiyun 	 * problem that vblank irqs fire a few scanlines before start
1944*4882a593Smuzhiyun 	 * of vblank. Some driver internal callers need the true vblank
1945*4882a593Smuzhiyun 	 * start to be used and signal this via the USE_REAL_VBLANKSTART flag.
1946*4882a593Smuzhiyun 	 *
1947*4882a593Smuzhiyun 	 * The cause of the "early" vblank irq is that the irq is triggered
1948*4882a593Smuzhiyun 	 * by the line buffer logic when the line buffer read position enters
1949*4882a593Smuzhiyun 	 * the vblank, whereas our crtc scanout position naturally lags the
1950*4882a593Smuzhiyun 	 * line buffer read position.
1951*4882a593Smuzhiyun 	 */
1952*4882a593Smuzhiyun 	if (!(flags & USE_REAL_VBLANKSTART))
1953*4882a593Smuzhiyun 		vbl_start -= rdev->mode_info.crtcs[pipe]->lb_vblank_lead_lines;
1954*4882a593Smuzhiyun 
1955*4882a593Smuzhiyun 	/* Test scanout position against vblank region. */
1956*4882a593Smuzhiyun 	if ((*vpos < vbl_start) && (*vpos >= vbl_end))
1957*4882a593Smuzhiyun 		in_vbl = false;
1958*4882a593Smuzhiyun 
1959*4882a593Smuzhiyun 	/* In vblank? */
1960*4882a593Smuzhiyun 	if (in_vbl)
1961*4882a593Smuzhiyun 	    ret |= DRM_SCANOUTPOS_IN_VBLANK;
1962*4882a593Smuzhiyun 
1963*4882a593Smuzhiyun 	/* Called from driver internal vblank counter query code? */
1964*4882a593Smuzhiyun 	if (flags & GET_DISTANCE_TO_VBLANKSTART) {
1965*4882a593Smuzhiyun 		/* Caller wants distance from fudged earlier vbl_start */
1966*4882a593Smuzhiyun 		*vpos -= vbl_start;
1967*4882a593Smuzhiyun 		return ret;
1968*4882a593Smuzhiyun 	}
1969*4882a593Smuzhiyun 
1970*4882a593Smuzhiyun 	/* Check if inside vblank area and apply corrective offsets:
1971*4882a593Smuzhiyun 	 * vpos will then be >=0 in video scanout area, but negative
1972*4882a593Smuzhiyun 	 * within vblank area, counting down the number of lines until
1973*4882a593Smuzhiyun 	 * start of scanout.
1974*4882a593Smuzhiyun 	 */
1975*4882a593Smuzhiyun 
1976*4882a593Smuzhiyun 	/* Inside "upper part" of vblank area? Apply corrective offset if so: */
1977*4882a593Smuzhiyun 	if (in_vbl && (*vpos >= vbl_start)) {
1978*4882a593Smuzhiyun 		vtotal = mode->crtc_vtotal;
1979*4882a593Smuzhiyun 		*vpos = *vpos - vtotal;
1980*4882a593Smuzhiyun 	}
1981*4882a593Smuzhiyun 
1982*4882a593Smuzhiyun 	/* Correct for shifted end of vbl at vbl_end. */
1983*4882a593Smuzhiyun 	*vpos = *vpos - vbl_end;
1984*4882a593Smuzhiyun 
1985*4882a593Smuzhiyun 	return ret;
1986*4882a593Smuzhiyun }
1987*4882a593Smuzhiyun 
1988*4882a593Smuzhiyun bool
radeon_get_crtc_scanout_position(struct drm_crtc * crtc,bool in_vblank_irq,int * vpos,int * hpos,ktime_t * stime,ktime_t * etime,const struct drm_display_mode * mode)1989*4882a593Smuzhiyun radeon_get_crtc_scanout_position(struct drm_crtc *crtc,
1990*4882a593Smuzhiyun 				 bool in_vblank_irq, int *vpos, int *hpos,
1991*4882a593Smuzhiyun 				 ktime_t *stime, ktime_t *etime,
1992*4882a593Smuzhiyun 				 const struct drm_display_mode *mode)
1993*4882a593Smuzhiyun {
1994*4882a593Smuzhiyun 	struct drm_device *dev = crtc->dev;
1995*4882a593Smuzhiyun 	unsigned int pipe = crtc->index;
1996*4882a593Smuzhiyun 
1997*4882a593Smuzhiyun 	return radeon_get_crtc_scanoutpos(dev, pipe, 0, vpos, hpos,
1998*4882a593Smuzhiyun 					  stime, etime, mode);
1999*4882a593Smuzhiyun }
2000