1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Copyright 2007-11 Advanced Micro Devices, Inc.
3*4882a593Smuzhiyun * Copyright 2008 Red Hat Inc.
4*4882a593Smuzhiyun *
5*4882a593Smuzhiyun * Permission is hereby granted, free of charge, to any person obtaining a
6*4882a593Smuzhiyun * copy of this software and associated documentation files (the "Software"),
7*4882a593Smuzhiyun * to deal in the Software without restriction, including without limitation
8*4882a593Smuzhiyun * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9*4882a593Smuzhiyun * and/or sell copies of the Software, and to permit persons to whom the
10*4882a593Smuzhiyun * Software is furnished to do so, subject to the following conditions:
11*4882a593Smuzhiyun *
12*4882a593Smuzhiyun * The above copyright notice and this permission notice shall be included in
13*4882a593Smuzhiyun * all copies or substantial portions of the Software.
14*4882a593Smuzhiyun *
15*4882a593Smuzhiyun * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16*4882a593Smuzhiyun * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17*4882a593Smuzhiyun * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18*4882a593Smuzhiyun * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19*4882a593Smuzhiyun * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20*4882a593Smuzhiyun * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21*4882a593Smuzhiyun * OTHER DEALINGS IN THE SOFTWARE.
22*4882a593Smuzhiyun *
23*4882a593Smuzhiyun * Authors: Dave Airlie
24*4882a593Smuzhiyun * Alex Deucher
25*4882a593Smuzhiyun */
26*4882a593Smuzhiyun
27*4882a593Smuzhiyun #include <linux/pci.h>
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun #include <drm/drm_crtc_helper.h>
30*4882a593Smuzhiyun #include <drm/amdgpu_drm.h>
31*4882a593Smuzhiyun #include "amdgpu.h"
32*4882a593Smuzhiyun #include "amdgpu_connectors.h"
33*4882a593Smuzhiyun #include "amdgpu_display.h"
34*4882a593Smuzhiyun #include "atom.h"
35*4882a593Smuzhiyun #include "atombios_encoders.h"
36*4882a593Smuzhiyun #include "atombios_dp.h"
37*4882a593Smuzhiyun #include <linux/backlight.h>
38*4882a593Smuzhiyun #include "bif/bif_4_1_d.h"
39*4882a593Smuzhiyun
40*4882a593Smuzhiyun u8
amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device * adev)41*4882a593Smuzhiyun amdgpu_atombios_encoder_get_backlight_level_from_reg(struct amdgpu_device *adev)
42*4882a593Smuzhiyun {
43*4882a593Smuzhiyun u8 backlight_level;
44*4882a593Smuzhiyun u32 bios_2_scratch;
45*4882a593Smuzhiyun
46*4882a593Smuzhiyun bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
47*4882a593Smuzhiyun
48*4882a593Smuzhiyun backlight_level = ((bios_2_scratch & ATOM_S2_CURRENT_BL_LEVEL_MASK) >>
49*4882a593Smuzhiyun ATOM_S2_CURRENT_BL_LEVEL_SHIFT);
50*4882a593Smuzhiyun
51*4882a593Smuzhiyun return backlight_level;
52*4882a593Smuzhiyun }
53*4882a593Smuzhiyun
54*4882a593Smuzhiyun void
amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device * adev,u8 backlight_level)55*4882a593Smuzhiyun amdgpu_atombios_encoder_set_backlight_level_to_reg(struct amdgpu_device *adev,
56*4882a593Smuzhiyun u8 backlight_level)
57*4882a593Smuzhiyun {
58*4882a593Smuzhiyun u32 bios_2_scratch;
59*4882a593Smuzhiyun
60*4882a593Smuzhiyun bios_2_scratch = RREG32(mmBIOS_SCRATCH_2);
61*4882a593Smuzhiyun
62*4882a593Smuzhiyun bios_2_scratch &= ~ATOM_S2_CURRENT_BL_LEVEL_MASK;
63*4882a593Smuzhiyun bios_2_scratch |= ((backlight_level << ATOM_S2_CURRENT_BL_LEVEL_SHIFT) &
64*4882a593Smuzhiyun ATOM_S2_CURRENT_BL_LEVEL_MASK);
65*4882a593Smuzhiyun
66*4882a593Smuzhiyun WREG32(mmBIOS_SCRATCH_2, bios_2_scratch);
67*4882a593Smuzhiyun }
68*4882a593Smuzhiyun
69*4882a593Smuzhiyun u8
amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder * amdgpu_encoder)70*4882a593Smuzhiyun amdgpu_atombios_encoder_get_backlight_level(struct amdgpu_encoder *amdgpu_encoder)
71*4882a593Smuzhiyun {
72*4882a593Smuzhiyun struct drm_device *dev = amdgpu_encoder->base.dev;
73*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
76*4882a593Smuzhiyun return 0;
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
79*4882a593Smuzhiyun }
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun void
amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder * amdgpu_encoder,u8 level)82*4882a593Smuzhiyun amdgpu_atombios_encoder_set_backlight_level(struct amdgpu_encoder *amdgpu_encoder,
83*4882a593Smuzhiyun u8 level)
84*4882a593Smuzhiyun {
85*4882a593Smuzhiyun struct drm_encoder *encoder = &amdgpu_encoder->base;
86*4882a593Smuzhiyun struct drm_device *dev = amdgpu_encoder->base.dev;
87*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
88*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *dig;
89*4882a593Smuzhiyun
90*4882a593Smuzhiyun if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
91*4882a593Smuzhiyun return;
92*4882a593Smuzhiyun
93*4882a593Smuzhiyun if ((amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) &&
94*4882a593Smuzhiyun amdgpu_encoder->enc_priv) {
95*4882a593Smuzhiyun dig = amdgpu_encoder->enc_priv;
96*4882a593Smuzhiyun dig->backlight_level = level;
97*4882a593Smuzhiyun amdgpu_atombios_encoder_set_backlight_level_to_reg(adev, dig->backlight_level);
98*4882a593Smuzhiyun
99*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
100*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
101*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
102*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
103*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
104*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
105*4882a593Smuzhiyun if (dig->backlight_level == 0)
106*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
107*4882a593Smuzhiyun ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
108*4882a593Smuzhiyun else {
109*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
110*4882a593Smuzhiyun ATOM_TRANSMITTER_ACTION_BL_BRIGHTNESS_CONTROL, 0, 0);
111*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
112*4882a593Smuzhiyun ATOM_TRANSMITTER_ACTION_LCD_BLON, 0, 0);
113*4882a593Smuzhiyun }
114*4882a593Smuzhiyun break;
115*4882a593Smuzhiyun default:
116*4882a593Smuzhiyun break;
117*4882a593Smuzhiyun }
118*4882a593Smuzhiyun }
119*4882a593Smuzhiyun }
120*4882a593Smuzhiyun
121*4882a593Smuzhiyun #if defined(CONFIG_BACKLIGHT_CLASS_DEVICE) || defined(CONFIG_BACKLIGHT_CLASS_DEVICE_MODULE)
122*4882a593Smuzhiyun
amdgpu_atombios_encoder_backlight_level(struct backlight_device * bd)123*4882a593Smuzhiyun static u8 amdgpu_atombios_encoder_backlight_level(struct backlight_device *bd)
124*4882a593Smuzhiyun {
125*4882a593Smuzhiyun u8 level;
126*4882a593Smuzhiyun
127*4882a593Smuzhiyun /* Convert brightness to hardware level */
128*4882a593Smuzhiyun if (bd->props.brightness < 0)
129*4882a593Smuzhiyun level = 0;
130*4882a593Smuzhiyun else if (bd->props.brightness > AMDGPU_MAX_BL_LEVEL)
131*4882a593Smuzhiyun level = AMDGPU_MAX_BL_LEVEL;
132*4882a593Smuzhiyun else
133*4882a593Smuzhiyun level = bd->props.brightness;
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun return level;
136*4882a593Smuzhiyun }
137*4882a593Smuzhiyun
amdgpu_atombios_encoder_update_backlight_status(struct backlight_device * bd)138*4882a593Smuzhiyun static int amdgpu_atombios_encoder_update_backlight_status(struct backlight_device *bd)
139*4882a593Smuzhiyun {
140*4882a593Smuzhiyun struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
141*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
142*4882a593Smuzhiyun
143*4882a593Smuzhiyun amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder,
144*4882a593Smuzhiyun amdgpu_atombios_encoder_backlight_level(bd));
145*4882a593Smuzhiyun
146*4882a593Smuzhiyun return 0;
147*4882a593Smuzhiyun }
148*4882a593Smuzhiyun
149*4882a593Smuzhiyun static int
amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device * bd)150*4882a593Smuzhiyun amdgpu_atombios_encoder_get_backlight_brightness(struct backlight_device *bd)
151*4882a593Smuzhiyun {
152*4882a593Smuzhiyun struct amdgpu_backlight_privdata *pdata = bl_get_data(bd);
153*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = pdata->encoder;
154*4882a593Smuzhiyun struct drm_device *dev = amdgpu_encoder->base.dev;
155*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
156*4882a593Smuzhiyun
157*4882a593Smuzhiyun return amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
158*4882a593Smuzhiyun }
159*4882a593Smuzhiyun
160*4882a593Smuzhiyun static const struct backlight_ops amdgpu_atombios_encoder_backlight_ops = {
161*4882a593Smuzhiyun .get_brightness = amdgpu_atombios_encoder_get_backlight_brightness,
162*4882a593Smuzhiyun .update_status = amdgpu_atombios_encoder_update_backlight_status,
163*4882a593Smuzhiyun };
164*4882a593Smuzhiyun
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * amdgpu_encoder,struct drm_connector * drm_connector)165*4882a593Smuzhiyun void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *amdgpu_encoder,
166*4882a593Smuzhiyun struct drm_connector *drm_connector)
167*4882a593Smuzhiyun {
168*4882a593Smuzhiyun struct drm_device *dev = amdgpu_encoder->base.dev;
169*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
170*4882a593Smuzhiyun struct backlight_device *bd;
171*4882a593Smuzhiyun struct backlight_properties props;
172*4882a593Smuzhiyun struct amdgpu_backlight_privdata *pdata;
173*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *dig;
174*4882a593Smuzhiyun u8 backlight_level;
175*4882a593Smuzhiyun char bl_name[16];
176*4882a593Smuzhiyun
177*4882a593Smuzhiyun /* Mac laptops with multiple GPUs use the gmux driver for backlight
178*4882a593Smuzhiyun * so don't register a backlight device
179*4882a593Smuzhiyun */
180*4882a593Smuzhiyun if ((adev->pdev->subsystem_vendor == PCI_VENDOR_ID_APPLE) &&
181*4882a593Smuzhiyun (adev->pdev->device == 0x6741))
182*4882a593Smuzhiyun return;
183*4882a593Smuzhiyun
184*4882a593Smuzhiyun if (!amdgpu_encoder->enc_priv)
185*4882a593Smuzhiyun return;
186*4882a593Smuzhiyun
187*4882a593Smuzhiyun if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
188*4882a593Smuzhiyun return;
189*4882a593Smuzhiyun
190*4882a593Smuzhiyun pdata = kmalloc(sizeof(struct amdgpu_backlight_privdata), GFP_KERNEL);
191*4882a593Smuzhiyun if (!pdata) {
192*4882a593Smuzhiyun DRM_ERROR("Memory allocation failed\n");
193*4882a593Smuzhiyun goto error;
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun
196*4882a593Smuzhiyun memset(&props, 0, sizeof(props));
197*4882a593Smuzhiyun props.max_brightness = AMDGPU_MAX_BL_LEVEL;
198*4882a593Smuzhiyun props.type = BACKLIGHT_RAW;
199*4882a593Smuzhiyun snprintf(bl_name, sizeof(bl_name),
200*4882a593Smuzhiyun "amdgpu_bl%d", dev->primary->index);
201*4882a593Smuzhiyun bd = backlight_device_register(bl_name, drm_connector->kdev,
202*4882a593Smuzhiyun pdata, &amdgpu_atombios_encoder_backlight_ops, &props);
203*4882a593Smuzhiyun if (IS_ERR(bd)) {
204*4882a593Smuzhiyun DRM_ERROR("Backlight registration failed\n");
205*4882a593Smuzhiyun goto error;
206*4882a593Smuzhiyun }
207*4882a593Smuzhiyun
208*4882a593Smuzhiyun pdata->encoder = amdgpu_encoder;
209*4882a593Smuzhiyun
210*4882a593Smuzhiyun backlight_level = amdgpu_atombios_encoder_get_backlight_level_from_reg(adev);
211*4882a593Smuzhiyun
212*4882a593Smuzhiyun dig = amdgpu_encoder->enc_priv;
213*4882a593Smuzhiyun dig->bl_dev = bd;
214*4882a593Smuzhiyun
215*4882a593Smuzhiyun bd->props.brightness = amdgpu_atombios_encoder_get_backlight_brightness(bd);
216*4882a593Smuzhiyun bd->props.power = FB_BLANK_UNBLANK;
217*4882a593Smuzhiyun backlight_update_status(bd);
218*4882a593Smuzhiyun
219*4882a593Smuzhiyun DRM_INFO("amdgpu atom DIG backlight initialized\n");
220*4882a593Smuzhiyun
221*4882a593Smuzhiyun return;
222*4882a593Smuzhiyun
223*4882a593Smuzhiyun error:
224*4882a593Smuzhiyun kfree(pdata);
225*4882a593Smuzhiyun return;
226*4882a593Smuzhiyun }
227*4882a593Smuzhiyun
228*4882a593Smuzhiyun void
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * amdgpu_encoder)229*4882a593Smuzhiyun amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *amdgpu_encoder)
230*4882a593Smuzhiyun {
231*4882a593Smuzhiyun struct drm_device *dev = amdgpu_encoder->base.dev;
232*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
233*4882a593Smuzhiyun struct backlight_device *bd = NULL;
234*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *dig;
235*4882a593Smuzhiyun
236*4882a593Smuzhiyun if (!amdgpu_encoder->enc_priv)
237*4882a593Smuzhiyun return;
238*4882a593Smuzhiyun
239*4882a593Smuzhiyun if (!(adev->mode_info.firmware_flags & ATOM_BIOS_INFO_BL_CONTROLLED_BY_GPU))
240*4882a593Smuzhiyun return;
241*4882a593Smuzhiyun
242*4882a593Smuzhiyun dig = amdgpu_encoder->enc_priv;
243*4882a593Smuzhiyun bd = dig->bl_dev;
244*4882a593Smuzhiyun dig->bl_dev = NULL;
245*4882a593Smuzhiyun
246*4882a593Smuzhiyun if (bd) {
247*4882a593Smuzhiyun struct amdgpu_legacy_backlight_privdata *pdata;
248*4882a593Smuzhiyun
249*4882a593Smuzhiyun pdata = bl_get_data(bd);
250*4882a593Smuzhiyun backlight_device_unregister(bd);
251*4882a593Smuzhiyun kfree(pdata);
252*4882a593Smuzhiyun
253*4882a593Smuzhiyun DRM_INFO("amdgpu atom LVDS backlight unloaded\n");
254*4882a593Smuzhiyun }
255*4882a593Smuzhiyun }
256*4882a593Smuzhiyun
257*4882a593Smuzhiyun #else /* !CONFIG_BACKLIGHT_CLASS_DEVICE */
258*4882a593Smuzhiyun
amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder * encoder)259*4882a593Smuzhiyun void amdgpu_atombios_encoder_init_backlight(struct amdgpu_encoder *encoder)
260*4882a593Smuzhiyun {
261*4882a593Smuzhiyun }
262*4882a593Smuzhiyun
amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder * encoder)263*4882a593Smuzhiyun void amdgpu_atombios_encoder_fini_backlight(struct amdgpu_encoder *encoder)
264*4882a593Smuzhiyun {
265*4882a593Smuzhiyun }
266*4882a593Smuzhiyun
267*4882a593Smuzhiyun #endif
268*4882a593Smuzhiyun
amdgpu_atombios_encoder_is_digital(struct drm_encoder * encoder)269*4882a593Smuzhiyun bool amdgpu_atombios_encoder_is_digital(struct drm_encoder *encoder)
270*4882a593Smuzhiyun {
271*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
272*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
273*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
274*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
275*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
276*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
277*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
278*4882a593Smuzhiyun return true;
279*4882a593Smuzhiyun default:
280*4882a593Smuzhiyun return false;
281*4882a593Smuzhiyun }
282*4882a593Smuzhiyun }
283*4882a593Smuzhiyun
amdgpu_atombios_encoder_mode_fixup(struct drm_encoder * encoder,const struct drm_display_mode * mode,struct drm_display_mode * adjusted_mode)284*4882a593Smuzhiyun bool amdgpu_atombios_encoder_mode_fixup(struct drm_encoder *encoder,
285*4882a593Smuzhiyun const struct drm_display_mode *mode,
286*4882a593Smuzhiyun struct drm_display_mode *adjusted_mode)
287*4882a593Smuzhiyun {
288*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
289*4882a593Smuzhiyun
290*4882a593Smuzhiyun /* set the active encoder to connector routing */
291*4882a593Smuzhiyun amdgpu_encoder_set_active_device(encoder);
292*4882a593Smuzhiyun drm_mode_set_crtcinfo(adjusted_mode, 0);
293*4882a593Smuzhiyun
294*4882a593Smuzhiyun /* hw bug */
295*4882a593Smuzhiyun if ((mode->flags & DRM_MODE_FLAG_INTERLACE)
296*4882a593Smuzhiyun && (mode->crtc_vsync_start < (mode->crtc_vdisplay + 2)))
297*4882a593Smuzhiyun adjusted_mode->crtc_vsync_start = adjusted_mode->crtc_vdisplay + 2;
298*4882a593Smuzhiyun
299*4882a593Smuzhiyun /* vertical FP must be at least 1 */
300*4882a593Smuzhiyun if (mode->crtc_vsync_start == mode->crtc_vdisplay)
301*4882a593Smuzhiyun adjusted_mode->crtc_vsync_start++;
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun /* get the native mode for scaling */
304*4882a593Smuzhiyun if (amdgpu_encoder->active_device & (ATOM_DEVICE_LCD_SUPPORT))
305*4882a593Smuzhiyun amdgpu_panel_mode_fixup(encoder, adjusted_mode);
306*4882a593Smuzhiyun else if (amdgpu_encoder->rmx_type != RMX_OFF)
307*4882a593Smuzhiyun amdgpu_panel_mode_fixup(encoder, adjusted_mode);
308*4882a593Smuzhiyun
309*4882a593Smuzhiyun if ((amdgpu_encoder->active_device & (ATOM_DEVICE_DFP_SUPPORT | ATOM_DEVICE_LCD_SUPPORT)) ||
310*4882a593Smuzhiyun (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)) {
311*4882a593Smuzhiyun struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
312*4882a593Smuzhiyun amdgpu_atombios_dp_set_link_config(connector, adjusted_mode);
313*4882a593Smuzhiyun }
314*4882a593Smuzhiyun
315*4882a593Smuzhiyun return true;
316*4882a593Smuzhiyun }
317*4882a593Smuzhiyun
318*4882a593Smuzhiyun static void
amdgpu_atombios_encoder_setup_dac(struct drm_encoder * encoder,int action)319*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dac(struct drm_encoder *encoder, int action)
320*4882a593Smuzhiyun {
321*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
322*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
323*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
324*4882a593Smuzhiyun DAC_ENCODER_CONTROL_PS_ALLOCATION args;
325*4882a593Smuzhiyun int index = 0;
326*4882a593Smuzhiyun
327*4882a593Smuzhiyun memset(&args, 0, sizeof(args));
328*4882a593Smuzhiyun
329*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
330*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_DAC1:
331*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
332*4882a593Smuzhiyun index = GetIndexIntoMasterTable(COMMAND, DAC1EncoderControl);
333*4882a593Smuzhiyun break;
334*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_DAC2:
335*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
336*4882a593Smuzhiyun index = GetIndexIntoMasterTable(COMMAND, DAC2EncoderControl);
337*4882a593Smuzhiyun break;
338*4882a593Smuzhiyun }
339*4882a593Smuzhiyun
340*4882a593Smuzhiyun args.ucAction = action;
341*4882a593Smuzhiyun args.ucDacStandard = ATOM_DAC1_PS2;
342*4882a593Smuzhiyun args.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
343*4882a593Smuzhiyun
344*4882a593Smuzhiyun amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun }
347*4882a593Smuzhiyun
amdgpu_atombios_encoder_get_bpc(struct drm_encoder * encoder)348*4882a593Smuzhiyun static u8 amdgpu_atombios_encoder_get_bpc(struct drm_encoder *encoder)
349*4882a593Smuzhiyun {
350*4882a593Smuzhiyun int bpc = 8;
351*4882a593Smuzhiyun
352*4882a593Smuzhiyun if (encoder->crtc) {
353*4882a593Smuzhiyun struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
354*4882a593Smuzhiyun bpc = amdgpu_crtc->bpc;
355*4882a593Smuzhiyun }
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun switch (bpc) {
358*4882a593Smuzhiyun case 0:
359*4882a593Smuzhiyun return PANEL_BPC_UNDEFINE;
360*4882a593Smuzhiyun case 6:
361*4882a593Smuzhiyun return PANEL_6BIT_PER_COLOR;
362*4882a593Smuzhiyun case 8:
363*4882a593Smuzhiyun default:
364*4882a593Smuzhiyun return PANEL_8BIT_PER_COLOR;
365*4882a593Smuzhiyun case 10:
366*4882a593Smuzhiyun return PANEL_10BIT_PER_COLOR;
367*4882a593Smuzhiyun case 12:
368*4882a593Smuzhiyun return PANEL_12BIT_PER_COLOR;
369*4882a593Smuzhiyun case 16:
370*4882a593Smuzhiyun return PANEL_16BIT_PER_COLOR;
371*4882a593Smuzhiyun }
372*4882a593Smuzhiyun }
373*4882a593Smuzhiyun
374*4882a593Smuzhiyun union dvo_encoder_control {
375*4882a593Smuzhiyun ENABLE_EXTERNAL_TMDS_ENCODER_PS_ALLOCATION ext_tmds;
376*4882a593Smuzhiyun DVO_ENCODER_CONTROL_PS_ALLOCATION dvo;
377*4882a593Smuzhiyun DVO_ENCODER_CONTROL_PS_ALLOCATION_V3 dvo_v3;
378*4882a593Smuzhiyun DVO_ENCODER_CONTROL_PS_ALLOCATION_V1_4 dvo_v4;
379*4882a593Smuzhiyun };
380*4882a593Smuzhiyun
381*4882a593Smuzhiyun static void
amdgpu_atombios_encoder_setup_dvo(struct drm_encoder * encoder,int action)382*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dvo(struct drm_encoder *encoder, int action)
383*4882a593Smuzhiyun {
384*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
385*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
386*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
387*4882a593Smuzhiyun union dvo_encoder_control args;
388*4882a593Smuzhiyun int index = GetIndexIntoMasterTable(COMMAND, DVOEncoderControl);
389*4882a593Smuzhiyun uint8_t frev, crev;
390*4882a593Smuzhiyun
391*4882a593Smuzhiyun memset(&args, 0, sizeof(args));
392*4882a593Smuzhiyun
393*4882a593Smuzhiyun if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
394*4882a593Smuzhiyun return;
395*4882a593Smuzhiyun
396*4882a593Smuzhiyun switch (frev) {
397*4882a593Smuzhiyun case 1:
398*4882a593Smuzhiyun switch (crev) {
399*4882a593Smuzhiyun case 1:
400*4882a593Smuzhiyun /* R4xx, R5xx */
401*4882a593Smuzhiyun args.ext_tmds.sXTmdsEncoder.ucEnable = action;
402*4882a593Smuzhiyun
403*4882a593Smuzhiyun if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
404*4882a593Smuzhiyun args.ext_tmds.sXTmdsEncoder.ucMisc |= PANEL_ENCODER_MISC_DUAL;
405*4882a593Smuzhiyun
406*4882a593Smuzhiyun args.ext_tmds.sXTmdsEncoder.ucMisc |= ATOM_PANEL_MISC_888RGB;
407*4882a593Smuzhiyun break;
408*4882a593Smuzhiyun case 2:
409*4882a593Smuzhiyun /* RS600/690/740 */
410*4882a593Smuzhiyun args.dvo.sDVOEncoder.ucAction = action;
411*4882a593Smuzhiyun args.dvo.sDVOEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
412*4882a593Smuzhiyun /* DFP1, CRT1, TV1 depending on the type of port */
413*4882a593Smuzhiyun args.dvo.sDVOEncoder.ucDeviceType = ATOM_DEVICE_DFP1_INDEX;
414*4882a593Smuzhiyun
415*4882a593Smuzhiyun if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
416*4882a593Smuzhiyun args.dvo.sDVOEncoder.usDevAttr.sDigAttrib.ucAttribute |= PANEL_ENCODER_MISC_DUAL;
417*4882a593Smuzhiyun break;
418*4882a593Smuzhiyun case 3:
419*4882a593Smuzhiyun /* R6xx */
420*4882a593Smuzhiyun args.dvo_v3.ucAction = action;
421*4882a593Smuzhiyun args.dvo_v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
422*4882a593Smuzhiyun args.dvo_v3.ucDVOConfig = 0; /* XXX */
423*4882a593Smuzhiyun break;
424*4882a593Smuzhiyun case 4:
425*4882a593Smuzhiyun /* DCE8 */
426*4882a593Smuzhiyun args.dvo_v4.ucAction = action;
427*4882a593Smuzhiyun args.dvo_v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
428*4882a593Smuzhiyun args.dvo_v4.ucDVOConfig = 0; /* XXX */
429*4882a593Smuzhiyun args.dvo_v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
430*4882a593Smuzhiyun break;
431*4882a593Smuzhiyun default:
432*4882a593Smuzhiyun DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
433*4882a593Smuzhiyun break;
434*4882a593Smuzhiyun }
435*4882a593Smuzhiyun break;
436*4882a593Smuzhiyun default:
437*4882a593Smuzhiyun DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
438*4882a593Smuzhiyun break;
439*4882a593Smuzhiyun }
440*4882a593Smuzhiyun
441*4882a593Smuzhiyun amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
442*4882a593Smuzhiyun }
443*4882a593Smuzhiyun
amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder * encoder)444*4882a593Smuzhiyun int amdgpu_atombios_encoder_get_encoder_mode(struct drm_encoder *encoder)
445*4882a593Smuzhiyun {
446*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
447*4882a593Smuzhiyun struct drm_connector *connector;
448*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector;
449*4882a593Smuzhiyun struct amdgpu_connector_atom_dig *dig_connector;
450*4882a593Smuzhiyun
451*4882a593Smuzhiyun /* dp bridges are always DP */
452*4882a593Smuzhiyun if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE)
453*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DP;
454*4882a593Smuzhiyun
455*4882a593Smuzhiyun /* DVO is always DVO */
456*4882a593Smuzhiyun if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DVO1) ||
457*4882a593Smuzhiyun (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1))
458*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DVO;
459*4882a593Smuzhiyun
460*4882a593Smuzhiyun connector = amdgpu_get_connector_for_encoder(encoder);
461*4882a593Smuzhiyun /* if we don't have an active device yet, just use one of
462*4882a593Smuzhiyun * the connectors tied to the encoder.
463*4882a593Smuzhiyun */
464*4882a593Smuzhiyun if (!connector)
465*4882a593Smuzhiyun connector = amdgpu_get_connector_for_encoder_init(encoder);
466*4882a593Smuzhiyun amdgpu_connector = to_amdgpu_connector(connector);
467*4882a593Smuzhiyun
468*4882a593Smuzhiyun switch (connector->connector_type) {
469*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_DVII:
470*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_HDMIB: /* HDMI-B is basically DL-DVI; analog works fine */
471*4882a593Smuzhiyun if (amdgpu_audio != 0) {
472*4882a593Smuzhiyun if (amdgpu_connector->use_digital &&
473*4882a593Smuzhiyun (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE))
474*4882a593Smuzhiyun return ATOM_ENCODER_MODE_HDMI;
475*4882a593Smuzhiyun else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
476*4882a593Smuzhiyun (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
477*4882a593Smuzhiyun return ATOM_ENCODER_MODE_HDMI;
478*4882a593Smuzhiyun else if (amdgpu_connector->use_digital)
479*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DVI;
480*4882a593Smuzhiyun else
481*4882a593Smuzhiyun return ATOM_ENCODER_MODE_CRT;
482*4882a593Smuzhiyun } else if (amdgpu_connector->use_digital) {
483*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DVI;
484*4882a593Smuzhiyun } else {
485*4882a593Smuzhiyun return ATOM_ENCODER_MODE_CRT;
486*4882a593Smuzhiyun }
487*4882a593Smuzhiyun break;
488*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_DVID:
489*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_HDMIA:
490*4882a593Smuzhiyun default:
491*4882a593Smuzhiyun if (amdgpu_audio != 0) {
492*4882a593Smuzhiyun if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
493*4882a593Smuzhiyun return ATOM_ENCODER_MODE_HDMI;
494*4882a593Smuzhiyun else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
495*4882a593Smuzhiyun (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
496*4882a593Smuzhiyun return ATOM_ENCODER_MODE_HDMI;
497*4882a593Smuzhiyun else
498*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DVI;
499*4882a593Smuzhiyun } else {
500*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DVI;
501*4882a593Smuzhiyun }
502*4882a593Smuzhiyun break;
503*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_LVDS:
504*4882a593Smuzhiyun return ATOM_ENCODER_MODE_LVDS;
505*4882a593Smuzhiyun break;
506*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_DisplayPort:
507*4882a593Smuzhiyun dig_connector = amdgpu_connector->con_priv;
508*4882a593Smuzhiyun if ((dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_DISPLAYPORT) ||
509*4882a593Smuzhiyun (dig_connector->dp_sink_type == CONNECTOR_OBJECT_ID_eDP)) {
510*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DP;
511*4882a593Smuzhiyun } else if (amdgpu_audio != 0) {
512*4882a593Smuzhiyun if (amdgpu_connector->audio == AMDGPU_AUDIO_ENABLE)
513*4882a593Smuzhiyun return ATOM_ENCODER_MODE_HDMI;
514*4882a593Smuzhiyun else if (drm_detect_hdmi_monitor(amdgpu_connector_edid(connector)) &&
515*4882a593Smuzhiyun (amdgpu_connector->audio == AMDGPU_AUDIO_AUTO))
516*4882a593Smuzhiyun return ATOM_ENCODER_MODE_HDMI;
517*4882a593Smuzhiyun else
518*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DVI;
519*4882a593Smuzhiyun } else {
520*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DVI;
521*4882a593Smuzhiyun }
522*4882a593Smuzhiyun break;
523*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_eDP:
524*4882a593Smuzhiyun return ATOM_ENCODER_MODE_DP;
525*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_DVIA:
526*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_VGA:
527*4882a593Smuzhiyun return ATOM_ENCODER_MODE_CRT;
528*4882a593Smuzhiyun break;
529*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_Composite:
530*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_SVIDEO:
531*4882a593Smuzhiyun case DRM_MODE_CONNECTOR_9PinDIN:
532*4882a593Smuzhiyun /* fix me */
533*4882a593Smuzhiyun return ATOM_ENCODER_MODE_TV;
534*4882a593Smuzhiyun /*return ATOM_ENCODER_MODE_CV;*/
535*4882a593Smuzhiyun break;
536*4882a593Smuzhiyun }
537*4882a593Smuzhiyun }
538*4882a593Smuzhiyun
539*4882a593Smuzhiyun /*
540*4882a593Smuzhiyun * DIG Encoder/Transmitter Setup
541*4882a593Smuzhiyun *
542*4882a593Smuzhiyun * DCE 6.0
543*4882a593Smuzhiyun * - 3 DIG transmitter blocks UNIPHY0/1/2 (links A and B).
544*4882a593Smuzhiyun * Supports up to 6 digital outputs
545*4882a593Smuzhiyun * - 6 DIG encoder blocks.
546*4882a593Smuzhiyun * - DIG to PHY mapping is hardcoded
547*4882a593Smuzhiyun * DIG1 drives UNIPHY0 link A, A+B
548*4882a593Smuzhiyun * DIG2 drives UNIPHY0 link B
549*4882a593Smuzhiyun * DIG3 drives UNIPHY1 link A, A+B
550*4882a593Smuzhiyun * DIG4 drives UNIPHY1 link B
551*4882a593Smuzhiyun * DIG5 drives UNIPHY2 link A, A+B
552*4882a593Smuzhiyun * DIG6 drives UNIPHY2 link B
553*4882a593Smuzhiyun *
554*4882a593Smuzhiyun * Routing
555*4882a593Smuzhiyun * crtc -> dig encoder -> UNIPHY/LVTMA (1 or 2 links)
556*4882a593Smuzhiyun * Examples:
557*4882a593Smuzhiyun * crtc0 -> dig2 -> LVTMA links A+B -> TMDS/HDMI
558*4882a593Smuzhiyun * crtc1 -> dig1 -> UNIPHY0 link B -> DP
559*4882a593Smuzhiyun * crtc0 -> dig1 -> UNIPHY2 link A -> LVDS
560*4882a593Smuzhiyun * crtc1 -> dig2 -> UNIPHY1 link B+A -> TMDS/HDMI
561*4882a593Smuzhiyun */
562*4882a593Smuzhiyun
563*4882a593Smuzhiyun union dig_encoder_control {
564*4882a593Smuzhiyun DIG_ENCODER_CONTROL_PS_ALLOCATION v1;
565*4882a593Smuzhiyun DIG_ENCODER_CONTROL_PARAMETERS_V2 v2;
566*4882a593Smuzhiyun DIG_ENCODER_CONTROL_PARAMETERS_V3 v3;
567*4882a593Smuzhiyun DIG_ENCODER_CONTROL_PARAMETERS_V4 v4;
568*4882a593Smuzhiyun DIG_ENCODER_CONTROL_PARAMETERS_V5 v5;
569*4882a593Smuzhiyun };
570*4882a593Smuzhiyun
571*4882a593Smuzhiyun void
amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder * encoder,int action,int panel_mode)572*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_encoder(struct drm_encoder *encoder,
573*4882a593Smuzhiyun int action, int panel_mode)
574*4882a593Smuzhiyun {
575*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
576*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
577*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
578*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
579*4882a593Smuzhiyun struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
580*4882a593Smuzhiyun union dig_encoder_control args;
581*4882a593Smuzhiyun int index = GetIndexIntoMasterTable(COMMAND, DIGxEncoderControl);
582*4882a593Smuzhiyun uint8_t frev, crev;
583*4882a593Smuzhiyun int dp_clock = 0;
584*4882a593Smuzhiyun int dp_lane_count = 0;
585*4882a593Smuzhiyun int hpd_id = AMDGPU_HPD_NONE;
586*4882a593Smuzhiyun
587*4882a593Smuzhiyun if (connector) {
588*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
589*4882a593Smuzhiyun struct amdgpu_connector_atom_dig *dig_connector =
590*4882a593Smuzhiyun amdgpu_connector->con_priv;
591*4882a593Smuzhiyun
592*4882a593Smuzhiyun dp_clock = dig_connector->dp_clock;
593*4882a593Smuzhiyun dp_lane_count = dig_connector->dp_lane_count;
594*4882a593Smuzhiyun hpd_id = amdgpu_connector->hpd.hpd;
595*4882a593Smuzhiyun }
596*4882a593Smuzhiyun
597*4882a593Smuzhiyun /* no dig encoder assigned */
598*4882a593Smuzhiyun if (dig->dig_encoder == -1)
599*4882a593Smuzhiyun return;
600*4882a593Smuzhiyun
601*4882a593Smuzhiyun memset(&args, 0, sizeof(args));
602*4882a593Smuzhiyun
603*4882a593Smuzhiyun if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
604*4882a593Smuzhiyun return;
605*4882a593Smuzhiyun
606*4882a593Smuzhiyun switch (frev) {
607*4882a593Smuzhiyun case 1:
608*4882a593Smuzhiyun switch (crev) {
609*4882a593Smuzhiyun case 1:
610*4882a593Smuzhiyun args.v1.ucAction = action;
611*4882a593Smuzhiyun args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
612*4882a593Smuzhiyun if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
613*4882a593Smuzhiyun args.v3.ucPanelMode = panel_mode;
614*4882a593Smuzhiyun else
615*4882a593Smuzhiyun args.v1.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
616*4882a593Smuzhiyun
617*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode))
618*4882a593Smuzhiyun args.v1.ucLaneNum = dp_lane_count;
619*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
620*4882a593Smuzhiyun args.v1.ucLaneNum = 8;
621*4882a593Smuzhiyun else
622*4882a593Smuzhiyun args.v1.ucLaneNum = 4;
623*4882a593Smuzhiyun
624*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v1.ucEncoderMode) && (dp_clock == 270000))
625*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
626*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
627*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
628*4882a593Smuzhiyun args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER1;
629*4882a593Smuzhiyun break;
630*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
631*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
632*4882a593Smuzhiyun args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER2;
633*4882a593Smuzhiyun break;
634*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
635*4882a593Smuzhiyun args.v1.ucConfig = ATOM_ENCODER_CONFIG_V2_TRANSMITTER3;
636*4882a593Smuzhiyun break;
637*4882a593Smuzhiyun }
638*4882a593Smuzhiyun if (dig->linkb)
639*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKB;
640*4882a593Smuzhiyun else
641*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_ENCODER_CONFIG_LINKA;
642*4882a593Smuzhiyun break;
643*4882a593Smuzhiyun case 2:
644*4882a593Smuzhiyun case 3:
645*4882a593Smuzhiyun args.v3.ucAction = action;
646*4882a593Smuzhiyun args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
647*4882a593Smuzhiyun if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
648*4882a593Smuzhiyun args.v3.ucPanelMode = panel_mode;
649*4882a593Smuzhiyun else
650*4882a593Smuzhiyun args.v3.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
651*4882a593Smuzhiyun
652*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode))
653*4882a593Smuzhiyun args.v3.ucLaneNum = dp_lane_count;
654*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
655*4882a593Smuzhiyun args.v3.ucLaneNum = 8;
656*4882a593Smuzhiyun else
657*4882a593Smuzhiyun args.v3.ucLaneNum = 4;
658*4882a593Smuzhiyun
659*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v3.ucEncoderMode) && (dp_clock == 270000))
660*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
661*4882a593Smuzhiyun args.v3.acConfig.ucDigSel = dig->dig_encoder;
662*4882a593Smuzhiyun args.v3.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
663*4882a593Smuzhiyun break;
664*4882a593Smuzhiyun case 4:
665*4882a593Smuzhiyun args.v4.ucAction = action;
666*4882a593Smuzhiyun args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
667*4882a593Smuzhiyun if (action == ATOM_ENCODER_CMD_SETUP_PANEL_MODE)
668*4882a593Smuzhiyun args.v4.ucPanelMode = panel_mode;
669*4882a593Smuzhiyun else
670*4882a593Smuzhiyun args.v4.ucEncoderMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
671*4882a593Smuzhiyun
672*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode))
673*4882a593Smuzhiyun args.v4.ucLaneNum = dp_lane_count;
674*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
675*4882a593Smuzhiyun args.v4.ucLaneNum = 8;
676*4882a593Smuzhiyun else
677*4882a593Smuzhiyun args.v4.ucLaneNum = 4;
678*4882a593Smuzhiyun
679*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v4.ucEncoderMode)) {
680*4882a593Smuzhiyun if (dp_clock == 540000)
681*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_5_40GHZ;
682*4882a593Smuzhiyun else if (dp_clock == 324000)
683*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_3_24GHZ;
684*4882a593Smuzhiyun else if (dp_clock == 270000)
685*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_2_70GHZ;
686*4882a593Smuzhiyun else
687*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_ENCODER_CONFIG_V4_DPLINKRATE_1_62GHZ;
688*4882a593Smuzhiyun }
689*4882a593Smuzhiyun args.v4.acConfig.ucDigSel = dig->dig_encoder;
690*4882a593Smuzhiyun args.v4.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
691*4882a593Smuzhiyun if (hpd_id == AMDGPU_HPD_NONE)
692*4882a593Smuzhiyun args.v4.ucHPD_ID = 0;
693*4882a593Smuzhiyun else
694*4882a593Smuzhiyun args.v4.ucHPD_ID = hpd_id + 1;
695*4882a593Smuzhiyun break;
696*4882a593Smuzhiyun case 5:
697*4882a593Smuzhiyun switch (action) {
698*4882a593Smuzhiyun case ATOM_ENCODER_CMD_SETUP_PANEL_MODE:
699*4882a593Smuzhiyun args.v5.asDPPanelModeParam.ucAction = action;
700*4882a593Smuzhiyun args.v5.asDPPanelModeParam.ucPanelMode = panel_mode;
701*4882a593Smuzhiyun args.v5.asDPPanelModeParam.ucDigId = dig->dig_encoder;
702*4882a593Smuzhiyun break;
703*4882a593Smuzhiyun case ATOM_ENCODER_CMD_STREAM_SETUP:
704*4882a593Smuzhiyun args.v5.asStreamParam.ucAction = action;
705*4882a593Smuzhiyun args.v5.asStreamParam.ucDigId = dig->dig_encoder;
706*4882a593Smuzhiyun args.v5.asStreamParam.ucDigMode =
707*4882a593Smuzhiyun amdgpu_atombios_encoder_get_encoder_mode(encoder);
708*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v5.asStreamParam.ucDigMode))
709*4882a593Smuzhiyun args.v5.asStreamParam.ucLaneNum = dp_lane_count;
710*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder,
711*4882a593Smuzhiyun amdgpu_encoder->pixel_clock))
712*4882a593Smuzhiyun args.v5.asStreamParam.ucLaneNum = 8;
713*4882a593Smuzhiyun else
714*4882a593Smuzhiyun args.v5.asStreamParam.ucLaneNum = 4;
715*4882a593Smuzhiyun args.v5.asStreamParam.ulPixelClock =
716*4882a593Smuzhiyun cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
717*4882a593Smuzhiyun args.v5.asStreamParam.ucBitPerColor =
718*4882a593Smuzhiyun amdgpu_atombios_encoder_get_bpc(encoder);
719*4882a593Smuzhiyun args.v5.asStreamParam.ucLinkRateIn270Mhz = dp_clock / 27000;
720*4882a593Smuzhiyun break;
721*4882a593Smuzhiyun case ATOM_ENCODER_CMD_DP_LINK_TRAINING_START:
722*4882a593Smuzhiyun case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN1:
723*4882a593Smuzhiyun case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN2:
724*4882a593Smuzhiyun case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN3:
725*4882a593Smuzhiyun case ATOM_ENCODER_CMD_DP_LINK_TRAINING_PATTERN4:
726*4882a593Smuzhiyun case ATOM_ENCODER_CMD_DP_LINK_TRAINING_COMPLETE:
727*4882a593Smuzhiyun case ATOM_ENCODER_CMD_DP_VIDEO_OFF:
728*4882a593Smuzhiyun case ATOM_ENCODER_CMD_DP_VIDEO_ON:
729*4882a593Smuzhiyun args.v5.asCmdParam.ucAction = action;
730*4882a593Smuzhiyun args.v5.asCmdParam.ucDigId = dig->dig_encoder;
731*4882a593Smuzhiyun break;
732*4882a593Smuzhiyun default:
733*4882a593Smuzhiyun DRM_ERROR("Unsupported action 0x%x\n", action);
734*4882a593Smuzhiyun break;
735*4882a593Smuzhiyun }
736*4882a593Smuzhiyun break;
737*4882a593Smuzhiyun default:
738*4882a593Smuzhiyun DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
739*4882a593Smuzhiyun break;
740*4882a593Smuzhiyun }
741*4882a593Smuzhiyun break;
742*4882a593Smuzhiyun default:
743*4882a593Smuzhiyun DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
744*4882a593Smuzhiyun break;
745*4882a593Smuzhiyun }
746*4882a593Smuzhiyun
747*4882a593Smuzhiyun amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
748*4882a593Smuzhiyun
749*4882a593Smuzhiyun }
750*4882a593Smuzhiyun
751*4882a593Smuzhiyun union dig_transmitter_control {
752*4882a593Smuzhiyun DIG_TRANSMITTER_CONTROL_PS_ALLOCATION v1;
753*4882a593Smuzhiyun DIG_TRANSMITTER_CONTROL_PARAMETERS_V2 v2;
754*4882a593Smuzhiyun DIG_TRANSMITTER_CONTROL_PARAMETERS_V3 v3;
755*4882a593Smuzhiyun DIG_TRANSMITTER_CONTROL_PARAMETERS_V4 v4;
756*4882a593Smuzhiyun DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_5 v5;
757*4882a593Smuzhiyun DIG_TRANSMITTER_CONTROL_PARAMETERS_V1_6 v6;
758*4882a593Smuzhiyun };
759*4882a593Smuzhiyun
760*4882a593Smuzhiyun void
amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder * encoder,int action,uint8_t lane_num,uint8_t lane_set)761*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_transmitter(struct drm_encoder *encoder, int action,
762*4882a593Smuzhiyun uint8_t lane_num, uint8_t lane_set)
763*4882a593Smuzhiyun {
764*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
765*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
766*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
767*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
768*4882a593Smuzhiyun struct drm_connector *connector;
769*4882a593Smuzhiyun union dig_transmitter_control args;
770*4882a593Smuzhiyun int index = 0;
771*4882a593Smuzhiyun uint8_t frev, crev;
772*4882a593Smuzhiyun bool is_dp = false;
773*4882a593Smuzhiyun int pll_id = 0;
774*4882a593Smuzhiyun int dp_clock = 0;
775*4882a593Smuzhiyun int dp_lane_count = 0;
776*4882a593Smuzhiyun int connector_object_id = 0;
777*4882a593Smuzhiyun int igp_lane_info = 0;
778*4882a593Smuzhiyun int dig_encoder = dig->dig_encoder;
779*4882a593Smuzhiyun int hpd_id = AMDGPU_HPD_NONE;
780*4882a593Smuzhiyun
781*4882a593Smuzhiyun if (action == ATOM_TRANSMITTER_ACTION_INIT) {
782*4882a593Smuzhiyun connector = amdgpu_get_connector_for_encoder_init(encoder);
783*4882a593Smuzhiyun /* just needed to avoid bailing in the encoder check. the encoder
784*4882a593Smuzhiyun * isn't used for init
785*4882a593Smuzhiyun */
786*4882a593Smuzhiyun dig_encoder = 0;
787*4882a593Smuzhiyun } else
788*4882a593Smuzhiyun connector = amdgpu_get_connector_for_encoder(encoder);
789*4882a593Smuzhiyun
790*4882a593Smuzhiyun if (connector) {
791*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
792*4882a593Smuzhiyun struct amdgpu_connector_atom_dig *dig_connector =
793*4882a593Smuzhiyun amdgpu_connector->con_priv;
794*4882a593Smuzhiyun
795*4882a593Smuzhiyun hpd_id = amdgpu_connector->hpd.hpd;
796*4882a593Smuzhiyun dp_clock = dig_connector->dp_clock;
797*4882a593Smuzhiyun dp_lane_count = dig_connector->dp_lane_count;
798*4882a593Smuzhiyun connector_object_id =
799*4882a593Smuzhiyun (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
800*4882a593Smuzhiyun }
801*4882a593Smuzhiyun
802*4882a593Smuzhiyun if (encoder->crtc) {
803*4882a593Smuzhiyun struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
804*4882a593Smuzhiyun pll_id = amdgpu_crtc->pll_id;
805*4882a593Smuzhiyun }
806*4882a593Smuzhiyun
807*4882a593Smuzhiyun /* no dig encoder assigned */
808*4882a593Smuzhiyun if (dig_encoder == -1)
809*4882a593Smuzhiyun return;
810*4882a593Smuzhiyun
811*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)))
812*4882a593Smuzhiyun is_dp = true;
813*4882a593Smuzhiyun
814*4882a593Smuzhiyun memset(&args, 0, sizeof(args));
815*4882a593Smuzhiyun
816*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
817*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
818*4882a593Smuzhiyun index = GetIndexIntoMasterTable(COMMAND, DVOOutputControl);
819*4882a593Smuzhiyun break;
820*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
821*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
822*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
823*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
824*4882a593Smuzhiyun index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
825*4882a593Smuzhiyun break;
826*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
827*4882a593Smuzhiyun index = GetIndexIntoMasterTable(COMMAND, LVTMATransmitterControl);
828*4882a593Smuzhiyun break;
829*4882a593Smuzhiyun }
830*4882a593Smuzhiyun
831*4882a593Smuzhiyun if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
832*4882a593Smuzhiyun return;
833*4882a593Smuzhiyun
834*4882a593Smuzhiyun switch (frev) {
835*4882a593Smuzhiyun case 1:
836*4882a593Smuzhiyun switch (crev) {
837*4882a593Smuzhiyun case 1:
838*4882a593Smuzhiyun args.v1.ucAction = action;
839*4882a593Smuzhiyun if (action == ATOM_TRANSMITTER_ACTION_INIT) {
840*4882a593Smuzhiyun args.v1.usInitInfo = cpu_to_le16(connector_object_id);
841*4882a593Smuzhiyun } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
842*4882a593Smuzhiyun args.v1.asMode.ucLaneSel = lane_num;
843*4882a593Smuzhiyun args.v1.asMode.ucLaneSet = lane_set;
844*4882a593Smuzhiyun } else {
845*4882a593Smuzhiyun if (is_dp)
846*4882a593Smuzhiyun args.v1.usPixelClock = cpu_to_le16(dp_clock / 10);
847*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
848*4882a593Smuzhiyun args.v1.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
849*4882a593Smuzhiyun else
850*4882a593Smuzhiyun args.v1.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
851*4882a593Smuzhiyun }
852*4882a593Smuzhiyun
853*4882a593Smuzhiyun args.v1.ucConfig = ATOM_TRANSMITTER_CONFIG_CLKSRC_PPLL;
854*4882a593Smuzhiyun
855*4882a593Smuzhiyun if (dig_encoder)
856*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG2_ENCODER;
857*4882a593Smuzhiyun else
858*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_DIG1_ENCODER;
859*4882a593Smuzhiyun
860*4882a593Smuzhiyun if ((adev->flags & AMD_IS_APU) &&
861*4882a593Smuzhiyun (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_UNIPHY)) {
862*4882a593Smuzhiyun if (is_dp ||
863*4882a593Smuzhiyun !amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock)) {
864*4882a593Smuzhiyun if (igp_lane_info & 0x1)
865*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_3;
866*4882a593Smuzhiyun else if (igp_lane_info & 0x2)
867*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_4_7;
868*4882a593Smuzhiyun else if (igp_lane_info & 0x4)
869*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_11;
870*4882a593Smuzhiyun else if (igp_lane_info & 0x8)
871*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_12_15;
872*4882a593Smuzhiyun } else {
873*4882a593Smuzhiyun if (igp_lane_info & 0x3)
874*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_0_7;
875*4882a593Smuzhiyun else if (igp_lane_info & 0xc)
876*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LANE_8_15;
877*4882a593Smuzhiyun }
878*4882a593Smuzhiyun }
879*4882a593Smuzhiyun
880*4882a593Smuzhiyun if (dig->linkb)
881*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKB;
882*4882a593Smuzhiyun else
883*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_LINKA;
884*4882a593Smuzhiyun
885*4882a593Smuzhiyun if (is_dp)
886*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
887*4882a593Smuzhiyun else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
888*4882a593Smuzhiyun if (dig->coherent_mode)
889*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_COHERENT;
890*4882a593Smuzhiyun if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
891*4882a593Smuzhiyun args.v1.ucConfig |= ATOM_TRANSMITTER_CONFIG_8LANE_LINK;
892*4882a593Smuzhiyun }
893*4882a593Smuzhiyun break;
894*4882a593Smuzhiyun case 2:
895*4882a593Smuzhiyun args.v2.ucAction = action;
896*4882a593Smuzhiyun if (action == ATOM_TRANSMITTER_ACTION_INIT) {
897*4882a593Smuzhiyun args.v2.usInitInfo = cpu_to_le16(connector_object_id);
898*4882a593Smuzhiyun } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
899*4882a593Smuzhiyun args.v2.asMode.ucLaneSel = lane_num;
900*4882a593Smuzhiyun args.v2.asMode.ucLaneSet = lane_set;
901*4882a593Smuzhiyun } else {
902*4882a593Smuzhiyun if (is_dp)
903*4882a593Smuzhiyun args.v2.usPixelClock = cpu_to_le16(dp_clock / 10);
904*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
905*4882a593Smuzhiyun args.v2.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
906*4882a593Smuzhiyun else
907*4882a593Smuzhiyun args.v2.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
908*4882a593Smuzhiyun }
909*4882a593Smuzhiyun
910*4882a593Smuzhiyun args.v2.acConfig.ucEncoderSel = dig_encoder;
911*4882a593Smuzhiyun if (dig->linkb)
912*4882a593Smuzhiyun args.v2.acConfig.ucLinkSel = 1;
913*4882a593Smuzhiyun
914*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
915*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
916*4882a593Smuzhiyun args.v2.acConfig.ucTransmitterSel = 0;
917*4882a593Smuzhiyun break;
918*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
919*4882a593Smuzhiyun args.v2.acConfig.ucTransmitterSel = 1;
920*4882a593Smuzhiyun break;
921*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
922*4882a593Smuzhiyun args.v2.acConfig.ucTransmitterSel = 2;
923*4882a593Smuzhiyun break;
924*4882a593Smuzhiyun }
925*4882a593Smuzhiyun
926*4882a593Smuzhiyun if (is_dp) {
927*4882a593Smuzhiyun args.v2.acConfig.fCoherentMode = 1;
928*4882a593Smuzhiyun args.v2.acConfig.fDPConnector = 1;
929*4882a593Smuzhiyun } else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
930*4882a593Smuzhiyun if (dig->coherent_mode)
931*4882a593Smuzhiyun args.v2.acConfig.fCoherentMode = 1;
932*4882a593Smuzhiyun if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
933*4882a593Smuzhiyun args.v2.acConfig.fDualLinkConnector = 1;
934*4882a593Smuzhiyun }
935*4882a593Smuzhiyun break;
936*4882a593Smuzhiyun case 3:
937*4882a593Smuzhiyun args.v3.ucAction = action;
938*4882a593Smuzhiyun if (action == ATOM_TRANSMITTER_ACTION_INIT) {
939*4882a593Smuzhiyun args.v3.usInitInfo = cpu_to_le16(connector_object_id);
940*4882a593Smuzhiyun } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
941*4882a593Smuzhiyun args.v3.asMode.ucLaneSel = lane_num;
942*4882a593Smuzhiyun args.v3.asMode.ucLaneSet = lane_set;
943*4882a593Smuzhiyun } else {
944*4882a593Smuzhiyun if (is_dp)
945*4882a593Smuzhiyun args.v3.usPixelClock = cpu_to_le16(dp_clock / 10);
946*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
947*4882a593Smuzhiyun args.v3.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
948*4882a593Smuzhiyun else
949*4882a593Smuzhiyun args.v3.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
950*4882a593Smuzhiyun }
951*4882a593Smuzhiyun
952*4882a593Smuzhiyun if (is_dp)
953*4882a593Smuzhiyun args.v3.ucLaneNum = dp_lane_count;
954*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
955*4882a593Smuzhiyun args.v3.ucLaneNum = 8;
956*4882a593Smuzhiyun else
957*4882a593Smuzhiyun args.v3.ucLaneNum = 4;
958*4882a593Smuzhiyun
959*4882a593Smuzhiyun if (dig->linkb)
960*4882a593Smuzhiyun args.v3.acConfig.ucLinkSel = 1;
961*4882a593Smuzhiyun if (dig_encoder & 1)
962*4882a593Smuzhiyun args.v3.acConfig.ucEncoderSel = 1;
963*4882a593Smuzhiyun
964*4882a593Smuzhiyun /* Select the PLL for the PHY
965*4882a593Smuzhiyun * DP PHY should be clocked from external src if there is
966*4882a593Smuzhiyun * one.
967*4882a593Smuzhiyun */
968*4882a593Smuzhiyun /* On DCE4, if there is an external clock, it generates the DP ref clock */
969*4882a593Smuzhiyun if (is_dp && adev->clock.dp_extclk)
970*4882a593Smuzhiyun args.v3.acConfig.ucRefClkSource = 2; /* external src */
971*4882a593Smuzhiyun else
972*4882a593Smuzhiyun args.v3.acConfig.ucRefClkSource = pll_id;
973*4882a593Smuzhiyun
974*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
975*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
976*4882a593Smuzhiyun args.v3.acConfig.ucTransmitterSel = 0;
977*4882a593Smuzhiyun break;
978*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
979*4882a593Smuzhiyun args.v3.acConfig.ucTransmitterSel = 1;
980*4882a593Smuzhiyun break;
981*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
982*4882a593Smuzhiyun args.v3.acConfig.ucTransmitterSel = 2;
983*4882a593Smuzhiyun break;
984*4882a593Smuzhiyun }
985*4882a593Smuzhiyun
986*4882a593Smuzhiyun if (is_dp)
987*4882a593Smuzhiyun args.v3.acConfig.fCoherentMode = 1; /* DP requires coherent */
988*4882a593Smuzhiyun else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
989*4882a593Smuzhiyun if (dig->coherent_mode)
990*4882a593Smuzhiyun args.v3.acConfig.fCoherentMode = 1;
991*4882a593Smuzhiyun if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
992*4882a593Smuzhiyun args.v3.acConfig.fDualLinkConnector = 1;
993*4882a593Smuzhiyun }
994*4882a593Smuzhiyun break;
995*4882a593Smuzhiyun case 4:
996*4882a593Smuzhiyun args.v4.ucAction = action;
997*4882a593Smuzhiyun if (action == ATOM_TRANSMITTER_ACTION_INIT) {
998*4882a593Smuzhiyun args.v4.usInitInfo = cpu_to_le16(connector_object_id);
999*4882a593Smuzhiyun } else if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH) {
1000*4882a593Smuzhiyun args.v4.asMode.ucLaneSel = lane_num;
1001*4882a593Smuzhiyun args.v4.asMode.ucLaneSet = lane_set;
1002*4882a593Smuzhiyun } else {
1003*4882a593Smuzhiyun if (is_dp)
1004*4882a593Smuzhiyun args.v4.usPixelClock = cpu_to_le16(dp_clock / 10);
1005*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1006*4882a593Smuzhiyun args.v4.usPixelClock = cpu_to_le16((amdgpu_encoder->pixel_clock / 2) / 10);
1007*4882a593Smuzhiyun else
1008*4882a593Smuzhiyun args.v4.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1009*4882a593Smuzhiyun }
1010*4882a593Smuzhiyun
1011*4882a593Smuzhiyun if (is_dp)
1012*4882a593Smuzhiyun args.v4.ucLaneNum = dp_lane_count;
1013*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1014*4882a593Smuzhiyun args.v4.ucLaneNum = 8;
1015*4882a593Smuzhiyun else
1016*4882a593Smuzhiyun args.v4.ucLaneNum = 4;
1017*4882a593Smuzhiyun
1018*4882a593Smuzhiyun if (dig->linkb)
1019*4882a593Smuzhiyun args.v4.acConfig.ucLinkSel = 1;
1020*4882a593Smuzhiyun if (dig_encoder & 1)
1021*4882a593Smuzhiyun args.v4.acConfig.ucEncoderSel = 1;
1022*4882a593Smuzhiyun
1023*4882a593Smuzhiyun /* Select the PLL for the PHY
1024*4882a593Smuzhiyun * DP PHY should be clocked from external src if there is
1025*4882a593Smuzhiyun * one.
1026*4882a593Smuzhiyun */
1027*4882a593Smuzhiyun /* On DCE5 DCPLL usually generates the DP ref clock */
1028*4882a593Smuzhiyun if (is_dp) {
1029*4882a593Smuzhiyun if (adev->clock.dp_extclk)
1030*4882a593Smuzhiyun args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_EXTCLK;
1031*4882a593Smuzhiyun else
1032*4882a593Smuzhiyun args.v4.acConfig.ucRefClkSource = ENCODER_REFCLK_SRC_DCPLL;
1033*4882a593Smuzhiyun } else
1034*4882a593Smuzhiyun args.v4.acConfig.ucRefClkSource = pll_id;
1035*4882a593Smuzhiyun
1036*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
1037*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1038*4882a593Smuzhiyun args.v4.acConfig.ucTransmitterSel = 0;
1039*4882a593Smuzhiyun break;
1040*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1041*4882a593Smuzhiyun args.v4.acConfig.ucTransmitterSel = 1;
1042*4882a593Smuzhiyun break;
1043*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1044*4882a593Smuzhiyun args.v4.acConfig.ucTransmitterSel = 2;
1045*4882a593Smuzhiyun break;
1046*4882a593Smuzhiyun }
1047*4882a593Smuzhiyun
1048*4882a593Smuzhiyun if (is_dp)
1049*4882a593Smuzhiyun args.v4.acConfig.fCoherentMode = 1; /* DP requires coherent */
1050*4882a593Smuzhiyun else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1051*4882a593Smuzhiyun if (dig->coherent_mode)
1052*4882a593Smuzhiyun args.v4.acConfig.fCoherentMode = 1;
1053*4882a593Smuzhiyun if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1054*4882a593Smuzhiyun args.v4.acConfig.fDualLinkConnector = 1;
1055*4882a593Smuzhiyun }
1056*4882a593Smuzhiyun break;
1057*4882a593Smuzhiyun case 5:
1058*4882a593Smuzhiyun args.v5.ucAction = action;
1059*4882a593Smuzhiyun if (is_dp)
1060*4882a593Smuzhiyun args.v5.usSymClock = cpu_to_le16(dp_clock / 10);
1061*4882a593Smuzhiyun else
1062*4882a593Smuzhiyun args.v5.usSymClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1063*4882a593Smuzhiyun
1064*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
1065*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1066*4882a593Smuzhiyun if (dig->linkb)
1067*4882a593Smuzhiyun args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1068*4882a593Smuzhiyun else
1069*4882a593Smuzhiyun args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1070*4882a593Smuzhiyun break;
1071*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1072*4882a593Smuzhiyun if (dig->linkb)
1073*4882a593Smuzhiyun args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1074*4882a593Smuzhiyun else
1075*4882a593Smuzhiyun args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1076*4882a593Smuzhiyun break;
1077*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1078*4882a593Smuzhiyun if (dig->linkb)
1079*4882a593Smuzhiyun args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1080*4882a593Smuzhiyun else
1081*4882a593Smuzhiyun args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1082*4882a593Smuzhiyun break;
1083*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1084*4882a593Smuzhiyun args.v5.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1085*4882a593Smuzhiyun break;
1086*4882a593Smuzhiyun }
1087*4882a593Smuzhiyun if (is_dp)
1088*4882a593Smuzhiyun args.v5.ucLaneNum = dp_lane_count;
1089*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1090*4882a593Smuzhiyun args.v5.ucLaneNum = 8;
1091*4882a593Smuzhiyun else
1092*4882a593Smuzhiyun args.v5.ucLaneNum = 4;
1093*4882a593Smuzhiyun args.v5.ucConnObjId = connector_object_id;
1094*4882a593Smuzhiyun args.v5.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1095*4882a593Smuzhiyun
1096*4882a593Smuzhiyun if (is_dp && adev->clock.dp_extclk)
1097*4882a593Smuzhiyun args.v5.asConfig.ucPhyClkSrcId = ENCODER_REFCLK_SRC_EXTCLK;
1098*4882a593Smuzhiyun else
1099*4882a593Smuzhiyun args.v5.asConfig.ucPhyClkSrcId = pll_id;
1100*4882a593Smuzhiyun
1101*4882a593Smuzhiyun if (is_dp)
1102*4882a593Smuzhiyun args.v5.asConfig.ucCoherentMode = 1; /* DP requires coherent */
1103*4882a593Smuzhiyun else if (amdgpu_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT)) {
1104*4882a593Smuzhiyun if (dig->coherent_mode)
1105*4882a593Smuzhiyun args.v5.asConfig.ucCoherentMode = 1;
1106*4882a593Smuzhiyun }
1107*4882a593Smuzhiyun if (hpd_id == AMDGPU_HPD_NONE)
1108*4882a593Smuzhiyun args.v5.asConfig.ucHPDSel = 0;
1109*4882a593Smuzhiyun else
1110*4882a593Smuzhiyun args.v5.asConfig.ucHPDSel = hpd_id + 1;
1111*4882a593Smuzhiyun args.v5.ucDigEncoderSel = 1 << dig_encoder;
1112*4882a593Smuzhiyun args.v5.ucDPLaneSet = lane_set;
1113*4882a593Smuzhiyun break;
1114*4882a593Smuzhiyun case 6:
1115*4882a593Smuzhiyun args.v6.ucAction = action;
1116*4882a593Smuzhiyun if (is_dp)
1117*4882a593Smuzhiyun args.v6.ulSymClock = cpu_to_le32(dp_clock / 10);
1118*4882a593Smuzhiyun else
1119*4882a593Smuzhiyun args.v6.ulSymClock = cpu_to_le32(amdgpu_encoder->pixel_clock / 10);
1120*4882a593Smuzhiyun
1121*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
1122*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1123*4882a593Smuzhiyun if (dig->linkb)
1124*4882a593Smuzhiyun args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYB;
1125*4882a593Smuzhiyun else
1126*4882a593Smuzhiyun args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYA;
1127*4882a593Smuzhiyun break;
1128*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1129*4882a593Smuzhiyun if (dig->linkb)
1130*4882a593Smuzhiyun args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYD;
1131*4882a593Smuzhiyun else
1132*4882a593Smuzhiyun args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYC;
1133*4882a593Smuzhiyun break;
1134*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1135*4882a593Smuzhiyun if (dig->linkb)
1136*4882a593Smuzhiyun args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYF;
1137*4882a593Smuzhiyun else
1138*4882a593Smuzhiyun args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYE;
1139*4882a593Smuzhiyun break;
1140*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1141*4882a593Smuzhiyun args.v6.ucPhyId = ATOM_PHY_ID_UNIPHYG;
1142*4882a593Smuzhiyun break;
1143*4882a593Smuzhiyun }
1144*4882a593Smuzhiyun if (is_dp)
1145*4882a593Smuzhiyun args.v6.ucLaneNum = dp_lane_count;
1146*4882a593Smuzhiyun else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1147*4882a593Smuzhiyun args.v6.ucLaneNum = 8;
1148*4882a593Smuzhiyun else
1149*4882a593Smuzhiyun args.v6.ucLaneNum = 4;
1150*4882a593Smuzhiyun args.v6.ucConnObjId = connector_object_id;
1151*4882a593Smuzhiyun if (action == ATOM_TRANSMITTER_ACTION_SETUP_VSEMPH)
1152*4882a593Smuzhiyun args.v6.ucDPLaneSet = lane_set;
1153*4882a593Smuzhiyun else
1154*4882a593Smuzhiyun args.v6.ucDigMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1155*4882a593Smuzhiyun
1156*4882a593Smuzhiyun if (hpd_id == AMDGPU_HPD_NONE)
1157*4882a593Smuzhiyun args.v6.ucHPDSel = 0;
1158*4882a593Smuzhiyun else
1159*4882a593Smuzhiyun args.v6.ucHPDSel = hpd_id + 1;
1160*4882a593Smuzhiyun args.v6.ucDigEncoderSel = 1 << dig_encoder;
1161*4882a593Smuzhiyun break;
1162*4882a593Smuzhiyun default:
1163*4882a593Smuzhiyun DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1164*4882a593Smuzhiyun break;
1165*4882a593Smuzhiyun }
1166*4882a593Smuzhiyun break;
1167*4882a593Smuzhiyun default:
1168*4882a593Smuzhiyun DRM_ERROR("Unknown table version %d, %d\n", frev, crev);
1169*4882a593Smuzhiyun break;
1170*4882a593Smuzhiyun }
1171*4882a593Smuzhiyun
1172*4882a593Smuzhiyun amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1173*4882a593Smuzhiyun }
1174*4882a593Smuzhiyun
1175*4882a593Smuzhiyun bool
amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector * connector,int action)1176*4882a593Smuzhiyun amdgpu_atombios_encoder_set_edp_panel_power(struct drm_connector *connector,
1177*4882a593Smuzhiyun int action)
1178*4882a593Smuzhiyun {
1179*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1180*4882a593Smuzhiyun struct drm_device *dev = amdgpu_connector->base.dev;
1181*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
1182*4882a593Smuzhiyun union dig_transmitter_control args;
1183*4882a593Smuzhiyun int index = GetIndexIntoMasterTable(COMMAND, UNIPHYTransmitterControl);
1184*4882a593Smuzhiyun uint8_t frev, crev;
1185*4882a593Smuzhiyun
1186*4882a593Smuzhiyun if (connector->connector_type != DRM_MODE_CONNECTOR_eDP)
1187*4882a593Smuzhiyun goto done;
1188*4882a593Smuzhiyun
1189*4882a593Smuzhiyun if ((action != ATOM_TRANSMITTER_ACTION_POWER_ON) &&
1190*4882a593Smuzhiyun (action != ATOM_TRANSMITTER_ACTION_POWER_OFF))
1191*4882a593Smuzhiyun goto done;
1192*4882a593Smuzhiyun
1193*4882a593Smuzhiyun if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1194*4882a593Smuzhiyun goto done;
1195*4882a593Smuzhiyun
1196*4882a593Smuzhiyun memset(&args, 0, sizeof(args));
1197*4882a593Smuzhiyun
1198*4882a593Smuzhiyun args.v1.ucAction = action;
1199*4882a593Smuzhiyun
1200*4882a593Smuzhiyun amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1201*4882a593Smuzhiyun
1202*4882a593Smuzhiyun /* wait for the panel to power up */
1203*4882a593Smuzhiyun if (action == ATOM_TRANSMITTER_ACTION_POWER_ON) {
1204*4882a593Smuzhiyun int i;
1205*4882a593Smuzhiyun
1206*4882a593Smuzhiyun for (i = 0; i < 300; i++) {
1207*4882a593Smuzhiyun if (amdgpu_display_hpd_sense(adev, amdgpu_connector->hpd.hpd))
1208*4882a593Smuzhiyun return true;
1209*4882a593Smuzhiyun mdelay(1);
1210*4882a593Smuzhiyun }
1211*4882a593Smuzhiyun return false;
1212*4882a593Smuzhiyun }
1213*4882a593Smuzhiyun done:
1214*4882a593Smuzhiyun return true;
1215*4882a593Smuzhiyun }
1216*4882a593Smuzhiyun
1217*4882a593Smuzhiyun union external_encoder_control {
1218*4882a593Smuzhiyun EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION v1;
1219*4882a593Smuzhiyun EXTERNAL_ENCODER_CONTROL_PS_ALLOCATION_V3 v3;
1220*4882a593Smuzhiyun };
1221*4882a593Smuzhiyun
1222*4882a593Smuzhiyun static void
amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder * encoder,struct drm_encoder * ext_encoder,int action)1223*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_external_encoder(struct drm_encoder *encoder,
1224*4882a593Smuzhiyun struct drm_encoder *ext_encoder,
1225*4882a593Smuzhiyun int action)
1226*4882a593Smuzhiyun {
1227*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
1228*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
1229*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1230*4882a593Smuzhiyun struct amdgpu_encoder *ext_amdgpu_encoder = to_amdgpu_encoder(ext_encoder);
1231*4882a593Smuzhiyun union external_encoder_control args;
1232*4882a593Smuzhiyun struct drm_connector *connector;
1233*4882a593Smuzhiyun int index = GetIndexIntoMasterTable(COMMAND, ExternalEncoderControl);
1234*4882a593Smuzhiyun u8 frev, crev;
1235*4882a593Smuzhiyun int dp_clock = 0;
1236*4882a593Smuzhiyun int dp_lane_count = 0;
1237*4882a593Smuzhiyun int connector_object_id = 0;
1238*4882a593Smuzhiyun u32 ext_enum = (ext_amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
1239*4882a593Smuzhiyun
1240*4882a593Smuzhiyun if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1241*4882a593Smuzhiyun connector = amdgpu_get_connector_for_encoder_init(encoder);
1242*4882a593Smuzhiyun else
1243*4882a593Smuzhiyun connector = amdgpu_get_connector_for_encoder(encoder);
1244*4882a593Smuzhiyun
1245*4882a593Smuzhiyun if (connector) {
1246*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1247*4882a593Smuzhiyun struct amdgpu_connector_atom_dig *dig_connector =
1248*4882a593Smuzhiyun amdgpu_connector->con_priv;
1249*4882a593Smuzhiyun
1250*4882a593Smuzhiyun dp_clock = dig_connector->dp_clock;
1251*4882a593Smuzhiyun dp_lane_count = dig_connector->dp_lane_count;
1252*4882a593Smuzhiyun connector_object_id =
1253*4882a593Smuzhiyun (amdgpu_connector->connector_object_id & OBJECT_ID_MASK) >> OBJECT_ID_SHIFT;
1254*4882a593Smuzhiyun }
1255*4882a593Smuzhiyun
1256*4882a593Smuzhiyun memset(&args, 0, sizeof(args));
1257*4882a593Smuzhiyun
1258*4882a593Smuzhiyun if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1259*4882a593Smuzhiyun return;
1260*4882a593Smuzhiyun
1261*4882a593Smuzhiyun switch (frev) {
1262*4882a593Smuzhiyun case 1:
1263*4882a593Smuzhiyun /* no params on frev 1 */
1264*4882a593Smuzhiyun break;
1265*4882a593Smuzhiyun case 2:
1266*4882a593Smuzhiyun switch (crev) {
1267*4882a593Smuzhiyun case 1:
1268*4882a593Smuzhiyun case 2:
1269*4882a593Smuzhiyun args.v1.sDigEncoder.ucAction = action;
1270*4882a593Smuzhiyun args.v1.sDigEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1271*4882a593Smuzhiyun args.v1.sDigEncoder.ucEncoderMode =
1272*4882a593Smuzhiyun amdgpu_atombios_encoder_get_encoder_mode(encoder);
1273*4882a593Smuzhiyun
1274*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v1.sDigEncoder.ucEncoderMode)) {
1275*4882a593Smuzhiyun if (dp_clock == 270000)
1276*4882a593Smuzhiyun args.v1.sDigEncoder.ucConfig |= ATOM_ENCODER_CONFIG_DPLINKRATE_2_70GHZ;
1277*4882a593Smuzhiyun args.v1.sDigEncoder.ucLaneNum = dp_lane_count;
1278*4882a593Smuzhiyun } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1279*4882a593Smuzhiyun args.v1.sDigEncoder.ucLaneNum = 8;
1280*4882a593Smuzhiyun else
1281*4882a593Smuzhiyun args.v1.sDigEncoder.ucLaneNum = 4;
1282*4882a593Smuzhiyun break;
1283*4882a593Smuzhiyun case 3:
1284*4882a593Smuzhiyun args.v3.sExtEncoder.ucAction = action;
1285*4882a593Smuzhiyun if (action == EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT)
1286*4882a593Smuzhiyun args.v3.sExtEncoder.usConnectorId = cpu_to_le16(connector_object_id);
1287*4882a593Smuzhiyun else
1288*4882a593Smuzhiyun args.v3.sExtEncoder.usPixelClock = cpu_to_le16(amdgpu_encoder->pixel_clock / 10);
1289*4882a593Smuzhiyun args.v3.sExtEncoder.ucEncoderMode =
1290*4882a593Smuzhiyun amdgpu_atombios_encoder_get_encoder_mode(encoder);
1291*4882a593Smuzhiyun
1292*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(args.v3.sExtEncoder.ucEncoderMode)) {
1293*4882a593Smuzhiyun if (dp_clock == 270000)
1294*4882a593Smuzhiyun args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_2_70GHZ;
1295*4882a593Smuzhiyun else if (dp_clock == 540000)
1296*4882a593Smuzhiyun args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_DPLINKRATE_5_40GHZ;
1297*4882a593Smuzhiyun args.v3.sExtEncoder.ucLaneNum = dp_lane_count;
1298*4882a593Smuzhiyun } else if (amdgpu_dig_monitor_is_duallink(encoder, amdgpu_encoder->pixel_clock))
1299*4882a593Smuzhiyun args.v3.sExtEncoder.ucLaneNum = 8;
1300*4882a593Smuzhiyun else
1301*4882a593Smuzhiyun args.v3.sExtEncoder.ucLaneNum = 4;
1302*4882a593Smuzhiyun switch (ext_enum) {
1303*4882a593Smuzhiyun case GRAPH_OBJECT_ENUM_ID1:
1304*4882a593Smuzhiyun args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER1;
1305*4882a593Smuzhiyun break;
1306*4882a593Smuzhiyun case GRAPH_OBJECT_ENUM_ID2:
1307*4882a593Smuzhiyun args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER2;
1308*4882a593Smuzhiyun break;
1309*4882a593Smuzhiyun case GRAPH_OBJECT_ENUM_ID3:
1310*4882a593Smuzhiyun args.v3.sExtEncoder.ucConfig |= EXTERNAL_ENCODER_CONFIG_V3_ENCODER3;
1311*4882a593Smuzhiyun break;
1312*4882a593Smuzhiyun }
1313*4882a593Smuzhiyun args.v3.sExtEncoder.ucBitPerColor = amdgpu_atombios_encoder_get_bpc(encoder);
1314*4882a593Smuzhiyun break;
1315*4882a593Smuzhiyun default:
1316*4882a593Smuzhiyun DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1317*4882a593Smuzhiyun return;
1318*4882a593Smuzhiyun }
1319*4882a593Smuzhiyun break;
1320*4882a593Smuzhiyun default:
1321*4882a593Smuzhiyun DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1322*4882a593Smuzhiyun return;
1323*4882a593Smuzhiyun }
1324*4882a593Smuzhiyun amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1325*4882a593Smuzhiyun }
1326*4882a593Smuzhiyun
1327*4882a593Smuzhiyun static void
amdgpu_atombios_encoder_setup_dig(struct drm_encoder * encoder,int action)1328*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig(struct drm_encoder *encoder, int action)
1329*4882a593Smuzhiyun {
1330*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1331*4882a593Smuzhiyun struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1332*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *dig = amdgpu_encoder->enc_priv;
1333*4882a593Smuzhiyun struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1334*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector = NULL;
1335*4882a593Smuzhiyun struct amdgpu_connector_atom_dig *amdgpu_dig_connector = NULL;
1336*4882a593Smuzhiyun
1337*4882a593Smuzhiyun if (connector) {
1338*4882a593Smuzhiyun amdgpu_connector = to_amdgpu_connector(connector);
1339*4882a593Smuzhiyun amdgpu_dig_connector = amdgpu_connector->con_priv;
1340*4882a593Smuzhiyun }
1341*4882a593Smuzhiyun
1342*4882a593Smuzhiyun if (action == ATOM_ENABLE) {
1343*4882a593Smuzhiyun if (!connector)
1344*4882a593Smuzhiyun dig->panel_mode = DP_PANEL_MODE_EXTERNAL_DP_MODE;
1345*4882a593Smuzhiyun else
1346*4882a593Smuzhiyun dig->panel_mode = amdgpu_atombios_dp_get_panel_mode(encoder, connector);
1347*4882a593Smuzhiyun
1348*4882a593Smuzhiyun /* setup and enable the encoder */
1349*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_SETUP, 0);
1350*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1351*4882a593Smuzhiyun ATOM_ENCODER_CMD_SETUP_PANEL_MODE,
1352*4882a593Smuzhiyun dig->panel_mode);
1353*4882a593Smuzhiyun if (ext_encoder)
1354*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1355*4882a593Smuzhiyun EXTERNAL_ENCODER_ACTION_V3_ENCODER_SETUP);
1356*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1357*4882a593Smuzhiyun connector) {
1358*4882a593Smuzhiyun if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1359*4882a593Smuzhiyun amdgpu_atombios_encoder_set_edp_panel_power(connector,
1360*4882a593Smuzhiyun ATOM_TRANSMITTER_ACTION_POWER_ON);
1361*4882a593Smuzhiyun amdgpu_dig_connector->edp_on = true;
1362*4882a593Smuzhiyun }
1363*4882a593Smuzhiyun }
1364*4882a593Smuzhiyun /* enable the transmitter */
1365*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1366*4882a593Smuzhiyun ATOM_TRANSMITTER_ACTION_ENABLE,
1367*4882a593Smuzhiyun 0, 0);
1368*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1369*4882a593Smuzhiyun connector) {
1370*4882a593Smuzhiyun /* DP_SET_POWER_D0 is set in amdgpu_atombios_dp_link_train */
1371*4882a593Smuzhiyun amdgpu_atombios_dp_link_train(encoder, connector);
1372*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_encoder(encoder, ATOM_ENCODER_CMD_DP_VIDEO_ON, 0);
1373*4882a593Smuzhiyun }
1374*4882a593Smuzhiyun if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1375*4882a593Smuzhiyun amdgpu_atombios_encoder_set_backlight_level(amdgpu_encoder, dig->backlight_level);
1376*4882a593Smuzhiyun if (ext_encoder)
1377*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_ENABLE);
1378*4882a593Smuzhiyun } else {
1379*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1380*4882a593Smuzhiyun connector)
1381*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_encoder(encoder,
1382*4882a593Smuzhiyun ATOM_ENCODER_CMD_DP_VIDEO_OFF, 0);
1383*4882a593Smuzhiyun if (ext_encoder)
1384*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder, ATOM_DISABLE);
1385*4882a593Smuzhiyun if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
1386*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1387*4882a593Smuzhiyun ATOM_TRANSMITTER_ACTION_LCD_BLOFF, 0, 0);
1388*4882a593Smuzhiyun
1389*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1390*4882a593Smuzhiyun connector)
1391*4882a593Smuzhiyun amdgpu_atombios_dp_set_rx_power_state(connector, DP_SET_POWER_D3);
1392*4882a593Smuzhiyun /* disable the transmitter */
1393*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_transmitter(encoder,
1394*4882a593Smuzhiyun ATOM_TRANSMITTER_ACTION_DISABLE, 0, 0);
1395*4882a593Smuzhiyun if (ENCODER_MODE_IS_DP(amdgpu_atombios_encoder_get_encoder_mode(encoder)) &&
1396*4882a593Smuzhiyun connector) {
1397*4882a593Smuzhiyun if (connector->connector_type == DRM_MODE_CONNECTOR_eDP) {
1398*4882a593Smuzhiyun amdgpu_atombios_encoder_set_edp_panel_power(connector,
1399*4882a593Smuzhiyun ATOM_TRANSMITTER_ACTION_POWER_OFF);
1400*4882a593Smuzhiyun amdgpu_dig_connector->edp_on = false;
1401*4882a593Smuzhiyun }
1402*4882a593Smuzhiyun }
1403*4882a593Smuzhiyun }
1404*4882a593Smuzhiyun }
1405*4882a593Smuzhiyun
1406*4882a593Smuzhiyun void
amdgpu_atombios_encoder_dpms(struct drm_encoder * encoder,int mode)1407*4882a593Smuzhiyun amdgpu_atombios_encoder_dpms(struct drm_encoder *encoder, int mode)
1408*4882a593Smuzhiyun {
1409*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1410*4882a593Smuzhiyun
1411*4882a593Smuzhiyun DRM_DEBUG_KMS("encoder dpms %d to mode %d, devices %08x, active_devices %08x\n",
1412*4882a593Smuzhiyun amdgpu_encoder->encoder_id, mode, amdgpu_encoder->devices,
1413*4882a593Smuzhiyun amdgpu_encoder->active_device);
1414*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
1415*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1416*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1417*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1418*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1419*4882a593Smuzhiyun switch (mode) {
1420*4882a593Smuzhiyun case DRM_MODE_DPMS_ON:
1421*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig(encoder, ATOM_ENABLE);
1422*4882a593Smuzhiyun break;
1423*4882a593Smuzhiyun case DRM_MODE_DPMS_STANDBY:
1424*4882a593Smuzhiyun case DRM_MODE_DPMS_SUSPEND:
1425*4882a593Smuzhiyun case DRM_MODE_DPMS_OFF:
1426*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig(encoder, ATOM_DISABLE);
1427*4882a593Smuzhiyun break;
1428*4882a593Smuzhiyun }
1429*4882a593Smuzhiyun break;
1430*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1431*4882a593Smuzhiyun switch (mode) {
1432*4882a593Smuzhiyun case DRM_MODE_DPMS_ON:
1433*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_ENABLE);
1434*4882a593Smuzhiyun break;
1435*4882a593Smuzhiyun case DRM_MODE_DPMS_STANDBY:
1436*4882a593Smuzhiyun case DRM_MODE_DPMS_SUSPEND:
1437*4882a593Smuzhiyun case DRM_MODE_DPMS_OFF:
1438*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dvo(encoder, ATOM_DISABLE);
1439*4882a593Smuzhiyun break;
1440*4882a593Smuzhiyun }
1441*4882a593Smuzhiyun break;
1442*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1443*4882a593Smuzhiyun switch (mode) {
1444*4882a593Smuzhiyun case DRM_MODE_DPMS_ON:
1445*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dac(encoder, ATOM_ENABLE);
1446*4882a593Smuzhiyun break;
1447*4882a593Smuzhiyun case DRM_MODE_DPMS_STANDBY:
1448*4882a593Smuzhiyun case DRM_MODE_DPMS_SUSPEND:
1449*4882a593Smuzhiyun case DRM_MODE_DPMS_OFF:
1450*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dac(encoder, ATOM_DISABLE);
1451*4882a593Smuzhiyun break;
1452*4882a593Smuzhiyun }
1453*4882a593Smuzhiyun break;
1454*4882a593Smuzhiyun default:
1455*4882a593Smuzhiyun return;
1456*4882a593Smuzhiyun }
1457*4882a593Smuzhiyun }
1458*4882a593Smuzhiyun
1459*4882a593Smuzhiyun union crtc_source_param {
1460*4882a593Smuzhiyun SELECT_CRTC_SOURCE_PS_ALLOCATION v1;
1461*4882a593Smuzhiyun SELECT_CRTC_SOURCE_PARAMETERS_V2 v2;
1462*4882a593Smuzhiyun SELECT_CRTC_SOURCE_PARAMETERS_V3 v3;
1463*4882a593Smuzhiyun };
1464*4882a593Smuzhiyun
1465*4882a593Smuzhiyun void
amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder * encoder)1466*4882a593Smuzhiyun amdgpu_atombios_encoder_set_crtc_source(struct drm_encoder *encoder)
1467*4882a593Smuzhiyun {
1468*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
1469*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
1470*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1471*4882a593Smuzhiyun struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(encoder->crtc);
1472*4882a593Smuzhiyun union crtc_source_param args;
1473*4882a593Smuzhiyun int index = GetIndexIntoMasterTable(COMMAND, SelectCRTC_Source);
1474*4882a593Smuzhiyun uint8_t frev, crev;
1475*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *dig;
1476*4882a593Smuzhiyun
1477*4882a593Smuzhiyun memset(&args, 0, sizeof(args));
1478*4882a593Smuzhiyun
1479*4882a593Smuzhiyun if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1480*4882a593Smuzhiyun return;
1481*4882a593Smuzhiyun
1482*4882a593Smuzhiyun switch (frev) {
1483*4882a593Smuzhiyun case 1:
1484*4882a593Smuzhiyun switch (crev) {
1485*4882a593Smuzhiyun case 1:
1486*4882a593Smuzhiyun default:
1487*4882a593Smuzhiyun args.v1.ucCRTC = amdgpu_crtc->crtc_id;
1488*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
1489*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_TMDS1:
1490*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_TMDS1:
1491*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_DFP1_INDEX;
1492*4882a593Smuzhiyun break;
1493*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_LVDS:
1494*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_LVTM1:
1495*4882a593Smuzhiyun if (amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT)
1496*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_LCD1_INDEX;
1497*4882a593Smuzhiyun else
1498*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_DFP3_INDEX;
1499*4882a593Smuzhiyun break;
1500*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_DVO1:
1501*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_DDI:
1502*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1503*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_DFP2_INDEX;
1504*4882a593Smuzhiyun break;
1505*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_DAC1:
1506*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1507*4882a593Smuzhiyun if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1508*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1509*4882a593Smuzhiyun else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1510*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1511*4882a593Smuzhiyun else
1512*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_CRT1_INDEX;
1513*4882a593Smuzhiyun break;
1514*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_DAC2:
1515*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1516*4882a593Smuzhiyun if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1517*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_TV1_INDEX;
1518*4882a593Smuzhiyun else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1519*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_CV_INDEX;
1520*4882a593Smuzhiyun else
1521*4882a593Smuzhiyun args.v1.ucDevice = ATOM_DEVICE_CRT2_INDEX;
1522*4882a593Smuzhiyun break;
1523*4882a593Smuzhiyun }
1524*4882a593Smuzhiyun break;
1525*4882a593Smuzhiyun case 2:
1526*4882a593Smuzhiyun args.v2.ucCRTC = amdgpu_crtc->crtc_id;
1527*4882a593Smuzhiyun if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1528*4882a593Smuzhiyun struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1529*4882a593Smuzhiyun
1530*4882a593Smuzhiyun if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1531*4882a593Smuzhiyun args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1532*4882a593Smuzhiyun else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1533*4882a593Smuzhiyun args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1534*4882a593Smuzhiyun else
1535*4882a593Smuzhiyun args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1536*4882a593Smuzhiyun } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1537*4882a593Smuzhiyun args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1538*4882a593Smuzhiyun } else {
1539*4882a593Smuzhiyun args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1540*4882a593Smuzhiyun }
1541*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
1542*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1543*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1544*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1545*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1546*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1547*4882a593Smuzhiyun dig = amdgpu_encoder->enc_priv;
1548*4882a593Smuzhiyun switch (dig->dig_encoder) {
1549*4882a593Smuzhiyun case 0:
1550*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1551*4882a593Smuzhiyun break;
1552*4882a593Smuzhiyun case 1:
1553*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1554*4882a593Smuzhiyun break;
1555*4882a593Smuzhiyun case 2:
1556*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1557*4882a593Smuzhiyun break;
1558*4882a593Smuzhiyun case 3:
1559*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1560*4882a593Smuzhiyun break;
1561*4882a593Smuzhiyun case 4:
1562*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1563*4882a593Smuzhiyun break;
1564*4882a593Smuzhiyun case 5:
1565*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1566*4882a593Smuzhiyun break;
1567*4882a593Smuzhiyun case 6:
1568*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1569*4882a593Smuzhiyun break;
1570*4882a593Smuzhiyun }
1571*4882a593Smuzhiyun break;
1572*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1573*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1574*4882a593Smuzhiyun break;
1575*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1576*4882a593Smuzhiyun if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1577*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1578*4882a593Smuzhiyun else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1579*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1580*4882a593Smuzhiyun else
1581*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1582*4882a593Smuzhiyun break;
1583*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1584*4882a593Smuzhiyun if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1585*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1586*4882a593Smuzhiyun else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1587*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1588*4882a593Smuzhiyun else
1589*4882a593Smuzhiyun args.v2.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1590*4882a593Smuzhiyun break;
1591*4882a593Smuzhiyun }
1592*4882a593Smuzhiyun break;
1593*4882a593Smuzhiyun case 3:
1594*4882a593Smuzhiyun args.v3.ucCRTC = amdgpu_crtc->crtc_id;
1595*4882a593Smuzhiyun if (amdgpu_encoder_get_dp_bridge_encoder_id(encoder) != ENCODER_OBJECT_ID_NONE) {
1596*4882a593Smuzhiyun struct drm_connector *connector = amdgpu_get_connector_for_encoder(encoder);
1597*4882a593Smuzhiyun
1598*4882a593Smuzhiyun if (connector->connector_type == DRM_MODE_CONNECTOR_LVDS)
1599*4882a593Smuzhiyun args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1600*4882a593Smuzhiyun else if (connector->connector_type == DRM_MODE_CONNECTOR_VGA)
1601*4882a593Smuzhiyun args.v2.ucEncodeMode = ATOM_ENCODER_MODE_CRT;
1602*4882a593Smuzhiyun else
1603*4882a593Smuzhiyun args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1604*4882a593Smuzhiyun } else if (amdgpu_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
1605*4882a593Smuzhiyun args.v2.ucEncodeMode = ATOM_ENCODER_MODE_LVDS;
1606*4882a593Smuzhiyun } else {
1607*4882a593Smuzhiyun args.v2.ucEncodeMode = amdgpu_atombios_encoder_get_encoder_mode(encoder);
1608*4882a593Smuzhiyun }
1609*4882a593Smuzhiyun args.v3.ucDstBpc = amdgpu_atombios_encoder_get_bpc(encoder);
1610*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
1611*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1612*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1613*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1614*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1615*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_LVTMA:
1616*4882a593Smuzhiyun dig = amdgpu_encoder->enc_priv;
1617*4882a593Smuzhiyun switch (dig->dig_encoder) {
1618*4882a593Smuzhiyun case 0:
1619*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DIG1_ENCODER_ID;
1620*4882a593Smuzhiyun break;
1621*4882a593Smuzhiyun case 1:
1622*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DIG2_ENCODER_ID;
1623*4882a593Smuzhiyun break;
1624*4882a593Smuzhiyun case 2:
1625*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DIG3_ENCODER_ID;
1626*4882a593Smuzhiyun break;
1627*4882a593Smuzhiyun case 3:
1628*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DIG4_ENCODER_ID;
1629*4882a593Smuzhiyun break;
1630*4882a593Smuzhiyun case 4:
1631*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DIG5_ENCODER_ID;
1632*4882a593Smuzhiyun break;
1633*4882a593Smuzhiyun case 5:
1634*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DIG6_ENCODER_ID;
1635*4882a593Smuzhiyun break;
1636*4882a593Smuzhiyun case 6:
1637*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DIG7_ENCODER_ID;
1638*4882a593Smuzhiyun break;
1639*4882a593Smuzhiyun }
1640*4882a593Smuzhiyun break;
1641*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1:
1642*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DVO_ENCODER_ID;
1643*4882a593Smuzhiyun break;
1644*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1:
1645*4882a593Smuzhiyun if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1646*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1647*4882a593Smuzhiyun else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1648*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1649*4882a593Smuzhiyun else
1650*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DAC1_ENCODER_ID;
1651*4882a593Smuzhiyun break;
1652*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2:
1653*4882a593Smuzhiyun if (amdgpu_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1654*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1655*4882a593Smuzhiyun else if (amdgpu_encoder->active_device & (ATOM_DEVICE_CV_SUPPORT))
1656*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_TV_ENCODER_ID;
1657*4882a593Smuzhiyun else
1658*4882a593Smuzhiyun args.v3.ucEncoderID = ASIC_INT_DAC2_ENCODER_ID;
1659*4882a593Smuzhiyun break;
1660*4882a593Smuzhiyun }
1661*4882a593Smuzhiyun break;
1662*4882a593Smuzhiyun }
1663*4882a593Smuzhiyun break;
1664*4882a593Smuzhiyun default:
1665*4882a593Smuzhiyun DRM_ERROR("Unknown table version: %d, %d\n", frev, crev);
1666*4882a593Smuzhiyun return;
1667*4882a593Smuzhiyun }
1668*4882a593Smuzhiyun
1669*4882a593Smuzhiyun amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1670*4882a593Smuzhiyun }
1671*4882a593Smuzhiyun
1672*4882a593Smuzhiyun /* This only needs to be called once at startup */
1673*4882a593Smuzhiyun void
amdgpu_atombios_encoder_init_dig(struct amdgpu_device * adev)1674*4882a593Smuzhiyun amdgpu_atombios_encoder_init_dig(struct amdgpu_device *adev)
1675*4882a593Smuzhiyun {
1676*4882a593Smuzhiyun struct drm_device *dev = adev_to_drm(adev);
1677*4882a593Smuzhiyun struct drm_encoder *encoder;
1678*4882a593Smuzhiyun
1679*4882a593Smuzhiyun list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1680*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1681*4882a593Smuzhiyun struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1682*4882a593Smuzhiyun
1683*4882a593Smuzhiyun switch (amdgpu_encoder->encoder_id) {
1684*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY:
1685*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY1:
1686*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY2:
1687*4882a593Smuzhiyun case ENCODER_OBJECT_ID_INTERNAL_UNIPHY3:
1688*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_dig_transmitter(encoder, ATOM_TRANSMITTER_ACTION_INIT,
1689*4882a593Smuzhiyun 0, 0);
1690*4882a593Smuzhiyun break;
1691*4882a593Smuzhiyun }
1692*4882a593Smuzhiyun
1693*4882a593Smuzhiyun if (ext_encoder)
1694*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1695*4882a593Smuzhiyun EXTERNAL_ENCODER_ACTION_V3_ENCODER_INIT);
1696*4882a593Smuzhiyun }
1697*4882a593Smuzhiyun }
1698*4882a593Smuzhiyun
1699*4882a593Smuzhiyun static bool
amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder * encoder,struct drm_connector * connector)1700*4882a593Smuzhiyun amdgpu_atombios_encoder_dac_load_detect(struct drm_encoder *encoder,
1701*4882a593Smuzhiyun struct drm_connector *connector)
1702*4882a593Smuzhiyun {
1703*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
1704*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
1705*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1706*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1707*4882a593Smuzhiyun
1708*4882a593Smuzhiyun if (amdgpu_encoder->devices & (ATOM_DEVICE_TV_SUPPORT |
1709*4882a593Smuzhiyun ATOM_DEVICE_CV_SUPPORT |
1710*4882a593Smuzhiyun ATOM_DEVICE_CRT_SUPPORT)) {
1711*4882a593Smuzhiyun DAC_LOAD_DETECTION_PS_ALLOCATION args;
1712*4882a593Smuzhiyun int index = GetIndexIntoMasterTable(COMMAND, DAC_LoadDetection);
1713*4882a593Smuzhiyun uint8_t frev, crev;
1714*4882a593Smuzhiyun
1715*4882a593Smuzhiyun memset(&args, 0, sizeof(args));
1716*4882a593Smuzhiyun
1717*4882a593Smuzhiyun if (!amdgpu_atom_parse_cmd_header(adev->mode_info.atom_context, index, &frev, &crev))
1718*4882a593Smuzhiyun return false;
1719*4882a593Smuzhiyun
1720*4882a593Smuzhiyun args.sDacload.ucMisc = 0;
1721*4882a593Smuzhiyun
1722*4882a593Smuzhiyun if ((amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_DAC1) ||
1723*4882a593Smuzhiyun (amdgpu_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1))
1724*4882a593Smuzhiyun args.sDacload.ucDacType = ATOM_DAC_A;
1725*4882a593Smuzhiyun else
1726*4882a593Smuzhiyun args.sDacload.ucDacType = ATOM_DAC_B;
1727*4882a593Smuzhiyun
1728*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)
1729*4882a593Smuzhiyun args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT1_SUPPORT);
1730*4882a593Smuzhiyun else if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)
1731*4882a593Smuzhiyun args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CRT2_SUPPORT);
1732*4882a593Smuzhiyun else if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1733*4882a593Smuzhiyun args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_CV_SUPPORT);
1734*4882a593Smuzhiyun if (crev >= 3)
1735*4882a593Smuzhiyun args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1736*4882a593Smuzhiyun } else if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1737*4882a593Smuzhiyun args.sDacload.usDeviceID = cpu_to_le16(ATOM_DEVICE_TV1_SUPPORT);
1738*4882a593Smuzhiyun if (crev >= 3)
1739*4882a593Smuzhiyun args.sDacload.ucMisc = DAC_LOAD_MISC_YPrPb;
1740*4882a593Smuzhiyun }
1741*4882a593Smuzhiyun
1742*4882a593Smuzhiyun amdgpu_atom_execute_table(adev->mode_info.atom_context, index, (uint32_t *)&args);
1743*4882a593Smuzhiyun
1744*4882a593Smuzhiyun return true;
1745*4882a593Smuzhiyun } else
1746*4882a593Smuzhiyun return false;
1747*4882a593Smuzhiyun }
1748*4882a593Smuzhiyun
1749*4882a593Smuzhiyun enum drm_connector_status
amdgpu_atombios_encoder_dac_detect(struct drm_encoder * encoder,struct drm_connector * connector)1750*4882a593Smuzhiyun amdgpu_atombios_encoder_dac_detect(struct drm_encoder *encoder,
1751*4882a593Smuzhiyun struct drm_connector *connector)
1752*4882a593Smuzhiyun {
1753*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
1754*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
1755*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1756*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1757*4882a593Smuzhiyun uint32_t bios_0_scratch;
1758*4882a593Smuzhiyun
1759*4882a593Smuzhiyun if (!amdgpu_atombios_encoder_dac_load_detect(encoder, connector)) {
1760*4882a593Smuzhiyun DRM_DEBUG_KMS("detect returned false \n");
1761*4882a593Smuzhiyun return connector_status_unknown;
1762*4882a593Smuzhiyun }
1763*4882a593Smuzhiyun
1764*4882a593Smuzhiyun bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1765*4882a593Smuzhiyun
1766*4882a593Smuzhiyun DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1767*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1768*4882a593Smuzhiyun if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1769*4882a593Smuzhiyun return connector_status_connected;
1770*4882a593Smuzhiyun }
1771*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1772*4882a593Smuzhiyun if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1773*4882a593Smuzhiyun return connector_status_connected;
1774*4882a593Smuzhiyun }
1775*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1776*4882a593Smuzhiyun if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1777*4882a593Smuzhiyun return connector_status_connected;
1778*4882a593Smuzhiyun }
1779*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1780*4882a593Smuzhiyun if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1781*4882a593Smuzhiyun return connector_status_connected; /* CTV */
1782*4882a593Smuzhiyun else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1783*4882a593Smuzhiyun return connector_status_connected; /* STV */
1784*4882a593Smuzhiyun }
1785*4882a593Smuzhiyun return connector_status_disconnected;
1786*4882a593Smuzhiyun }
1787*4882a593Smuzhiyun
1788*4882a593Smuzhiyun enum drm_connector_status
amdgpu_atombios_encoder_dig_detect(struct drm_encoder * encoder,struct drm_connector * connector)1789*4882a593Smuzhiyun amdgpu_atombios_encoder_dig_detect(struct drm_encoder *encoder,
1790*4882a593Smuzhiyun struct drm_connector *connector)
1791*4882a593Smuzhiyun {
1792*4882a593Smuzhiyun struct drm_device *dev = encoder->dev;
1793*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
1794*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1795*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector = to_amdgpu_connector(connector);
1796*4882a593Smuzhiyun struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1797*4882a593Smuzhiyun u32 bios_0_scratch;
1798*4882a593Smuzhiyun
1799*4882a593Smuzhiyun if (!ext_encoder)
1800*4882a593Smuzhiyun return connector_status_unknown;
1801*4882a593Smuzhiyun
1802*4882a593Smuzhiyun if ((amdgpu_connector->devices & ATOM_DEVICE_CRT_SUPPORT) == 0)
1803*4882a593Smuzhiyun return connector_status_unknown;
1804*4882a593Smuzhiyun
1805*4882a593Smuzhiyun /* load detect on the dp bridge */
1806*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1807*4882a593Smuzhiyun EXTERNAL_ENCODER_ACTION_V3_DACLOAD_DETECTION);
1808*4882a593Smuzhiyun
1809*4882a593Smuzhiyun bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1810*4882a593Smuzhiyun
1811*4882a593Smuzhiyun DRM_DEBUG_KMS("Bios 0 scratch %x %08x\n", bios_0_scratch, amdgpu_encoder->devices);
1812*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT) {
1813*4882a593Smuzhiyun if (bios_0_scratch & ATOM_S0_CRT1_MASK)
1814*4882a593Smuzhiyun return connector_status_connected;
1815*4882a593Smuzhiyun }
1816*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT) {
1817*4882a593Smuzhiyun if (bios_0_scratch & ATOM_S0_CRT2_MASK)
1818*4882a593Smuzhiyun return connector_status_connected;
1819*4882a593Smuzhiyun }
1820*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_CV_SUPPORT) {
1821*4882a593Smuzhiyun if (bios_0_scratch & (ATOM_S0_CV_MASK|ATOM_S0_CV_MASK_A))
1822*4882a593Smuzhiyun return connector_status_connected;
1823*4882a593Smuzhiyun }
1824*4882a593Smuzhiyun if (amdgpu_connector->devices & ATOM_DEVICE_TV1_SUPPORT) {
1825*4882a593Smuzhiyun if (bios_0_scratch & (ATOM_S0_TV1_COMPOSITE | ATOM_S0_TV1_COMPOSITE_A))
1826*4882a593Smuzhiyun return connector_status_connected; /* CTV */
1827*4882a593Smuzhiyun else if (bios_0_scratch & (ATOM_S0_TV1_SVIDEO | ATOM_S0_TV1_SVIDEO_A))
1828*4882a593Smuzhiyun return connector_status_connected; /* STV */
1829*4882a593Smuzhiyun }
1830*4882a593Smuzhiyun return connector_status_disconnected;
1831*4882a593Smuzhiyun }
1832*4882a593Smuzhiyun
1833*4882a593Smuzhiyun void
amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder * encoder)1834*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_ext_encoder_ddc(struct drm_encoder *encoder)
1835*4882a593Smuzhiyun {
1836*4882a593Smuzhiyun struct drm_encoder *ext_encoder = amdgpu_get_external_encoder(encoder);
1837*4882a593Smuzhiyun
1838*4882a593Smuzhiyun if (ext_encoder)
1839*4882a593Smuzhiyun /* ddc_setup on the dp bridge */
1840*4882a593Smuzhiyun amdgpu_atombios_encoder_setup_external_encoder(encoder, ext_encoder,
1841*4882a593Smuzhiyun EXTERNAL_ENCODER_ACTION_V3_DDC_SETUP);
1842*4882a593Smuzhiyun
1843*4882a593Smuzhiyun }
1844*4882a593Smuzhiyun
1845*4882a593Smuzhiyun void
amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector * connector,struct drm_encoder * encoder,bool connected)1846*4882a593Smuzhiyun amdgpu_atombios_encoder_set_bios_scratch_regs(struct drm_connector *connector,
1847*4882a593Smuzhiyun struct drm_encoder *encoder,
1848*4882a593Smuzhiyun bool connected)
1849*4882a593Smuzhiyun {
1850*4882a593Smuzhiyun struct drm_device *dev = connector->dev;
1851*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
1852*4882a593Smuzhiyun struct amdgpu_connector *amdgpu_connector =
1853*4882a593Smuzhiyun to_amdgpu_connector(connector);
1854*4882a593Smuzhiyun struct amdgpu_encoder *amdgpu_encoder = to_amdgpu_encoder(encoder);
1855*4882a593Smuzhiyun uint32_t bios_0_scratch, bios_3_scratch, bios_6_scratch;
1856*4882a593Smuzhiyun
1857*4882a593Smuzhiyun bios_0_scratch = RREG32(mmBIOS_SCRATCH_0);
1858*4882a593Smuzhiyun bios_3_scratch = RREG32(mmBIOS_SCRATCH_3);
1859*4882a593Smuzhiyun bios_6_scratch = RREG32(mmBIOS_SCRATCH_6);
1860*4882a593Smuzhiyun
1861*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_LCD1_SUPPORT) &&
1862*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_LCD1_SUPPORT)) {
1863*4882a593Smuzhiyun if (connected) {
1864*4882a593Smuzhiyun DRM_DEBUG_KMS("LCD1 connected\n");
1865*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_LCD1;
1866*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_LCD1_ACTIVE;
1867*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_LCD1;
1868*4882a593Smuzhiyun } else {
1869*4882a593Smuzhiyun DRM_DEBUG_KMS("LCD1 disconnected\n");
1870*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_LCD1;
1871*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_LCD1_ACTIVE;
1872*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_LCD1;
1873*4882a593Smuzhiyun }
1874*4882a593Smuzhiyun }
1875*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT1_SUPPORT) &&
1876*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_CRT1_SUPPORT)) {
1877*4882a593Smuzhiyun if (connected) {
1878*4882a593Smuzhiyun DRM_DEBUG_KMS("CRT1 connected\n");
1879*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_CRT1_COLOR;
1880*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_CRT1_ACTIVE;
1881*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_CRT1;
1882*4882a593Smuzhiyun } else {
1883*4882a593Smuzhiyun DRM_DEBUG_KMS("CRT1 disconnected\n");
1884*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_CRT1_MASK;
1885*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_CRT1_ACTIVE;
1886*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT1;
1887*4882a593Smuzhiyun }
1888*4882a593Smuzhiyun }
1889*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_CRT2_SUPPORT) &&
1890*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_CRT2_SUPPORT)) {
1891*4882a593Smuzhiyun if (connected) {
1892*4882a593Smuzhiyun DRM_DEBUG_KMS("CRT2 connected\n");
1893*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_CRT2_COLOR;
1894*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_CRT2_ACTIVE;
1895*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_CRT2;
1896*4882a593Smuzhiyun } else {
1897*4882a593Smuzhiyun DRM_DEBUG_KMS("CRT2 disconnected\n");
1898*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_CRT2_MASK;
1899*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_CRT2_ACTIVE;
1900*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_CRT2;
1901*4882a593Smuzhiyun }
1902*4882a593Smuzhiyun }
1903*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP1_SUPPORT) &&
1904*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_DFP1_SUPPORT)) {
1905*4882a593Smuzhiyun if (connected) {
1906*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP1 connected\n");
1907*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_DFP1;
1908*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_DFP1_ACTIVE;
1909*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_DFP1;
1910*4882a593Smuzhiyun } else {
1911*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP1 disconnected\n");
1912*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_DFP1;
1913*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_DFP1_ACTIVE;
1914*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP1;
1915*4882a593Smuzhiyun }
1916*4882a593Smuzhiyun }
1917*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP2_SUPPORT) &&
1918*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_DFP2_SUPPORT)) {
1919*4882a593Smuzhiyun if (connected) {
1920*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP2 connected\n");
1921*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_DFP2;
1922*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_DFP2_ACTIVE;
1923*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_DFP2;
1924*4882a593Smuzhiyun } else {
1925*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP2 disconnected\n");
1926*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_DFP2;
1927*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_DFP2_ACTIVE;
1928*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP2;
1929*4882a593Smuzhiyun }
1930*4882a593Smuzhiyun }
1931*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP3_SUPPORT) &&
1932*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_DFP3_SUPPORT)) {
1933*4882a593Smuzhiyun if (connected) {
1934*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP3 connected\n");
1935*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_DFP3;
1936*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_DFP3_ACTIVE;
1937*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_DFP3;
1938*4882a593Smuzhiyun } else {
1939*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP3 disconnected\n");
1940*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_DFP3;
1941*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_DFP3_ACTIVE;
1942*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP3;
1943*4882a593Smuzhiyun }
1944*4882a593Smuzhiyun }
1945*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP4_SUPPORT) &&
1946*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_DFP4_SUPPORT)) {
1947*4882a593Smuzhiyun if (connected) {
1948*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP4 connected\n");
1949*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_DFP4;
1950*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_DFP4_ACTIVE;
1951*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_DFP4;
1952*4882a593Smuzhiyun } else {
1953*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP4 disconnected\n");
1954*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_DFP4;
1955*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_DFP4_ACTIVE;
1956*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP4;
1957*4882a593Smuzhiyun }
1958*4882a593Smuzhiyun }
1959*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP5_SUPPORT) &&
1960*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_DFP5_SUPPORT)) {
1961*4882a593Smuzhiyun if (connected) {
1962*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP5 connected\n");
1963*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_DFP5;
1964*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_DFP5_ACTIVE;
1965*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_DFP5;
1966*4882a593Smuzhiyun } else {
1967*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP5 disconnected\n");
1968*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_DFP5;
1969*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_DFP5_ACTIVE;
1970*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP5;
1971*4882a593Smuzhiyun }
1972*4882a593Smuzhiyun }
1973*4882a593Smuzhiyun if ((amdgpu_encoder->devices & ATOM_DEVICE_DFP6_SUPPORT) &&
1974*4882a593Smuzhiyun (amdgpu_connector->devices & ATOM_DEVICE_DFP6_SUPPORT)) {
1975*4882a593Smuzhiyun if (connected) {
1976*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP6 connected\n");
1977*4882a593Smuzhiyun bios_0_scratch |= ATOM_S0_DFP6;
1978*4882a593Smuzhiyun bios_3_scratch |= ATOM_S3_DFP6_ACTIVE;
1979*4882a593Smuzhiyun bios_6_scratch |= ATOM_S6_ACC_REQ_DFP6;
1980*4882a593Smuzhiyun } else {
1981*4882a593Smuzhiyun DRM_DEBUG_KMS("DFP6 disconnected\n");
1982*4882a593Smuzhiyun bios_0_scratch &= ~ATOM_S0_DFP6;
1983*4882a593Smuzhiyun bios_3_scratch &= ~ATOM_S3_DFP6_ACTIVE;
1984*4882a593Smuzhiyun bios_6_scratch &= ~ATOM_S6_ACC_REQ_DFP6;
1985*4882a593Smuzhiyun }
1986*4882a593Smuzhiyun }
1987*4882a593Smuzhiyun
1988*4882a593Smuzhiyun WREG32(mmBIOS_SCRATCH_0, bios_0_scratch);
1989*4882a593Smuzhiyun WREG32(mmBIOS_SCRATCH_3, bios_3_scratch);
1990*4882a593Smuzhiyun WREG32(mmBIOS_SCRATCH_6, bios_6_scratch);
1991*4882a593Smuzhiyun }
1992*4882a593Smuzhiyun
1993*4882a593Smuzhiyun union lvds_info {
1994*4882a593Smuzhiyun struct _ATOM_LVDS_INFO info;
1995*4882a593Smuzhiyun struct _ATOM_LVDS_INFO_V12 info_12;
1996*4882a593Smuzhiyun };
1997*4882a593Smuzhiyun
1998*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder * encoder)1999*4882a593Smuzhiyun amdgpu_atombios_encoder_get_lcd_info(struct amdgpu_encoder *encoder)
2000*4882a593Smuzhiyun {
2001*4882a593Smuzhiyun struct drm_device *dev = encoder->base.dev;
2002*4882a593Smuzhiyun struct amdgpu_device *adev = drm_to_adev(dev);
2003*4882a593Smuzhiyun struct amdgpu_mode_info *mode_info = &adev->mode_info;
2004*4882a593Smuzhiyun int index = GetIndexIntoMasterTable(DATA, LVDS_Info);
2005*4882a593Smuzhiyun uint16_t data_offset, misc;
2006*4882a593Smuzhiyun union lvds_info *lvds_info;
2007*4882a593Smuzhiyun uint8_t frev, crev;
2008*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *lvds = NULL;
2009*4882a593Smuzhiyun int encoder_enum = (encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2010*4882a593Smuzhiyun
2011*4882a593Smuzhiyun if (amdgpu_atom_parse_data_header(mode_info->atom_context, index, NULL,
2012*4882a593Smuzhiyun &frev, &crev, &data_offset)) {
2013*4882a593Smuzhiyun lvds_info =
2014*4882a593Smuzhiyun (union lvds_info *)(mode_info->atom_context->bios + data_offset);
2015*4882a593Smuzhiyun lvds =
2016*4882a593Smuzhiyun kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2017*4882a593Smuzhiyun
2018*4882a593Smuzhiyun if (!lvds)
2019*4882a593Smuzhiyun return NULL;
2020*4882a593Smuzhiyun
2021*4882a593Smuzhiyun lvds->native_mode.clock =
2022*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usPixClk) * 10;
2023*4882a593Smuzhiyun lvds->native_mode.hdisplay =
2024*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usHActive);
2025*4882a593Smuzhiyun lvds->native_mode.vdisplay =
2026*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usVActive);
2027*4882a593Smuzhiyun lvds->native_mode.htotal = lvds->native_mode.hdisplay +
2028*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usHBlanking_Time);
2029*4882a593Smuzhiyun lvds->native_mode.hsync_start = lvds->native_mode.hdisplay +
2030*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncOffset);
2031*4882a593Smuzhiyun lvds->native_mode.hsync_end = lvds->native_mode.hsync_start +
2032*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usHSyncWidth);
2033*4882a593Smuzhiyun lvds->native_mode.vtotal = lvds->native_mode.vdisplay +
2034*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usVBlanking_Time);
2035*4882a593Smuzhiyun lvds->native_mode.vsync_start = lvds->native_mode.vdisplay +
2036*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncOffset);
2037*4882a593Smuzhiyun lvds->native_mode.vsync_end = lvds->native_mode.vsync_start +
2038*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.sLCDTiming.usVSyncWidth);
2039*4882a593Smuzhiyun lvds->panel_pwr_delay =
2040*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.usOffDelayInMs);
2041*4882a593Smuzhiyun lvds->lcd_misc = lvds_info->info.ucLVDS_Misc;
2042*4882a593Smuzhiyun
2043*4882a593Smuzhiyun misc = le16_to_cpu(lvds_info->info.sLCDTiming.susModeMiscInfo.usAccess);
2044*4882a593Smuzhiyun if (misc & ATOM_VSYNC_POLARITY)
2045*4882a593Smuzhiyun lvds->native_mode.flags |= DRM_MODE_FLAG_NVSYNC;
2046*4882a593Smuzhiyun if (misc & ATOM_HSYNC_POLARITY)
2047*4882a593Smuzhiyun lvds->native_mode.flags |= DRM_MODE_FLAG_NHSYNC;
2048*4882a593Smuzhiyun if (misc & ATOM_COMPOSITESYNC)
2049*4882a593Smuzhiyun lvds->native_mode.flags |= DRM_MODE_FLAG_CSYNC;
2050*4882a593Smuzhiyun if (misc & ATOM_INTERLACE)
2051*4882a593Smuzhiyun lvds->native_mode.flags |= DRM_MODE_FLAG_INTERLACE;
2052*4882a593Smuzhiyun if (misc & ATOM_DOUBLE_CLOCK_MODE)
2053*4882a593Smuzhiyun lvds->native_mode.flags |= DRM_MODE_FLAG_DBLSCAN;
2054*4882a593Smuzhiyun
2055*4882a593Smuzhiyun lvds->native_mode.width_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageHSize);
2056*4882a593Smuzhiyun lvds->native_mode.height_mm = le16_to_cpu(lvds_info->info.sLCDTiming.usImageVSize);
2057*4882a593Smuzhiyun
2058*4882a593Smuzhiyun /* set crtc values */
2059*4882a593Smuzhiyun drm_mode_set_crtcinfo(&lvds->native_mode, CRTC_INTERLACE_HALVE_V);
2060*4882a593Smuzhiyun
2061*4882a593Smuzhiyun lvds->lcd_ss_id = lvds_info->info.ucSS_Id;
2062*4882a593Smuzhiyun
2063*4882a593Smuzhiyun encoder->native_mode = lvds->native_mode;
2064*4882a593Smuzhiyun
2065*4882a593Smuzhiyun if (encoder_enum == 2)
2066*4882a593Smuzhiyun lvds->linkb = true;
2067*4882a593Smuzhiyun else
2068*4882a593Smuzhiyun lvds->linkb = false;
2069*4882a593Smuzhiyun
2070*4882a593Smuzhiyun /* parse the lcd record table */
2071*4882a593Smuzhiyun if (le16_to_cpu(lvds_info->info.usModePatchTableOffset)) {
2072*4882a593Smuzhiyun ATOM_FAKE_EDID_PATCH_RECORD *fake_edid_record;
2073*4882a593Smuzhiyun ATOM_PANEL_RESOLUTION_PATCH_RECORD *panel_res_record;
2074*4882a593Smuzhiyun bool bad_record = false;
2075*4882a593Smuzhiyun u8 *record;
2076*4882a593Smuzhiyun
2077*4882a593Smuzhiyun if ((frev == 1) && (crev < 2))
2078*4882a593Smuzhiyun /* absolute */
2079*4882a593Smuzhiyun record = (u8 *)(mode_info->atom_context->bios +
2080*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2081*4882a593Smuzhiyun else
2082*4882a593Smuzhiyun /* relative */
2083*4882a593Smuzhiyun record = (u8 *)(mode_info->atom_context->bios +
2084*4882a593Smuzhiyun data_offset +
2085*4882a593Smuzhiyun le16_to_cpu(lvds_info->info.usModePatchTableOffset));
2086*4882a593Smuzhiyun while (*record != ATOM_RECORD_END_TYPE) {
2087*4882a593Smuzhiyun switch (*record) {
2088*4882a593Smuzhiyun case LCD_MODE_PATCH_RECORD_MODE_TYPE:
2089*4882a593Smuzhiyun record += sizeof(ATOM_PATCH_RECORD_MODE);
2090*4882a593Smuzhiyun break;
2091*4882a593Smuzhiyun case LCD_RTS_RECORD_TYPE:
2092*4882a593Smuzhiyun record += sizeof(ATOM_LCD_RTS_RECORD);
2093*4882a593Smuzhiyun break;
2094*4882a593Smuzhiyun case LCD_CAP_RECORD_TYPE:
2095*4882a593Smuzhiyun record += sizeof(ATOM_LCD_MODE_CONTROL_CAP);
2096*4882a593Smuzhiyun break;
2097*4882a593Smuzhiyun case LCD_FAKE_EDID_PATCH_RECORD_TYPE:
2098*4882a593Smuzhiyun fake_edid_record = (ATOM_FAKE_EDID_PATCH_RECORD *)record;
2099*4882a593Smuzhiyun if (fake_edid_record->ucFakeEDIDLength) {
2100*4882a593Smuzhiyun struct edid *edid;
2101*4882a593Smuzhiyun int edid_size =
2102*4882a593Smuzhiyun max((int)EDID_LENGTH, (int)fake_edid_record->ucFakeEDIDLength);
2103*4882a593Smuzhiyun edid = kmalloc(edid_size, GFP_KERNEL);
2104*4882a593Smuzhiyun if (edid) {
2105*4882a593Smuzhiyun memcpy((u8 *)edid, (u8 *)&fake_edid_record->ucFakeEDIDString[0],
2106*4882a593Smuzhiyun fake_edid_record->ucFakeEDIDLength);
2107*4882a593Smuzhiyun
2108*4882a593Smuzhiyun if (drm_edid_is_valid(edid)) {
2109*4882a593Smuzhiyun adev->mode_info.bios_hardcoded_edid = edid;
2110*4882a593Smuzhiyun adev->mode_info.bios_hardcoded_edid_size = edid_size;
2111*4882a593Smuzhiyun } else
2112*4882a593Smuzhiyun kfree(edid);
2113*4882a593Smuzhiyun }
2114*4882a593Smuzhiyun }
2115*4882a593Smuzhiyun record += fake_edid_record->ucFakeEDIDLength ?
2116*4882a593Smuzhiyun fake_edid_record->ucFakeEDIDLength + 2 :
2117*4882a593Smuzhiyun sizeof(ATOM_FAKE_EDID_PATCH_RECORD);
2118*4882a593Smuzhiyun break;
2119*4882a593Smuzhiyun case LCD_PANEL_RESOLUTION_RECORD_TYPE:
2120*4882a593Smuzhiyun panel_res_record = (ATOM_PANEL_RESOLUTION_PATCH_RECORD *)record;
2121*4882a593Smuzhiyun lvds->native_mode.width_mm = panel_res_record->usHSize;
2122*4882a593Smuzhiyun lvds->native_mode.height_mm = panel_res_record->usVSize;
2123*4882a593Smuzhiyun record += sizeof(ATOM_PANEL_RESOLUTION_PATCH_RECORD);
2124*4882a593Smuzhiyun break;
2125*4882a593Smuzhiyun default:
2126*4882a593Smuzhiyun DRM_ERROR("Bad LCD record %d\n", *record);
2127*4882a593Smuzhiyun bad_record = true;
2128*4882a593Smuzhiyun break;
2129*4882a593Smuzhiyun }
2130*4882a593Smuzhiyun if (bad_record)
2131*4882a593Smuzhiyun break;
2132*4882a593Smuzhiyun }
2133*4882a593Smuzhiyun }
2134*4882a593Smuzhiyun }
2135*4882a593Smuzhiyun return lvds;
2136*4882a593Smuzhiyun }
2137*4882a593Smuzhiyun
2138*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *
amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder * amdgpu_encoder)2139*4882a593Smuzhiyun amdgpu_atombios_encoder_get_dig_info(struct amdgpu_encoder *amdgpu_encoder)
2140*4882a593Smuzhiyun {
2141*4882a593Smuzhiyun int encoder_enum = (amdgpu_encoder->encoder_enum & ENUM_ID_MASK) >> ENUM_ID_SHIFT;
2142*4882a593Smuzhiyun struct amdgpu_encoder_atom_dig *dig = kzalloc(sizeof(struct amdgpu_encoder_atom_dig), GFP_KERNEL);
2143*4882a593Smuzhiyun
2144*4882a593Smuzhiyun if (!dig)
2145*4882a593Smuzhiyun return NULL;
2146*4882a593Smuzhiyun
2147*4882a593Smuzhiyun /* coherent mode by default */
2148*4882a593Smuzhiyun dig->coherent_mode = true;
2149*4882a593Smuzhiyun dig->dig_encoder = -1;
2150*4882a593Smuzhiyun
2151*4882a593Smuzhiyun if (encoder_enum == 2)
2152*4882a593Smuzhiyun dig->linkb = true;
2153*4882a593Smuzhiyun else
2154*4882a593Smuzhiyun dig->linkb = false;
2155*4882a593Smuzhiyun
2156*4882a593Smuzhiyun return dig;
2157*4882a593Smuzhiyun }
2158*4882a593Smuzhiyun
2159