PageRenderTime 136ms CodeModel.GetById 13ms app.highlight 116ms RepoModel.GetById 1ms app.codeStats 0ms

/drivers/gpu/drm/radeon/atombios_crtc.c

https://bitbucket.org/ndreys/linux-sunxi
C | 1636 lines | 1399 code | 152 blank | 85 comment | 245 complexity | fe3b66827f83a0eb9ffba2752ddea33a MD5 | raw file
Possible License(s): GPL-2.0, LGPL-2.0, AGPL-1.0

Large files files are truncated, but you can click here to view the full file

   1/*
   2 * Copyright 2007-8 Advanced Micro Devices, Inc.
   3 * Copyright 2008 Red Hat Inc.
   4 *
   5 * Permission is hereby granted, free of charge, to any person obtaining a
   6 * copy of this software and associated documentation files (the "Software"),
   7 * to deal in the Software without restriction, including without limitation
   8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
   9 * and/or sell copies of the Software, and to permit persons to whom the
  10 * Software is furnished to do so, subject to the following conditions:
  11 *
  12 * The above copyright notice and this permission notice shall be included in
  13 * all copies or substantial portions of the Software.
  14 *
  15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
  16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
  17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
  18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
  19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
  20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
  21 * OTHER DEALINGS IN THE SOFTWARE.
  22 *
  23 * Authors: Dave Airlie
  24 *          Alex Deucher
  25 */
  26#include <drm/drmP.h>
  27#include <drm/drm_crtc_helper.h>
  28#include <drm/radeon_drm.h>
  29#include <drm/drm_fixed.h>
  30#include "radeon.h"
  31#include "atom.h"
  32#include "atom-bits.h"
  33
  34static void atombios_overscan_setup(struct drm_crtc *crtc,
  35				    struct drm_display_mode *mode,
  36				    struct drm_display_mode *adjusted_mode)
  37{
  38	struct drm_device *dev = crtc->dev;
  39	struct radeon_device *rdev = dev->dev_private;
  40	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
  41	SET_CRTC_OVERSCAN_PS_ALLOCATION args;
  42	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_OverScan);
  43	int a1, a2;
  44
  45	memset(&args, 0, sizeof(args));
  46
  47	args.ucCRTC = radeon_crtc->crtc_id;
  48
  49	switch (radeon_crtc->rmx_type) {
  50	case RMX_CENTER:
  51		args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
  52		args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - mode->crtc_vdisplay) / 2);
  53		args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
  54		args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - mode->crtc_hdisplay) / 2);
  55		break;
  56	case RMX_ASPECT:
  57		a1 = mode->crtc_vdisplay * adjusted_mode->crtc_hdisplay;
  58		a2 = adjusted_mode->crtc_vdisplay * mode->crtc_hdisplay;
  59
  60		if (a1 > a2) {
  61			args.usOverscanLeft = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
  62			args.usOverscanRight = cpu_to_le16((adjusted_mode->crtc_hdisplay - (a2 / mode->crtc_vdisplay)) / 2);
  63		} else if (a2 > a1) {
  64			args.usOverscanTop = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
  65			args.usOverscanBottom = cpu_to_le16((adjusted_mode->crtc_vdisplay - (a1 / mode->crtc_hdisplay)) / 2);
  66		}
  67		break;
  68	case RMX_FULL:
  69	default:
  70		args.usOverscanRight = cpu_to_le16(radeon_crtc->h_border);
  71		args.usOverscanLeft = cpu_to_le16(radeon_crtc->h_border);
  72		args.usOverscanBottom = cpu_to_le16(radeon_crtc->v_border);
  73		args.usOverscanTop = cpu_to_le16(radeon_crtc->v_border);
  74		break;
  75	}
  76	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
  77}
  78
  79static void atombios_scaler_setup(struct drm_crtc *crtc)
  80{
  81	struct drm_device *dev = crtc->dev;
  82	struct radeon_device *rdev = dev->dev_private;
  83	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
  84	ENABLE_SCALER_PS_ALLOCATION args;
  85	int index = GetIndexIntoMasterTable(COMMAND, EnableScaler);
  86
  87	/* fixme - fill in enc_priv for atom dac */
  88	enum radeon_tv_std tv_std = TV_STD_NTSC;
  89	bool is_tv = false, is_cv = false;
  90	struct drm_encoder *encoder;
  91
  92	if (!ASIC_IS_AVIVO(rdev) && radeon_crtc->crtc_id)
  93		return;
  94
  95	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
  96		/* find tv std */
  97		if (encoder->crtc == crtc) {
  98			struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
  99			if (radeon_encoder->active_device & ATOM_DEVICE_TV_SUPPORT) {
 100				struct radeon_encoder_atom_dac *tv_dac = radeon_encoder->enc_priv;
 101				tv_std = tv_dac->tv_std;
 102				is_tv = true;
 103			}
 104		}
 105	}
 106
 107	memset(&args, 0, sizeof(args));
 108
 109	args.ucScaler = radeon_crtc->crtc_id;
 110
 111	if (is_tv) {
 112		switch (tv_std) {
 113		case TV_STD_NTSC:
 114		default:
 115			args.ucTVStandard = ATOM_TV_NTSC;
 116			break;
 117		case TV_STD_PAL:
 118			args.ucTVStandard = ATOM_TV_PAL;
 119			break;
 120		case TV_STD_PAL_M:
 121			args.ucTVStandard = ATOM_TV_PALM;
 122			break;
 123		case TV_STD_PAL_60:
 124			args.ucTVStandard = ATOM_TV_PAL60;
 125			break;
 126		case TV_STD_NTSC_J:
 127			args.ucTVStandard = ATOM_TV_NTSCJ;
 128			break;
 129		case TV_STD_SCART_PAL:
 130			args.ucTVStandard = ATOM_TV_PAL; /* ??? */
 131			break;
 132		case TV_STD_SECAM:
 133			args.ucTVStandard = ATOM_TV_SECAM;
 134			break;
 135		case TV_STD_PAL_CN:
 136			args.ucTVStandard = ATOM_TV_PALCN;
 137			break;
 138		}
 139		args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
 140	} else if (is_cv) {
 141		args.ucTVStandard = ATOM_TV_CV;
 142		args.ucEnable = SCALER_ENABLE_MULTITAP_MODE;
 143	} else {
 144		switch (radeon_crtc->rmx_type) {
 145		case RMX_FULL:
 146			args.ucEnable = ATOM_SCALER_EXPANSION;
 147			break;
 148		case RMX_CENTER:
 149			args.ucEnable = ATOM_SCALER_CENTER;
 150			break;
 151		case RMX_ASPECT:
 152			args.ucEnable = ATOM_SCALER_EXPANSION;
 153			break;
 154		default:
 155			if (ASIC_IS_AVIVO(rdev))
 156				args.ucEnable = ATOM_SCALER_DISABLE;
 157			else
 158				args.ucEnable = ATOM_SCALER_CENTER;
 159			break;
 160		}
 161	}
 162	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 163	if ((is_tv || is_cv)
 164	    && rdev->family >= CHIP_RV515 && rdev->family <= CHIP_R580) {
 165		atom_rv515_force_tv_scaler(rdev, radeon_crtc);
 166	}
 167}
 168
 169static void atombios_lock_crtc(struct drm_crtc *crtc, int lock)
 170{
 171	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 172	struct drm_device *dev = crtc->dev;
 173	struct radeon_device *rdev = dev->dev_private;
 174	int index =
 175	    GetIndexIntoMasterTable(COMMAND, UpdateCRTC_DoubleBufferRegisters);
 176	ENABLE_CRTC_PS_ALLOCATION args;
 177
 178	memset(&args, 0, sizeof(args));
 179
 180	args.ucCRTC = radeon_crtc->crtc_id;
 181	args.ucEnable = lock;
 182
 183	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 184}
 185
 186static void atombios_enable_crtc(struct drm_crtc *crtc, int state)
 187{
 188	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 189	struct drm_device *dev = crtc->dev;
 190	struct radeon_device *rdev = dev->dev_private;
 191	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTC);
 192	ENABLE_CRTC_PS_ALLOCATION args;
 193
 194	memset(&args, 0, sizeof(args));
 195
 196	args.ucCRTC = radeon_crtc->crtc_id;
 197	args.ucEnable = state;
 198
 199	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 200}
 201
 202static void atombios_enable_crtc_memreq(struct drm_crtc *crtc, int state)
 203{
 204	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 205	struct drm_device *dev = crtc->dev;
 206	struct radeon_device *rdev = dev->dev_private;
 207	int index = GetIndexIntoMasterTable(COMMAND, EnableCRTCMemReq);
 208	ENABLE_CRTC_PS_ALLOCATION args;
 209
 210	memset(&args, 0, sizeof(args));
 211
 212	args.ucCRTC = radeon_crtc->crtc_id;
 213	args.ucEnable = state;
 214
 215	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 216}
 217
 218static void atombios_blank_crtc(struct drm_crtc *crtc, int state)
 219{
 220	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 221	struct drm_device *dev = crtc->dev;
 222	struct radeon_device *rdev = dev->dev_private;
 223	int index = GetIndexIntoMasterTable(COMMAND, BlankCRTC);
 224	BLANK_CRTC_PS_ALLOCATION args;
 225
 226	memset(&args, 0, sizeof(args));
 227
 228	args.ucCRTC = radeon_crtc->crtc_id;
 229	args.ucBlanking = state;
 230
 231	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 232}
 233
 234void atombios_crtc_dpms(struct drm_crtc *crtc, int mode)
 235{
 236	struct drm_device *dev = crtc->dev;
 237	struct radeon_device *rdev = dev->dev_private;
 238	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 239
 240	switch (mode) {
 241	case DRM_MODE_DPMS_ON:
 242		radeon_crtc->enabled = true;
 243		/* adjust pm to dpms changes BEFORE enabling crtcs */
 244		radeon_pm_compute_clocks(rdev);
 245		atombios_enable_crtc(crtc, ATOM_ENABLE);
 246		if (ASIC_IS_DCE3(rdev))
 247			atombios_enable_crtc_memreq(crtc, ATOM_ENABLE);
 248		atombios_blank_crtc(crtc, ATOM_DISABLE);
 249		drm_vblank_post_modeset(dev, radeon_crtc->crtc_id);
 250		radeon_crtc_load_lut(crtc);
 251		break;
 252	case DRM_MODE_DPMS_STANDBY:
 253	case DRM_MODE_DPMS_SUSPEND:
 254	case DRM_MODE_DPMS_OFF:
 255		drm_vblank_pre_modeset(dev, radeon_crtc->crtc_id);
 256		if (radeon_crtc->enabled)
 257			atombios_blank_crtc(crtc, ATOM_ENABLE);
 258		if (ASIC_IS_DCE3(rdev))
 259			atombios_enable_crtc_memreq(crtc, ATOM_DISABLE);
 260		atombios_enable_crtc(crtc, ATOM_DISABLE);
 261		radeon_crtc->enabled = false;
 262		/* adjust pm to dpms changes AFTER disabling crtcs */
 263		radeon_pm_compute_clocks(rdev);
 264		break;
 265	}
 266}
 267
 268static void
 269atombios_set_crtc_dtd_timing(struct drm_crtc *crtc,
 270			     struct drm_display_mode *mode)
 271{
 272	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 273	struct drm_device *dev = crtc->dev;
 274	struct radeon_device *rdev = dev->dev_private;
 275	SET_CRTC_USING_DTD_TIMING_PARAMETERS args;
 276	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_UsingDTDTiming);
 277	u16 misc = 0;
 278
 279	memset(&args, 0, sizeof(args));
 280	args.usH_Size = cpu_to_le16(mode->crtc_hdisplay - (radeon_crtc->h_border * 2));
 281	args.usH_Blanking_Time =
 282		cpu_to_le16(mode->crtc_hblank_end - mode->crtc_hdisplay + (radeon_crtc->h_border * 2));
 283	args.usV_Size = cpu_to_le16(mode->crtc_vdisplay - (radeon_crtc->v_border * 2));
 284	args.usV_Blanking_Time =
 285		cpu_to_le16(mode->crtc_vblank_end - mode->crtc_vdisplay + (radeon_crtc->v_border * 2));
 286	args.usH_SyncOffset =
 287		cpu_to_le16(mode->crtc_hsync_start - mode->crtc_hdisplay + radeon_crtc->h_border);
 288	args.usH_SyncWidth =
 289		cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
 290	args.usV_SyncOffset =
 291		cpu_to_le16(mode->crtc_vsync_start - mode->crtc_vdisplay + radeon_crtc->v_border);
 292	args.usV_SyncWidth =
 293		cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
 294	args.ucH_Border = radeon_crtc->h_border;
 295	args.ucV_Border = radeon_crtc->v_border;
 296
 297	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
 298		misc |= ATOM_VSYNC_POLARITY;
 299	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
 300		misc |= ATOM_HSYNC_POLARITY;
 301	if (mode->flags & DRM_MODE_FLAG_CSYNC)
 302		misc |= ATOM_COMPOSITESYNC;
 303	if (mode->flags & DRM_MODE_FLAG_INTERLACE)
 304		misc |= ATOM_INTERLACE;
 305	if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
 306		misc |= ATOM_DOUBLE_CLOCK_MODE;
 307
 308	args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
 309	args.ucCRTC = radeon_crtc->crtc_id;
 310
 311	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 312}
 313
 314static void atombios_crtc_set_timing(struct drm_crtc *crtc,
 315				     struct drm_display_mode *mode)
 316{
 317	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 318	struct drm_device *dev = crtc->dev;
 319	struct radeon_device *rdev = dev->dev_private;
 320	SET_CRTC_TIMING_PARAMETERS_PS_ALLOCATION args;
 321	int index = GetIndexIntoMasterTable(COMMAND, SetCRTC_Timing);
 322	u16 misc = 0;
 323
 324	memset(&args, 0, sizeof(args));
 325	args.usH_Total = cpu_to_le16(mode->crtc_htotal);
 326	args.usH_Disp = cpu_to_le16(mode->crtc_hdisplay);
 327	args.usH_SyncStart = cpu_to_le16(mode->crtc_hsync_start);
 328	args.usH_SyncWidth =
 329		cpu_to_le16(mode->crtc_hsync_end - mode->crtc_hsync_start);
 330	args.usV_Total = cpu_to_le16(mode->crtc_vtotal);
 331	args.usV_Disp = cpu_to_le16(mode->crtc_vdisplay);
 332	args.usV_SyncStart = cpu_to_le16(mode->crtc_vsync_start);
 333	args.usV_SyncWidth =
 334		cpu_to_le16(mode->crtc_vsync_end - mode->crtc_vsync_start);
 335
 336	args.ucOverscanRight = radeon_crtc->h_border;
 337	args.ucOverscanLeft = radeon_crtc->h_border;
 338	args.ucOverscanBottom = radeon_crtc->v_border;
 339	args.ucOverscanTop = radeon_crtc->v_border;
 340
 341	if (mode->flags & DRM_MODE_FLAG_NVSYNC)
 342		misc |= ATOM_VSYNC_POLARITY;
 343	if (mode->flags & DRM_MODE_FLAG_NHSYNC)
 344		misc |= ATOM_HSYNC_POLARITY;
 345	if (mode->flags & DRM_MODE_FLAG_CSYNC)
 346		misc |= ATOM_COMPOSITESYNC;
 347	if (mode->flags & DRM_MODE_FLAG_INTERLACE)
 348		misc |= ATOM_INTERLACE;
 349	if (mode->flags & DRM_MODE_FLAG_DBLSCAN)
 350		misc |= ATOM_DOUBLE_CLOCK_MODE;
 351
 352	args.susModeMiscInfo.usAccess = cpu_to_le16(misc);
 353	args.ucCRTC = radeon_crtc->crtc_id;
 354
 355	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 356}
 357
 358static void atombios_disable_ss(struct drm_crtc *crtc)
 359{
 360	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 361	struct drm_device *dev = crtc->dev;
 362	struct radeon_device *rdev = dev->dev_private;
 363	u32 ss_cntl;
 364
 365	if (ASIC_IS_DCE4(rdev)) {
 366		switch (radeon_crtc->pll_id) {
 367		case ATOM_PPLL1:
 368			ss_cntl = RREG32(EVERGREEN_P1PLL_SS_CNTL);
 369			ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
 370			WREG32(EVERGREEN_P1PLL_SS_CNTL, ss_cntl);
 371			break;
 372		case ATOM_PPLL2:
 373			ss_cntl = RREG32(EVERGREEN_P2PLL_SS_CNTL);
 374			ss_cntl &= ~EVERGREEN_PxPLL_SS_EN;
 375			WREG32(EVERGREEN_P2PLL_SS_CNTL, ss_cntl);
 376			break;
 377		case ATOM_DCPLL:
 378		case ATOM_PPLL_INVALID:
 379			return;
 380		}
 381	} else if (ASIC_IS_AVIVO(rdev)) {
 382		switch (radeon_crtc->pll_id) {
 383		case ATOM_PPLL1:
 384			ss_cntl = RREG32(AVIVO_P1PLL_INT_SS_CNTL);
 385			ss_cntl &= ~1;
 386			WREG32(AVIVO_P1PLL_INT_SS_CNTL, ss_cntl);
 387			break;
 388		case ATOM_PPLL2:
 389			ss_cntl = RREG32(AVIVO_P2PLL_INT_SS_CNTL);
 390			ss_cntl &= ~1;
 391			WREG32(AVIVO_P2PLL_INT_SS_CNTL, ss_cntl);
 392			break;
 393		case ATOM_DCPLL:
 394		case ATOM_PPLL_INVALID:
 395			return;
 396		}
 397	}
 398}
 399
 400
 401union atom_enable_ss {
 402	ENABLE_LVDS_SS_PARAMETERS lvds_ss;
 403	ENABLE_LVDS_SS_PARAMETERS_V2 lvds_ss_2;
 404	ENABLE_SPREAD_SPECTRUM_ON_PPLL_PS_ALLOCATION v1;
 405	ENABLE_SPREAD_SPECTRUM_ON_PPLL_V2 v2;
 406	ENABLE_SPREAD_SPECTRUM_ON_PPLL_V3 v3;
 407};
 408
 409static void atombios_crtc_program_ss(struct drm_crtc *crtc,
 410				     int enable,
 411				     int pll_id,
 412				     struct radeon_atom_ss *ss)
 413{
 414	struct drm_device *dev = crtc->dev;
 415	struct radeon_device *rdev = dev->dev_private;
 416	int index = GetIndexIntoMasterTable(COMMAND, EnableSpreadSpectrumOnPPLL);
 417	union atom_enable_ss args;
 418
 419	memset(&args, 0, sizeof(args));
 420
 421	if (ASIC_IS_DCE5(rdev)) {
 422		args.v3.usSpreadSpectrumAmountFrac = cpu_to_le16(0);
 423		args.v3.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
 424		switch (pll_id) {
 425		case ATOM_PPLL1:
 426			args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P1PLL;
 427			args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
 428			args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
 429			break;
 430		case ATOM_PPLL2:
 431			args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_P2PLL;
 432			args.v3.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
 433			args.v3.usSpreadSpectrumStep = cpu_to_le16(ss->step);
 434			break;
 435		case ATOM_DCPLL:
 436			args.v3.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V3_DCPLL;
 437			args.v3.usSpreadSpectrumAmount = cpu_to_le16(0);
 438			args.v3.usSpreadSpectrumStep = cpu_to_le16(0);
 439			break;
 440		case ATOM_PPLL_INVALID:
 441			return;
 442		}
 443		args.v3.ucEnable = enable;
 444		if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK))
 445			args.v3.ucEnable = ATOM_DISABLE;
 446	} else if (ASIC_IS_DCE4(rdev)) {
 447		args.v2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
 448		args.v2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
 449		switch (pll_id) {
 450		case ATOM_PPLL1:
 451			args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P1PLL;
 452			args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
 453			args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
 454			break;
 455		case ATOM_PPLL2:
 456			args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_P2PLL;
 457			args.v2.usSpreadSpectrumAmount = cpu_to_le16(ss->amount);
 458			args.v2.usSpreadSpectrumStep = cpu_to_le16(ss->step);
 459			break;
 460		case ATOM_DCPLL:
 461			args.v2.ucSpreadSpectrumType |= ATOM_PPLL_SS_TYPE_V2_DCPLL;
 462			args.v2.usSpreadSpectrumAmount = cpu_to_le16(0);
 463			args.v2.usSpreadSpectrumStep = cpu_to_le16(0);
 464			break;
 465		case ATOM_PPLL_INVALID:
 466			return;
 467		}
 468		args.v2.ucEnable = enable;
 469		if ((ss->percentage == 0) || (ss->type & ATOM_EXTERNAL_SS_MASK))
 470			args.v2.ucEnable = ATOM_DISABLE;
 471	} else if (ASIC_IS_DCE3(rdev)) {
 472		args.v1.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
 473		args.v1.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
 474		args.v1.ucSpreadSpectrumStep = ss->step;
 475		args.v1.ucSpreadSpectrumDelay = ss->delay;
 476		args.v1.ucSpreadSpectrumRange = ss->range;
 477		args.v1.ucPpll = pll_id;
 478		args.v1.ucEnable = enable;
 479	} else if (ASIC_IS_AVIVO(rdev)) {
 480		if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
 481		    (ss->type & ATOM_EXTERNAL_SS_MASK)) {
 482			atombios_disable_ss(crtc);
 483			return;
 484		}
 485		args.lvds_ss_2.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
 486		args.lvds_ss_2.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
 487		args.lvds_ss_2.ucSpreadSpectrumStep = ss->step;
 488		args.lvds_ss_2.ucSpreadSpectrumDelay = ss->delay;
 489		args.lvds_ss_2.ucSpreadSpectrumRange = ss->range;
 490		args.lvds_ss_2.ucEnable = enable;
 491	} else {
 492		if ((enable == ATOM_DISABLE) || (ss->percentage == 0) ||
 493		    (ss->type & ATOM_EXTERNAL_SS_MASK)) {
 494			atombios_disable_ss(crtc);
 495			return;
 496		}
 497		args.lvds_ss.usSpreadSpectrumPercentage = cpu_to_le16(ss->percentage);
 498		args.lvds_ss.ucSpreadSpectrumType = ss->type & ATOM_SS_CENTRE_SPREAD_MODE_MASK;
 499		args.lvds_ss.ucSpreadSpectrumStepSize_Delay = (ss->step & 3) << 2;
 500		args.lvds_ss.ucSpreadSpectrumStepSize_Delay |= (ss->delay & 7) << 4;
 501		args.lvds_ss.ucEnable = enable;
 502	}
 503	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 504}
 505
 506union adjust_pixel_clock {
 507	ADJUST_DISPLAY_PLL_PS_ALLOCATION v1;
 508	ADJUST_DISPLAY_PLL_PS_ALLOCATION_V3 v3;
 509};
 510
 511static u32 atombios_adjust_pll(struct drm_crtc *crtc,
 512			       struct drm_display_mode *mode,
 513			       struct radeon_pll *pll,
 514			       bool ss_enabled,
 515			       struct radeon_atom_ss *ss)
 516{
 517	struct drm_device *dev = crtc->dev;
 518	struct radeon_device *rdev = dev->dev_private;
 519	struct drm_encoder *encoder = NULL;
 520	struct radeon_encoder *radeon_encoder = NULL;
 521	struct drm_connector *connector = NULL;
 522	u32 adjusted_clock = mode->clock;
 523	int encoder_mode = 0;
 524	u32 dp_clock = mode->clock;
 525	int bpc = 8;
 526
 527	/* reset the pll flags */
 528	pll->flags = 0;
 529
 530	if (ASIC_IS_AVIVO(rdev)) {
 531		if ((rdev->family == CHIP_RS600) ||
 532		    (rdev->family == CHIP_RS690) ||
 533		    (rdev->family == CHIP_RS740))
 534			pll->flags |= (/*RADEON_PLL_USE_FRAC_FB_DIV |*/
 535				       RADEON_PLL_PREFER_CLOSEST_LOWER);
 536
 537		if (ASIC_IS_DCE32(rdev) && mode->clock > 200000)	/* range limits??? */
 538			pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
 539		else
 540			pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
 541
 542		if (rdev->family < CHIP_RV770)
 543			pll->flags |= RADEON_PLL_PREFER_MINM_OVER_MAXP;
 544	} else {
 545		pll->flags |= RADEON_PLL_LEGACY;
 546
 547		if (mode->clock > 200000)	/* range limits??? */
 548			pll->flags |= RADEON_PLL_PREFER_HIGH_FB_DIV;
 549		else
 550			pll->flags |= RADEON_PLL_PREFER_LOW_REF_DIV;
 551	}
 552
 553	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
 554		if (encoder->crtc == crtc) {
 555			radeon_encoder = to_radeon_encoder(encoder);
 556			connector = radeon_get_connector_for_encoder(encoder);
 557			if (connector)
 558				bpc = connector->display_info.bpc;
 559			encoder_mode = atombios_get_encoder_mode(encoder);
 560			if ((radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) ||
 561			    radeon_encoder_is_dp_bridge(encoder)) {
 562				if (connector) {
 563					struct radeon_connector *radeon_connector = to_radeon_connector(connector);
 564					struct radeon_connector_atom_dig *dig_connector =
 565						radeon_connector->con_priv;
 566
 567					dp_clock = dig_connector->dp_clock;
 568				}
 569			}
 570
 571			/* use recommended ref_div for ss */
 572			if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
 573				if (ss_enabled) {
 574					if (ss->refdiv) {
 575						pll->flags |= RADEON_PLL_USE_REF_DIV;
 576						pll->reference_div = ss->refdiv;
 577						if (ASIC_IS_AVIVO(rdev))
 578							pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
 579					}
 580				}
 581			}
 582
 583			if (ASIC_IS_AVIVO(rdev)) {
 584				/* DVO wants 2x pixel clock if the DVO chip is in 12 bit mode */
 585				if (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DVO1)
 586					adjusted_clock = mode->clock * 2;
 587				if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
 588					pll->flags |= RADEON_PLL_PREFER_CLOSEST_LOWER;
 589				if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT))
 590					pll->flags |= RADEON_PLL_IS_LCD;
 591			} else {
 592				if (encoder->encoder_type != DRM_MODE_ENCODER_DAC)
 593					pll->flags |= RADEON_PLL_NO_ODD_POST_DIV;
 594				if (encoder->encoder_type == DRM_MODE_ENCODER_LVDS)
 595					pll->flags |= RADEON_PLL_USE_REF_DIV;
 596			}
 597			break;
 598		}
 599	}
 600
 601	/* DCE3+ has an AdjustDisplayPll that will adjust the pixel clock
 602	 * accordingly based on the encoder/transmitter to work around
 603	 * special hw requirements.
 604	 */
 605	if (ASIC_IS_DCE3(rdev)) {
 606		union adjust_pixel_clock args;
 607		u8 frev, crev;
 608		int index;
 609
 610		index = GetIndexIntoMasterTable(COMMAND, AdjustDisplayPll);
 611		if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
 612					   &crev))
 613			return adjusted_clock;
 614
 615		memset(&args, 0, sizeof(args));
 616
 617		switch (frev) {
 618		case 1:
 619			switch (crev) {
 620			case 1:
 621			case 2:
 622				args.v1.usPixelClock = cpu_to_le16(mode->clock / 10);
 623				args.v1.ucTransmitterID = radeon_encoder->encoder_id;
 624				args.v1.ucEncodeMode = encoder_mode;
 625				if (ss_enabled && ss->percentage)
 626					args.v1.ucConfig |=
 627						ADJUST_DISPLAY_CONFIG_SS_ENABLE;
 628
 629				atom_execute_table(rdev->mode_info.atom_context,
 630						   index, (uint32_t *)&args);
 631				adjusted_clock = le16_to_cpu(args.v1.usPixelClock) * 10;
 632				break;
 633			case 3:
 634				args.v3.sInput.usPixelClock = cpu_to_le16(mode->clock / 10);
 635				args.v3.sInput.ucTransmitterID = radeon_encoder->encoder_id;
 636				args.v3.sInput.ucEncodeMode = encoder_mode;
 637				args.v3.sInput.ucDispPllConfig = 0;
 638				if (ss_enabled && ss->percentage)
 639					args.v3.sInput.ucDispPllConfig |=
 640						DISPPLL_CONFIG_SS_ENABLE;
 641				if (radeon_encoder->devices & (ATOM_DEVICE_DFP_SUPPORT) ||
 642				    radeon_encoder_is_dp_bridge(encoder)) {
 643					struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
 644					if (encoder_mode == ATOM_ENCODER_MODE_DP) {
 645						args.v3.sInput.ucDispPllConfig |=
 646							DISPPLL_CONFIG_COHERENT_MODE;
 647						/* 16200 or 27000 */
 648						args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
 649					} else {
 650						if (encoder_mode == ATOM_ENCODER_MODE_HDMI) {
 651							/* deep color support */
 652							args.v3.sInput.usPixelClock =
 653								cpu_to_le16((mode->clock * bpc / 8) / 10);
 654						}
 655						if (dig->coherent_mode)
 656							args.v3.sInput.ucDispPllConfig |=
 657								DISPPLL_CONFIG_COHERENT_MODE;
 658						if (mode->clock > 165000)
 659							args.v3.sInput.ucDispPllConfig |=
 660								DISPPLL_CONFIG_DUAL_LINK;
 661					}
 662				} else if (radeon_encoder->devices & (ATOM_DEVICE_LCD_SUPPORT)) {
 663					if (encoder_mode == ATOM_ENCODER_MODE_DP) {
 664						args.v3.sInput.ucDispPllConfig |=
 665							DISPPLL_CONFIG_COHERENT_MODE;
 666						/* 16200 or 27000 */
 667						args.v3.sInput.usPixelClock = cpu_to_le16(dp_clock / 10);
 668					} else if (encoder_mode != ATOM_ENCODER_MODE_LVDS) {
 669						if (mode->clock > 165000)
 670							args.v3.sInput.ucDispPllConfig |=
 671								DISPPLL_CONFIG_DUAL_LINK;
 672					}
 673				}
 674				if (radeon_encoder_is_dp_bridge(encoder)) {
 675					struct drm_encoder *ext_encoder = radeon_atom_get_external_encoder(encoder);
 676					struct radeon_encoder *ext_radeon_encoder = to_radeon_encoder(ext_encoder);
 677					args.v3.sInput.ucExtTransmitterID = ext_radeon_encoder->encoder_id;
 678				} else
 679					args.v3.sInput.ucExtTransmitterID = 0;
 680
 681				atom_execute_table(rdev->mode_info.atom_context,
 682						   index, (uint32_t *)&args);
 683				adjusted_clock = le32_to_cpu(args.v3.sOutput.ulDispPllFreq) * 10;
 684				if (args.v3.sOutput.ucRefDiv) {
 685					pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
 686					pll->flags |= RADEON_PLL_USE_REF_DIV;
 687					pll->reference_div = args.v3.sOutput.ucRefDiv;
 688				}
 689				if (args.v3.sOutput.ucPostDiv) {
 690					pll->flags |= RADEON_PLL_USE_FRAC_FB_DIV;
 691					pll->flags |= RADEON_PLL_USE_POST_DIV;
 692					pll->post_div = args.v3.sOutput.ucPostDiv;
 693				}
 694				break;
 695			default:
 696				DRM_ERROR("Unknown table version %d %d\n", frev, crev);
 697				return adjusted_clock;
 698			}
 699			break;
 700		default:
 701			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
 702			return adjusted_clock;
 703		}
 704	}
 705	return adjusted_clock;
 706}
 707
 708union set_pixel_clock {
 709	SET_PIXEL_CLOCK_PS_ALLOCATION base;
 710	PIXEL_CLOCK_PARAMETERS v1;
 711	PIXEL_CLOCK_PARAMETERS_V2 v2;
 712	PIXEL_CLOCK_PARAMETERS_V3 v3;
 713	PIXEL_CLOCK_PARAMETERS_V5 v5;
 714	PIXEL_CLOCK_PARAMETERS_V6 v6;
 715};
 716
 717/* on DCE5, make sure the voltage is high enough to support the
 718 * required disp clk.
 719 */
 720static void atombios_crtc_set_dcpll(struct drm_crtc *crtc,
 721				    u32 dispclk)
 722{
 723	struct drm_device *dev = crtc->dev;
 724	struct radeon_device *rdev = dev->dev_private;
 725	u8 frev, crev;
 726	int index;
 727	union set_pixel_clock args;
 728
 729	memset(&args, 0, sizeof(args));
 730
 731	index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
 732	if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
 733				   &crev))
 734		return;
 735
 736	switch (frev) {
 737	case 1:
 738		switch (crev) {
 739		case 5:
 740			/* if the default dcpll clock is specified,
 741			 * SetPixelClock provides the dividers
 742			 */
 743			args.v5.ucCRTC = ATOM_CRTC_INVALID;
 744			args.v5.usPixelClock = cpu_to_le16(dispclk);
 745			args.v5.ucPpll = ATOM_DCPLL;
 746			break;
 747		case 6:
 748			/* if the default dcpll clock is specified,
 749			 * SetPixelClock provides the dividers
 750			 */
 751			args.v6.ulDispEngClkFreq = cpu_to_le32(dispclk);
 752			args.v6.ucPpll = ATOM_DCPLL;
 753			break;
 754		default:
 755			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
 756			return;
 757		}
 758		break;
 759	default:
 760		DRM_ERROR("Unknown table version %d %d\n", frev, crev);
 761		return;
 762	}
 763	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 764}
 765
 766static void atombios_crtc_program_pll(struct drm_crtc *crtc,
 767				      int crtc_id,
 768				      int pll_id,
 769				      u32 encoder_mode,
 770				      u32 encoder_id,
 771				      u32 clock,
 772				      u32 ref_div,
 773				      u32 fb_div,
 774				      u32 frac_fb_div,
 775				      u32 post_div,
 776				      int bpc,
 777				      bool ss_enabled,
 778				      struct radeon_atom_ss *ss)
 779{
 780	struct drm_device *dev = crtc->dev;
 781	struct radeon_device *rdev = dev->dev_private;
 782	u8 frev, crev;
 783	int index = GetIndexIntoMasterTable(COMMAND, SetPixelClock);
 784	union set_pixel_clock args;
 785
 786	memset(&args, 0, sizeof(args));
 787
 788	if (!atom_parse_cmd_header(rdev->mode_info.atom_context, index, &frev,
 789				   &crev))
 790		return;
 791
 792	switch (frev) {
 793	case 1:
 794		switch (crev) {
 795		case 1:
 796			if (clock == ATOM_DISABLE)
 797				return;
 798			args.v1.usPixelClock = cpu_to_le16(clock / 10);
 799			args.v1.usRefDiv = cpu_to_le16(ref_div);
 800			args.v1.usFbDiv = cpu_to_le16(fb_div);
 801			args.v1.ucFracFbDiv = frac_fb_div;
 802			args.v1.ucPostDiv = post_div;
 803			args.v1.ucPpll = pll_id;
 804			args.v1.ucCRTC = crtc_id;
 805			args.v1.ucRefDivSrc = 1;
 806			break;
 807		case 2:
 808			args.v2.usPixelClock = cpu_to_le16(clock / 10);
 809			args.v2.usRefDiv = cpu_to_le16(ref_div);
 810			args.v2.usFbDiv = cpu_to_le16(fb_div);
 811			args.v2.ucFracFbDiv = frac_fb_div;
 812			args.v2.ucPostDiv = post_div;
 813			args.v2.ucPpll = pll_id;
 814			args.v2.ucCRTC = crtc_id;
 815			args.v2.ucRefDivSrc = 1;
 816			break;
 817		case 3:
 818			args.v3.usPixelClock = cpu_to_le16(clock / 10);
 819			args.v3.usRefDiv = cpu_to_le16(ref_div);
 820			args.v3.usFbDiv = cpu_to_le16(fb_div);
 821			args.v3.ucFracFbDiv = frac_fb_div;
 822			args.v3.ucPostDiv = post_div;
 823			args.v3.ucPpll = pll_id;
 824			args.v3.ucMiscInfo = (pll_id << 2);
 825			if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
 826				args.v3.ucMiscInfo |= PIXEL_CLOCK_MISC_REF_DIV_SRC;
 827			args.v3.ucTransmitterId = encoder_id;
 828			args.v3.ucEncoderMode = encoder_mode;
 829			break;
 830		case 5:
 831			args.v5.ucCRTC = crtc_id;
 832			args.v5.usPixelClock = cpu_to_le16(clock / 10);
 833			args.v5.ucRefDiv = ref_div;
 834			args.v5.usFbDiv = cpu_to_le16(fb_div);
 835			args.v5.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
 836			args.v5.ucPostDiv = post_div;
 837			args.v5.ucMiscInfo = 0; /* HDMI depth, etc. */
 838			if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
 839				args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_REF_DIV_SRC;
 840			switch (bpc) {
 841			case 8:
 842			default:
 843				args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_24BPP;
 844				break;
 845			case 10:
 846				args.v5.ucMiscInfo |= PIXEL_CLOCK_V5_MISC_HDMI_30BPP;
 847				break;
 848			}
 849			args.v5.ucTransmitterID = encoder_id;
 850			args.v5.ucEncoderMode = encoder_mode;
 851			args.v5.ucPpll = pll_id;
 852			break;
 853		case 6:
 854			args.v6.ulCrtcPclkFreq.ucCRTC = crtc_id;
 855			args.v6.ulCrtcPclkFreq.ulPixelClock = cpu_to_le32(clock / 10);
 856			args.v6.ucRefDiv = ref_div;
 857			args.v6.usFbDiv = cpu_to_le16(fb_div);
 858			args.v6.ulFbDivDecFrac = cpu_to_le32(frac_fb_div * 100000);
 859			args.v6.ucPostDiv = post_div;
 860			args.v6.ucMiscInfo = 0; /* HDMI depth, etc. */
 861			if (ss_enabled && (ss->type & ATOM_EXTERNAL_SS_MASK))
 862				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_REF_DIV_SRC;
 863			switch (bpc) {
 864			case 8:
 865			default:
 866				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_24BPP;
 867				break;
 868			case 10:
 869				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_30BPP;
 870				break;
 871			case 12:
 872				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_36BPP;
 873				break;
 874			case 16:
 875				args.v6.ucMiscInfo |= PIXEL_CLOCK_V6_MISC_HDMI_48BPP;
 876				break;
 877			}
 878			args.v6.ucTransmitterID = encoder_id;
 879			args.v6.ucEncoderMode = encoder_mode;
 880			args.v6.ucPpll = pll_id;
 881			break;
 882		default:
 883			DRM_ERROR("Unknown table version %d %d\n", frev, crev);
 884			return;
 885		}
 886		break;
 887	default:
 888		DRM_ERROR("Unknown table version %d %d\n", frev, crev);
 889		return;
 890	}
 891
 892	atom_execute_table(rdev->mode_info.atom_context, index, (uint32_t *)&args);
 893}
 894
 895static void atombios_crtc_set_pll(struct drm_crtc *crtc, struct drm_display_mode *mode)
 896{
 897	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
 898	struct drm_device *dev = crtc->dev;
 899	struct radeon_device *rdev = dev->dev_private;
 900	struct drm_encoder *encoder = NULL;
 901	struct radeon_encoder *radeon_encoder = NULL;
 902	u32 pll_clock = mode->clock;
 903	u32 ref_div = 0, fb_div = 0, frac_fb_div = 0, post_div = 0;
 904	struct radeon_pll *pll;
 905	u32 adjusted_clock;
 906	int encoder_mode = 0;
 907	struct radeon_atom_ss ss;
 908	bool ss_enabled = false;
 909	int bpc = 8;
 910
 911	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
 912		if (encoder->crtc == crtc) {
 913			radeon_encoder = to_radeon_encoder(encoder);
 914			encoder_mode = atombios_get_encoder_mode(encoder);
 915			break;
 916		}
 917	}
 918
 919	if (!radeon_encoder)
 920		return;
 921
 922	switch (radeon_crtc->pll_id) {
 923	case ATOM_PPLL1:
 924		pll = &rdev->clock.p1pll;
 925		break;
 926	case ATOM_PPLL2:
 927		pll = &rdev->clock.p2pll;
 928		break;
 929	case ATOM_DCPLL:
 930	case ATOM_PPLL_INVALID:
 931	default:
 932		pll = &rdev->clock.dcpll;
 933		break;
 934	}
 935
 936	if (radeon_encoder->active_device &
 937	    (ATOM_DEVICE_LCD_SUPPORT | ATOM_DEVICE_DFP_SUPPORT)) {
 938		struct radeon_encoder_atom_dig *dig = radeon_encoder->enc_priv;
 939		struct drm_connector *connector =
 940			radeon_get_connector_for_encoder(encoder);
 941		struct radeon_connector *radeon_connector =
 942			to_radeon_connector(connector);
 943		struct radeon_connector_atom_dig *dig_connector =
 944			radeon_connector->con_priv;
 945		int dp_clock;
 946		bpc = connector->display_info.bpc;
 947
 948		switch (encoder_mode) {
 949		case ATOM_ENCODER_MODE_DP:
 950			/* DP/eDP */
 951			dp_clock = dig_connector->dp_clock / 10;
 952			if (ASIC_IS_DCE4(rdev))
 953				ss_enabled =
 954					radeon_atombios_get_asic_ss_info(rdev, &ss,
 955									 ASIC_INTERNAL_SS_ON_DP,
 956									 dp_clock);
 957			else {
 958				if (dp_clock == 16200) {
 959					ss_enabled =
 960						radeon_atombios_get_ppll_ss_info(rdev, &ss,
 961										 ATOM_DP_SS_ID2);
 962					if (!ss_enabled)
 963						ss_enabled =
 964							radeon_atombios_get_ppll_ss_info(rdev, &ss,
 965											 ATOM_DP_SS_ID1);
 966				} else
 967					ss_enabled =
 968						radeon_atombios_get_ppll_ss_info(rdev, &ss,
 969										 ATOM_DP_SS_ID1);
 970			}
 971			break;
 972		case ATOM_ENCODER_MODE_LVDS:
 973			if (ASIC_IS_DCE4(rdev))
 974				ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
 975									      dig->lcd_ss_id,
 976									      mode->clock / 10);
 977			else
 978				ss_enabled = radeon_atombios_get_ppll_ss_info(rdev, &ss,
 979									      dig->lcd_ss_id);
 980			break;
 981		case ATOM_ENCODER_MODE_DVI:
 982			if (ASIC_IS_DCE4(rdev))
 983				ss_enabled =
 984					radeon_atombios_get_asic_ss_info(rdev, &ss,
 985									 ASIC_INTERNAL_SS_ON_TMDS,
 986									 mode->clock / 10);
 987			break;
 988		case ATOM_ENCODER_MODE_HDMI:
 989			if (ASIC_IS_DCE4(rdev))
 990				ss_enabled =
 991					radeon_atombios_get_asic_ss_info(rdev, &ss,
 992									 ASIC_INTERNAL_SS_ON_HDMI,
 993									 mode->clock / 10);
 994			break;
 995		default:
 996			break;
 997		}
 998	}
 999
1000	/* adjust pixel clock as needed */
1001	adjusted_clock = atombios_adjust_pll(crtc, mode, pll, ss_enabled, &ss);
1002
1003	if (radeon_encoder->active_device & (ATOM_DEVICE_TV_SUPPORT))
1004		/* TV seems to prefer the legacy algo on some boards */
1005		radeon_compute_pll_legacy(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1006					  &ref_div, &post_div);
1007	else if (ASIC_IS_AVIVO(rdev))
1008		radeon_compute_pll_avivo(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1009					 &ref_div, &post_div);
1010	else
1011		radeon_compute_pll_legacy(pll, adjusted_clock, &pll_clock, &fb_div, &frac_fb_div,
1012					  &ref_div, &post_div);
1013
1014	atombios_crtc_program_ss(crtc, ATOM_DISABLE, radeon_crtc->pll_id, &ss);
1015
1016	atombios_crtc_program_pll(crtc, radeon_crtc->crtc_id, radeon_crtc->pll_id,
1017				  encoder_mode, radeon_encoder->encoder_id, mode->clock,
1018				  ref_div, fb_div, frac_fb_div, post_div, bpc, ss_enabled, &ss);
1019
1020	if (ss_enabled) {
1021		/* calculate ss amount and step size */
1022		if (ASIC_IS_DCE4(rdev)) {
1023			u32 step_size;
1024			u32 amount = (((fb_div * 10) + frac_fb_div) * ss.percentage) / 10000;
1025			ss.amount = (amount / 10) & ATOM_PPLL_SS_AMOUNT_V2_FBDIV_MASK;
1026			ss.amount |= ((amount - (amount / 10)) << ATOM_PPLL_SS_AMOUNT_V2_NFRAC_SHIFT) &
1027				ATOM_PPLL_SS_AMOUNT_V2_NFRAC_MASK;
1028			if (ss.type & ATOM_PPLL_SS_TYPE_V2_CENTRE_SPREAD)
1029				step_size = (4 * amount * ref_div * (ss.rate * 2048)) /
1030					(125 * 25 * pll->reference_freq / 100);
1031			else
1032				step_size = (2 * amount * ref_div * (ss.rate * 2048)) /
1033					(125 * 25 * pll->reference_freq / 100);
1034			ss.step = step_size;
1035		}
1036
1037		atombios_crtc_program_ss(crtc, ATOM_ENABLE, radeon_crtc->pll_id, &ss);
1038	}
1039}
1040
1041static int dce4_crtc_do_set_base(struct drm_crtc *crtc,
1042				 struct drm_framebuffer *fb,
1043				 int x, int y, int atomic)
1044{
1045	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1046	struct drm_device *dev = crtc->dev;
1047	struct radeon_device *rdev = dev->dev_private;
1048	struct radeon_framebuffer *radeon_fb;
1049	struct drm_framebuffer *target_fb;
1050	struct drm_gem_object *obj;
1051	struct radeon_bo *rbo;
1052	uint64_t fb_location;
1053	uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1054	u32 fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_NONE);
1055	u32 tmp, viewport_w, viewport_h;
1056	int r;
1057
1058	/* no fb bound */
1059	if (!atomic && !crtc->fb) {
1060		DRM_DEBUG_KMS("No FB bound\n");
1061		return 0;
1062	}
1063
1064	if (atomic) {
1065		radeon_fb = to_radeon_framebuffer(fb);
1066		target_fb = fb;
1067	}
1068	else {
1069		radeon_fb = to_radeon_framebuffer(crtc->fb);
1070		target_fb = crtc->fb;
1071	}
1072
1073	/* If atomic, assume fb object is pinned & idle & fenced and
1074	 * just update base pointers
1075	 */
1076	obj = radeon_fb->obj;
1077	rbo = gem_to_radeon_bo(obj);
1078	r = radeon_bo_reserve(rbo, false);
1079	if (unlikely(r != 0))
1080		return r;
1081
1082	if (atomic)
1083		fb_location = radeon_bo_gpu_offset(rbo);
1084	else {
1085		r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1086		if (unlikely(r != 0)) {
1087			radeon_bo_unreserve(rbo);
1088			return -EINVAL;
1089		}
1090	}
1091
1092	radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1093	radeon_bo_unreserve(rbo);
1094
1095	switch (target_fb->bits_per_pixel) {
1096	case 8:
1097		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_8BPP) |
1098			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_INDEXED));
1099		break;
1100	case 15:
1101		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1102			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB1555));
1103		break;
1104	case 16:
1105		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_16BPP) |
1106			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB565));
1107#ifdef __BIG_ENDIAN
1108		fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN16);
1109#endif
1110		break;
1111	case 24:
1112	case 32:
1113		fb_format = (EVERGREEN_GRPH_DEPTH(EVERGREEN_GRPH_DEPTH_32BPP) |
1114			     EVERGREEN_GRPH_FORMAT(EVERGREEN_GRPH_FORMAT_ARGB8888));
1115#ifdef __BIG_ENDIAN
1116		fb_swap = EVERGREEN_GRPH_ENDIAN_SWAP(EVERGREEN_GRPH_ENDIAN_8IN32);
1117#endif
1118		break;
1119	default:
1120		DRM_ERROR("Unsupported screen depth %d\n",
1121			  target_fb->bits_per_pixel);
1122		return -EINVAL;
1123	}
1124
1125	if (tiling_flags & RADEON_TILING_MACRO)
1126		fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_2D_TILED_THIN1);
1127	else if (tiling_flags & RADEON_TILING_MICRO)
1128		fb_format |= EVERGREEN_GRPH_ARRAY_MODE(EVERGREEN_GRPH_ARRAY_1D_TILED_THIN1);
1129
1130	switch (radeon_crtc->crtc_id) {
1131	case 0:
1132		WREG32(AVIVO_D1VGA_CONTROL, 0);
1133		break;
1134	case 1:
1135		WREG32(AVIVO_D2VGA_CONTROL, 0);
1136		break;
1137	case 2:
1138		WREG32(EVERGREEN_D3VGA_CONTROL, 0);
1139		break;
1140	case 3:
1141		WREG32(EVERGREEN_D4VGA_CONTROL, 0);
1142		break;
1143	case 4:
1144		WREG32(EVERGREEN_D5VGA_CONTROL, 0);
1145		break;
1146	case 5:
1147		WREG32(EVERGREEN_D6VGA_CONTROL, 0);
1148		break;
1149	default:
1150		break;
1151	}
1152
1153	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1154	       upper_32_bits(fb_location));
1155	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1156	       upper_32_bits(fb_location));
1157	WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1158	       (u32)fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1159	WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1160	       (u32) fb_location & EVERGREEN_GRPH_SURFACE_ADDRESS_MASK);
1161	WREG32(EVERGREEN_GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1162	WREG32(EVERGREEN_GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1163
1164	WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1165	WREG32(EVERGREEN_GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1166	WREG32(EVERGREEN_GRPH_X_START + radeon_crtc->crtc_offset, 0);
1167	WREG32(EVERGREEN_GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1168	WREG32(EVERGREEN_GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1169	WREG32(EVERGREEN_GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1170
1171	fb_pitch_pixels = target_fb->pitch / (target_fb->bits_per_pixel / 8);
1172	WREG32(EVERGREEN_GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1173	WREG32(EVERGREEN_GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1174
1175	WREG32(EVERGREEN_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1176	       target_fb->height);
1177	x &= ~3;
1178	y &= ~1;
1179	WREG32(EVERGREEN_VIEWPORT_START + radeon_crtc->crtc_offset,
1180	       (x << 16) | y);
1181	viewport_w = crtc->mode.hdisplay;
1182	viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1183	WREG32(EVERGREEN_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1184	       (viewport_w << 16) | viewport_h);
1185
1186	/* pageflip setup */
1187	/* make sure flip is at vb rather than hb */
1188	tmp = RREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1189	tmp &= ~EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1190	WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1191
1192	/* set pageflip to happen anywhere in vblank interval */
1193	WREG32(EVERGREEN_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1194
1195	if (!atomic && fb && fb != crtc->fb) {
1196		radeon_fb = to_radeon_framebuffer(fb);
1197		rbo = gem_to_radeon_bo(radeon_fb->obj);
1198		r = radeon_bo_reserve(rbo, false);
1199		if (unlikely(r != 0))
1200			return r;
1201		radeon_bo_unpin(rbo);
1202		radeon_bo_unreserve(rbo);
1203	}
1204
1205	/* Bytes per pixel may have changed */
1206	radeon_bandwidth_update(rdev);
1207
1208	return 0;
1209}
1210
1211static int avivo_crtc_do_set_base(struct drm_crtc *crtc,
1212				  struct drm_framebuffer *fb,
1213				  int x, int y, int atomic)
1214{
1215	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1216	struct drm_device *dev = crtc->dev;
1217	struct radeon_device *rdev = dev->dev_private;
1218	struct radeon_framebuffer *radeon_fb;
1219	struct drm_gem_object *obj;
1220	struct radeon_bo *rbo;
1221	struct drm_framebuffer *target_fb;
1222	uint64_t fb_location;
1223	uint32_t fb_format, fb_pitch_pixels, tiling_flags;
1224	u32 fb_swap = R600_D1GRPH_SWAP_ENDIAN_NONE;
1225	u32 tmp, viewport_w, viewport_h;
1226	int r;
1227
1228	/* no fb bound */
1229	if (!atomic && !crtc->fb) {
1230		DRM_DEBUG_KMS("No FB bound\n");
1231		return 0;
1232	}
1233
1234	if (atomic) {
1235		radeon_fb = to_radeon_framebuffer(fb);
1236		target_fb = fb;
1237	}
1238	else {
1239		radeon_fb = to_radeon_framebuffer(crtc->fb);
1240		target_fb = crtc->fb;
1241	}
1242
1243	obj = radeon_fb->obj;
1244	rbo = gem_to_radeon_bo(obj);
1245	r = radeon_bo_reserve(rbo, false);
1246	if (unlikely(r != 0))
1247		return r;
1248
1249	/* If atomic, assume fb object is pinned & idle & fenced and
1250	 * just update base pointers
1251	 */
1252	if (atomic)
1253		fb_location = radeon_bo_gpu_offset(rbo);
1254	else {
1255		r = radeon_bo_pin(rbo, RADEON_GEM_DOMAIN_VRAM, &fb_location);
1256		if (unlikely(r != 0)) {
1257			radeon_bo_unreserve(rbo);
1258			return -EINVAL;
1259		}
1260	}
1261	radeon_bo_get_tiling_flags(rbo, &tiling_flags, NULL);
1262	radeon_bo_unreserve(rbo);
1263
1264	switch (target_fb->bits_per_pixel) {
1265	case 8:
1266		fb_format =
1267		    AVIVO_D1GRPH_CONTROL_DEPTH_8BPP |
1268		    AVIVO_D1GRPH_CONTROL_8BPP_INDEXED;
1269		break;
1270	case 15:
1271		fb_format =
1272		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1273		    AVIVO_D1GRPH_CONTROL_16BPP_ARGB1555;
1274		break;
1275	case 16:
1276		fb_format =
1277		    AVIVO_D1GRPH_CONTROL_DEPTH_16BPP |
1278		    AVIVO_D1GRPH_CONTROL_16BPP_RGB565;
1279#ifdef __BIG_ENDIAN
1280		fb_swap = R600_D1GRPH_SWAP_ENDIAN_16BIT;
1281#endif
1282		break;
1283	case 24:
1284	case 32:
1285		fb_format =
1286		    AVIVO_D1GRPH_CONTROL_DEPTH_32BPP |
1287		    AVIVO_D1GRPH_CONTROL_32BPP_ARGB8888;
1288#ifdef __BIG_ENDIAN
1289		fb_swap = R600_D1GRPH_SWAP_ENDIAN_32BIT;
1290#endif
1291		break;
1292	default:
1293		DRM_ERROR("Unsupported screen depth %d\n",
1294			  target_fb->bits_per_pixel);
1295		return -EINVAL;
1296	}
1297
1298	if (rdev->family >= CHIP_R600) {
1299		if (tiling_flags & RADEON_TILING_MACRO)
1300			fb_format |= R600_D1GRPH_ARRAY_MODE_2D_TILED_THIN1;
1301		else if (tiling_flags & RADEON_TILING_MICRO)
1302			fb_format |= R600_D1GRPH_ARRAY_MODE_1D_TILED_THIN1;
1303	} else {
1304		if (tiling_flags & RADEON_TILING_MACRO)
1305			fb_format |= AVIVO_D1GRPH_MACRO_ADDRESS_MODE;
1306
1307		if (tiling_flags & RADEON_TILING_MICRO)
1308			fb_format |= AVIVO_D1GRPH_TILED;
1309	}
1310
1311	if (radeon_crtc->crtc_id == 0)
1312		WREG32(AVIVO_D1VGA_CONTROL, 0);
1313	else
1314		WREG32(AVIVO_D2VGA_CONTROL, 0);
1315
1316	if (rdev->family >= CHIP_RV770) {
1317		if (radeon_crtc->crtc_id) {
1318			WREG32(R700_D2GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1319			WREG32(R700_D2GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1320		} else {
1321			WREG32(R700_D1GRPH_PRIMARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1322			WREG32(R700_D1GRPH_SECONDARY_SURFACE_ADDRESS_HIGH, upper_32_bits(fb_location));
1323		}
1324	}
1325	WREG32(AVIVO_D1GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1326	       (u32) fb_location);
1327	WREG32(AVIVO_D1GRPH_SECONDARY_SURFACE_ADDRESS +
1328	       radeon_crtc->crtc_offset, (u32) fb_location);
1329	WREG32(AVIVO_D1GRPH_CONTROL + radeon_crtc->crtc_offset, fb_format);
1330	if (rdev->family >= CHIP_R600)
1331		WREG32(R600_D1GRPH_SWAP_CONTROL + radeon_crtc->crtc_offset, fb_swap);
1332
1333	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_X + radeon_crtc->crtc_offset, 0);
1334	WREG32(AVIVO_D1GRPH_SURFACE_OFFSET_Y + radeon_crtc->crtc_offset, 0);
1335	WREG32(AVIVO_D1GRPH_X_START + radeon_crtc->crtc_offset, 0);
1336	WREG32(AVIVO_D1GRPH_Y_START + radeon_crtc->crtc_offset, 0);
1337	WREG32(AVIVO_D1GRPH_X_END + radeon_crtc->crtc_offset, target_fb->width);
1338	WREG32(AVIVO_D1GRPH_Y_END + radeon_crtc->crtc_offset, target_fb->height);
1339
1340	fb_pitch_pixels = target_fb->pitch / (target_fb->bits_per_pixel / 8);
1341	WREG32(AVIVO_D1GRPH_PITCH + radeon_crtc->crtc_offset, fb_pitch_pixels);
1342	WREG32(AVIVO_D1GRPH_ENABLE + radeon_crtc->crtc_offset, 1);
1343
1344	WREG32(AVIVO_D1MODE_DESKTOP_HEIGHT + radeon_crtc->crtc_offset,
1345	       target_fb->height);
1346	x &= ~3;
1347	y &= ~1;
1348	WREG32(AVIVO_D1MODE_VIEWPORT_START + radeon_crtc->crtc_offset,
1349	       (x << 16) | y);
1350	viewport_w = crtc->mode.hdisplay;
1351	viewport_h = (crtc->mode.vdisplay + 1) & ~1;
1352	WREG32(AVIVO_D1MODE_VIEWPORT_SIZE + radeon_crtc->crtc_offset,
1353	       (viewport_w << 16) | viewport_h);
1354
1355	/* pageflip setup */
1356	/* make sure flip is at vb rather than hb */
1357	tmp = RREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset);
1358	tmp &= ~AVIVO_D1GRPH_SURFACE_UPDATE_H_RETRACE_EN;
1359	WREG32(AVIVO_D1GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset, tmp);
1360
1361	/* set pageflip to happen anywhere in vblank interval */
1362	WREG32(AVIVO_D1MODE_MASTER_UPDATE_MODE + radeon_crtc->crtc_offset, 0);
1363
1364	if (!atomic && fb && fb != crtc->fb) {
1365		radeon_fb = to_radeon_framebuffer(fb);
1366		rbo = gem_to_radeon_bo(radeon_fb->obj);
1367		r = radeon_bo_reserve(rbo, false);
1368		if (unlikely(r != 0))
1369			return r;
1370		radeon_bo_unpin(rbo);
1371		radeon_bo_unreserve(rbo);
1372	}
1373
1374	/* Bytes per pixel may have changed */
1375	radeon_bandwidth_update(rdev);
1376
1377	return 0;
1378}
1379
1380int atombios_crtc_set_base(struct drm_crtc *crtc, int x, int y,
1381			   struct drm_framebuffer *old_fb)
1382{
1383	struct drm_device *dev = crtc->dev;
1384	struct radeon_device *rdev = dev->dev_private;
1385
1386	if (ASIC_IS_DCE4(rdev))
1387		return dce4_crtc_do_set_base(crtc, old_fb, x, y, 0);
1388	else if (ASIC_IS_AVIVO(rdev))
1389		return avivo_crtc_do_set_base(crtc, old_fb, x, y, 0);
1390	else
1391		return radeon_crtc_do_set_base(crtc, old_fb, x, y, 0);
1392}
1393
1394int atombios_crtc_set_base_atomic(struct drm_crtc *crtc,
1395                                  struct drm_framebuffer *fb,
1396				  int x, int y, enum mode_set_atomic state)
1397{
1398       struct drm_device *dev = crtc->dev;
1399       struct radeon_device *rdev = dev->dev_private;
1400
1401	if (ASIC_IS_DCE4(rdev))
1402		return dce4_crtc_do_set_base(crtc, fb, x, y, 1);
1403	else if (ASIC_IS_AVIVO(rdev))
1404		return avivo_crtc_do_set_base(crtc, fb, x, y, 1);
1405	else
1406		return radeon_crtc_do_set_base(crtc, fb, x, y, 1);
1407}
1408
1409/* properly set additional regs when using atombios */
1410static void radeon_legacy_atom_fixup(struct drm_crtc *crtc)
1411{
1412	struct drm_device *dev = crtc->dev;
1413	struct radeon_device *rdev = dev->dev_private;
1414	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1415	u32 disp_merge_cntl;
1416
1417	switch (radeon_crtc->crtc_id) {
1418	case 0:
1419		disp_merge_cntl = RREG32(RADEON_DISP_MERGE_CNTL);
1420		disp_merge_cntl &= ~RADEON_DISP_RGB_OFFSET_EN;
1421		WREG32(RADEON_DISP_MERGE_CNTL, disp_merge_cntl);
1422		break;
1423	case 1:
1424		disp_merge_cntl = RREG32(RADEON_DISP2_MERGE_CNTL);
1425		disp_merge_cntl &= ~RADEON_DISP2_RGB_OFFSET_EN;
1426		WREG32(RADEON_DISP2_MERGE_CNTL, disp_merge_cntl);
1427		WREG32(RADEON_FP_H2_SYNC_STRT_WID,   RREG32(RADEON_CRTC2_H_SYNC_STRT_WID));
1428		WREG32(RADEON_FP_V2_SYNC_STRT_WID,   RREG32(RADEON_CRTC2_V_SYNC_STRT_WID));
1429		break;
1430	}
1431}
1432
1433static int radeon_atom_pick_pll(struct drm_crtc *crtc)
1434{
1435	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1436	struct drm_device *dev = crtc->dev;
1437	struct radeon_device *rdev = dev->dev_private;
1438	struct drm_encoder *test_encoder;
1439	struct drm_crtc *test_crtc;
1440	uint32_t pll_in_use = 0;
1441
1442	if (ASIC_IS_DCE4(rdev)) {
1443		list_for_each_entry(test_encoder, &dev->mode_config.encoder_list, head) {
1444			if (test_encoder->crtc && (test_encoder->crtc == crtc)) {
1445				/* in DP mode, the DP ref clock can come from PPLL, DCPLL, or ext clock,
1446				 * depending on the asic:
1447				 * DCE4: PPLL or ext clock
1448				 * DCE5: DCPLL or ext clock
1449				 *
1450				 * Setting ATOM_PPLL_INVALID will cause SetPixelClock to skip
1451				 * PPLL/DCPLL programming and only program the DP DTO for the
1452				 * crtc virtual pixel clock.
1453				 */
1454				if (atombios_get_encoder_mode(test_encoder) == ATOM_ENCODER_MODE_DP) {
1455					if (ASIC_IS_DCE5(rdev) || rdev->clock.dp_extclk)
1456						return ATOM_PPLL_INVALID;
1457				}
1458			}
1459		}
1460
1461		/* otherwise, pick one of the plls */
1462		list_for_each_entry(test_crtc, &dev->mode_config.crtc_list, head) {
1463			struct radeon_crtc *radeon_test_crtc;
1464
1465			if (crtc == test_crtc)
1466				continue;
1467
1468			radeon_test_crtc = to_radeon_crtc(test_crtc);
1469			if ((radeon_test_crtc->pll_id >= ATOM_PPLL1) &&
1470			    (radeon_test_crtc->pll_id <= ATOM_PPLL2))
1471				pll_in_use |= (1 << radeon_test_crtc->pll_id);
1472		}
1473		if (!(pll_in_use & 1))
1474			return ATOM_PPLL1;
1475		return ATOM_PPLL2;
1476	} else
1477		return radeon_crtc->crtc_id;
1478
1479}
1480
1481int atombios_crtc_mode_set(struct drm_crtc *crtc,
1482			   struct drm_display_mode *mode,
1483			   struct drm_display_mode *adjusted_mode,
1484			   int x, int y, struct drm_framebuffer *old_fb)
1485{
1486	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1487	struct drm_device *dev = crtc->dev;
1488	struct radeon_device *rdev = dev->dev_private;
1489	struct drm_encoder *encoder;
1490	bool is_tvcv = false;
1491
1492	list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1493		/* find tv std */
1494		if (encoder->crtc == crtc) {
1495			struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1496			if (radeon_encoder->active_device &
1497			    (ATOM_DEVICE_TV_SUPPORT | ATOM_DEVICE_CV_SUPPORT))
1498				is_tvcv = true;
1499		}
1500	}
1501
1502	/* always set DCPLL */
1503	if (ASIC_IS_DCE4(rdev)) {
1504		struct radeon_atom_ss ss;
1505		bool ss_enabled = radeon_atombios_get_asic_ss_info(rdev, &ss,
1506								   ASIC_INTERNAL_SS_ON_DCPLL,
1507								   rdev->clock.default_dispclk);
1508		if (ss_enabled)
1509			atombios_crtc_program_ss(crtc, ATOM_DISABLE, ATOM_DCPLL, &ss);
1510		/* XXX: DCE5, make sure voltage, dispclk is high enough */
1511		atombios_crtc_set_dcpll(crtc, rdev->clock.default_dispclk);
1512		if (ss_enabled)
1513			atombios_crtc_program_ss(crtc, ATOM_ENABLE, ATOM_DCPLL, &ss);
1514	}
1515	atombios_crtc_set_pll(crtc, adjusted_mode);
1516
1517	if (ASIC_IS_DCE4(rdev))
1518		atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1519	else if (ASIC_IS_AVIVO(rdev)) {
1520		if (is_tvcv)
1521			atombios_crtc_set_timing(crtc, adjusted_mode);
1522		else
1523			atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1524	} else {
1525		atombios_crtc_set_timing(crtc, adjusted_mode);
1526		if (radeon_crtc->crtc_id == 0)
1527			atombios_set_crtc_dtd_timing(crtc, adjusted_mode);
1528		radeon_legacy_atom_fixup(crtc);
1529	}
1530	atombios_crtc_set_base(crtc, x, y, old_fb);
1531	atombios_overscan_setup(crtc, mode, adjusted_mode);
1532	atombios_scaler_setup(crtc);
1533	return 0;
1534}
1535
1536static bool atombios_crtc_mode_fixup(struct drm_crtc *crtc,
1537				     struct drm_display_mode *mode,
1538				     struct drm_display_mode *adjusted_mode)
1539{
1540	struct drm_device *dev = crtc->dev;
1541	struct radeon_device *rdev = dev->dev_private;
1542
1543	/* adjust pm to upcoming mode change */
1544	radeon_pm_compute_clocks(rdev);
1545
1546	if (!radeon_crtc_scaling_mode_fixup(crtc, mode, adjusted_mode))
1547		return false;
1548	return true;
1549}
1550
1551static void atombios_crtc_prepare(struct drm_crtc *crtc)
1552{
1553	struct radeon_crtc *radeon_crtc = to_radeon_crtc(crtc);
1554
1555	/* pick pll */
1556	radeon_crtc->pll_id = radeon_atom_pick_pll(crtc);
1557
1558	atombios_lock_crtc(crtc, ATOM_ENABLE);
1559	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_OFF);
1560}
1561
1562static void atombios_crtc_commit(struct drm_crtc *crtc)
1563{
1564	atombios_crtc_dpms(crtc, DRM_MODE_DPMS_ON);
1565	atombios_

Large files files are truncated, but you can click here to view the full file