2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
27 #include "radeon_drm.h"
31 #include <asm/div64.h>
33 #include "drm_crtc_helper.h"
36 static void avivo_crtc_load_lut(struct drm_crtc
*crtc
)
38 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
39 struct drm_device
*dev
= crtc
->dev
;
40 struct radeon_device
*rdev
= dev
->dev_private
;
43 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
44 WREG32(AVIVO_DC_LUTA_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
46 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
47 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
48 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
50 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
51 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
52 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
54 WREG32(AVIVO_DC_LUT_RW_SELECT
, radeon_crtc
->crtc_id
);
55 WREG32(AVIVO_DC_LUT_RW_MODE
, 0);
56 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK
, 0x0000003f);
58 WREG8(AVIVO_DC_LUT_RW_INDEX
, 0);
59 for (i
= 0; i
< 256; i
++) {
60 WREG32(AVIVO_DC_LUT_30_COLOR
,
61 (radeon_crtc
->lut_r
[i
] << 20) |
62 (radeon_crtc
->lut_g
[i
] << 10) |
63 (radeon_crtc
->lut_b
[i
] << 0));
66 WREG32(AVIVO_D1GRPH_LUT_SEL
+ radeon_crtc
->crtc_offset
, radeon_crtc
->crtc_id
);
69 static void dce4_crtc_load_lut(struct drm_crtc
*crtc
)
71 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
72 struct drm_device
*dev
= crtc
->dev
;
73 struct radeon_device
*rdev
= dev
->dev_private
;
76 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
77 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
79 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
80 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
81 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
83 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
84 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
85 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
87 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
88 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
90 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
91 for (i
= 0; i
< 256; i
++) {
92 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
93 (radeon_crtc
->lut_r
[i
] << 20) |
94 (radeon_crtc
->lut_g
[i
] << 10) |
95 (radeon_crtc
->lut_b
[i
] << 0));
99 static void dce5_crtc_load_lut(struct drm_crtc
*crtc
)
101 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
102 struct drm_device
*dev
= crtc
->dev
;
103 struct radeon_device
*rdev
= dev
->dev_private
;
106 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
108 WREG32(NI_INPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
109 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS
) |
110 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS
)));
111 WREG32(NI_PRESCALE_GRPH_CONTROL
+ radeon_crtc
->crtc_offset
,
112 NI_GRPH_PRESCALE_BYPASS
);
113 WREG32(NI_PRESCALE_OVL_CONTROL
+ radeon_crtc
->crtc_offset
,
114 NI_OVL_PRESCALE_BYPASS
);
115 WREG32(NI_INPUT_GAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
116 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
) |
117 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
)));
119 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
121 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
122 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
123 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
125 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
126 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
127 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
129 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
130 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
132 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
133 for (i
= 0; i
< 256; i
++) {
134 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
135 (radeon_crtc
->lut_r
[i
] << 20) |
136 (radeon_crtc
->lut_g
[i
] << 10) |
137 (radeon_crtc
->lut_b
[i
] << 0));
140 WREG32(NI_DEGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
141 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
142 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
143 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
144 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
)));
145 WREG32(NI_GAMUT_REMAP_CONTROL
+ radeon_crtc
->crtc_offset
,
146 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
) |
147 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
)));
148 WREG32(NI_REGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
149 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS
) |
150 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS
)));
151 WREG32(NI_OUTPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
152 (NI_OUTPUT_CSC_GRPH_MODE(NI_OUTPUT_CSC_BYPASS
) |
153 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS
)));
154 /* XXX match this to the depth of the crtc fmt block, move to modeset? */
155 WREG32(0x6940 + radeon_crtc
->crtc_offset
, 0);
159 static void legacy_crtc_load_lut(struct drm_crtc
*crtc
)
161 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
162 struct drm_device
*dev
= crtc
->dev
;
163 struct radeon_device
*rdev
= dev
->dev_private
;
167 dac2_cntl
= RREG32(RADEON_DAC_CNTL2
);
168 if (radeon_crtc
->crtc_id
== 0)
169 dac2_cntl
&= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL
;
171 dac2_cntl
|= RADEON_DAC2_PALETTE_ACC_CTL
;
172 WREG32(RADEON_DAC_CNTL2
, dac2_cntl
);
174 WREG8(RADEON_PALETTE_INDEX
, 0);
175 for (i
= 0; i
< 256; i
++) {
176 WREG32(RADEON_PALETTE_30_DATA
,
177 (radeon_crtc
->lut_r
[i
] << 20) |
178 (radeon_crtc
->lut_g
[i
] << 10) |
179 (radeon_crtc
->lut_b
[i
] << 0));
183 void radeon_crtc_load_lut(struct drm_crtc
*crtc
)
185 struct drm_device
*dev
= crtc
->dev
;
186 struct radeon_device
*rdev
= dev
->dev_private
;
191 if (ASIC_IS_DCE5(rdev
))
192 dce5_crtc_load_lut(crtc
);
193 else if (ASIC_IS_DCE4(rdev
))
194 dce4_crtc_load_lut(crtc
);
195 else if (ASIC_IS_AVIVO(rdev
))
196 avivo_crtc_load_lut(crtc
);
198 legacy_crtc_load_lut(crtc
);
201 /** Sets the color ramps on behalf of fbcon */
202 void radeon_crtc_fb_gamma_set(struct drm_crtc
*crtc
, u16 red
, u16 green
,
205 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
207 radeon_crtc
->lut_r
[regno
] = red
>> 6;
208 radeon_crtc
->lut_g
[regno
] = green
>> 6;
209 radeon_crtc
->lut_b
[regno
] = blue
>> 6;
212 /** Gets the color ramps on behalf of fbcon */
213 void radeon_crtc_fb_gamma_get(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
214 u16
*blue
, int regno
)
216 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
218 *red
= radeon_crtc
->lut_r
[regno
] << 6;
219 *green
= radeon_crtc
->lut_g
[regno
] << 6;
220 *blue
= radeon_crtc
->lut_b
[regno
] << 6;
223 static void radeon_crtc_gamma_set(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
224 u16
*blue
, uint32_t start
, uint32_t size
)
226 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
227 int end
= (start
+ size
> 256) ? 256 : start
+ size
, i
;
229 /* userspace palettes are always correct as is */
230 for (i
= start
; i
< end
; i
++) {
231 radeon_crtc
->lut_r
[i
] = red
[i
] >> 6;
232 radeon_crtc
->lut_g
[i
] = green
[i
] >> 6;
233 radeon_crtc
->lut_b
[i
] = blue
[i
] >> 6;
235 radeon_crtc_load_lut(crtc
);
238 static void radeon_crtc_destroy(struct drm_crtc
*crtc
)
240 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
242 drm_crtc_cleanup(crtc
);
247 * Handle unpin events outside the interrupt handler proper.
249 static void radeon_unpin_work_func(struct work_struct
*__work
)
251 struct radeon_unpin_work
*work
=
252 container_of(__work
, struct radeon_unpin_work
, work
);
255 /* unpin of the old buffer */
256 r
= radeon_bo_reserve(work
->old_rbo
, false);
257 if (likely(r
== 0)) {
258 r
= radeon_bo_unpin(work
->old_rbo
);
259 if (unlikely(r
!= 0)) {
260 DRM_ERROR("failed to unpin buffer after flip\n");
262 radeon_bo_unreserve(work
->old_rbo
);
264 DRM_ERROR("failed to reserve buffer after flip\n");
266 drm_gem_object_unreference_unlocked(&work
->old_rbo
->gem_base
);
270 void radeon_crtc_handle_flip(struct radeon_device
*rdev
, int crtc_id
)
272 struct radeon_crtc
*radeon_crtc
= rdev
->mode_info
.crtcs
[crtc_id
];
273 struct radeon_unpin_work
*work
;
274 struct drm_pending_vblank_event
*e
;
280 spin_lock_irqsave(&rdev
->ddev
->event_lock
, flags
);
281 work
= radeon_crtc
->unpin_work
;
283 (work
->fence
&& !radeon_fence_signaled(work
->fence
))) {
284 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
287 /* New pageflip, or just completion of a previous one? */
288 if (!radeon_crtc
->deferred_flip_completion
) {
289 /* do the flip (mmio) */
290 update_pending
= radeon_page_flip(rdev
, crtc_id
, work
->new_crtc_base
);
292 /* This is just a completion of a flip queued in crtc
293 * at last invocation. Make sure we go directly to
294 * completion routine.
297 radeon_crtc
->deferred_flip_completion
= 0;
300 /* Has the pageflip already completed in crtc, or is it certain
301 * to complete in this vblank?
303 if (update_pending
&&
304 (DRM_SCANOUTPOS_VALID
& radeon_get_crtc_scanoutpos(rdev
->ddev
, crtc_id
,
306 ((vpos
>= (99 * rdev
->mode_info
.crtcs
[crtc_id
]->base
.hwmode
.crtc_vdisplay
)/100) ||
307 (vpos
< 0 && !ASIC_IS_AVIVO(rdev
)))) {
308 /* crtc didn't flip in this target vblank interval,
309 * but flip is pending in crtc. Based on the current
310 * scanout position we know that the current frame is
311 * (nearly) complete and the flip will (likely)
312 * complete before the start of the next frame.
316 if (update_pending
) {
317 /* crtc didn't flip in this target vblank interval,
318 * but flip is pending in crtc. It will complete it
319 * in next vblank interval, so complete the flip at
322 radeon_crtc
->deferred_flip_completion
= 1;
323 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
327 /* Pageflip (will be) certainly completed in this vblank. Clean up. */
328 radeon_crtc
->unpin_work
= NULL
;
330 /* wakeup userspace */
333 e
->event
.sequence
= drm_vblank_count_and_time(rdev
->ddev
, crtc_id
, &now
);
334 e
->event
.tv_sec
= now
.tv_sec
;
335 e
->event
.tv_usec
= now
.tv_usec
;
336 list_add_tail(&e
->base
.link
, &e
->base
.file_priv
->event_list
);
337 wake_up_interruptible(&e
->base
.file_priv
->event_wait
);
339 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
341 drm_vblank_put(rdev
->ddev
, radeon_crtc
->crtc_id
);
342 radeon_fence_unref(&work
->fence
);
343 radeon_post_page_flip(work
->rdev
, work
->crtc_id
);
344 schedule_work(&work
->work
);
347 static int radeon_crtc_page_flip(struct drm_crtc
*crtc
,
348 struct drm_framebuffer
*fb
,
349 struct drm_pending_vblank_event
*event
)
351 struct drm_device
*dev
= crtc
->dev
;
352 struct radeon_device
*rdev
= dev
->dev_private
;
353 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
354 struct radeon_framebuffer
*old_radeon_fb
;
355 struct radeon_framebuffer
*new_radeon_fb
;
356 struct drm_gem_object
*obj
;
357 struct radeon_bo
*rbo
;
358 struct radeon_unpin_work
*work
;
360 u32 tiling_flags
, pitch_pixels
;
364 work
= kzalloc(sizeof *work
, GFP_KERNEL
);
370 work
->crtc_id
= radeon_crtc
->crtc_id
;
371 old_radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
372 new_radeon_fb
= to_radeon_framebuffer(fb
);
373 /* schedule unpin of the old buffer */
374 obj
= old_radeon_fb
->obj
;
375 /* take a reference to the old object */
376 drm_gem_object_reference(obj
);
377 rbo
= gem_to_radeon_bo(obj
);
379 obj
= new_radeon_fb
->obj
;
380 rbo
= gem_to_radeon_bo(obj
);
381 if (rbo
->tbo
.sync_obj
)
382 work
->fence
= radeon_fence_ref(rbo
->tbo
.sync_obj
);
383 INIT_WORK(&work
->work
, radeon_unpin_work_func
);
385 /* We borrow the event spin lock for protecting unpin_work */
386 spin_lock_irqsave(&dev
->event_lock
, flags
);
387 if (radeon_crtc
->unpin_work
) {
388 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
392 radeon_crtc
->unpin_work
= work
;
393 radeon_crtc
->deferred_flip_completion
= 0;
394 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
396 /* pin the new buffer */
397 DRM_DEBUG_DRIVER("flip-ioctl() cur_fbo = %p, cur_bbo = %p\n",
400 r
= radeon_bo_reserve(rbo
, false);
401 if (unlikely(r
!= 0)) {
402 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
405 /* Only 27 bit offset for legacy CRTC */
406 r
= radeon_bo_pin_restricted(rbo
, RADEON_GEM_DOMAIN_VRAM
,
407 ASIC_IS_AVIVO(rdev
) ? 0 : 1 << 27, &base
);
408 if (unlikely(r
!= 0)) {
409 radeon_bo_unreserve(rbo
);
411 DRM_ERROR("failed to pin new rbo buffer before flip\n");
414 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
415 radeon_bo_unreserve(rbo
);
417 if (!ASIC_IS_AVIVO(rdev
)) {
418 /* crtc offset is from display base addr not FB location */
419 base
-= radeon_crtc
->legacy_display_base_addr
;
420 pitch_pixels
= fb
->pitches
[0] / (fb
->bits_per_pixel
/ 8);
422 if (tiling_flags
& RADEON_TILING_MACRO
) {
423 if (ASIC_IS_R300(rdev
)) {
426 int byteshift
= fb
->bits_per_pixel
>> 4;
427 int tile_addr
= (((crtc
->y
>> 3) * pitch_pixels
+ crtc
->x
) >> (8 - byteshift
)) << 11;
428 base
+= tile_addr
+ ((crtc
->x
<< byteshift
) % 256) + ((crtc
->y
% 8) << 8);
431 int offset
= crtc
->y
* pitch_pixels
+ crtc
->x
;
432 switch (fb
->bits_per_pixel
) {
453 spin_lock_irqsave(&dev
->event_lock
, flags
);
454 work
->new_crtc_base
= base
;
455 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
460 r
= drm_vblank_get(dev
, radeon_crtc
->crtc_id
);
462 DRM_ERROR("failed to get vblank before flip\n");
466 /* set the proper interrupt */
467 radeon_pre_page_flip(rdev
, radeon_crtc
->crtc_id
);
472 if (unlikely(radeon_bo_reserve(rbo
, false) != 0)) {
473 DRM_ERROR("failed to reserve new rbo in error path\n");
476 if (unlikely(radeon_bo_unpin(rbo
) != 0)) {
477 DRM_ERROR("failed to unpin new rbo in error path\n");
479 radeon_bo_unreserve(rbo
);
482 spin_lock_irqsave(&dev
->event_lock
, flags
);
483 radeon_crtc
->unpin_work
= NULL
;
485 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
486 drm_gem_object_unreference_unlocked(old_radeon_fb
->obj
);
487 radeon_fence_unref(&work
->fence
);
493 static const struct drm_crtc_funcs radeon_crtc_funcs
= {
494 .cursor_set
= radeon_crtc_cursor_set
,
495 .cursor_move
= radeon_crtc_cursor_move
,
496 .gamma_set
= radeon_crtc_gamma_set
,
497 .set_config
= drm_crtc_helper_set_config
,
498 .destroy
= radeon_crtc_destroy
,
499 .page_flip
= radeon_crtc_page_flip
,
502 static void radeon_crtc_init(struct drm_device
*dev
, int index
)
504 struct radeon_device
*rdev
= dev
->dev_private
;
505 struct radeon_crtc
*radeon_crtc
;
508 radeon_crtc
= kzalloc(sizeof(struct radeon_crtc
) + (RADEONFB_CONN_LIMIT
* sizeof(struct drm_connector
*)), GFP_KERNEL
);
509 if (radeon_crtc
== NULL
)
512 drm_crtc_init(dev
, &radeon_crtc
->base
, &radeon_crtc_funcs
);
514 drm_mode_crtc_set_gamma_size(&radeon_crtc
->base
, 256);
515 radeon_crtc
->crtc_id
= index
;
516 rdev
->mode_info
.crtcs
[index
] = radeon_crtc
;
519 radeon_crtc
->mode_set
.crtc
= &radeon_crtc
->base
;
520 radeon_crtc
->mode_set
.connectors
= (struct drm_connector
**)(radeon_crtc
+ 1);
521 radeon_crtc
->mode_set
.num_connectors
= 0;
524 for (i
= 0; i
< 256; i
++) {
525 radeon_crtc
->lut_r
[i
] = i
<< 2;
526 radeon_crtc
->lut_g
[i
] = i
<< 2;
527 radeon_crtc
->lut_b
[i
] = i
<< 2;
530 if (rdev
->is_atom_bios
&& (ASIC_IS_AVIVO(rdev
) || radeon_r4xx_atom
))
531 radeon_atombios_init_crtc(dev
, radeon_crtc
);
533 radeon_legacy_init_crtc(dev
, radeon_crtc
);
536 static const char *encoder_names
[37] = {
556 "INTERNAL_KLDSCP_TMDS1",
557 "INTERNAL_KLDSCP_DVO1",
558 "INTERNAL_KLDSCP_DAC1",
559 "INTERNAL_KLDSCP_DAC2",
568 "INTERNAL_KLDSCP_LVTMA",
576 static const char *hpd_names
[6] = {
585 static void radeon_print_display_setup(struct drm_device
*dev
)
587 struct drm_connector
*connector
;
588 struct radeon_connector
*radeon_connector
;
589 struct drm_encoder
*encoder
;
590 struct radeon_encoder
*radeon_encoder
;
594 DRM_INFO("Radeon Display Connectors\n");
595 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
596 radeon_connector
= to_radeon_connector(connector
);
597 DRM_INFO("Connector %d:\n", i
);
598 DRM_INFO(" %s\n", drm_get_connector_name(connector
));
599 if (radeon_connector
->hpd
.hpd
!= RADEON_HPD_NONE
)
600 DRM_INFO(" %s\n", hpd_names
[radeon_connector
->hpd
.hpd
]);
601 if (radeon_connector
->ddc_bus
) {
602 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
603 radeon_connector
->ddc_bus
->rec
.mask_clk_reg
,
604 radeon_connector
->ddc_bus
->rec
.mask_data_reg
,
605 radeon_connector
->ddc_bus
->rec
.a_clk_reg
,
606 radeon_connector
->ddc_bus
->rec
.a_data_reg
,
607 radeon_connector
->ddc_bus
->rec
.en_clk_reg
,
608 radeon_connector
->ddc_bus
->rec
.en_data_reg
,
609 radeon_connector
->ddc_bus
->rec
.y_clk_reg
,
610 radeon_connector
->ddc_bus
->rec
.y_data_reg
);
611 if (radeon_connector
->router
.ddc_valid
)
612 DRM_INFO(" DDC Router 0x%x/0x%x\n",
613 radeon_connector
->router
.ddc_mux_control_pin
,
614 radeon_connector
->router
.ddc_mux_state
);
615 if (radeon_connector
->router
.cd_valid
)
616 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
617 radeon_connector
->router
.cd_mux_control_pin
,
618 radeon_connector
->router
.cd_mux_state
);
620 if (connector
->connector_type
== DRM_MODE_CONNECTOR_VGA
||
621 connector
->connector_type
== DRM_MODE_CONNECTOR_DVII
||
622 connector
->connector_type
== DRM_MODE_CONNECTOR_DVID
||
623 connector
->connector_type
== DRM_MODE_CONNECTOR_DVIA
||
624 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIA
||
625 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIB
)
626 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
628 DRM_INFO(" Encoders:\n");
629 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
630 radeon_encoder
= to_radeon_encoder(encoder
);
631 devices
= radeon_encoder
->devices
& radeon_connector
->devices
;
633 if (devices
& ATOM_DEVICE_CRT1_SUPPORT
)
634 DRM_INFO(" CRT1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
635 if (devices
& ATOM_DEVICE_CRT2_SUPPORT
)
636 DRM_INFO(" CRT2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
637 if (devices
& ATOM_DEVICE_LCD1_SUPPORT
)
638 DRM_INFO(" LCD1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
639 if (devices
& ATOM_DEVICE_DFP1_SUPPORT
)
640 DRM_INFO(" DFP1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
641 if (devices
& ATOM_DEVICE_DFP2_SUPPORT
)
642 DRM_INFO(" DFP2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
643 if (devices
& ATOM_DEVICE_DFP3_SUPPORT
)
644 DRM_INFO(" DFP3: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
645 if (devices
& ATOM_DEVICE_DFP4_SUPPORT
)
646 DRM_INFO(" DFP4: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
647 if (devices
& ATOM_DEVICE_DFP5_SUPPORT
)
648 DRM_INFO(" DFP5: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
649 if (devices
& ATOM_DEVICE_DFP6_SUPPORT
)
650 DRM_INFO(" DFP6: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
651 if (devices
& ATOM_DEVICE_TV1_SUPPORT
)
652 DRM_INFO(" TV1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
653 if (devices
& ATOM_DEVICE_CV_SUPPORT
)
654 DRM_INFO(" CV: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
661 static bool radeon_setup_enc_conn(struct drm_device
*dev
)
663 struct radeon_device
*rdev
= dev
->dev_private
;
667 if (rdev
->is_atom_bios
) {
668 ret
= radeon_get_atom_connector_info_from_supported_devices_table(dev
);
670 ret
= radeon_get_atom_connector_info_from_object_table(dev
);
672 ret
= radeon_get_legacy_connector_info_from_bios(dev
);
674 ret
= radeon_get_legacy_connector_info_from_table(dev
);
677 if (!ASIC_IS_AVIVO(rdev
))
678 ret
= radeon_get_legacy_connector_info_from_table(dev
);
681 radeon_setup_encoder_clones(dev
);
682 radeon_print_display_setup(dev
);
688 int radeon_ddc_get_modes(struct radeon_connector
*radeon_connector
)
690 struct drm_device
*dev
= radeon_connector
->base
.dev
;
691 struct radeon_device
*rdev
= dev
->dev_private
;
694 /* on hw with routers, select right port */
695 if (radeon_connector
->router
.ddc_valid
)
696 radeon_router_select_ddc_port(radeon_connector
);
698 if ((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_DisplayPort
) ||
699 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
) ||
700 (radeon_connector_encoder_get_dp_bridge_encoder_id(&radeon_connector
->base
) !=
701 ENCODER_OBJECT_ID_NONE
)) {
702 struct radeon_connector_atom_dig
*dig
= radeon_connector
->con_priv
;
704 if ((dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_DISPLAYPORT
||
705 dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_eDP
) && dig
->dp_i2c_bus
)
706 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
,
707 &dig
->dp_i2c_bus
->adapter
);
708 else if (radeon_connector
->ddc_bus
&& !radeon_connector
->edid
)
709 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
,
710 &radeon_connector
->ddc_bus
->adapter
);
712 if (radeon_connector
->ddc_bus
&& !radeon_connector
->edid
)
713 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
,
714 &radeon_connector
->ddc_bus
->adapter
);
717 if (!radeon_connector
->edid
) {
718 if (rdev
->is_atom_bios
) {
719 /* some laptops provide a hardcoded edid in rom for LCDs */
720 if (((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_LVDS
) ||
721 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)))
722 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
724 /* some servers provide a hardcoded edid in rom for KVMs */
725 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
727 if (radeon_connector
->edid
) {
728 drm_mode_connector_update_edid_property(&radeon_connector
->base
, radeon_connector
->edid
);
729 ret
= drm_add_edid_modes(&radeon_connector
->base
, radeon_connector
->edid
);
732 drm_mode_connector_update_edid_property(&radeon_connector
->base
, NULL
);
737 static void avivo_get_fb_div(struct radeon_pll
*pll
,
744 u32 tmp
= post_div
* ref_div
;
747 *fb_div
= tmp
/ pll
->reference_freq
;
748 *frac_fb_div
= tmp
% pll
->reference_freq
;
750 if (*fb_div
> pll
->max_feedback_div
)
751 *fb_div
= pll
->max_feedback_div
;
752 else if (*fb_div
< pll
->min_feedback_div
)
753 *fb_div
= pll
->min_feedback_div
;
756 static u32
avivo_get_post_div(struct radeon_pll
*pll
,
759 u32 vco
, post_div
, tmp
;
761 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
762 return pll
->post_div
;
764 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
765 if (pll
->flags
& RADEON_PLL_IS_LCD
)
766 vco
= pll
->lcd_pll_out_min
;
768 vco
= pll
->pll_out_min
;
770 if (pll
->flags
& RADEON_PLL_IS_LCD
)
771 vco
= pll
->lcd_pll_out_max
;
773 vco
= pll
->pll_out_max
;
776 post_div
= vco
/ target_clock
;
777 tmp
= vco
% target_clock
;
779 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
787 if (post_div
> pll
->max_post_div
)
788 post_div
= pll
->max_post_div
;
789 else if (post_div
< pll
->min_post_div
)
790 post_div
= pll
->min_post_div
;
795 #define MAX_TOLERANCE 10
797 void radeon_compute_pll_avivo(struct radeon_pll
*pll
,
805 u32 target_clock
= freq
/ 10;
806 u32 post_div
= avivo_get_post_div(pll
, target_clock
);
807 u32 ref_div
= pll
->min_ref_div
;
808 u32 fb_div
= 0, frac_fb_div
= 0, tmp
;
810 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
811 ref_div
= pll
->reference_div
;
813 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
814 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
, &fb_div
, &frac_fb_div
);
815 frac_fb_div
= (100 * frac_fb_div
) / pll
->reference_freq
;
816 if (frac_fb_div
>= 5) {
818 frac_fb_div
= frac_fb_div
/ 10;
821 if (frac_fb_div
>= 10) {
826 while (ref_div
<= pll
->max_ref_div
) {
827 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
,
828 &fb_div
, &frac_fb_div
);
829 if (frac_fb_div
>= (pll
->reference_freq
/ 2))
832 tmp
= (pll
->reference_freq
* fb_div
) / (post_div
* ref_div
);
833 tmp
= (tmp
* 10000) / target_clock
;
835 if (tmp
> (10000 + MAX_TOLERANCE
))
837 else if (tmp
>= (10000 - MAX_TOLERANCE
))
844 *dot_clock_p
= ((pll
->reference_freq
* fb_div
* 10) + (pll
->reference_freq
* frac_fb_div
)) /
845 (ref_div
* post_div
* 10);
847 *frac_fb_div_p
= frac_fb_div
;
848 *ref_div_p
= ref_div
;
849 *post_div_p
= post_div
;
850 DRM_DEBUG_KMS("%d, pll dividers - fb: %d.%d ref: %d, post %d\n",
851 *dot_clock_p
, fb_div
, frac_fb_div
, ref_div
, post_div
);
855 static inline uint32_t radeon_div(uint64_t n
, uint32_t d
)
865 void radeon_compute_pll_legacy(struct radeon_pll
*pll
,
867 uint32_t *dot_clock_p
,
869 uint32_t *frac_fb_div_p
,
871 uint32_t *post_div_p
)
873 uint32_t min_ref_div
= pll
->min_ref_div
;
874 uint32_t max_ref_div
= pll
->max_ref_div
;
875 uint32_t min_post_div
= pll
->min_post_div
;
876 uint32_t max_post_div
= pll
->max_post_div
;
877 uint32_t min_fractional_feed_div
= 0;
878 uint32_t max_fractional_feed_div
= 0;
879 uint32_t best_vco
= pll
->best_vco
;
880 uint32_t best_post_div
= 1;
881 uint32_t best_ref_div
= 1;
882 uint32_t best_feedback_div
= 1;
883 uint32_t best_frac_feedback_div
= 0;
884 uint32_t best_freq
= -1;
885 uint32_t best_error
= 0xffffffff;
886 uint32_t best_vco_diff
= 1;
888 u32 pll_out_min
, pll_out_max
;
890 DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq
, pll
->min_ref_div
, pll
->max_ref_div
);
893 if (pll
->flags
& RADEON_PLL_IS_LCD
) {
894 pll_out_min
= pll
->lcd_pll_out_min
;
895 pll_out_max
= pll
->lcd_pll_out_max
;
897 pll_out_min
= pll
->pll_out_min
;
898 pll_out_max
= pll
->pll_out_max
;
901 if (pll_out_min
> 64800)
904 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
905 min_ref_div
= max_ref_div
= pll
->reference_div
;
907 while (min_ref_div
< max_ref_div
-1) {
908 uint32_t mid
= (min_ref_div
+ max_ref_div
) / 2;
909 uint32_t pll_in
= pll
->reference_freq
/ mid
;
910 if (pll_in
< pll
->pll_in_min
)
912 else if (pll_in
> pll
->pll_in_max
)
919 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
920 min_post_div
= max_post_div
= pll
->post_div
;
922 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
923 min_fractional_feed_div
= pll
->min_frac_feedback_div
;
924 max_fractional_feed_div
= pll
->max_frac_feedback_div
;
927 for (post_div
= max_post_div
; post_div
>= min_post_div
; --post_div
) {
930 if ((pll
->flags
& RADEON_PLL_NO_ODD_POST_DIV
) && (post_div
& 1))
933 /* legacy radeons only have a few post_divs */
934 if (pll
->flags
& RADEON_PLL_LEGACY
) {
935 if ((post_div
== 5) ||
946 for (ref_div
= min_ref_div
; ref_div
<= max_ref_div
; ++ref_div
) {
947 uint32_t feedback_div
, current_freq
= 0, error
, vco_diff
;
948 uint32_t pll_in
= pll
->reference_freq
/ ref_div
;
949 uint32_t min_feed_div
= pll
->min_feedback_div
;
950 uint32_t max_feed_div
= pll
->max_feedback_div
+ 1;
952 if (pll_in
< pll
->pll_in_min
|| pll_in
> pll
->pll_in_max
)
955 while (min_feed_div
< max_feed_div
) {
957 uint32_t min_frac_feed_div
= min_fractional_feed_div
;
958 uint32_t max_frac_feed_div
= max_fractional_feed_div
+ 1;
959 uint32_t frac_feedback_div
;
962 feedback_div
= (min_feed_div
+ max_feed_div
) / 2;
964 tmp
= (uint64_t)pll
->reference_freq
* feedback_div
;
965 vco
= radeon_div(tmp
, ref_div
);
967 if (vco
< pll_out_min
) {
968 min_feed_div
= feedback_div
+ 1;
970 } else if (vco
> pll_out_max
) {
971 max_feed_div
= feedback_div
;
975 while (min_frac_feed_div
< max_frac_feed_div
) {
976 frac_feedback_div
= (min_frac_feed_div
+ max_frac_feed_div
) / 2;
977 tmp
= (uint64_t)pll
->reference_freq
* 10000 * feedback_div
;
978 tmp
+= (uint64_t)pll
->reference_freq
* 1000 * frac_feedback_div
;
979 current_freq
= radeon_div(tmp
, ref_div
* post_div
);
981 if (pll
->flags
& RADEON_PLL_PREFER_CLOSEST_LOWER
) {
982 if (freq
< current_freq
)
985 error
= freq
- current_freq
;
987 error
= abs(current_freq
- freq
);
988 vco_diff
= abs(vco
- best_vco
);
990 if ((best_vco
== 0 && error
< best_error
) ||
992 ((best_error
> 100 && error
< best_error
- 100) ||
993 (abs(error
- best_error
) < 100 && vco_diff
< best_vco_diff
)))) {
994 best_post_div
= post_div
;
995 best_ref_div
= ref_div
;
996 best_feedback_div
= feedback_div
;
997 best_frac_feedback_div
= frac_feedback_div
;
998 best_freq
= current_freq
;
1000 best_vco_diff
= vco_diff
;
1001 } else if (current_freq
== freq
) {
1002 if (best_freq
== -1) {
1003 best_post_div
= post_div
;
1004 best_ref_div
= ref_div
;
1005 best_feedback_div
= feedback_div
;
1006 best_frac_feedback_div
= frac_feedback_div
;
1007 best_freq
= current_freq
;
1009 best_vco_diff
= vco_diff
;
1010 } else if (((pll
->flags
& RADEON_PLL_PREFER_LOW_REF_DIV
) && (ref_div
< best_ref_div
)) ||
1011 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_REF_DIV
) && (ref_div
> best_ref_div
)) ||
1012 ((pll
->flags
& RADEON_PLL_PREFER_LOW_FB_DIV
) && (feedback_div
< best_feedback_div
)) ||
1013 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_FB_DIV
) && (feedback_div
> best_feedback_div
)) ||
1014 ((pll
->flags
& RADEON_PLL_PREFER_LOW_POST_DIV
) && (post_div
< best_post_div
)) ||
1015 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_POST_DIV
) && (post_div
> best_post_div
))) {
1016 best_post_div
= post_div
;
1017 best_ref_div
= ref_div
;
1018 best_feedback_div
= feedback_div
;
1019 best_frac_feedback_div
= frac_feedback_div
;
1020 best_freq
= current_freq
;
1022 best_vco_diff
= vco_diff
;
1025 if (current_freq
< freq
)
1026 min_frac_feed_div
= frac_feedback_div
+ 1;
1028 max_frac_feed_div
= frac_feedback_div
;
1030 if (current_freq
< freq
)
1031 min_feed_div
= feedback_div
+ 1;
1033 max_feed_div
= feedback_div
;
1038 *dot_clock_p
= best_freq
/ 10000;
1039 *fb_div_p
= best_feedback_div
;
1040 *frac_fb_div_p
= best_frac_feedback_div
;
1041 *ref_div_p
= best_ref_div
;
1042 *post_div_p
= best_post_div
;
1043 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1045 best_freq
/ 1000, best_feedback_div
, best_frac_feedback_div
,
1046 best_ref_div
, best_post_div
);
1050 static void radeon_user_framebuffer_destroy(struct drm_framebuffer
*fb
)
1052 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1054 if (radeon_fb
->obj
) {
1055 drm_gem_object_unreference_unlocked(radeon_fb
->obj
);
1057 drm_framebuffer_cleanup(fb
);
1061 static int radeon_user_framebuffer_create_handle(struct drm_framebuffer
*fb
,
1062 struct drm_file
*file_priv
,
1063 unsigned int *handle
)
1065 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1067 return drm_gem_handle_create(file_priv
, radeon_fb
->obj
, handle
);
1070 static const struct drm_framebuffer_funcs radeon_fb_funcs
= {
1071 .destroy
= radeon_user_framebuffer_destroy
,
1072 .create_handle
= radeon_user_framebuffer_create_handle
,
1076 radeon_framebuffer_init(struct drm_device
*dev
,
1077 struct radeon_framebuffer
*rfb
,
1078 struct drm_mode_fb_cmd2
*mode_cmd
,
1079 struct drm_gem_object
*obj
)
1083 ret
= drm_framebuffer_init(dev
, &rfb
->base
, &radeon_fb_funcs
);
1088 drm_helper_mode_fill_fb_struct(&rfb
->base
, mode_cmd
);
1092 static struct drm_framebuffer
*
1093 radeon_user_framebuffer_create(struct drm_device
*dev
,
1094 struct drm_file
*file_priv
,
1095 struct drm_mode_fb_cmd2
*mode_cmd
)
1097 struct drm_gem_object
*obj
;
1098 struct radeon_framebuffer
*radeon_fb
;
1101 obj
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handles
[0]);
1103 dev_err(&dev
->pdev
->dev
, "No GEM object associated to handle 0x%08X, "
1104 "can't create framebuffer\n", mode_cmd
->handles
[0]);
1105 return ERR_PTR(-ENOENT
);
1108 radeon_fb
= kzalloc(sizeof(*radeon_fb
), GFP_KERNEL
);
1109 if (radeon_fb
== NULL
)
1110 return ERR_PTR(-ENOMEM
);
1112 ret
= radeon_framebuffer_init(dev
, radeon_fb
, mode_cmd
, obj
);
1115 drm_gem_object_unreference_unlocked(obj
);
1119 return &radeon_fb
->base
;
1122 static void radeon_output_poll_changed(struct drm_device
*dev
)
1124 struct radeon_device
*rdev
= dev
->dev_private
;
1125 radeon_fb_output_poll_changed(rdev
);
1128 static const struct drm_mode_config_funcs radeon_mode_funcs
= {
1129 .fb_create
= radeon_user_framebuffer_create
,
1130 .output_poll_changed
= radeon_output_poll_changed
1133 static struct drm_prop_enum_list radeon_tmds_pll_enum_list
[] =
1138 static struct drm_prop_enum_list radeon_tv_std_enum_list
[] =
1139 { { TV_STD_NTSC
, "ntsc" },
1140 { TV_STD_PAL
, "pal" },
1141 { TV_STD_PAL_M
, "pal-m" },
1142 { TV_STD_PAL_60
, "pal-60" },
1143 { TV_STD_NTSC_J
, "ntsc-j" },
1144 { TV_STD_SCART_PAL
, "scart-pal" },
1145 { TV_STD_PAL_CN
, "pal-cn" },
1146 { TV_STD_SECAM
, "secam" },
1149 static struct drm_prop_enum_list radeon_underscan_enum_list
[] =
1150 { { UNDERSCAN_OFF
, "off" },
1151 { UNDERSCAN_ON
, "on" },
1152 { UNDERSCAN_AUTO
, "auto" },
1155 static int radeon_modeset_create_props(struct radeon_device
*rdev
)
1159 if (rdev
->is_atom_bios
) {
1160 rdev
->mode_info
.coherent_mode_property
=
1161 drm_property_create_range(rdev
->ddev
, 0 , "coherent", 0, 1);
1162 if (!rdev
->mode_info
.coherent_mode_property
)
1166 if (!ASIC_IS_AVIVO(rdev
)) {
1167 sz
= ARRAY_SIZE(radeon_tmds_pll_enum_list
);
1168 rdev
->mode_info
.tmds_pll_property
=
1169 drm_property_create_enum(rdev
->ddev
, 0,
1171 radeon_tmds_pll_enum_list
, sz
);
1174 rdev
->mode_info
.load_detect_property
=
1175 drm_property_create_range(rdev
->ddev
, 0, "load detection", 0, 1);
1176 if (!rdev
->mode_info
.load_detect_property
)
1179 drm_mode_create_scaling_mode_property(rdev
->ddev
);
1181 sz
= ARRAY_SIZE(radeon_tv_std_enum_list
);
1182 rdev
->mode_info
.tv_std_property
=
1183 drm_property_create_enum(rdev
->ddev
, 0,
1185 radeon_tv_std_enum_list
, sz
);
1187 sz
= ARRAY_SIZE(radeon_underscan_enum_list
);
1188 rdev
->mode_info
.underscan_property
=
1189 drm_property_create_enum(rdev
->ddev
, 0,
1191 radeon_underscan_enum_list
, sz
);
1193 rdev
->mode_info
.underscan_hborder_property
=
1194 drm_property_create_range(rdev
->ddev
, 0,
1195 "underscan hborder", 0, 128);
1196 if (!rdev
->mode_info
.underscan_hborder_property
)
1199 rdev
->mode_info
.underscan_vborder_property
=
1200 drm_property_create_range(rdev
->ddev
, 0,
1201 "underscan vborder", 0, 128);
1202 if (!rdev
->mode_info
.underscan_vborder_property
)
1208 void radeon_update_display_priority(struct radeon_device
*rdev
)
1210 /* adjustment options for the display watermarks */
1211 if ((radeon_disp_priority
== 0) || (radeon_disp_priority
> 2)) {
1212 /* set display priority to high for r3xx, rv515 chips
1213 * this avoids flickering due to underflow to the
1214 * display controllers during heavy acceleration.
1215 * Don't force high on rs4xx igp chips as it seems to
1216 * affect the sound card. See kernel bug 15982.
1218 if ((ASIC_IS_R300(rdev
) || (rdev
->family
== CHIP_RV515
)) &&
1219 !(rdev
->flags
& RADEON_IS_IGP
))
1220 rdev
->disp_priority
= 2;
1222 rdev
->disp_priority
= 0;
1224 rdev
->disp_priority
= radeon_disp_priority
;
1228 int radeon_modeset_init(struct radeon_device
*rdev
)
1233 drm_mode_config_init(rdev
->ddev
);
1234 rdev
->mode_info
.mode_config_initialized
= true;
1236 rdev
->ddev
->mode_config
.funcs
= (void *)&radeon_mode_funcs
;
1238 if (ASIC_IS_DCE5(rdev
)) {
1239 rdev
->ddev
->mode_config
.max_width
= 16384;
1240 rdev
->ddev
->mode_config
.max_height
= 16384;
1241 } else if (ASIC_IS_AVIVO(rdev
)) {
1242 rdev
->ddev
->mode_config
.max_width
= 8192;
1243 rdev
->ddev
->mode_config
.max_height
= 8192;
1245 rdev
->ddev
->mode_config
.max_width
= 4096;
1246 rdev
->ddev
->mode_config
.max_height
= 4096;
1249 rdev
->ddev
->mode_config
.preferred_depth
= 24;
1250 rdev
->ddev
->mode_config
.prefer_shadow
= 1;
1252 rdev
->ddev
->mode_config
.fb_base
= rdev
->mc
.aper_base
;
1254 ret
= radeon_modeset_create_props(rdev
);
1259 /* init i2c buses */
1260 radeon_i2c_init(rdev
);
1262 /* check combios for a valid hardcoded EDID - Sun servers */
1263 if (!rdev
->is_atom_bios
) {
1264 /* check for hardcoded EDID in BIOS */
1265 radeon_combios_check_hardcoded_edid(rdev
);
1268 /* allocate crtcs */
1269 for (i
= 0; i
< rdev
->num_crtc
; i
++) {
1270 radeon_crtc_init(rdev
->ddev
, i
);
1273 /* okay we should have all the bios connectors */
1274 ret
= radeon_setup_enc_conn(rdev
->ddev
);
1279 /* init dig PHYs, disp eng pll */
1280 if (rdev
->is_atom_bios
) {
1281 radeon_atom_encoder_init(rdev
);
1282 radeon_atom_disp_eng_pll_init(rdev
);
1285 /* initialize hpd */
1286 radeon_hpd_init(rdev
);
1288 /* Initialize power management */
1289 radeon_pm_init(rdev
);
1291 radeon_fbdev_init(rdev
);
1292 drm_kms_helper_poll_init(rdev
->ddev
);
1297 void radeon_modeset_fini(struct radeon_device
*rdev
)
1299 radeon_fbdev_fini(rdev
);
1300 kfree(rdev
->mode_info
.bios_hardcoded_edid
);
1301 radeon_pm_fini(rdev
);
1303 if (rdev
->mode_info
.mode_config_initialized
) {
1304 drm_kms_helper_poll_fini(rdev
->ddev
);
1305 radeon_hpd_fini(rdev
);
1306 drm_mode_config_cleanup(rdev
->ddev
);
1307 rdev
->mode_info
.mode_config_initialized
= false;
1309 /* free i2c buses */
1310 radeon_i2c_fini(rdev
);
1313 static bool is_hdtv_mode(struct drm_display_mode
*mode
)
1315 /* try and guess if this is a tv or a monitor */
1316 if ((mode
->vdisplay
== 480 && mode
->hdisplay
== 720) || /* 480p */
1317 (mode
->vdisplay
== 576) || /* 576p */
1318 (mode
->vdisplay
== 720) || /* 720p */
1319 (mode
->vdisplay
== 1080)) /* 1080p */
1325 bool radeon_crtc_scaling_mode_fixup(struct drm_crtc
*crtc
,
1326 struct drm_display_mode
*mode
,
1327 struct drm_display_mode
*adjusted_mode
)
1329 struct drm_device
*dev
= crtc
->dev
;
1330 struct radeon_device
*rdev
= dev
->dev_private
;
1331 struct drm_encoder
*encoder
;
1332 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1333 struct radeon_encoder
*radeon_encoder
;
1334 struct drm_connector
*connector
;
1335 struct radeon_connector
*radeon_connector
;
1337 u32 src_v
= 1, dst_v
= 1;
1338 u32 src_h
= 1, dst_h
= 1;
1340 radeon_crtc
->h_border
= 0;
1341 radeon_crtc
->v_border
= 0;
1343 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
1344 if (encoder
->crtc
!= crtc
)
1346 radeon_encoder
= to_radeon_encoder(encoder
);
1347 connector
= radeon_get_connector_for_encoder(encoder
);
1348 radeon_connector
= to_radeon_connector(connector
);
1352 if (radeon_encoder
->rmx_type
== RMX_OFF
)
1353 radeon_crtc
->rmx_type
= RMX_OFF
;
1354 else if (mode
->hdisplay
< radeon_encoder
->native_mode
.hdisplay
||
1355 mode
->vdisplay
< radeon_encoder
->native_mode
.vdisplay
)
1356 radeon_crtc
->rmx_type
= radeon_encoder
->rmx_type
;
1358 radeon_crtc
->rmx_type
= RMX_OFF
;
1359 /* copy native mode */
1360 memcpy(&radeon_crtc
->native_mode
,
1361 &radeon_encoder
->native_mode
,
1362 sizeof(struct drm_display_mode
));
1363 src_v
= crtc
->mode
.vdisplay
;
1364 dst_v
= radeon_crtc
->native_mode
.vdisplay
;
1365 src_h
= crtc
->mode
.hdisplay
;
1366 dst_h
= radeon_crtc
->native_mode
.hdisplay
;
1368 /* fix up for overscan on hdmi */
1369 if (ASIC_IS_AVIVO(rdev
) &&
1370 (!(mode
->flags
& DRM_MODE_FLAG_INTERLACE
)) &&
1371 ((radeon_encoder
->underscan_type
== UNDERSCAN_ON
) ||
1372 ((radeon_encoder
->underscan_type
== UNDERSCAN_AUTO
) &&
1373 drm_detect_hdmi_monitor(radeon_connector
->edid
) &&
1374 is_hdtv_mode(mode
)))) {
1375 if (radeon_encoder
->underscan_hborder
!= 0)
1376 radeon_crtc
->h_border
= radeon_encoder
->underscan_hborder
;
1378 radeon_crtc
->h_border
= (mode
->hdisplay
>> 5) + 16;
1379 if (radeon_encoder
->underscan_vborder
!= 0)
1380 radeon_crtc
->v_border
= radeon_encoder
->underscan_vborder
;
1382 radeon_crtc
->v_border
= (mode
->vdisplay
>> 5) + 16;
1383 radeon_crtc
->rmx_type
= RMX_FULL
;
1384 src_v
= crtc
->mode
.vdisplay
;
1385 dst_v
= crtc
->mode
.vdisplay
- (radeon_crtc
->v_border
* 2);
1386 src_h
= crtc
->mode
.hdisplay
;
1387 dst_h
= crtc
->mode
.hdisplay
- (radeon_crtc
->h_border
* 2);
1391 if (radeon_crtc
->rmx_type
!= radeon_encoder
->rmx_type
) {
1392 /* WARNING: Right now this can't happen but
1393 * in the future we need to check that scaling
1394 * are consistent across different encoder
1395 * (ie all encoder can work with the same
1398 DRM_ERROR("Scaling not consistent across encoder.\n");
1403 if (radeon_crtc
->rmx_type
!= RMX_OFF
) {
1405 a
.full
= dfixed_const(src_v
);
1406 b
.full
= dfixed_const(dst_v
);
1407 radeon_crtc
->vsc
.full
= dfixed_div(a
, b
);
1408 a
.full
= dfixed_const(src_h
);
1409 b
.full
= dfixed_const(dst_h
);
1410 radeon_crtc
->hsc
.full
= dfixed_div(a
, b
);
1412 radeon_crtc
->vsc
.full
= dfixed_const(1);
1413 radeon_crtc
->hsc
.full
= dfixed_const(1);
1419 * Retrieve current video scanout position of crtc on a given gpu.
1421 * \param dev Device to query.
1422 * \param crtc Crtc to query.
1423 * \param *vpos Location where vertical scanout position should be stored.
1424 * \param *hpos Location where horizontal scanout position should go.
1426 * Returns vpos as a positive number while in active scanout area.
1427 * Returns vpos as a negative number inside vblank, counting the number
1428 * of scanlines to go until end of vblank, e.g., -1 means "one scanline
1429 * until start of active scanout / end of vblank."
1431 * \return Flags, or'ed together as follows:
1433 * DRM_SCANOUTPOS_VALID = Query successful.
1434 * DRM_SCANOUTPOS_INVBL = Inside vblank.
1435 * DRM_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of
1436 * this flag means that returned position may be offset by a constant but
1437 * unknown small number of scanlines wrt. real scanout position.
1440 int radeon_get_crtc_scanoutpos(struct drm_device
*dev
, int crtc
, int *vpos
, int *hpos
)
1442 u32 stat_crtc
= 0, vbl
= 0, position
= 0;
1443 int vbl_start
, vbl_end
, vtotal
, ret
= 0;
1446 struct radeon_device
*rdev
= dev
->dev_private
;
1448 if (ASIC_IS_DCE4(rdev
)) {
1450 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1451 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1452 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1453 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1454 ret
|= DRM_SCANOUTPOS_VALID
;
1457 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1458 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1459 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1460 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1461 ret
|= DRM_SCANOUTPOS_VALID
;
1464 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1465 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1466 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1467 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1468 ret
|= DRM_SCANOUTPOS_VALID
;
1471 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1472 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1473 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1474 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1475 ret
|= DRM_SCANOUTPOS_VALID
;
1478 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1479 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1480 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1481 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1482 ret
|= DRM_SCANOUTPOS_VALID
;
1485 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1486 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1487 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1488 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1489 ret
|= DRM_SCANOUTPOS_VALID
;
1491 } else if (ASIC_IS_AVIVO(rdev
)) {
1493 vbl
= RREG32(AVIVO_D1CRTC_V_BLANK_START_END
);
1494 position
= RREG32(AVIVO_D1CRTC_STATUS_POSITION
);
1495 ret
|= DRM_SCANOUTPOS_VALID
;
1498 vbl
= RREG32(AVIVO_D2CRTC_V_BLANK_START_END
);
1499 position
= RREG32(AVIVO_D2CRTC_STATUS_POSITION
);
1500 ret
|= DRM_SCANOUTPOS_VALID
;
1503 /* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */
1505 /* Assume vbl_end == 0, get vbl_start from
1508 vbl
= (RREG32(RADEON_CRTC_V_TOTAL_DISP
) &
1509 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1510 /* Only retrieve vpos from upper 16 bits, set hpos == 0. */
1511 position
= (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1512 stat_crtc
= RREG32(RADEON_CRTC_STATUS
);
1513 if (!(stat_crtc
& 1))
1516 ret
|= DRM_SCANOUTPOS_VALID
;
1519 vbl
= (RREG32(RADEON_CRTC2_V_TOTAL_DISP
) &
1520 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1521 position
= (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1522 stat_crtc
= RREG32(RADEON_CRTC2_STATUS
);
1523 if (!(stat_crtc
& 1))
1526 ret
|= DRM_SCANOUTPOS_VALID
;
1530 /* Decode into vertical and horizontal scanout position. */
1531 *vpos
= position
& 0x1fff;
1532 *hpos
= (position
>> 16) & 0x1fff;
1534 /* Valid vblank area boundaries from gpu retrieved? */
1537 ret
|= DRM_SCANOUTPOS_ACCURATE
;
1538 vbl_start
= vbl
& 0x1fff;
1539 vbl_end
= (vbl
>> 16) & 0x1fff;
1542 /* No: Fake something reasonable which gives at least ok results. */
1543 vbl_start
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vdisplay
;
1547 /* Test scanout position against vblank region. */
1548 if ((*vpos
< vbl_start
) && (*vpos
>= vbl_end
))
1551 /* Check if inside vblank area and apply corrective offsets:
1552 * vpos will then be >=0 in video scanout area, but negative
1553 * within vblank area, counting down the number of lines until
1557 /* Inside "upper part" of vblank area? Apply corrective offset if so: */
1558 if (in_vbl
&& (*vpos
>= vbl_start
)) {
1559 vtotal
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vtotal
;
1560 *vpos
= *vpos
- vtotal
;
1563 /* Correct for shifted end of vbl at vbl_end. */
1564 *vpos
= *vpos
- vbl_end
;
1568 ret
|= DRM_SCANOUTPOS_INVBL
;