2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
26 * $FreeBSD: head/sys/dev/drm2/radeon/radeon_display.c 254885 2013-08-25 19:37:15Z dumbbell $
30 #include <uapi_drm/radeon_drm.h>
35 #include <drm/drm_crtc_helper.h>
36 #include <drm/drm_edid.h>
37 #include <linux/err.h>
39 static void avivo_crtc_load_lut(struct drm_crtc
*crtc
)
41 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
42 struct drm_device
*dev
= crtc
->dev
;
43 struct radeon_device
*rdev
= dev
->dev_private
;
46 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
47 WREG32(AVIVO_DC_LUTA_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
49 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
50 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
51 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
53 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
54 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
55 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
57 WREG32(AVIVO_DC_LUT_RW_SELECT
, radeon_crtc
->crtc_id
);
58 WREG32(AVIVO_DC_LUT_RW_MODE
, 0);
59 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK
, 0x0000003f);
61 WREG8(AVIVO_DC_LUT_RW_INDEX
, 0);
62 for (i
= 0; i
< 256; i
++) {
63 WREG32(AVIVO_DC_LUT_30_COLOR
,
64 (radeon_crtc
->lut_r
[i
] << 20) |
65 (radeon_crtc
->lut_g
[i
] << 10) |
66 (radeon_crtc
->lut_b
[i
] << 0));
69 WREG32(AVIVO_D1GRPH_LUT_SEL
+ radeon_crtc
->crtc_offset
, radeon_crtc
->crtc_id
);
72 static void dce4_crtc_load_lut(struct drm_crtc
*crtc
)
74 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
75 struct drm_device
*dev
= crtc
->dev
;
76 struct radeon_device
*rdev
= dev
->dev_private
;
79 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
80 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
82 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
83 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
84 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
86 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
87 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
88 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
90 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
91 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
93 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
94 for (i
= 0; i
< 256; i
++) {
95 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
96 (radeon_crtc
->lut_r
[i
] << 20) |
97 (radeon_crtc
->lut_g
[i
] << 10) |
98 (radeon_crtc
->lut_b
[i
] << 0));
102 static void dce5_crtc_load_lut(struct drm_crtc
*crtc
)
104 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
105 struct drm_device
*dev
= crtc
->dev
;
106 struct radeon_device
*rdev
= dev
->dev_private
;
109 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
111 WREG32(NI_INPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
112 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS
) |
113 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS
)));
114 WREG32(NI_PRESCALE_GRPH_CONTROL
+ radeon_crtc
->crtc_offset
,
115 NI_GRPH_PRESCALE_BYPASS
);
116 WREG32(NI_PRESCALE_OVL_CONTROL
+ radeon_crtc
->crtc_offset
,
117 NI_OVL_PRESCALE_BYPASS
);
118 WREG32(NI_INPUT_GAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
119 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
) |
120 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
)));
122 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
124 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
125 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
126 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
128 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
129 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
130 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
132 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
133 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
135 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
136 for (i
= 0; i
< 256; i
++) {
137 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
138 (radeon_crtc
->lut_r
[i
] << 20) |
139 (radeon_crtc
->lut_g
[i
] << 10) |
140 (radeon_crtc
->lut_b
[i
] << 0));
143 WREG32(NI_DEGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
144 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
145 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
146 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
147 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
)));
148 WREG32(NI_GAMUT_REMAP_CONTROL
+ radeon_crtc
->crtc_offset
,
149 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
) |
150 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
)));
151 WREG32(NI_REGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
152 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS
) |
153 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS
)));
154 WREG32(NI_OUTPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
155 (NI_OUTPUT_CSC_GRPH_MODE(NI_OUTPUT_CSC_BYPASS
) |
156 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS
)));
157 /* XXX match this to the depth of the crtc fmt block, move to modeset? */
158 WREG32(0x6940 + radeon_crtc
->crtc_offset
, 0);
162 static void legacy_crtc_load_lut(struct drm_crtc
*crtc
)
164 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
165 struct drm_device
*dev
= crtc
->dev
;
166 struct radeon_device
*rdev
= dev
->dev_private
;
170 dac2_cntl
= RREG32(RADEON_DAC_CNTL2
);
171 if (radeon_crtc
->crtc_id
== 0)
172 dac2_cntl
&= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL
;
174 dac2_cntl
|= RADEON_DAC2_PALETTE_ACC_CTL
;
175 WREG32(RADEON_DAC_CNTL2
, dac2_cntl
);
177 WREG8(RADEON_PALETTE_INDEX
, 0);
178 for (i
= 0; i
< 256; i
++) {
179 WREG32(RADEON_PALETTE_30_DATA
,
180 (radeon_crtc
->lut_r
[i
] << 20) |
181 (radeon_crtc
->lut_g
[i
] << 10) |
182 (radeon_crtc
->lut_b
[i
] << 0));
186 void radeon_crtc_load_lut(struct drm_crtc
*crtc
)
188 struct drm_device
*dev
= crtc
->dev
;
189 struct radeon_device
*rdev
= dev
->dev_private
;
194 if (ASIC_IS_DCE5(rdev
))
195 dce5_crtc_load_lut(crtc
);
196 else if (ASIC_IS_DCE4(rdev
))
197 dce4_crtc_load_lut(crtc
);
198 else if (ASIC_IS_AVIVO(rdev
))
199 avivo_crtc_load_lut(crtc
);
201 legacy_crtc_load_lut(crtc
);
204 /** Sets the color ramps on behalf of fbcon */
205 void radeon_crtc_fb_gamma_set(struct drm_crtc
*crtc
, u16 red
, u16 green
,
208 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
210 radeon_crtc
->lut_r
[regno
] = red
>> 6;
211 radeon_crtc
->lut_g
[regno
] = green
>> 6;
212 radeon_crtc
->lut_b
[regno
] = blue
>> 6;
215 /** Gets the color ramps on behalf of fbcon */
216 void radeon_crtc_fb_gamma_get(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
217 u16
*blue
, int regno
)
219 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
221 *red
= radeon_crtc
->lut_r
[regno
] << 6;
222 *green
= radeon_crtc
->lut_g
[regno
] << 6;
223 *blue
= radeon_crtc
->lut_b
[regno
] << 6;
226 static void radeon_crtc_gamma_set(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
227 u16
*blue
, uint32_t start
, uint32_t size
)
229 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
230 int end
= (start
+ size
> 256) ? 256 : start
+ size
, i
;
232 /* userspace palettes are always correct as is */
233 for (i
= start
; i
< end
; i
++) {
234 radeon_crtc
->lut_r
[i
] = red
[i
] >> 6;
235 radeon_crtc
->lut_g
[i
] = green
[i
] >> 6;
236 radeon_crtc
->lut_b
[i
] = blue
[i
] >> 6;
238 radeon_crtc_load_lut(crtc
);
241 static void radeon_crtc_destroy(struct drm_crtc
*crtc
)
243 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
245 drm_crtc_cleanup(crtc
);
246 drm_free(radeon_crtc
, M_DRM
);
250 * Handle unpin events outside the interrupt handler proper.
252 static void radeon_unpin_work_func(void *arg
, int pending
)
254 struct radeon_unpin_work
*work
= arg
;
257 /* unpin of the old buffer */
258 r
= radeon_bo_reserve(work
->old_rbo
, false);
259 if (likely(r
== 0)) {
260 r
= radeon_bo_unpin(work
->old_rbo
);
261 if (unlikely(r
!= 0)) {
262 DRM_ERROR("failed to unpin buffer after flip\n");
264 radeon_bo_unreserve(work
->old_rbo
);
266 DRM_ERROR("failed to reserve buffer after flip\n");
268 drm_gem_object_unreference_unlocked(&work
->old_rbo
->gem_base
);
269 drm_free(work
, M_DRM
);
272 void radeon_crtc_handle_flip(struct radeon_device
*rdev
, int crtc_id
)
274 struct radeon_crtc
*radeon_crtc
= rdev
->mode_info
.crtcs
[crtc_id
];
275 struct radeon_unpin_work
*work
;
276 struct drm_pending_vblank_event
*e
;
281 lockmgr(&rdev
->ddev
->event_lock
, LK_EXCLUSIVE
);
282 work
= radeon_crtc
->unpin_work
;
284 (work
->fence
&& !radeon_fence_signaled(work
->fence
))) {
285 lockmgr(&rdev
->ddev
->event_lock
, LK_RELEASE
);
288 /* New pageflip, or just completion of a previous one? */
289 if (!radeon_crtc
->deferred_flip_completion
) {
290 /* do the flip (mmio) */
291 update_pending
= radeon_page_flip(rdev
, crtc_id
, work
->new_crtc_base
);
293 /* This is just a completion of a flip queued in crtc
294 * at last invocation. Make sure we go directly to
295 * completion routine.
298 radeon_crtc
->deferred_flip_completion
= 0;
301 /* Has the pageflip already completed in crtc, or is it certain
302 * to complete in this vblank?
304 if (update_pending
&&
305 (DRM_SCANOUTPOS_VALID
& radeon_get_crtc_scanoutpos(rdev
->ddev
, crtc_id
,
307 ((vpos
>= (99 * rdev
->mode_info
.crtcs
[crtc_id
]->base
.hwmode
.crtc_vdisplay
)/100) ||
308 (vpos
< 0 && !ASIC_IS_AVIVO(rdev
)))) {
309 /* crtc didn't flip in this target vblank interval,
310 * but flip is pending in crtc. Based on the current
311 * scanout position we know that the current frame is
312 * (nearly) complete and the flip will (likely)
313 * complete before the start of the next frame.
317 if (update_pending
) {
318 /* crtc didn't flip in this target vblank interval,
319 * but flip is pending in crtc. It will complete it
320 * in next vblank interval, so complete the flip at
323 radeon_crtc
->deferred_flip_completion
= 1;
324 lockmgr(&rdev
->ddev
->event_lock
, LK_RELEASE
);
328 /* Pageflip (will be) certainly completed in this vblank. Clean up. */
329 radeon_crtc
->unpin_work
= NULL
;
331 /* wakeup userspace */
334 e
->event
.sequence
= drm_vblank_count_and_time(rdev
->ddev
, crtc_id
, &now
);
335 e
->event
.tv_sec
= now
.tv_sec
;
336 e
->event
.tv_usec
= now
.tv_usec
;
337 list_add_tail(&e
->base
.link
, &e
->base
.file_priv
->event_list
);
339 wake_up_interruptible(&e
->base
.file_priv
->event_wait
);
340 #endif /* DUMBBELL_WIP */
342 lockmgr(&rdev
->ddev
->event_lock
, LK_RELEASE
);
344 drm_vblank_put(rdev
->ddev
, radeon_crtc
->crtc_id
);
345 radeon_fence_unref(&work
->fence
);
346 radeon_post_page_flip(work
->rdev
, work
->crtc_id
);
347 taskqueue_enqueue(rdev
->tq
, &work
->work
);
350 static int radeon_crtc_page_flip(struct drm_crtc
*crtc
,
351 struct drm_framebuffer
*fb
,
352 struct drm_pending_vblank_event
*event
)
354 struct drm_device
*dev
= crtc
->dev
;
355 struct radeon_device
*rdev
= dev
->dev_private
;
356 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
357 struct radeon_framebuffer
*old_radeon_fb
;
358 struct radeon_framebuffer
*new_radeon_fb
;
359 struct drm_gem_object
*obj
;
360 struct radeon_bo
*rbo
;
361 struct radeon_unpin_work
*work
;
362 u32 tiling_flags
, pitch_pixels
;
366 work
= kmalloc(sizeof *work
, M_DRM
, M_WAITOK
| M_ZERO
);
372 work
->crtc_id
= radeon_crtc
->crtc_id
;
373 old_radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
374 new_radeon_fb
= to_radeon_framebuffer(fb
);
375 /* schedule unpin of the old buffer */
376 obj
= old_radeon_fb
->obj
;
377 /* take a reference to the old object */
378 drm_gem_object_reference(obj
);
379 rbo
= gem_to_radeon_bo(obj
);
381 obj
= new_radeon_fb
->obj
;
382 rbo
= gem_to_radeon_bo(obj
);
384 lockmgr(&rbo
->tbo
.bdev
->fence_lock
, LK_EXCLUSIVE
);
385 if (rbo
->tbo
.sync_obj
)
386 work
->fence
= radeon_fence_ref(rbo
->tbo
.sync_obj
);
387 lockmgr(&rbo
->tbo
.bdev
->fence_lock
, LK_RELEASE
);
389 TASK_INIT(&work
->work
, 0, radeon_unpin_work_func
, work
);
391 /* We borrow the event spin lock for protecting unpin_work */
392 lockmgr(&dev
->event_lock
, LK_EXCLUSIVE
);
393 if (radeon_crtc
->unpin_work
) {
394 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
398 radeon_crtc
->unpin_work
= work
;
399 radeon_crtc
->deferred_flip_completion
= 0;
400 lockmgr(&dev
->event_lock
, LK_RELEASE
);
402 /* pin the new buffer */
403 DRM_DEBUG_DRIVER("flip-ioctl() cur_fbo = %p, cur_bbo = %p\n",
406 r
= radeon_bo_reserve(rbo
, false);
407 if (unlikely(r
!= 0)) {
408 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
411 /* Only 27 bit offset for legacy CRTC */
412 r
= radeon_bo_pin_restricted(rbo
, RADEON_GEM_DOMAIN_VRAM
,
413 ASIC_IS_AVIVO(rdev
) ? 0 : 1 << 27, &base
);
414 if (unlikely(r
!= 0)) {
415 radeon_bo_unreserve(rbo
);
417 DRM_ERROR("failed to pin new rbo buffer before flip\n");
420 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
421 radeon_bo_unreserve(rbo
);
423 if (!ASIC_IS_AVIVO(rdev
)) {
424 /* crtc offset is from display base addr not FB location */
425 base
-= radeon_crtc
->legacy_display_base_addr
;
426 pitch_pixels
= fb
->pitches
[0] / (fb
->bits_per_pixel
/ 8);
428 if (tiling_flags
& RADEON_TILING_MACRO
) {
429 if (ASIC_IS_R300(rdev
)) {
432 int byteshift
= fb
->bits_per_pixel
>> 4;
433 int tile_addr
= (((crtc
->y
>> 3) * pitch_pixels
+ crtc
->x
) >> (8 - byteshift
)) << 11;
434 base
+= tile_addr
+ ((crtc
->x
<< byteshift
) % 256) + ((crtc
->y
% 8) << 8);
437 int offset
= crtc
->y
* pitch_pixels
+ crtc
->x
;
438 switch (fb
->bits_per_pixel
) {
459 lockmgr(&dev
->event_lock
, LK_EXCLUSIVE
);
460 work
->new_crtc_base
= base
;
461 lockmgr(&dev
->event_lock
, LK_RELEASE
);
466 r
= drm_vblank_get(dev
, radeon_crtc
->crtc_id
);
468 DRM_ERROR("failed to get vblank before flip\n");
472 /* set the proper interrupt */
473 radeon_pre_page_flip(rdev
, radeon_crtc
->crtc_id
);
478 if (unlikely(radeon_bo_reserve(rbo
, false) != 0)) {
479 DRM_ERROR("failed to reserve new rbo in error path\n");
482 if (unlikely(radeon_bo_unpin(rbo
) != 0)) {
483 DRM_ERROR("failed to unpin new rbo in error path\n");
485 radeon_bo_unreserve(rbo
);
488 lockmgr(&dev
->event_lock
, LK_EXCLUSIVE
);
489 radeon_crtc
->unpin_work
= NULL
;
491 lockmgr(&dev
->event_lock
, LK_RELEASE
);
492 drm_gem_object_unreference_unlocked(old_radeon_fb
->obj
);
493 radeon_fence_unref(&work
->fence
);
494 drm_free(work
, M_DRM
);
499 static const struct drm_crtc_funcs radeon_crtc_funcs
= {
500 .cursor_set
= radeon_crtc_cursor_set
,
501 .cursor_move
= radeon_crtc_cursor_move
,
502 .gamma_set
= radeon_crtc_gamma_set
,
503 .set_config
= drm_crtc_helper_set_config
,
504 .destroy
= radeon_crtc_destroy
,
505 .page_flip
= radeon_crtc_page_flip
,
508 static void radeon_crtc_init(struct drm_device
*dev
, int index
)
510 struct radeon_device
*rdev
= dev
->dev_private
;
511 struct radeon_crtc
*radeon_crtc
;
514 radeon_crtc
= kmalloc(sizeof(struct radeon_crtc
) + (RADEONFB_CONN_LIMIT
* sizeof(struct drm_connector
*)),
515 M_DRM
, M_WAITOK
| M_ZERO
);
516 if (radeon_crtc
== NULL
)
519 drm_crtc_init(dev
, &radeon_crtc
->base
, &radeon_crtc_funcs
);
521 drm_mode_crtc_set_gamma_size(&radeon_crtc
->base
, 256);
522 radeon_crtc
->crtc_id
= index
;
523 rdev
->mode_info
.crtcs
[index
] = radeon_crtc
;
526 radeon_crtc
->mode_set
.crtc
= &radeon_crtc
->base
;
527 radeon_crtc
->mode_set
.connectors
= (struct drm_connector
**)(radeon_crtc
+ 1);
528 radeon_crtc
->mode_set
.num_connectors
= 0;
531 for (i
= 0; i
< 256; i
++) {
532 radeon_crtc
->lut_r
[i
] = i
<< 2;
533 radeon_crtc
->lut_g
[i
] = i
<< 2;
534 radeon_crtc
->lut_b
[i
] = i
<< 2;
537 if (rdev
->is_atom_bios
&& (ASIC_IS_AVIVO(rdev
) || radeon_r4xx_atom
))
538 radeon_atombios_init_crtc(dev
, radeon_crtc
);
540 radeon_legacy_init_crtc(dev
, radeon_crtc
);
543 static const char *encoder_names
[37] = {
563 "INTERNAL_KLDSCP_TMDS1",
564 "INTERNAL_KLDSCP_DVO1",
565 "INTERNAL_KLDSCP_DAC1",
566 "INTERNAL_KLDSCP_DAC2",
575 "INTERNAL_KLDSCP_LVTMA",
583 static const char *hpd_names
[6] = {
592 static void radeon_print_display_setup(struct drm_device
*dev
)
594 struct drm_connector
*connector
;
595 struct radeon_connector
*radeon_connector
;
596 struct drm_encoder
*encoder
;
597 struct radeon_encoder
*radeon_encoder
;
601 DRM_INFO("Radeon Display Connectors\n");
602 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
603 radeon_connector
= to_radeon_connector(connector
);
604 DRM_INFO("Connector %d:\n", i
);
605 DRM_INFO(" %s\n", drm_get_connector_name(connector
));
606 if (radeon_connector
->hpd
.hpd
!= RADEON_HPD_NONE
)
607 DRM_INFO(" %s\n", hpd_names
[radeon_connector
->hpd
.hpd
]);
608 if (radeon_connector
->ddc_bus
) {
609 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
610 radeon_connector
->ddc_bus
->rec
.mask_clk_reg
,
611 radeon_connector
->ddc_bus
->rec
.mask_data_reg
,
612 radeon_connector
->ddc_bus
->rec
.a_clk_reg
,
613 radeon_connector
->ddc_bus
->rec
.a_data_reg
,
614 radeon_connector
->ddc_bus
->rec
.en_clk_reg
,
615 radeon_connector
->ddc_bus
->rec
.en_data_reg
,
616 radeon_connector
->ddc_bus
->rec
.y_clk_reg
,
617 radeon_connector
->ddc_bus
->rec
.y_data_reg
);
618 if (radeon_connector
->router
.ddc_valid
)
619 DRM_INFO(" DDC Router 0x%x/0x%x\n",
620 radeon_connector
->router
.ddc_mux_control_pin
,
621 radeon_connector
->router
.ddc_mux_state
);
622 if (radeon_connector
->router
.cd_valid
)
623 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
624 radeon_connector
->router
.cd_mux_control_pin
,
625 radeon_connector
->router
.cd_mux_state
);
627 if (connector
->connector_type
== DRM_MODE_CONNECTOR_VGA
||
628 connector
->connector_type
== DRM_MODE_CONNECTOR_DVII
||
629 connector
->connector_type
== DRM_MODE_CONNECTOR_DVID
||
630 connector
->connector_type
== DRM_MODE_CONNECTOR_DVIA
||
631 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIA
||
632 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIB
)
633 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
635 DRM_INFO(" Encoders:\n");
636 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
637 radeon_encoder
= to_radeon_encoder(encoder
);
638 devices
= radeon_encoder
->devices
& radeon_connector
->devices
;
640 if (devices
& ATOM_DEVICE_CRT1_SUPPORT
)
641 DRM_INFO(" CRT1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
642 if (devices
& ATOM_DEVICE_CRT2_SUPPORT
)
643 DRM_INFO(" CRT2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
644 if (devices
& ATOM_DEVICE_LCD1_SUPPORT
)
645 DRM_INFO(" LCD1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
646 if (devices
& ATOM_DEVICE_DFP1_SUPPORT
)
647 DRM_INFO(" DFP1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
648 if (devices
& ATOM_DEVICE_DFP2_SUPPORT
)
649 DRM_INFO(" DFP2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
650 if (devices
& ATOM_DEVICE_DFP3_SUPPORT
)
651 DRM_INFO(" DFP3: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
652 if (devices
& ATOM_DEVICE_DFP4_SUPPORT
)
653 DRM_INFO(" DFP4: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
654 if (devices
& ATOM_DEVICE_DFP5_SUPPORT
)
655 DRM_INFO(" DFP5: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
656 if (devices
& ATOM_DEVICE_DFP6_SUPPORT
)
657 DRM_INFO(" DFP6: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
658 if (devices
& ATOM_DEVICE_TV1_SUPPORT
)
659 DRM_INFO(" TV1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
660 if (devices
& ATOM_DEVICE_CV_SUPPORT
)
661 DRM_INFO(" CV: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
668 static bool radeon_setup_enc_conn(struct drm_device
*dev
)
670 struct radeon_device
*rdev
= dev
->dev_private
;
674 if (rdev
->is_atom_bios
) {
675 ret
= radeon_get_atom_connector_info_from_supported_devices_table(dev
);
677 ret
= radeon_get_atom_connector_info_from_object_table(dev
);
679 ret
= radeon_get_legacy_connector_info_from_bios(dev
);
681 ret
= radeon_get_legacy_connector_info_from_table(dev
);
684 if (!ASIC_IS_AVIVO(rdev
))
685 ret
= radeon_get_legacy_connector_info_from_table(dev
);
688 radeon_setup_encoder_clones(dev
);
689 radeon_print_display_setup(dev
);
695 int radeon_ddc_get_modes(struct radeon_connector
*radeon_connector
)
697 struct drm_device
*dev
= radeon_connector
->base
.dev
;
698 struct radeon_device
*rdev
= dev
->dev_private
;
701 /* on hw with routers, select right port */
702 if (radeon_connector
->router
.ddc_valid
)
703 radeon_router_select_ddc_port(radeon_connector
);
705 if (radeon_connector_encoder_get_dp_bridge_encoder_id(&radeon_connector
->base
) !=
706 ENCODER_OBJECT_ID_NONE
) {
707 struct radeon_connector_atom_dig
*dig
= radeon_connector
->con_priv
;
710 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
,
711 dig
->dp_i2c_bus
->adapter
);
712 } else if ((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_DisplayPort
) ||
713 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)) {
714 struct radeon_connector_atom_dig
*dig
= radeon_connector
->con_priv
;
716 if ((dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_DISPLAYPORT
||
717 dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_eDP
) && dig
->dp_i2c_bus
)
718 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
,
719 dig
->dp_i2c_bus
->adapter
);
720 else if (radeon_connector
->ddc_bus
&& !radeon_connector
->edid
)
721 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
,
722 radeon_connector
->ddc_bus
->adapter
);
724 if (radeon_connector
->ddc_bus
&& !radeon_connector
->edid
)
725 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
,
726 radeon_connector
->ddc_bus
->adapter
);
729 if (!radeon_connector
->edid
) {
730 if (rdev
->is_atom_bios
) {
731 /* some laptops provide a hardcoded edid in rom for LCDs */
732 if (((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_LVDS
) ||
733 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)))
734 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
736 /* some servers provide a hardcoded edid in rom for KVMs */
737 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
739 if (radeon_connector
->edid
) {
740 drm_mode_connector_update_edid_property(&radeon_connector
->base
, radeon_connector
->edid
);
741 ret
= drm_add_edid_modes(&radeon_connector
->base
, radeon_connector
->edid
);
744 drm_mode_connector_update_edid_property(&radeon_connector
->base
, NULL
);
749 static void avivo_get_fb_div(struct radeon_pll
*pll
,
756 u32 tmp
= post_div
* ref_div
;
759 *fb_div
= tmp
/ pll
->reference_freq
;
760 *frac_fb_div
= tmp
% pll
->reference_freq
;
762 if (*fb_div
> pll
->max_feedback_div
)
763 *fb_div
= pll
->max_feedback_div
;
764 else if (*fb_div
< pll
->min_feedback_div
)
765 *fb_div
= pll
->min_feedback_div
;
768 static u32
avivo_get_post_div(struct radeon_pll
*pll
,
771 u32 vco
, post_div
, tmp
;
773 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
774 return pll
->post_div
;
776 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
777 if (pll
->flags
& RADEON_PLL_IS_LCD
)
778 vco
= pll
->lcd_pll_out_min
;
780 vco
= pll
->pll_out_min
;
782 if (pll
->flags
& RADEON_PLL_IS_LCD
)
783 vco
= pll
->lcd_pll_out_max
;
785 vco
= pll
->pll_out_max
;
788 post_div
= vco
/ target_clock
;
789 tmp
= vco
% target_clock
;
791 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
799 if (post_div
> pll
->max_post_div
)
800 post_div
= pll
->max_post_div
;
801 else if (post_div
< pll
->min_post_div
)
802 post_div
= pll
->min_post_div
;
807 #define MAX_TOLERANCE 10
809 void radeon_compute_pll_avivo(struct radeon_pll
*pll
,
817 u32 target_clock
= freq
/ 10;
818 u32 post_div
= avivo_get_post_div(pll
, target_clock
);
819 u32 ref_div
= pll
->min_ref_div
;
820 u32 fb_div
= 0, frac_fb_div
= 0, tmp
;
822 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
823 ref_div
= pll
->reference_div
;
825 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
826 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
, &fb_div
, &frac_fb_div
);
827 frac_fb_div
= (100 * frac_fb_div
) / pll
->reference_freq
;
828 if (frac_fb_div
>= 5) {
830 frac_fb_div
= frac_fb_div
/ 10;
833 if (frac_fb_div
>= 10) {
838 while (ref_div
<= pll
->max_ref_div
) {
839 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
,
840 &fb_div
, &frac_fb_div
);
841 if (frac_fb_div
>= (pll
->reference_freq
/ 2))
844 tmp
= (pll
->reference_freq
* fb_div
) / (post_div
* ref_div
);
845 tmp
= (tmp
* 10000) / target_clock
;
847 if (tmp
> (10000 + MAX_TOLERANCE
))
849 else if (tmp
>= (10000 - MAX_TOLERANCE
))
856 *dot_clock_p
= ((pll
->reference_freq
* fb_div
* 10) + (pll
->reference_freq
* frac_fb_div
)) /
857 (ref_div
* post_div
* 10);
859 *frac_fb_div_p
= frac_fb_div
;
860 *ref_div_p
= ref_div
;
861 *post_div_p
= post_div
;
862 DRM_DEBUG_KMS("%d, pll dividers - fb: %d.%d ref: %d, post %d\n",
863 *dot_clock_p
, fb_div
, frac_fb_div
, ref_div
, post_div
);
867 static inline uint32_t radeon_div(uint64_t n
, uint32_t d
)
877 void radeon_compute_pll_legacy(struct radeon_pll
*pll
,
879 uint32_t *dot_clock_p
,
881 uint32_t *frac_fb_div_p
,
883 uint32_t *post_div_p
)
885 uint32_t min_ref_div
= pll
->min_ref_div
;
886 uint32_t max_ref_div
= pll
->max_ref_div
;
887 uint32_t min_post_div
= pll
->min_post_div
;
888 uint32_t max_post_div
= pll
->max_post_div
;
889 uint32_t min_fractional_feed_div
= 0;
890 uint32_t max_fractional_feed_div
= 0;
891 uint32_t best_vco
= pll
->best_vco
;
892 uint32_t best_post_div
= 1;
893 uint32_t best_ref_div
= 1;
894 uint32_t best_feedback_div
= 1;
895 uint32_t best_frac_feedback_div
= 0;
896 uint32_t best_freq
= -1;
897 uint32_t best_error
= 0xffffffff;
898 uint32_t best_vco_diff
= 1;
900 u32 pll_out_min
, pll_out_max
;
902 DRM_DEBUG_KMS("PLL freq %ju %u %u\n", (uintmax_t)freq
, pll
->min_ref_div
, pll
->max_ref_div
);
905 if (pll
->flags
& RADEON_PLL_IS_LCD
) {
906 pll_out_min
= pll
->lcd_pll_out_min
;
907 pll_out_max
= pll
->lcd_pll_out_max
;
909 pll_out_min
= pll
->pll_out_min
;
910 pll_out_max
= pll
->pll_out_max
;
913 if (pll_out_min
> 64800)
916 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
917 min_ref_div
= max_ref_div
= pll
->reference_div
;
919 while (min_ref_div
< max_ref_div
-1) {
920 uint32_t mid
= (min_ref_div
+ max_ref_div
) / 2;
921 uint32_t pll_in
= pll
->reference_freq
/ mid
;
922 if (pll_in
< pll
->pll_in_min
)
924 else if (pll_in
> pll
->pll_in_max
)
931 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
932 min_post_div
= max_post_div
= pll
->post_div
;
934 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
935 min_fractional_feed_div
= pll
->min_frac_feedback_div
;
936 max_fractional_feed_div
= pll
->max_frac_feedback_div
;
939 for (post_div
= max_post_div
; post_div
>= min_post_div
; --post_div
) {
942 if ((pll
->flags
& RADEON_PLL_NO_ODD_POST_DIV
) && (post_div
& 1))
945 /* legacy radeons only have a few post_divs */
946 if (pll
->flags
& RADEON_PLL_LEGACY
) {
947 if ((post_div
== 5) ||
958 for (ref_div
= min_ref_div
; ref_div
<= max_ref_div
; ++ref_div
) {
959 uint32_t feedback_div
, current_freq
= 0, error
, vco_diff
;
960 uint32_t pll_in
= pll
->reference_freq
/ ref_div
;
961 uint32_t min_feed_div
= pll
->min_feedback_div
;
962 uint32_t max_feed_div
= pll
->max_feedback_div
+ 1;
964 if (pll_in
< pll
->pll_in_min
|| pll_in
> pll
->pll_in_max
)
967 while (min_feed_div
< max_feed_div
) {
969 uint32_t min_frac_feed_div
= min_fractional_feed_div
;
970 uint32_t max_frac_feed_div
= max_fractional_feed_div
+ 1;
971 uint32_t frac_feedback_div
;
974 feedback_div
= (min_feed_div
+ max_feed_div
) / 2;
976 tmp
= (uint64_t)pll
->reference_freq
* feedback_div
;
977 vco
= radeon_div(tmp
, ref_div
);
979 if (vco
< pll_out_min
) {
980 min_feed_div
= feedback_div
+ 1;
982 } else if (vco
> pll_out_max
) {
983 max_feed_div
= feedback_div
;
987 while (min_frac_feed_div
< max_frac_feed_div
) {
988 frac_feedback_div
= (min_frac_feed_div
+ max_frac_feed_div
) / 2;
989 tmp
= (uint64_t)pll
->reference_freq
* 10000 * feedback_div
;
990 tmp
+= (uint64_t)pll
->reference_freq
* 1000 * frac_feedback_div
;
991 current_freq
= radeon_div(tmp
, ref_div
* post_div
);
993 if (pll
->flags
& RADEON_PLL_PREFER_CLOSEST_LOWER
) {
994 if (freq
< current_freq
)
997 error
= freq
- current_freq
;
999 error
= abs(current_freq
- freq
);
1000 vco_diff
= abs(vco
- best_vco
);
1002 if ((best_vco
== 0 && error
< best_error
) ||
1004 ((best_error
> 100 && error
< best_error
- 100) ||
1005 (abs(error
- best_error
) < 100 && vco_diff
< best_vco_diff
)))) {
1006 best_post_div
= post_div
;
1007 best_ref_div
= ref_div
;
1008 best_feedback_div
= feedback_div
;
1009 best_frac_feedback_div
= frac_feedback_div
;
1010 best_freq
= current_freq
;
1012 best_vco_diff
= vco_diff
;
1013 } else if (current_freq
== freq
) {
1014 if (best_freq
== -1) {
1015 best_post_div
= post_div
;
1016 best_ref_div
= ref_div
;
1017 best_feedback_div
= feedback_div
;
1018 best_frac_feedback_div
= frac_feedback_div
;
1019 best_freq
= current_freq
;
1021 best_vco_diff
= vco_diff
;
1022 } else if (((pll
->flags
& RADEON_PLL_PREFER_LOW_REF_DIV
) && (ref_div
< best_ref_div
)) ||
1023 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_REF_DIV
) && (ref_div
> best_ref_div
)) ||
1024 ((pll
->flags
& RADEON_PLL_PREFER_LOW_FB_DIV
) && (feedback_div
< best_feedback_div
)) ||
1025 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_FB_DIV
) && (feedback_div
> best_feedback_div
)) ||
1026 ((pll
->flags
& RADEON_PLL_PREFER_LOW_POST_DIV
) && (post_div
< best_post_div
)) ||
1027 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_POST_DIV
) && (post_div
> best_post_div
))) {
1028 best_post_div
= post_div
;
1029 best_ref_div
= ref_div
;
1030 best_feedback_div
= feedback_div
;
1031 best_frac_feedback_div
= frac_feedback_div
;
1032 best_freq
= current_freq
;
1034 best_vco_diff
= vco_diff
;
1037 if (current_freq
< freq
)
1038 min_frac_feed_div
= frac_feedback_div
+ 1;
1040 max_frac_feed_div
= frac_feedback_div
;
1042 if (current_freq
< freq
)
1043 min_feed_div
= feedback_div
+ 1;
1045 max_feed_div
= feedback_div
;
1050 *dot_clock_p
= best_freq
/ 10000;
1051 *fb_div_p
= best_feedback_div
;
1052 *frac_fb_div_p
= best_frac_feedback_div
;
1053 *ref_div_p
= best_ref_div
;
1054 *post_div_p
= best_post_div
;
1055 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1057 best_freq
/ 1000, best_feedback_div
, best_frac_feedback_div
,
1058 best_ref_div
, best_post_div
);
1062 static void radeon_user_framebuffer_destroy(struct drm_framebuffer
*fb
)
1064 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1066 if (radeon_fb
->obj
) {
1067 drm_gem_object_unreference_unlocked(radeon_fb
->obj
);
1069 drm_framebuffer_cleanup(fb
);
1070 drm_free(radeon_fb
, M_DRM
);
1073 static int radeon_user_framebuffer_create_handle(struct drm_framebuffer
*fb
,
1074 struct drm_file
*file_priv
,
1075 unsigned int *handle
)
1077 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1079 return drm_gem_handle_create(file_priv
, radeon_fb
->obj
, handle
);
1082 static const struct drm_framebuffer_funcs radeon_fb_funcs
= {
1083 .destroy
= radeon_user_framebuffer_destroy
,
1084 .create_handle
= radeon_user_framebuffer_create_handle
,
1088 radeon_framebuffer_init(struct drm_device
*dev
,
1089 struct radeon_framebuffer
*rfb
,
1090 struct drm_mode_fb_cmd2
*mode_cmd
,
1091 struct drm_gem_object
*obj
)
1095 ret
= drm_framebuffer_init(dev
, &rfb
->base
, &radeon_fb_funcs
);
1100 drm_helper_mode_fill_fb_struct(&rfb
->base
, mode_cmd
);
1104 static struct drm_framebuffer
*
1105 radeon_user_framebuffer_create(struct drm_device
*dev
,
1106 struct drm_file
*file_priv
,
1107 struct drm_mode_fb_cmd2
*mode_cmd
)
1109 struct drm_gem_object
*obj
;
1110 struct radeon_framebuffer
*radeon_fb
;
1113 obj
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handles
[0]);
1115 dev_err(dev
->dev
, "No GEM object associated to handle 0x%08X, "
1116 "can't create framebuffer\n", mode_cmd
->handles
[0]);
1117 return ERR_PTR(-ENOENT
);
1120 radeon_fb
= kmalloc(sizeof(*radeon_fb
), M_DRM
,
1122 if (radeon_fb
== NULL
) {
1123 drm_gem_object_unreference_unlocked(obj
);
1124 return ERR_PTR(-ENOMEM
);
1127 ret
= radeon_framebuffer_init(dev
, radeon_fb
, mode_cmd
, obj
);
1129 kfree(radeon_fb
, M_DRM
);
1130 drm_gem_object_unreference_unlocked(obj
);
1131 return ERR_PTR(ret
);
1134 return &radeon_fb
->base
;
1137 static void radeon_output_poll_changed(struct drm_device
*dev
)
1139 struct radeon_device
*rdev
= dev
->dev_private
;
1140 radeon_fb_output_poll_changed(rdev
);
1143 static const struct drm_mode_config_funcs radeon_mode_funcs
= {
1144 .fb_create
= radeon_user_framebuffer_create
,
1145 .output_poll_changed
= radeon_output_poll_changed
1148 static struct drm_prop_enum_list radeon_tmds_pll_enum_list
[] =
1153 static struct drm_prop_enum_list radeon_tv_std_enum_list
[] =
1154 { { TV_STD_NTSC
, "ntsc" },
1155 { TV_STD_PAL
, "pal" },
1156 { TV_STD_PAL_M
, "pal-m" },
1157 { TV_STD_PAL_60
, "pal-60" },
1158 { TV_STD_NTSC_J
, "ntsc-j" },
1159 { TV_STD_SCART_PAL
, "scart-pal" },
1160 { TV_STD_PAL_CN
, "pal-cn" },
1161 { TV_STD_SECAM
, "secam" },
1164 static struct drm_prop_enum_list radeon_underscan_enum_list
[] =
1165 { { UNDERSCAN_OFF
, "off" },
1166 { UNDERSCAN_ON
, "on" },
1167 { UNDERSCAN_AUTO
, "auto" },
1170 static int radeon_modeset_create_props(struct radeon_device
*rdev
)
1174 if (rdev
->is_atom_bios
) {
1175 rdev
->mode_info
.coherent_mode_property
=
1176 drm_property_create_range(rdev
->ddev
, 0 , "coherent", 0, 1);
1177 if (!rdev
->mode_info
.coherent_mode_property
)
1181 if (!ASIC_IS_AVIVO(rdev
)) {
1182 sz
= DRM_ARRAY_SIZE(radeon_tmds_pll_enum_list
);
1183 rdev
->mode_info
.tmds_pll_property
=
1184 drm_property_create_enum(rdev
->ddev
, 0,
1186 radeon_tmds_pll_enum_list
, sz
);
1189 rdev
->mode_info
.load_detect_property
=
1190 drm_property_create_range(rdev
->ddev
, 0, "load detection", 0, 1);
1191 if (!rdev
->mode_info
.load_detect_property
)
1194 drm_mode_create_scaling_mode_property(rdev
->ddev
);
1196 sz
= DRM_ARRAY_SIZE(radeon_tv_std_enum_list
);
1197 rdev
->mode_info
.tv_std_property
=
1198 drm_property_create_enum(rdev
->ddev
, 0,
1200 radeon_tv_std_enum_list
, sz
);
1202 sz
= DRM_ARRAY_SIZE(radeon_underscan_enum_list
);
1203 rdev
->mode_info
.underscan_property
=
1204 drm_property_create_enum(rdev
->ddev
, 0,
1206 radeon_underscan_enum_list
, sz
);
1208 rdev
->mode_info
.underscan_hborder_property
=
1209 drm_property_create_range(rdev
->ddev
, 0,
1210 "underscan hborder", 0, 128);
1211 if (!rdev
->mode_info
.underscan_hborder_property
)
1214 rdev
->mode_info
.underscan_vborder_property
=
1215 drm_property_create_range(rdev
->ddev
, 0,
1216 "underscan vborder", 0, 128);
1217 if (!rdev
->mode_info
.underscan_vborder_property
)
1223 void radeon_update_display_priority(struct radeon_device
*rdev
)
1225 /* adjustment options for the display watermarks */
1226 if ((radeon_disp_priority
== 0) || (radeon_disp_priority
> 2)) {
1227 /* set display priority to high for r3xx, rv515 chips
1228 * this avoids flickering due to underflow to the
1229 * display controllers during heavy acceleration.
1230 * Don't force high on rs4xx igp chips as it seems to
1231 * affect the sound card. See kernel bug 15982.
1233 if ((ASIC_IS_R300(rdev
) || (rdev
->family
== CHIP_RV515
)) &&
1234 !(rdev
->flags
& RADEON_IS_IGP
))
1235 rdev
->disp_priority
= 2;
1237 rdev
->disp_priority
= 0;
1239 rdev
->disp_priority
= radeon_disp_priority
;
1244 * Allocate hdmi structs and determine register offsets
1246 static void radeon_afmt_init(struct radeon_device
*rdev
)
1250 for (i
= 0; i
< RADEON_MAX_AFMT_BLOCKS
; i
++)
1251 rdev
->mode_info
.afmt
[i
] = NULL
;
1253 if (ASIC_IS_DCE6(rdev
)) {
1255 } else if (ASIC_IS_DCE4(rdev
)) {
1256 /* DCE4/5 has 6 audio blocks tied to DIG encoders */
1257 /* DCE4.1 has 2 audio blocks tied to DIG encoders */
1258 rdev
->mode_info
.afmt
[0] = kmalloc(sizeof(struct radeon_afmt
),
1261 if (rdev
->mode_info
.afmt
[0]) {
1262 rdev
->mode_info
.afmt
[0]->offset
= EVERGREEN_CRTC0_REGISTER_OFFSET
;
1263 rdev
->mode_info
.afmt
[0]->id
= 0;
1265 rdev
->mode_info
.afmt
[1] = kmalloc(sizeof(struct radeon_afmt
),
1268 if (rdev
->mode_info
.afmt
[1]) {
1269 rdev
->mode_info
.afmt
[1]->offset
= EVERGREEN_CRTC1_REGISTER_OFFSET
;
1270 rdev
->mode_info
.afmt
[1]->id
= 1;
1272 if (!ASIC_IS_DCE41(rdev
)) {
1273 rdev
->mode_info
.afmt
[2] = kmalloc(sizeof(struct radeon_afmt
),
1276 if (rdev
->mode_info
.afmt
[2]) {
1277 rdev
->mode_info
.afmt
[2]->offset
= EVERGREEN_CRTC2_REGISTER_OFFSET
;
1278 rdev
->mode_info
.afmt
[2]->id
= 2;
1280 rdev
->mode_info
.afmt
[3] = kmalloc(sizeof(struct radeon_afmt
),
1283 if (rdev
->mode_info
.afmt
[3]) {
1284 rdev
->mode_info
.afmt
[3]->offset
= EVERGREEN_CRTC3_REGISTER_OFFSET
;
1285 rdev
->mode_info
.afmt
[3]->id
= 3;
1287 rdev
->mode_info
.afmt
[4] = kmalloc(sizeof(struct radeon_afmt
),
1290 if (rdev
->mode_info
.afmt
[4]) {
1291 rdev
->mode_info
.afmt
[4]->offset
= EVERGREEN_CRTC4_REGISTER_OFFSET
;
1292 rdev
->mode_info
.afmt
[4]->id
= 4;
1294 rdev
->mode_info
.afmt
[5] = kmalloc(sizeof(struct radeon_afmt
),
1297 if (rdev
->mode_info
.afmt
[5]) {
1298 rdev
->mode_info
.afmt
[5]->offset
= EVERGREEN_CRTC5_REGISTER_OFFSET
;
1299 rdev
->mode_info
.afmt
[5]->id
= 5;
1302 } else if (ASIC_IS_DCE3(rdev
)) {
1303 /* DCE3.x has 2 audio blocks tied to DIG encoders */
1304 rdev
->mode_info
.afmt
[0] = kmalloc(sizeof(struct radeon_afmt
),
1307 if (rdev
->mode_info
.afmt
[0]) {
1308 rdev
->mode_info
.afmt
[0]->offset
= DCE3_HDMI_OFFSET0
;
1309 rdev
->mode_info
.afmt
[0]->id
= 0;
1311 rdev
->mode_info
.afmt
[1] = kmalloc(sizeof(struct radeon_afmt
),
1314 if (rdev
->mode_info
.afmt
[1]) {
1315 rdev
->mode_info
.afmt
[1]->offset
= DCE3_HDMI_OFFSET1
;
1316 rdev
->mode_info
.afmt
[1]->id
= 1;
1318 } else if (ASIC_IS_DCE2(rdev
)) {
1319 /* DCE2 has at least 1 routable audio block */
1320 rdev
->mode_info
.afmt
[0] = kmalloc(sizeof(struct radeon_afmt
),
1323 if (rdev
->mode_info
.afmt
[0]) {
1324 rdev
->mode_info
.afmt
[0]->offset
= DCE2_HDMI_OFFSET0
;
1325 rdev
->mode_info
.afmt
[0]->id
= 0;
1327 /* r6xx has 2 routable audio blocks */
1328 if (rdev
->family
>= CHIP_R600
) {
1329 rdev
->mode_info
.afmt
[1] = kmalloc(sizeof(struct radeon_afmt
),
1332 if (rdev
->mode_info
.afmt
[1]) {
1333 rdev
->mode_info
.afmt
[1]->offset
= DCE2_HDMI_OFFSET1
;
1334 rdev
->mode_info
.afmt
[1]->id
= 1;
1340 static void radeon_afmt_fini(struct radeon_device
*rdev
)
1344 for (i
= 0; i
< RADEON_MAX_AFMT_BLOCKS
; i
++) {
1345 drm_free(rdev
->mode_info
.afmt
[i
], M_DRM
);
1346 rdev
->mode_info
.afmt
[i
] = NULL
;
1350 int radeon_modeset_init(struct radeon_device
*rdev
)
1355 drm_mode_config_init(rdev
->ddev
);
1356 rdev
->mode_info
.mode_config_initialized
= true;
1358 rdev
->ddev
->mode_config
.funcs
= &radeon_mode_funcs
;
1360 if (ASIC_IS_DCE5(rdev
)) {
1361 rdev
->ddev
->mode_config
.max_width
= 16384;
1362 rdev
->ddev
->mode_config
.max_height
= 16384;
1363 } else if (ASIC_IS_AVIVO(rdev
)) {
1364 rdev
->ddev
->mode_config
.max_width
= 8192;
1365 rdev
->ddev
->mode_config
.max_height
= 8192;
1367 rdev
->ddev
->mode_config
.max_width
= 4096;
1368 rdev
->ddev
->mode_config
.max_height
= 4096;
1371 rdev
->ddev
->mode_config
.preferred_depth
= 24;
1372 rdev
->ddev
->mode_config
.prefer_shadow
= 1;
1374 rdev
->ddev
->mode_config
.fb_base
= rdev
->mc
.aper_base
;
1376 ret
= radeon_modeset_create_props(rdev
);
1381 /* init i2c buses */
1382 radeon_i2c_init(rdev
);
1384 /* check combios for a valid hardcoded EDID - Sun servers */
1385 if (!rdev
->is_atom_bios
) {
1386 /* check for hardcoded EDID in BIOS */
1387 radeon_combios_check_hardcoded_edid(rdev
);
1390 /* allocate crtcs */
1391 for (i
= 0; i
< rdev
->num_crtc
; i
++) {
1392 radeon_crtc_init(rdev
->ddev
, i
);
1395 /* okay we should have all the bios connectors */
1396 ret
= radeon_setup_enc_conn(rdev
->ddev
);
1401 /* init dig PHYs, disp eng pll */
1402 if (rdev
->is_atom_bios
) {
1403 radeon_atom_encoder_init(rdev
);
1404 radeon_atom_disp_eng_pll_init(rdev
);
1407 /* initialize hpd */
1408 radeon_hpd_init(rdev
);
1411 radeon_afmt_init(rdev
);
1413 /* Initialize power management */
1414 radeon_pm_init(rdev
);
1416 radeon_fbdev_init(rdev
);
1417 drm_kms_helper_poll_init(rdev
->ddev
);
1422 void radeon_modeset_fini(struct radeon_device
*rdev
)
1424 radeon_fbdev_fini(rdev
);
1425 drm_free(rdev
->mode_info
.bios_hardcoded_edid
, M_DRM
);
1426 radeon_pm_fini(rdev
);
1428 if (rdev
->mode_info
.mode_config_initialized
) {
1429 radeon_afmt_fini(rdev
);
1430 drm_kms_helper_poll_fini(rdev
->ddev
);
1431 radeon_hpd_fini(rdev
);
1432 DRM_UNLOCK(rdev
->ddev
); /* Work around lock recursion. dumbbell@ */
1433 drm_mode_config_cleanup(rdev
->ddev
);
1434 DRM_LOCK(rdev
->ddev
);
1435 rdev
->mode_info
.mode_config_initialized
= false;
1437 /* free i2c buses */
1438 radeon_i2c_fini(rdev
);
1441 static bool is_hdtv_mode(const struct drm_display_mode
*mode
)
1443 /* try and guess if this is a tv or a monitor */
1444 if ((mode
->vdisplay
== 480 && mode
->hdisplay
== 720) || /* 480p */
1445 (mode
->vdisplay
== 576) || /* 576p */
1446 (mode
->vdisplay
== 720) || /* 720p */
1447 (mode
->vdisplay
== 1080)) /* 1080p */
1453 bool radeon_crtc_scaling_mode_fixup(struct drm_crtc
*crtc
,
1454 const struct drm_display_mode
*mode
,
1455 struct drm_display_mode
*adjusted_mode
)
1457 struct drm_device
*dev
= crtc
->dev
;
1458 struct radeon_device
*rdev
= dev
->dev_private
;
1459 struct drm_encoder
*encoder
;
1460 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1461 struct radeon_encoder
*radeon_encoder
;
1462 struct drm_connector
*connector
;
1463 struct radeon_connector
*radeon_connector
;
1465 u32 src_v
= 1, dst_v
= 1;
1466 u32 src_h
= 1, dst_h
= 1;
1468 radeon_crtc
->h_border
= 0;
1469 radeon_crtc
->v_border
= 0;
1471 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
1472 if (encoder
->crtc
!= crtc
)
1474 radeon_encoder
= to_radeon_encoder(encoder
);
1475 connector
= radeon_get_connector_for_encoder(encoder
);
1476 radeon_connector
= to_radeon_connector(connector
);
1480 if (radeon_encoder
->rmx_type
== RMX_OFF
)
1481 radeon_crtc
->rmx_type
= RMX_OFF
;
1482 else if (mode
->hdisplay
< radeon_encoder
->native_mode
.hdisplay
||
1483 mode
->vdisplay
< radeon_encoder
->native_mode
.vdisplay
)
1484 radeon_crtc
->rmx_type
= radeon_encoder
->rmx_type
;
1486 radeon_crtc
->rmx_type
= RMX_OFF
;
1487 /* copy native mode */
1488 memcpy(&radeon_crtc
->native_mode
,
1489 &radeon_encoder
->native_mode
,
1490 sizeof(struct drm_display_mode
));
1491 src_v
= crtc
->mode
.vdisplay
;
1492 dst_v
= radeon_crtc
->native_mode
.vdisplay
;
1493 src_h
= crtc
->mode
.hdisplay
;
1494 dst_h
= radeon_crtc
->native_mode
.hdisplay
;
1496 /* fix up for overscan on hdmi */
1497 if (ASIC_IS_AVIVO(rdev
) &&
1498 (!(mode
->flags
& DRM_MODE_FLAG_INTERLACE
)) &&
1499 ((radeon_encoder
->underscan_type
== UNDERSCAN_ON
) ||
1500 ((radeon_encoder
->underscan_type
== UNDERSCAN_AUTO
) &&
1501 drm_detect_hdmi_monitor(radeon_connector
->edid
) &&
1502 is_hdtv_mode(mode
)))) {
1503 if (radeon_encoder
->underscan_hborder
!= 0)
1504 radeon_crtc
->h_border
= radeon_encoder
->underscan_hborder
;
1506 radeon_crtc
->h_border
= (mode
->hdisplay
>> 5) + 16;
1507 if (radeon_encoder
->underscan_vborder
!= 0)
1508 radeon_crtc
->v_border
= radeon_encoder
->underscan_vborder
;
1510 radeon_crtc
->v_border
= (mode
->vdisplay
>> 5) + 16;
1511 radeon_crtc
->rmx_type
= RMX_FULL
;
1512 src_v
= crtc
->mode
.vdisplay
;
1513 dst_v
= crtc
->mode
.vdisplay
- (radeon_crtc
->v_border
* 2);
1514 src_h
= crtc
->mode
.hdisplay
;
1515 dst_h
= crtc
->mode
.hdisplay
- (radeon_crtc
->h_border
* 2);
1519 if (radeon_crtc
->rmx_type
!= radeon_encoder
->rmx_type
) {
1520 /* WARNING: Right now this can't happen but
1521 * in the future we need to check that scaling
1522 * are consistent across different encoder
1523 * (ie all encoder can work with the same
1526 DRM_ERROR("Scaling not consistent across encoder.\n");
1531 if (radeon_crtc
->rmx_type
!= RMX_OFF
) {
1533 a
.full
= dfixed_const(src_v
);
1534 b
.full
= dfixed_const(dst_v
);
1535 radeon_crtc
->vsc
.full
= dfixed_div(a
, b
);
1536 a
.full
= dfixed_const(src_h
);
1537 b
.full
= dfixed_const(dst_h
);
1538 radeon_crtc
->hsc
.full
= dfixed_div(a
, b
);
1540 radeon_crtc
->vsc
.full
= dfixed_const(1);
1541 radeon_crtc
->hsc
.full
= dfixed_const(1);
1547 * Retrieve current video scanout position of crtc on a given gpu.
1549 * \param dev Device to query.
1550 * \param crtc Crtc to query.
1551 * \param *vpos Location where vertical scanout position should be stored.
1552 * \param *hpos Location where horizontal scanout position should go.
1554 * Returns vpos as a positive number while in active scanout area.
1555 * Returns vpos as a negative number inside vblank, counting the number
1556 * of scanlines to go until end of vblank, e.g., -1 means "one scanline
1557 * until start of active scanout / end of vblank."
1559 * \return Flags, or'ed together as follows:
1561 * DRM_SCANOUTPOS_VALID = Query successful.
1562 * DRM_SCANOUTPOS_INVBL = Inside vblank.
1563 * DRM_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of
1564 * this flag means that returned position may be offset by a constant but
1565 * unknown small number of scanlines wrt. real scanout position.
1568 int radeon_get_crtc_scanoutpos(struct drm_device
*dev
, int crtc
, int *vpos
, int *hpos
)
1570 u32 stat_crtc
= 0, vbl
= 0, position
= 0;
1571 int vbl_start
, vbl_end
, vtotal
, ret
= 0;
1574 struct radeon_device
*rdev
= dev
->dev_private
;
1576 if (ASIC_IS_DCE4(rdev
)) {
1578 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1579 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1580 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1581 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1582 ret
|= DRM_SCANOUTPOS_VALID
;
1585 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1586 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1587 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1588 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1589 ret
|= DRM_SCANOUTPOS_VALID
;
1592 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1593 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1594 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1595 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1596 ret
|= DRM_SCANOUTPOS_VALID
;
1599 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1600 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1601 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1602 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1603 ret
|= DRM_SCANOUTPOS_VALID
;
1606 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1607 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1608 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1609 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1610 ret
|= DRM_SCANOUTPOS_VALID
;
1613 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1614 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1615 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1616 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1617 ret
|= DRM_SCANOUTPOS_VALID
;
1619 } else if (ASIC_IS_AVIVO(rdev
)) {
1621 vbl
= RREG32(AVIVO_D1CRTC_V_BLANK_START_END
);
1622 position
= RREG32(AVIVO_D1CRTC_STATUS_POSITION
);
1623 ret
|= DRM_SCANOUTPOS_VALID
;
1626 vbl
= RREG32(AVIVO_D2CRTC_V_BLANK_START_END
);
1627 position
= RREG32(AVIVO_D2CRTC_STATUS_POSITION
);
1628 ret
|= DRM_SCANOUTPOS_VALID
;
1631 /* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */
1633 /* Assume vbl_end == 0, get vbl_start from
1636 vbl
= (RREG32(RADEON_CRTC_V_TOTAL_DISP
) &
1637 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1638 /* Only retrieve vpos from upper 16 bits, set hpos == 0. */
1639 position
= (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1640 stat_crtc
= RREG32(RADEON_CRTC_STATUS
);
1641 if (!(stat_crtc
& 1))
1644 ret
|= DRM_SCANOUTPOS_VALID
;
1647 vbl
= (RREG32(RADEON_CRTC2_V_TOTAL_DISP
) &
1648 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1649 position
= (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1650 stat_crtc
= RREG32(RADEON_CRTC2_STATUS
);
1651 if (!(stat_crtc
& 1))
1654 ret
|= DRM_SCANOUTPOS_VALID
;
1658 /* Decode into vertical and horizontal scanout position. */
1659 *vpos
= position
& 0x1fff;
1660 *hpos
= (position
>> 16) & 0x1fff;
1662 /* Valid vblank area boundaries from gpu retrieved? */
1665 ret
|= DRM_SCANOUTPOS_ACCURATE
;
1666 vbl_start
= vbl
& 0x1fff;
1667 vbl_end
= (vbl
>> 16) & 0x1fff;
1670 /* No: Fake something reasonable which gives at least ok results. */
1671 vbl_start
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vdisplay
;
1675 /* Test scanout position against vblank region. */
1676 if ((*vpos
< vbl_start
) && (*vpos
>= vbl_end
))
1679 /* Check if inside vblank area and apply corrective offsets:
1680 * vpos will then be >=0 in video scanout area, but negative
1681 * within vblank area, counting down the number of lines until
1685 /* Inside "upper part" of vblank area? Apply corrective offset if so: */
1686 if (in_vbl
&& (*vpos
>= vbl_start
)) {
1687 vtotal
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vtotal
;
1688 *vpos
= *vpos
- vtotal
;
1691 /* Correct for shifted end of vbl at vbl_end. */
1692 *vpos
= *vpos
- vbl_end
;
1696 ret
|= DRM_SCANOUTPOS_INVBL
;