2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
27 #include "radeon_drm.h"
31 #include <asm/div64.h>
33 #include "drm_crtc_helper.h"
36 static int radeon_ddc_dump(struct drm_connector
*connector
);
38 static void avivo_crtc_load_lut(struct drm_crtc
*crtc
)
40 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
41 struct drm_device
*dev
= crtc
->dev
;
42 struct radeon_device
*rdev
= dev
->dev_private
;
45 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
46 WREG32(AVIVO_DC_LUTA_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
48 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
49 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
50 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
52 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
53 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
54 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
56 WREG32(AVIVO_DC_LUT_RW_SELECT
, radeon_crtc
->crtc_id
);
57 WREG32(AVIVO_DC_LUT_RW_MODE
, 0);
58 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK
, 0x0000003f);
60 WREG8(AVIVO_DC_LUT_RW_INDEX
, 0);
61 for (i
= 0; i
< 256; i
++) {
62 WREG32(AVIVO_DC_LUT_30_COLOR
,
63 (radeon_crtc
->lut_r
[i
] << 20) |
64 (radeon_crtc
->lut_g
[i
] << 10) |
65 (radeon_crtc
->lut_b
[i
] << 0));
68 WREG32(AVIVO_D1GRPH_LUT_SEL
+ radeon_crtc
->crtc_offset
, radeon_crtc
->crtc_id
);
71 static void dce4_crtc_load_lut(struct drm_crtc
*crtc
)
73 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
74 struct drm_device
*dev
= crtc
->dev
;
75 struct radeon_device
*rdev
= dev
->dev_private
;
78 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
79 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
81 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
82 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
83 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
85 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
86 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
87 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
89 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
90 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
92 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
93 for (i
= 0; i
< 256; i
++) {
94 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
95 (radeon_crtc
->lut_r
[i
] << 20) |
96 (radeon_crtc
->lut_g
[i
] << 10) |
97 (radeon_crtc
->lut_b
[i
] << 0));
101 static void dce5_crtc_load_lut(struct drm_crtc
*crtc
)
103 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
104 struct drm_device
*dev
= crtc
->dev
;
105 struct radeon_device
*rdev
= dev
->dev_private
;
108 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
110 WREG32(NI_INPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
111 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS
) |
112 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS
)));
113 WREG32(NI_PRESCALE_GRPH_CONTROL
+ radeon_crtc
->crtc_offset
,
114 NI_GRPH_PRESCALE_BYPASS
);
115 WREG32(NI_PRESCALE_OVL_CONTROL
+ radeon_crtc
->crtc_offset
,
116 NI_OVL_PRESCALE_BYPASS
);
117 WREG32(NI_INPUT_GAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
118 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
) |
119 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
)));
121 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
123 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
124 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
125 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
127 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
128 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
129 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
131 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
132 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
134 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
135 for (i
= 0; i
< 256; i
++) {
136 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
137 (radeon_crtc
->lut_r
[i
] << 20) |
138 (radeon_crtc
->lut_g
[i
] << 10) |
139 (radeon_crtc
->lut_b
[i
] << 0));
142 WREG32(NI_DEGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
143 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
144 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
145 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
146 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
)));
147 WREG32(NI_GAMUT_REMAP_CONTROL
+ radeon_crtc
->crtc_offset
,
148 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
) |
149 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
)));
150 WREG32(NI_REGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
151 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS
) |
152 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS
)));
153 WREG32(NI_OUTPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
154 (NI_OUTPUT_CSC_GRPH_MODE(NI_OUTPUT_CSC_BYPASS
) |
155 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS
)));
156 /* XXX match this to the depth of the crtc fmt block, move to modeset? */
157 WREG32(0x6940 + radeon_crtc
->crtc_offset
, 0);
161 static void legacy_crtc_load_lut(struct drm_crtc
*crtc
)
163 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
164 struct drm_device
*dev
= crtc
->dev
;
165 struct radeon_device
*rdev
= dev
->dev_private
;
169 dac2_cntl
= RREG32(RADEON_DAC_CNTL2
);
170 if (radeon_crtc
->crtc_id
== 0)
171 dac2_cntl
&= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL
;
173 dac2_cntl
|= RADEON_DAC2_PALETTE_ACC_CTL
;
174 WREG32(RADEON_DAC_CNTL2
, dac2_cntl
);
176 WREG8(RADEON_PALETTE_INDEX
, 0);
177 for (i
= 0; i
< 256; i
++) {
178 WREG32(RADEON_PALETTE_30_DATA
,
179 (radeon_crtc
->lut_r
[i
] << 20) |
180 (radeon_crtc
->lut_g
[i
] << 10) |
181 (radeon_crtc
->lut_b
[i
] << 0));
185 void radeon_crtc_load_lut(struct drm_crtc
*crtc
)
187 struct drm_device
*dev
= crtc
->dev
;
188 struct radeon_device
*rdev
= dev
->dev_private
;
193 if (ASIC_IS_DCE5(rdev
))
194 dce5_crtc_load_lut(crtc
);
195 else if (ASIC_IS_DCE4(rdev
))
196 dce4_crtc_load_lut(crtc
);
197 else if (ASIC_IS_AVIVO(rdev
))
198 avivo_crtc_load_lut(crtc
);
200 legacy_crtc_load_lut(crtc
);
203 /** Sets the color ramps on behalf of fbcon */
204 void radeon_crtc_fb_gamma_set(struct drm_crtc
*crtc
, u16 red
, u16 green
,
207 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
209 radeon_crtc
->lut_r
[regno
] = red
>> 6;
210 radeon_crtc
->lut_g
[regno
] = green
>> 6;
211 radeon_crtc
->lut_b
[regno
] = blue
>> 6;
214 /** Gets the color ramps on behalf of fbcon */
215 void radeon_crtc_fb_gamma_get(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
216 u16
*blue
, int regno
)
218 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
220 *red
= radeon_crtc
->lut_r
[regno
] << 6;
221 *green
= radeon_crtc
->lut_g
[regno
] << 6;
222 *blue
= radeon_crtc
->lut_b
[regno
] << 6;
225 static void radeon_crtc_gamma_set(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
226 u16
*blue
, uint32_t start
, uint32_t size
)
228 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
229 int end
= (start
+ size
> 256) ? 256 : start
+ size
, i
;
231 /* userspace palettes are always correct as is */
232 for (i
= start
; i
< end
; i
++) {
233 radeon_crtc
->lut_r
[i
] = red
[i
] >> 6;
234 radeon_crtc
->lut_g
[i
] = green
[i
] >> 6;
235 radeon_crtc
->lut_b
[i
] = blue
[i
] >> 6;
237 radeon_crtc_load_lut(crtc
);
240 static void radeon_crtc_destroy(struct drm_crtc
*crtc
)
242 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
244 drm_crtc_cleanup(crtc
);
249 * Handle unpin events outside the interrupt handler proper.
251 static void radeon_unpin_work_func(struct work_struct
*__work
)
253 struct radeon_unpin_work
*work
=
254 container_of(__work
, struct radeon_unpin_work
, work
);
257 /* unpin of the old buffer */
258 r
= radeon_bo_reserve(work
->old_rbo
, false);
259 if (likely(r
== 0)) {
260 r
= radeon_bo_unpin(work
->old_rbo
);
261 if (unlikely(r
!= 0)) {
262 DRM_ERROR("failed to unpin buffer after flip\n");
264 radeon_bo_unreserve(work
->old_rbo
);
266 DRM_ERROR("failed to reserve buffer after flip\n");
268 drm_gem_object_unreference_unlocked(&work
->old_rbo
->gem_base
);
272 void radeon_crtc_handle_flip(struct radeon_device
*rdev
, int crtc_id
)
274 struct radeon_crtc
*radeon_crtc
= rdev
->mode_info
.crtcs
[crtc_id
];
275 struct radeon_unpin_work
*work
;
276 struct drm_pending_vblank_event
*e
;
282 spin_lock_irqsave(&rdev
->ddev
->event_lock
, flags
);
283 work
= radeon_crtc
->unpin_work
;
285 !radeon_fence_signaled(work
->fence
)) {
286 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
289 /* New pageflip, or just completion of a previous one? */
290 if (!radeon_crtc
->deferred_flip_completion
) {
291 /* do the flip (mmio) */
292 update_pending
= radeon_page_flip(rdev
, crtc_id
, work
->new_crtc_base
);
294 /* This is just a completion of a flip queued in crtc
295 * at last invocation. Make sure we go directly to
296 * completion routine.
299 radeon_crtc
->deferred_flip_completion
= 0;
302 /* Has the pageflip already completed in crtc, or is it certain
303 * to complete in this vblank?
305 if (update_pending
&&
306 (DRM_SCANOUTPOS_VALID
& radeon_get_crtc_scanoutpos(rdev
->ddev
, crtc_id
,
309 (vpos
< (99 * rdev
->mode_info
.crtcs
[crtc_id
]->base
.hwmode
.crtc_vdisplay
)/100)) {
310 /* crtc didn't flip in this target vblank interval,
311 * but flip is pending in crtc. It will complete it
312 * in next vblank interval, so complete the flip at
315 radeon_crtc
->deferred_flip_completion
= 1;
316 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
320 /* Pageflip (will be) certainly completed in this vblank. Clean up. */
321 radeon_crtc
->unpin_work
= NULL
;
323 /* wakeup userspace */
326 e
->event
.sequence
= drm_vblank_count_and_time(rdev
->ddev
, crtc_id
, &now
);
327 e
->event
.tv_sec
= now
.tv_sec
;
328 e
->event
.tv_usec
= now
.tv_usec
;
329 list_add_tail(&e
->base
.link
, &e
->base
.file_priv
->event_list
);
330 wake_up_interruptible(&e
->base
.file_priv
->event_wait
);
332 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
334 drm_vblank_put(rdev
->ddev
, radeon_crtc
->crtc_id
);
335 radeon_fence_unref(&work
->fence
);
336 radeon_post_page_flip(work
->rdev
, work
->crtc_id
);
337 schedule_work(&work
->work
);
340 static int radeon_crtc_page_flip(struct drm_crtc
*crtc
,
341 struct drm_framebuffer
*fb
,
342 struct drm_pending_vblank_event
*event
)
344 struct drm_device
*dev
= crtc
->dev
;
345 struct radeon_device
*rdev
= dev
->dev_private
;
346 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
347 struct radeon_framebuffer
*old_radeon_fb
;
348 struct radeon_framebuffer
*new_radeon_fb
;
349 struct drm_gem_object
*obj
;
350 struct radeon_bo
*rbo
;
351 struct radeon_fence
*fence
;
352 struct radeon_unpin_work
*work
;
354 u32 tiling_flags
, pitch_pixels
;
358 work
= kzalloc(sizeof *work
, GFP_KERNEL
);
362 r
= radeon_fence_create(rdev
, &fence
);
363 if (unlikely(r
!= 0)) {
365 DRM_ERROR("flip queue: failed to create fence.\n");
370 work
->crtc_id
= radeon_crtc
->crtc_id
;
371 work
->fence
= radeon_fence_ref(fence
);
372 old_radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
373 new_radeon_fb
= to_radeon_framebuffer(fb
);
374 /* schedule unpin of the old buffer */
375 obj
= old_radeon_fb
->obj
;
376 /* take a reference to the old object */
377 drm_gem_object_reference(obj
);
378 rbo
= gem_to_radeon_bo(obj
);
380 INIT_WORK(&work
->work
, radeon_unpin_work_func
);
382 /* We borrow the event spin lock for protecting unpin_work */
383 spin_lock_irqsave(&dev
->event_lock
, flags
);
384 if (radeon_crtc
->unpin_work
) {
385 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
389 radeon_crtc
->unpin_work
= work
;
390 radeon_crtc
->deferred_flip_completion
= 0;
391 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
393 /* pin the new buffer */
394 obj
= new_radeon_fb
->obj
;
395 rbo
= gem_to_radeon_bo(obj
);
397 DRM_DEBUG_DRIVER("flip-ioctl() cur_fbo = %p, cur_bbo = %p\n",
400 r
= radeon_bo_reserve(rbo
, false);
401 if (unlikely(r
!= 0)) {
402 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
405 r
= radeon_bo_pin(rbo
, RADEON_GEM_DOMAIN_VRAM
, &base
);
406 if (unlikely(r
!= 0)) {
407 radeon_bo_unreserve(rbo
);
409 DRM_ERROR("failed to pin new rbo buffer before flip\n");
412 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
413 radeon_bo_unreserve(rbo
);
415 if (!ASIC_IS_AVIVO(rdev
)) {
416 /* crtc offset is from display base addr not FB location */
417 base
-= radeon_crtc
->legacy_display_base_addr
;
418 pitch_pixels
= fb
->pitch
/ (fb
->bits_per_pixel
/ 8);
420 if (tiling_flags
& RADEON_TILING_MACRO
) {
421 if (ASIC_IS_R300(rdev
)) {
424 int byteshift
= fb
->bits_per_pixel
>> 4;
425 int tile_addr
= (((crtc
->y
>> 3) * pitch_pixels
+ crtc
->x
) >> (8 - byteshift
)) << 11;
426 base
+= tile_addr
+ ((crtc
->x
<< byteshift
) % 256) + ((crtc
->y
% 8) << 8);
429 int offset
= crtc
->y
* pitch_pixels
+ crtc
->x
;
430 switch (fb
->bits_per_pixel
) {
451 spin_lock_irqsave(&dev
->event_lock
, flags
);
452 work
->new_crtc_base
= base
;
453 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
458 r
= drm_vblank_get(dev
, radeon_crtc
->crtc_id
);
460 DRM_ERROR("failed to get vblank before flip\n");
464 /* 32 ought to cover us */
465 r
= radeon_ring_lock(rdev
, 32);
467 DRM_ERROR("failed to lock the ring before flip\n");
472 radeon_fence_emit(rdev
, fence
);
473 /* set the proper interrupt */
474 radeon_pre_page_flip(rdev
, radeon_crtc
->crtc_id
);
476 radeon_ring_unlock_commit(rdev
);
481 drm_vblank_put(dev
, radeon_crtc
->crtc_id
);
484 r
= radeon_bo_reserve(rbo
, false);
485 if (unlikely(r
!= 0)) {
486 DRM_ERROR("failed to reserve new rbo in error path\n");
489 r
= radeon_bo_unpin(rbo
);
490 if (unlikely(r
!= 0)) {
491 radeon_bo_unreserve(rbo
);
493 DRM_ERROR("failed to unpin new rbo in error path\n");
496 radeon_bo_unreserve(rbo
);
499 spin_lock_irqsave(&dev
->event_lock
, flags
);
500 radeon_crtc
->unpin_work
= NULL
;
502 drm_gem_object_unreference_unlocked(old_radeon_fb
->obj
);
503 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
504 radeon_fence_unref(&fence
);
510 static const struct drm_crtc_funcs radeon_crtc_funcs
= {
511 .cursor_set
= radeon_crtc_cursor_set
,
512 .cursor_move
= radeon_crtc_cursor_move
,
513 .gamma_set
= radeon_crtc_gamma_set
,
514 .set_config
= drm_crtc_helper_set_config
,
515 .destroy
= radeon_crtc_destroy
,
516 .page_flip
= radeon_crtc_page_flip
,
519 static void radeon_crtc_init(struct drm_device
*dev
, int index
)
521 struct radeon_device
*rdev
= dev
->dev_private
;
522 struct radeon_crtc
*radeon_crtc
;
525 radeon_crtc
= kzalloc(sizeof(struct radeon_crtc
) + (RADEONFB_CONN_LIMIT
* sizeof(struct drm_connector
*)), GFP_KERNEL
);
526 if (radeon_crtc
== NULL
)
529 drm_crtc_init(dev
, &radeon_crtc
->base
, &radeon_crtc_funcs
);
531 drm_mode_crtc_set_gamma_size(&radeon_crtc
->base
, 256);
532 radeon_crtc
->crtc_id
= index
;
533 rdev
->mode_info
.crtcs
[index
] = radeon_crtc
;
536 radeon_crtc
->mode_set
.crtc
= &radeon_crtc
->base
;
537 radeon_crtc
->mode_set
.connectors
= (struct drm_connector
**)(radeon_crtc
+ 1);
538 radeon_crtc
->mode_set
.num_connectors
= 0;
541 for (i
= 0; i
< 256; i
++) {
542 radeon_crtc
->lut_r
[i
] = i
<< 2;
543 radeon_crtc
->lut_g
[i
] = i
<< 2;
544 radeon_crtc
->lut_b
[i
] = i
<< 2;
547 if (rdev
->is_atom_bios
&& (ASIC_IS_AVIVO(rdev
) || radeon_r4xx_atom
))
548 radeon_atombios_init_crtc(dev
, radeon_crtc
);
550 radeon_legacy_init_crtc(dev
, radeon_crtc
);
553 static const char *encoder_names
[36] = {
573 "INTERNAL_KLDSCP_TMDS1",
574 "INTERNAL_KLDSCP_DVO1",
575 "INTERNAL_KLDSCP_DAC1",
576 "INTERNAL_KLDSCP_DAC2",
585 "INTERNAL_KLDSCP_LVTMA",
592 static const char *connector_names
[15] = {
610 static const char *hpd_names
[6] = {
619 static void radeon_print_display_setup(struct drm_device
*dev
)
621 struct drm_connector
*connector
;
622 struct radeon_connector
*radeon_connector
;
623 struct drm_encoder
*encoder
;
624 struct radeon_encoder
*radeon_encoder
;
628 DRM_INFO("Radeon Display Connectors\n");
629 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
630 radeon_connector
= to_radeon_connector(connector
);
631 DRM_INFO("Connector %d:\n", i
);
632 DRM_INFO(" %s\n", connector_names
[connector
->connector_type
]);
633 if (radeon_connector
->hpd
.hpd
!= RADEON_HPD_NONE
)
634 DRM_INFO(" %s\n", hpd_names
[radeon_connector
->hpd
.hpd
]);
635 if (radeon_connector
->ddc_bus
) {
636 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
637 radeon_connector
->ddc_bus
->rec
.mask_clk_reg
,
638 radeon_connector
->ddc_bus
->rec
.mask_data_reg
,
639 radeon_connector
->ddc_bus
->rec
.a_clk_reg
,
640 radeon_connector
->ddc_bus
->rec
.a_data_reg
,
641 radeon_connector
->ddc_bus
->rec
.en_clk_reg
,
642 radeon_connector
->ddc_bus
->rec
.en_data_reg
,
643 radeon_connector
->ddc_bus
->rec
.y_clk_reg
,
644 radeon_connector
->ddc_bus
->rec
.y_data_reg
);
645 if (radeon_connector
->router
.ddc_valid
)
646 DRM_INFO(" DDC Router 0x%x/0x%x\n",
647 radeon_connector
->router
.ddc_mux_control_pin
,
648 radeon_connector
->router
.ddc_mux_state
);
649 if (radeon_connector
->router
.cd_valid
)
650 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
651 radeon_connector
->router
.cd_mux_control_pin
,
652 radeon_connector
->router
.cd_mux_state
);
654 if (connector
->connector_type
== DRM_MODE_CONNECTOR_VGA
||
655 connector
->connector_type
== DRM_MODE_CONNECTOR_DVII
||
656 connector
->connector_type
== DRM_MODE_CONNECTOR_DVID
||
657 connector
->connector_type
== DRM_MODE_CONNECTOR_DVIA
||
658 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIA
||
659 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIB
)
660 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
662 DRM_INFO(" Encoders:\n");
663 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
664 radeon_encoder
= to_radeon_encoder(encoder
);
665 devices
= radeon_encoder
->devices
& radeon_connector
->devices
;
667 if (devices
& ATOM_DEVICE_CRT1_SUPPORT
)
668 DRM_INFO(" CRT1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
669 if (devices
& ATOM_DEVICE_CRT2_SUPPORT
)
670 DRM_INFO(" CRT2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
671 if (devices
& ATOM_DEVICE_LCD1_SUPPORT
)
672 DRM_INFO(" LCD1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
673 if (devices
& ATOM_DEVICE_DFP1_SUPPORT
)
674 DRM_INFO(" DFP1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
675 if (devices
& ATOM_DEVICE_DFP2_SUPPORT
)
676 DRM_INFO(" DFP2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
677 if (devices
& ATOM_DEVICE_DFP3_SUPPORT
)
678 DRM_INFO(" DFP3: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
679 if (devices
& ATOM_DEVICE_DFP4_SUPPORT
)
680 DRM_INFO(" DFP4: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
681 if (devices
& ATOM_DEVICE_DFP5_SUPPORT
)
682 DRM_INFO(" DFP5: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
683 if (devices
& ATOM_DEVICE_DFP6_SUPPORT
)
684 DRM_INFO(" DFP6: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
685 if (devices
& ATOM_DEVICE_TV1_SUPPORT
)
686 DRM_INFO(" TV1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
687 if (devices
& ATOM_DEVICE_CV_SUPPORT
)
688 DRM_INFO(" CV: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
695 static bool radeon_setup_enc_conn(struct drm_device
*dev
)
697 struct radeon_device
*rdev
= dev
->dev_private
;
698 struct drm_connector
*drm_connector
;
702 if (rdev
->is_atom_bios
) {
703 ret
= radeon_get_atom_connector_info_from_supported_devices_table(dev
);
705 ret
= radeon_get_atom_connector_info_from_object_table(dev
);
707 ret
= radeon_get_legacy_connector_info_from_bios(dev
);
709 ret
= radeon_get_legacy_connector_info_from_table(dev
);
712 if (!ASIC_IS_AVIVO(rdev
))
713 ret
= radeon_get_legacy_connector_info_from_table(dev
);
716 radeon_setup_encoder_clones(dev
);
717 radeon_print_display_setup(dev
);
718 list_for_each_entry(drm_connector
, &dev
->mode_config
.connector_list
, head
)
719 radeon_ddc_dump(drm_connector
);
725 int radeon_ddc_get_modes(struct radeon_connector
*radeon_connector
)
727 struct drm_device
*dev
= radeon_connector
->base
.dev
;
728 struct radeon_device
*rdev
= dev
->dev_private
;
731 /* on hw with routers, select right port */
732 if (radeon_connector
->router
.ddc_valid
)
733 radeon_router_select_ddc_port(radeon_connector
);
735 if ((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_DisplayPort
) ||
736 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)) {
737 struct radeon_connector_atom_dig
*dig
= radeon_connector
->con_priv
;
738 if ((dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_DISPLAYPORT
||
739 dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_eDP
) && dig
->dp_i2c_bus
)
740 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
, &dig
->dp_i2c_bus
->adapter
);
742 if (!radeon_connector
->ddc_bus
)
744 if (!radeon_connector
->edid
) {
745 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
, &radeon_connector
->ddc_bus
->adapter
);
748 if (!radeon_connector
->edid
) {
749 if (rdev
->is_atom_bios
) {
750 /* some laptops provide a hardcoded edid in rom for LCDs */
751 if (((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_LVDS
) ||
752 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)))
753 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
755 /* some servers provide a hardcoded edid in rom for KVMs */
756 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
758 if (radeon_connector
->edid
) {
759 drm_mode_connector_update_edid_property(&radeon_connector
->base
, radeon_connector
->edid
);
760 ret
= drm_add_edid_modes(&radeon_connector
->base
, radeon_connector
->edid
);
763 drm_mode_connector_update_edid_property(&radeon_connector
->base
, NULL
);
767 static int radeon_ddc_dump(struct drm_connector
*connector
)
770 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
773 /* on hw with routers, select right port */
774 if (radeon_connector
->router
.ddc_valid
)
775 radeon_router_select_ddc_port(radeon_connector
);
777 if (!radeon_connector
->ddc_bus
)
779 edid
= drm_get_edid(connector
, &radeon_connector
->ddc_bus
->adapter
);
787 static void avivo_get_fb_div(struct radeon_pll
*pll
,
794 u32 tmp
= post_div
* ref_div
;
797 *fb_div
= tmp
/ pll
->reference_freq
;
798 *frac_fb_div
= tmp
% pll
->reference_freq
;
800 if (*fb_div
> pll
->max_feedback_div
)
801 *fb_div
= pll
->max_feedback_div
;
802 else if (*fb_div
< pll
->min_feedback_div
)
803 *fb_div
= pll
->min_feedback_div
;
806 static u32
avivo_get_post_div(struct radeon_pll
*pll
,
809 u32 vco
, post_div
, tmp
;
811 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
812 return pll
->post_div
;
814 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
815 if (pll
->flags
& RADEON_PLL_IS_LCD
)
816 vco
= pll
->lcd_pll_out_min
;
818 vco
= pll
->pll_out_min
;
820 if (pll
->flags
& RADEON_PLL_IS_LCD
)
821 vco
= pll
->lcd_pll_out_max
;
823 vco
= pll
->pll_out_max
;
826 post_div
= vco
/ target_clock
;
827 tmp
= vco
% target_clock
;
829 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
837 if (post_div
> pll
->max_post_div
)
838 post_div
= pll
->max_post_div
;
839 else if (post_div
< pll
->min_post_div
)
840 post_div
= pll
->min_post_div
;
845 #define MAX_TOLERANCE 10
847 void radeon_compute_pll_avivo(struct radeon_pll
*pll
,
855 u32 target_clock
= freq
/ 10;
856 u32 post_div
= avivo_get_post_div(pll
, target_clock
);
857 u32 ref_div
= pll
->min_ref_div
;
858 u32 fb_div
= 0, frac_fb_div
= 0, tmp
;
860 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
861 ref_div
= pll
->reference_div
;
863 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
864 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
, &fb_div
, &frac_fb_div
);
865 frac_fb_div
= (100 * frac_fb_div
) / pll
->reference_freq
;
866 if (frac_fb_div
>= 5) {
868 frac_fb_div
= frac_fb_div
/ 10;
871 if (frac_fb_div
>= 10) {
876 while (ref_div
<= pll
->max_ref_div
) {
877 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
,
878 &fb_div
, &frac_fb_div
);
879 if (frac_fb_div
>= (pll
->reference_freq
/ 2))
882 tmp
= (pll
->reference_freq
* fb_div
) / (post_div
* ref_div
);
883 tmp
= (tmp
* 10000) / target_clock
;
885 if (tmp
> (10000 + MAX_TOLERANCE
))
887 else if (tmp
>= (10000 - MAX_TOLERANCE
))
894 *dot_clock_p
= ((pll
->reference_freq
* fb_div
* 10) + (pll
->reference_freq
* frac_fb_div
)) /
895 (ref_div
* post_div
* 10);
897 *frac_fb_div_p
= frac_fb_div
;
898 *ref_div_p
= ref_div
;
899 *post_div_p
= post_div
;
900 DRM_DEBUG_KMS("%d, pll dividers - fb: %d.%d ref: %d, post %d\n",
901 *dot_clock_p
, fb_div
, frac_fb_div
, ref_div
, post_div
);
905 static inline uint32_t radeon_div(uint64_t n
, uint32_t d
)
915 void radeon_compute_pll_legacy(struct radeon_pll
*pll
,
917 uint32_t *dot_clock_p
,
919 uint32_t *frac_fb_div_p
,
921 uint32_t *post_div_p
)
923 uint32_t min_ref_div
= pll
->min_ref_div
;
924 uint32_t max_ref_div
= pll
->max_ref_div
;
925 uint32_t min_post_div
= pll
->min_post_div
;
926 uint32_t max_post_div
= pll
->max_post_div
;
927 uint32_t min_fractional_feed_div
= 0;
928 uint32_t max_fractional_feed_div
= 0;
929 uint32_t best_vco
= pll
->best_vco
;
930 uint32_t best_post_div
= 1;
931 uint32_t best_ref_div
= 1;
932 uint32_t best_feedback_div
= 1;
933 uint32_t best_frac_feedback_div
= 0;
934 uint32_t best_freq
= -1;
935 uint32_t best_error
= 0xffffffff;
936 uint32_t best_vco_diff
= 1;
938 u32 pll_out_min
, pll_out_max
;
940 DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq
, pll
->min_ref_div
, pll
->max_ref_div
);
943 if (pll
->flags
& RADEON_PLL_IS_LCD
) {
944 pll_out_min
= pll
->lcd_pll_out_min
;
945 pll_out_max
= pll
->lcd_pll_out_max
;
947 pll_out_min
= pll
->pll_out_min
;
948 pll_out_max
= pll
->pll_out_max
;
951 if (pll_out_min
> 64800)
954 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
955 min_ref_div
= max_ref_div
= pll
->reference_div
;
957 while (min_ref_div
< max_ref_div
-1) {
958 uint32_t mid
= (min_ref_div
+ max_ref_div
) / 2;
959 uint32_t pll_in
= pll
->reference_freq
/ mid
;
960 if (pll_in
< pll
->pll_in_min
)
962 else if (pll_in
> pll
->pll_in_max
)
969 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
970 min_post_div
= max_post_div
= pll
->post_div
;
972 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
973 min_fractional_feed_div
= pll
->min_frac_feedback_div
;
974 max_fractional_feed_div
= pll
->max_frac_feedback_div
;
977 for (post_div
= max_post_div
; post_div
>= min_post_div
; --post_div
) {
980 if ((pll
->flags
& RADEON_PLL_NO_ODD_POST_DIV
) && (post_div
& 1))
983 /* legacy radeons only have a few post_divs */
984 if (pll
->flags
& RADEON_PLL_LEGACY
) {
985 if ((post_div
== 5) ||
996 for (ref_div
= min_ref_div
; ref_div
<= max_ref_div
; ++ref_div
) {
997 uint32_t feedback_div
, current_freq
= 0, error
, vco_diff
;
998 uint32_t pll_in
= pll
->reference_freq
/ ref_div
;
999 uint32_t min_feed_div
= pll
->min_feedback_div
;
1000 uint32_t max_feed_div
= pll
->max_feedback_div
+ 1;
1002 if (pll_in
< pll
->pll_in_min
|| pll_in
> pll
->pll_in_max
)
1005 while (min_feed_div
< max_feed_div
) {
1007 uint32_t min_frac_feed_div
= min_fractional_feed_div
;
1008 uint32_t max_frac_feed_div
= max_fractional_feed_div
+ 1;
1009 uint32_t frac_feedback_div
;
1012 feedback_div
= (min_feed_div
+ max_feed_div
) / 2;
1014 tmp
= (uint64_t)pll
->reference_freq
* feedback_div
;
1015 vco
= radeon_div(tmp
, ref_div
);
1017 if (vco
< pll_out_min
) {
1018 min_feed_div
= feedback_div
+ 1;
1020 } else if (vco
> pll_out_max
) {
1021 max_feed_div
= feedback_div
;
1025 while (min_frac_feed_div
< max_frac_feed_div
) {
1026 frac_feedback_div
= (min_frac_feed_div
+ max_frac_feed_div
) / 2;
1027 tmp
= (uint64_t)pll
->reference_freq
* 10000 * feedback_div
;
1028 tmp
+= (uint64_t)pll
->reference_freq
* 1000 * frac_feedback_div
;
1029 current_freq
= radeon_div(tmp
, ref_div
* post_div
);
1031 if (pll
->flags
& RADEON_PLL_PREFER_CLOSEST_LOWER
) {
1032 if (freq
< current_freq
)
1035 error
= freq
- current_freq
;
1037 error
= abs(current_freq
- freq
);
1038 vco_diff
= abs(vco
- best_vco
);
1040 if ((best_vco
== 0 && error
< best_error
) ||
1042 ((best_error
> 100 && error
< best_error
- 100) ||
1043 (abs(error
- best_error
) < 100 && vco_diff
< best_vco_diff
)))) {
1044 best_post_div
= post_div
;
1045 best_ref_div
= ref_div
;
1046 best_feedback_div
= feedback_div
;
1047 best_frac_feedback_div
= frac_feedback_div
;
1048 best_freq
= current_freq
;
1050 best_vco_diff
= vco_diff
;
1051 } else if (current_freq
== freq
) {
1052 if (best_freq
== -1) {
1053 best_post_div
= post_div
;
1054 best_ref_div
= ref_div
;
1055 best_feedback_div
= feedback_div
;
1056 best_frac_feedback_div
= frac_feedback_div
;
1057 best_freq
= current_freq
;
1059 best_vco_diff
= vco_diff
;
1060 } else if (((pll
->flags
& RADEON_PLL_PREFER_LOW_REF_DIV
) && (ref_div
< best_ref_div
)) ||
1061 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_REF_DIV
) && (ref_div
> best_ref_div
)) ||
1062 ((pll
->flags
& RADEON_PLL_PREFER_LOW_FB_DIV
) && (feedback_div
< best_feedback_div
)) ||
1063 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_FB_DIV
) && (feedback_div
> best_feedback_div
)) ||
1064 ((pll
->flags
& RADEON_PLL_PREFER_LOW_POST_DIV
) && (post_div
< best_post_div
)) ||
1065 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_POST_DIV
) && (post_div
> best_post_div
))) {
1066 best_post_div
= post_div
;
1067 best_ref_div
= ref_div
;
1068 best_feedback_div
= feedback_div
;
1069 best_frac_feedback_div
= frac_feedback_div
;
1070 best_freq
= current_freq
;
1072 best_vco_diff
= vco_diff
;
1075 if (current_freq
< freq
)
1076 min_frac_feed_div
= frac_feedback_div
+ 1;
1078 max_frac_feed_div
= frac_feedback_div
;
1080 if (current_freq
< freq
)
1081 min_feed_div
= feedback_div
+ 1;
1083 max_feed_div
= feedback_div
;
1088 *dot_clock_p
= best_freq
/ 10000;
1089 *fb_div_p
= best_feedback_div
;
1090 *frac_fb_div_p
= best_frac_feedback_div
;
1091 *ref_div_p
= best_ref_div
;
1092 *post_div_p
= best_post_div
;
1093 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1095 best_freq
/ 1000, best_feedback_div
, best_frac_feedback_div
,
1096 best_ref_div
, best_post_div
);
1100 static void radeon_user_framebuffer_destroy(struct drm_framebuffer
*fb
)
1102 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1104 if (radeon_fb
->obj
) {
1105 drm_gem_object_unreference_unlocked(radeon_fb
->obj
);
1107 drm_framebuffer_cleanup(fb
);
1111 static int radeon_user_framebuffer_create_handle(struct drm_framebuffer
*fb
,
1112 struct drm_file
*file_priv
,
1113 unsigned int *handle
)
1115 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1117 return drm_gem_handle_create(file_priv
, radeon_fb
->obj
, handle
);
1120 static const struct drm_framebuffer_funcs radeon_fb_funcs
= {
1121 .destroy
= radeon_user_framebuffer_destroy
,
1122 .create_handle
= radeon_user_framebuffer_create_handle
,
1126 radeon_framebuffer_init(struct drm_device
*dev
,
1127 struct radeon_framebuffer
*rfb
,
1128 struct drm_mode_fb_cmd
*mode_cmd
,
1129 struct drm_gem_object
*obj
)
1132 drm_framebuffer_init(dev
, &rfb
->base
, &radeon_fb_funcs
);
1133 drm_helper_mode_fill_fb_struct(&rfb
->base
, mode_cmd
);
1136 static struct drm_framebuffer
*
1137 radeon_user_framebuffer_create(struct drm_device
*dev
,
1138 struct drm_file
*file_priv
,
1139 struct drm_mode_fb_cmd
*mode_cmd
)
1141 struct drm_gem_object
*obj
;
1142 struct radeon_framebuffer
*radeon_fb
;
1144 obj
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handle
);
1146 dev_err(&dev
->pdev
->dev
, "No GEM object associated to handle 0x%08X, "
1147 "can't create framebuffer\n", mode_cmd
->handle
);
1148 return ERR_PTR(-ENOENT
);
1151 radeon_fb
= kzalloc(sizeof(*radeon_fb
), GFP_KERNEL
);
1152 if (radeon_fb
== NULL
)
1153 return ERR_PTR(-ENOMEM
);
1155 radeon_framebuffer_init(dev
, radeon_fb
, mode_cmd
, obj
);
1157 return &radeon_fb
->base
;
1160 static void radeon_output_poll_changed(struct drm_device
*dev
)
1162 struct radeon_device
*rdev
= dev
->dev_private
;
1163 radeon_fb_output_poll_changed(rdev
);
1166 static const struct drm_mode_config_funcs radeon_mode_funcs
= {
1167 .fb_create
= radeon_user_framebuffer_create
,
1168 .output_poll_changed
= radeon_output_poll_changed
1171 struct drm_prop_enum_list
{
1176 static struct drm_prop_enum_list radeon_tmds_pll_enum_list
[] =
1181 static struct drm_prop_enum_list radeon_tv_std_enum_list
[] =
1182 { { TV_STD_NTSC
, "ntsc" },
1183 { TV_STD_PAL
, "pal" },
1184 { TV_STD_PAL_M
, "pal-m" },
1185 { TV_STD_PAL_60
, "pal-60" },
1186 { TV_STD_NTSC_J
, "ntsc-j" },
1187 { TV_STD_SCART_PAL
, "scart-pal" },
1188 { TV_STD_PAL_CN
, "pal-cn" },
1189 { TV_STD_SECAM
, "secam" },
1192 static struct drm_prop_enum_list radeon_underscan_enum_list
[] =
1193 { { UNDERSCAN_OFF
, "off" },
1194 { UNDERSCAN_ON
, "on" },
1195 { UNDERSCAN_AUTO
, "auto" },
1198 static int radeon_modeset_create_props(struct radeon_device
*rdev
)
1202 if (rdev
->is_atom_bios
) {
1203 rdev
->mode_info
.coherent_mode_property
=
1204 drm_property_create(rdev
->ddev
,
1205 DRM_MODE_PROP_RANGE
,
1207 if (!rdev
->mode_info
.coherent_mode_property
)
1210 rdev
->mode_info
.coherent_mode_property
->values
[0] = 0;
1211 rdev
->mode_info
.coherent_mode_property
->values
[1] = 1;
1214 if (!ASIC_IS_AVIVO(rdev
)) {
1215 sz
= ARRAY_SIZE(radeon_tmds_pll_enum_list
);
1216 rdev
->mode_info
.tmds_pll_property
=
1217 drm_property_create(rdev
->ddev
,
1220 for (i
= 0; i
< sz
; i
++) {
1221 drm_property_add_enum(rdev
->mode_info
.tmds_pll_property
,
1223 radeon_tmds_pll_enum_list
[i
].type
,
1224 radeon_tmds_pll_enum_list
[i
].name
);
1228 rdev
->mode_info
.load_detect_property
=
1229 drm_property_create(rdev
->ddev
,
1230 DRM_MODE_PROP_RANGE
,
1231 "load detection", 2);
1232 if (!rdev
->mode_info
.load_detect_property
)
1234 rdev
->mode_info
.load_detect_property
->values
[0] = 0;
1235 rdev
->mode_info
.load_detect_property
->values
[1] = 1;
1237 drm_mode_create_scaling_mode_property(rdev
->ddev
);
1239 sz
= ARRAY_SIZE(radeon_tv_std_enum_list
);
1240 rdev
->mode_info
.tv_std_property
=
1241 drm_property_create(rdev
->ddev
,
1244 for (i
= 0; i
< sz
; i
++) {
1245 drm_property_add_enum(rdev
->mode_info
.tv_std_property
,
1247 radeon_tv_std_enum_list
[i
].type
,
1248 radeon_tv_std_enum_list
[i
].name
);
1251 sz
= ARRAY_SIZE(radeon_underscan_enum_list
);
1252 rdev
->mode_info
.underscan_property
=
1253 drm_property_create(rdev
->ddev
,
1256 for (i
= 0; i
< sz
; i
++) {
1257 drm_property_add_enum(rdev
->mode_info
.underscan_property
,
1259 radeon_underscan_enum_list
[i
].type
,
1260 radeon_underscan_enum_list
[i
].name
);
1263 rdev
->mode_info
.underscan_hborder_property
=
1264 drm_property_create(rdev
->ddev
,
1265 DRM_MODE_PROP_RANGE
,
1266 "underscan hborder", 2);
1267 if (!rdev
->mode_info
.underscan_hborder_property
)
1269 rdev
->mode_info
.underscan_hborder_property
->values
[0] = 0;
1270 rdev
->mode_info
.underscan_hborder_property
->values
[1] = 128;
1272 rdev
->mode_info
.underscan_vborder_property
=
1273 drm_property_create(rdev
->ddev
,
1274 DRM_MODE_PROP_RANGE
,
1275 "underscan vborder", 2);
1276 if (!rdev
->mode_info
.underscan_vborder_property
)
1278 rdev
->mode_info
.underscan_vborder_property
->values
[0] = 0;
1279 rdev
->mode_info
.underscan_vborder_property
->values
[1] = 128;
1284 void radeon_update_display_priority(struct radeon_device
*rdev
)
1286 /* adjustment options for the display watermarks */
1287 if ((radeon_disp_priority
== 0) || (radeon_disp_priority
> 2)) {
1288 /* set display priority to high for r3xx, rv515 chips
1289 * this avoids flickering due to underflow to the
1290 * display controllers during heavy acceleration.
1291 * Don't force high on rs4xx igp chips as it seems to
1292 * affect the sound card. See kernel bug 15982.
1294 if ((ASIC_IS_R300(rdev
) || (rdev
->family
== CHIP_RV515
)) &&
1295 !(rdev
->flags
& RADEON_IS_IGP
))
1296 rdev
->disp_priority
= 2;
1298 rdev
->disp_priority
= 0;
1300 rdev
->disp_priority
= radeon_disp_priority
;
1304 int radeon_modeset_init(struct radeon_device
*rdev
)
1309 drm_mode_config_init(rdev
->ddev
);
1310 rdev
->mode_info
.mode_config_initialized
= true;
1312 rdev
->ddev
->mode_config
.funcs
= (void *)&radeon_mode_funcs
;
1314 if (ASIC_IS_DCE5(rdev
)) {
1315 rdev
->ddev
->mode_config
.max_width
= 16384;
1316 rdev
->ddev
->mode_config
.max_height
= 16384;
1317 } else if (ASIC_IS_AVIVO(rdev
)) {
1318 rdev
->ddev
->mode_config
.max_width
= 8192;
1319 rdev
->ddev
->mode_config
.max_height
= 8192;
1321 rdev
->ddev
->mode_config
.max_width
= 4096;
1322 rdev
->ddev
->mode_config
.max_height
= 4096;
1325 rdev
->ddev
->mode_config
.fb_base
= rdev
->mc
.aper_base
;
1327 ret
= radeon_modeset_create_props(rdev
);
1332 /* init i2c buses */
1333 radeon_i2c_init(rdev
);
1335 /* check combios for a valid hardcoded EDID - Sun servers */
1336 if (!rdev
->is_atom_bios
) {
1337 /* check for hardcoded EDID in BIOS */
1338 radeon_combios_check_hardcoded_edid(rdev
);
1341 /* allocate crtcs */
1342 for (i
= 0; i
< rdev
->num_crtc
; i
++) {
1343 radeon_crtc_init(rdev
->ddev
, i
);
1346 /* okay we should have all the bios connectors */
1347 ret
= radeon_setup_enc_conn(rdev
->ddev
);
1353 if (rdev
->is_atom_bios
)
1354 radeon_atom_encoder_init(rdev
);
1356 /* initialize hpd */
1357 radeon_hpd_init(rdev
);
1359 /* Initialize power management */
1360 radeon_pm_init(rdev
);
1362 radeon_fbdev_init(rdev
);
1363 drm_kms_helper_poll_init(rdev
->ddev
);
1368 void radeon_modeset_fini(struct radeon_device
*rdev
)
1370 radeon_fbdev_fini(rdev
);
1371 kfree(rdev
->mode_info
.bios_hardcoded_edid
);
1372 radeon_pm_fini(rdev
);
1374 if (rdev
->mode_info
.mode_config_initialized
) {
1375 drm_kms_helper_poll_fini(rdev
->ddev
);
1376 radeon_hpd_fini(rdev
);
1377 drm_mode_config_cleanup(rdev
->ddev
);
1378 rdev
->mode_info
.mode_config_initialized
= false;
1380 /* free i2c buses */
1381 radeon_i2c_fini(rdev
);
1384 static bool is_hdtv_mode(struct drm_display_mode
*mode
)
1386 /* try and guess if this is a tv or a monitor */
1387 if ((mode
->vdisplay
== 480 && mode
->hdisplay
== 720) || /* 480p */
1388 (mode
->vdisplay
== 576) || /* 576p */
1389 (mode
->vdisplay
== 720) || /* 720p */
1390 (mode
->vdisplay
== 1080)) /* 1080p */
1396 bool radeon_crtc_scaling_mode_fixup(struct drm_crtc
*crtc
,
1397 struct drm_display_mode
*mode
,
1398 struct drm_display_mode
*adjusted_mode
)
1400 struct drm_device
*dev
= crtc
->dev
;
1401 struct radeon_device
*rdev
= dev
->dev_private
;
1402 struct drm_encoder
*encoder
;
1403 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1404 struct radeon_encoder
*radeon_encoder
;
1405 struct drm_connector
*connector
;
1406 struct radeon_connector
*radeon_connector
;
1408 u32 src_v
= 1, dst_v
= 1;
1409 u32 src_h
= 1, dst_h
= 1;
1411 radeon_crtc
->h_border
= 0;
1412 radeon_crtc
->v_border
= 0;
1414 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
1415 if (encoder
->crtc
!= crtc
)
1417 radeon_encoder
= to_radeon_encoder(encoder
);
1418 connector
= radeon_get_connector_for_encoder(encoder
);
1419 radeon_connector
= to_radeon_connector(connector
);
1423 if (radeon_encoder
->rmx_type
== RMX_OFF
)
1424 radeon_crtc
->rmx_type
= RMX_OFF
;
1425 else if (mode
->hdisplay
< radeon_encoder
->native_mode
.hdisplay
||
1426 mode
->vdisplay
< radeon_encoder
->native_mode
.vdisplay
)
1427 radeon_crtc
->rmx_type
= radeon_encoder
->rmx_type
;
1429 radeon_crtc
->rmx_type
= RMX_OFF
;
1430 /* copy native mode */
1431 memcpy(&radeon_crtc
->native_mode
,
1432 &radeon_encoder
->native_mode
,
1433 sizeof(struct drm_display_mode
));
1434 src_v
= crtc
->mode
.vdisplay
;
1435 dst_v
= radeon_crtc
->native_mode
.vdisplay
;
1436 src_h
= crtc
->mode
.hdisplay
;
1437 dst_h
= radeon_crtc
->native_mode
.hdisplay
;
1439 /* fix up for overscan on hdmi */
1440 if (ASIC_IS_AVIVO(rdev
) &&
1441 (!(mode
->flags
& DRM_MODE_FLAG_INTERLACE
)) &&
1442 ((radeon_encoder
->underscan_type
== UNDERSCAN_ON
) ||
1443 ((radeon_encoder
->underscan_type
== UNDERSCAN_AUTO
) &&
1444 drm_detect_hdmi_monitor(radeon_connector
->edid
) &&
1445 is_hdtv_mode(mode
)))) {
1446 if (radeon_encoder
->underscan_hborder
!= 0)
1447 radeon_crtc
->h_border
= radeon_encoder
->underscan_hborder
;
1449 radeon_crtc
->h_border
= (mode
->hdisplay
>> 5) + 16;
1450 if (radeon_encoder
->underscan_vborder
!= 0)
1451 radeon_crtc
->v_border
= radeon_encoder
->underscan_vborder
;
1453 radeon_crtc
->v_border
= (mode
->vdisplay
>> 5) + 16;
1454 radeon_crtc
->rmx_type
= RMX_FULL
;
1455 src_v
= crtc
->mode
.vdisplay
;
1456 dst_v
= crtc
->mode
.vdisplay
- (radeon_crtc
->v_border
* 2);
1457 src_h
= crtc
->mode
.hdisplay
;
1458 dst_h
= crtc
->mode
.hdisplay
- (radeon_crtc
->h_border
* 2);
1462 if (radeon_crtc
->rmx_type
!= radeon_encoder
->rmx_type
) {
1463 /* WARNING: Right now this can't happen but
1464 * in the future we need to check that scaling
1465 * are consistent across different encoder
1466 * (ie all encoder can work with the same
1469 DRM_ERROR("Scaling not consistent across encoder.\n");
1474 if (radeon_crtc
->rmx_type
!= RMX_OFF
) {
1476 a
.full
= dfixed_const(src_v
);
1477 b
.full
= dfixed_const(dst_v
);
1478 radeon_crtc
->vsc
.full
= dfixed_div(a
, b
);
1479 a
.full
= dfixed_const(src_h
);
1480 b
.full
= dfixed_const(dst_h
);
1481 radeon_crtc
->hsc
.full
= dfixed_div(a
, b
);
1483 radeon_crtc
->vsc
.full
= dfixed_const(1);
1484 radeon_crtc
->hsc
.full
= dfixed_const(1);
1490 * Retrieve current video scanout position of crtc on a given gpu.
1492 * \param dev Device to query.
1493 * \param crtc Crtc to query.
1494 * \param *vpos Location where vertical scanout position should be stored.
1495 * \param *hpos Location where horizontal scanout position should go.
1497 * Returns vpos as a positive number while in active scanout area.
1498 * Returns vpos as a negative number inside vblank, counting the number
1499 * of scanlines to go until end of vblank, e.g., -1 means "one scanline
1500 * until start of active scanout / end of vblank."
1502 * \return Flags, or'ed together as follows:
1504 * DRM_SCANOUTPOS_VALID = Query successful.
1505 * DRM_SCANOUTPOS_INVBL = Inside vblank.
1506 * DRM_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of
1507 * this flag means that returned position may be offset by a constant but
1508 * unknown small number of scanlines wrt. real scanout position.
1511 int radeon_get_crtc_scanoutpos(struct drm_device
*dev
, int crtc
, int *vpos
, int *hpos
)
1513 u32 stat_crtc
= 0, vbl
= 0, position
= 0;
1514 int vbl_start
, vbl_end
, vtotal
, ret
= 0;
1517 struct radeon_device
*rdev
= dev
->dev_private
;
1519 if (ASIC_IS_DCE4(rdev
)) {
1521 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1522 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1523 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1524 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1525 ret
|= DRM_SCANOUTPOS_VALID
;
1528 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1529 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1530 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1531 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1532 ret
|= DRM_SCANOUTPOS_VALID
;
1535 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1536 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1537 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1538 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1539 ret
|= DRM_SCANOUTPOS_VALID
;
1542 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1543 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1544 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1545 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1546 ret
|= DRM_SCANOUTPOS_VALID
;
1549 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1550 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1551 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1552 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1553 ret
|= DRM_SCANOUTPOS_VALID
;
1556 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1557 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1558 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1559 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1560 ret
|= DRM_SCANOUTPOS_VALID
;
1562 } else if (ASIC_IS_AVIVO(rdev
)) {
1564 vbl
= RREG32(AVIVO_D1CRTC_V_BLANK_START_END
);
1565 position
= RREG32(AVIVO_D1CRTC_STATUS_POSITION
);
1566 ret
|= DRM_SCANOUTPOS_VALID
;
1569 vbl
= RREG32(AVIVO_D2CRTC_V_BLANK_START_END
);
1570 position
= RREG32(AVIVO_D2CRTC_STATUS_POSITION
);
1571 ret
|= DRM_SCANOUTPOS_VALID
;
1574 /* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */
1576 /* Assume vbl_end == 0, get vbl_start from
1579 vbl
= (RREG32(RADEON_CRTC_V_TOTAL_DISP
) &
1580 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1581 /* Only retrieve vpos from upper 16 bits, set hpos == 0. */
1582 position
= (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1583 stat_crtc
= RREG32(RADEON_CRTC_STATUS
);
1584 if (!(stat_crtc
& 1))
1587 ret
|= DRM_SCANOUTPOS_VALID
;
1590 vbl
= (RREG32(RADEON_CRTC2_V_TOTAL_DISP
) &
1591 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1592 position
= (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1593 stat_crtc
= RREG32(RADEON_CRTC2_STATUS
);
1594 if (!(stat_crtc
& 1))
1597 ret
|= DRM_SCANOUTPOS_VALID
;
1601 /* Decode into vertical and horizontal scanout position. */
1602 *vpos
= position
& 0x1fff;
1603 *hpos
= (position
>> 16) & 0x1fff;
1605 /* Valid vblank area boundaries from gpu retrieved? */
1608 ret
|= DRM_SCANOUTPOS_ACCURATE
;
1609 vbl_start
= vbl
& 0x1fff;
1610 vbl_end
= (vbl
>> 16) & 0x1fff;
1613 /* No: Fake something reasonable which gives at least ok results. */
1614 vbl_start
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vdisplay
;
1618 /* Test scanout position against vblank region. */
1619 if ((*vpos
< vbl_start
) && (*vpos
>= vbl_end
))
1622 /* Check if inside vblank area and apply corrective offsets:
1623 * vpos will then be >=0 in video scanout area, but negative
1624 * within vblank area, counting down the number of lines until
1628 /* Inside "upper part" of vblank area? Apply corrective offset if so: */
1629 if (in_vbl
&& (*vpos
>= vbl_start
)) {
1630 vtotal
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vtotal
;
1631 *vpos
= *vpos
- vtotal
;
1634 /* Correct for shifted end of vbl at vbl_end. */
1635 *vpos
= *vpos
- vbl_end
;
1639 ret
|= DRM_SCANOUTPOS_INVBL
;