2 * Copyright 2007-8 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice shall be included in
13 * all copies or substantial portions of the Software.
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
19 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
20 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
21 * OTHER DEALINGS IN THE SOFTWARE.
23 * Authors: Dave Airlie
27 #include "radeon_drm.h"
31 #include <asm/div64.h>
33 #include "drm_crtc_helper.h"
36 static int radeon_ddc_dump(struct drm_connector
*connector
);
38 static void avivo_crtc_load_lut(struct drm_crtc
*crtc
)
40 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
41 struct drm_device
*dev
= crtc
->dev
;
42 struct radeon_device
*rdev
= dev
->dev_private
;
45 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
46 WREG32(AVIVO_DC_LUTA_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
48 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
49 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
50 WREG32(AVIVO_DC_LUTA_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
52 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
53 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
54 WREG32(AVIVO_DC_LUTA_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
56 WREG32(AVIVO_DC_LUT_RW_SELECT
, radeon_crtc
->crtc_id
);
57 WREG32(AVIVO_DC_LUT_RW_MODE
, 0);
58 WREG32(AVIVO_DC_LUT_WRITE_EN_MASK
, 0x0000003f);
60 WREG8(AVIVO_DC_LUT_RW_INDEX
, 0);
61 for (i
= 0; i
< 256; i
++) {
62 WREG32(AVIVO_DC_LUT_30_COLOR
,
63 (radeon_crtc
->lut_r
[i
] << 20) |
64 (radeon_crtc
->lut_g
[i
] << 10) |
65 (radeon_crtc
->lut_b
[i
] << 0));
68 WREG32(AVIVO_D1GRPH_LUT_SEL
+ radeon_crtc
->crtc_offset
, radeon_crtc
->crtc_id
);
71 static void dce4_crtc_load_lut(struct drm_crtc
*crtc
)
73 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
74 struct drm_device
*dev
= crtc
->dev
;
75 struct radeon_device
*rdev
= dev
->dev_private
;
78 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
79 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
81 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
82 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
83 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
85 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
86 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
87 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
89 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
90 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
92 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
93 for (i
= 0; i
< 256; i
++) {
94 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
95 (radeon_crtc
->lut_r
[i
] << 20) |
96 (radeon_crtc
->lut_g
[i
] << 10) |
97 (radeon_crtc
->lut_b
[i
] << 0));
101 static void dce5_crtc_load_lut(struct drm_crtc
*crtc
)
103 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
104 struct drm_device
*dev
= crtc
->dev
;
105 struct radeon_device
*rdev
= dev
->dev_private
;
108 DRM_DEBUG_KMS("%d\n", radeon_crtc
->crtc_id
);
110 WREG32(NI_INPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
111 (NI_INPUT_CSC_GRPH_MODE(NI_INPUT_CSC_BYPASS
) |
112 NI_INPUT_CSC_OVL_MODE(NI_INPUT_CSC_BYPASS
)));
113 WREG32(NI_PRESCALE_GRPH_CONTROL
+ radeon_crtc
->crtc_offset
,
114 NI_GRPH_PRESCALE_BYPASS
);
115 WREG32(NI_PRESCALE_OVL_CONTROL
+ radeon_crtc
->crtc_offset
,
116 NI_OVL_PRESCALE_BYPASS
);
117 WREG32(NI_INPUT_GAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
118 (NI_GRPH_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
) |
119 NI_OVL_INPUT_GAMMA_MODE(NI_INPUT_GAMMA_USE_LUT
)));
121 WREG32(EVERGREEN_DC_LUT_CONTROL
+ radeon_crtc
->crtc_offset
, 0);
123 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0);
124 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0);
125 WREG32(EVERGREEN_DC_LUT_BLACK_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0);
127 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_BLUE
+ radeon_crtc
->crtc_offset
, 0xffff);
128 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_GREEN
+ radeon_crtc
->crtc_offset
, 0xffff);
129 WREG32(EVERGREEN_DC_LUT_WHITE_OFFSET_RED
+ radeon_crtc
->crtc_offset
, 0xffff);
131 WREG32(EVERGREEN_DC_LUT_RW_MODE
+ radeon_crtc
->crtc_offset
, 0);
132 WREG32(EVERGREEN_DC_LUT_WRITE_EN_MASK
+ radeon_crtc
->crtc_offset
, 0x00000007);
134 WREG32(EVERGREEN_DC_LUT_RW_INDEX
+ radeon_crtc
->crtc_offset
, 0);
135 for (i
= 0; i
< 256; i
++) {
136 WREG32(EVERGREEN_DC_LUT_30_COLOR
+ radeon_crtc
->crtc_offset
,
137 (radeon_crtc
->lut_r
[i
] << 20) |
138 (radeon_crtc
->lut_g
[i
] << 10) |
139 (radeon_crtc
->lut_b
[i
] << 0));
142 WREG32(NI_DEGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
143 (NI_GRPH_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
144 NI_OVL_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
145 NI_ICON_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
) |
146 NI_CURSOR_DEGAMMA_MODE(NI_DEGAMMA_BYPASS
)));
147 WREG32(NI_GAMUT_REMAP_CONTROL
+ radeon_crtc
->crtc_offset
,
148 (NI_GRPH_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
) |
149 NI_OVL_GAMUT_REMAP_MODE(NI_GAMUT_REMAP_BYPASS
)));
150 WREG32(NI_REGAMMA_CONTROL
+ radeon_crtc
->crtc_offset
,
151 (NI_GRPH_REGAMMA_MODE(NI_REGAMMA_BYPASS
) |
152 NI_OVL_REGAMMA_MODE(NI_REGAMMA_BYPASS
)));
153 WREG32(NI_OUTPUT_CSC_CONTROL
+ radeon_crtc
->crtc_offset
,
154 (NI_OUTPUT_CSC_GRPH_MODE(NI_OUTPUT_CSC_BYPASS
) |
155 NI_OUTPUT_CSC_OVL_MODE(NI_OUTPUT_CSC_BYPASS
)));
156 /* XXX match this to the depth of the crtc fmt block, move to modeset? */
157 WREG32(0x6940 + radeon_crtc
->crtc_offset
, 0);
161 static void legacy_crtc_load_lut(struct drm_crtc
*crtc
)
163 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
164 struct drm_device
*dev
= crtc
->dev
;
165 struct radeon_device
*rdev
= dev
->dev_private
;
169 dac2_cntl
= RREG32(RADEON_DAC_CNTL2
);
170 if (radeon_crtc
->crtc_id
== 0)
171 dac2_cntl
&= (uint32_t)~RADEON_DAC2_PALETTE_ACC_CTL
;
173 dac2_cntl
|= RADEON_DAC2_PALETTE_ACC_CTL
;
174 WREG32(RADEON_DAC_CNTL2
, dac2_cntl
);
176 WREG8(RADEON_PALETTE_INDEX
, 0);
177 for (i
= 0; i
< 256; i
++) {
178 WREG32(RADEON_PALETTE_30_DATA
,
179 (radeon_crtc
->lut_r
[i
] << 20) |
180 (radeon_crtc
->lut_g
[i
] << 10) |
181 (radeon_crtc
->lut_b
[i
] << 0));
185 void radeon_crtc_load_lut(struct drm_crtc
*crtc
)
187 struct drm_device
*dev
= crtc
->dev
;
188 struct radeon_device
*rdev
= dev
->dev_private
;
193 if (ASIC_IS_DCE5(rdev
))
194 dce5_crtc_load_lut(crtc
);
195 else if (ASIC_IS_DCE4(rdev
))
196 dce4_crtc_load_lut(crtc
);
197 else if (ASIC_IS_AVIVO(rdev
))
198 avivo_crtc_load_lut(crtc
);
200 legacy_crtc_load_lut(crtc
);
203 /** Sets the color ramps on behalf of fbcon */
204 void radeon_crtc_fb_gamma_set(struct drm_crtc
*crtc
, u16 red
, u16 green
,
207 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
209 radeon_crtc
->lut_r
[regno
] = red
>> 6;
210 radeon_crtc
->lut_g
[regno
] = green
>> 6;
211 radeon_crtc
->lut_b
[regno
] = blue
>> 6;
214 /** Gets the color ramps on behalf of fbcon */
215 void radeon_crtc_fb_gamma_get(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
216 u16
*blue
, int regno
)
218 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
220 *red
= radeon_crtc
->lut_r
[regno
] << 6;
221 *green
= radeon_crtc
->lut_g
[regno
] << 6;
222 *blue
= radeon_crtc
->lut_b
[regno
] << 6;
225 static void radeon_crtc_gamma_set(struct drm_crtc
*crtc
, u16
*red
, u16
*green
,
226 u16
*blue
, uint32_t start
, uint32_t size
)
228 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
229 int end
= (start
+ size
> 256) ? 256 : start
+ size
, i
;
231 /* userspace palettes are always correct as is */
232 for (i
= start
; i
< end
; i
++) {
233 radeon_crtc
->lut_r
[i
] = red
[i
] >> 6;
234 radeon_crtc
->lut_g
[i
] = green
[i
] >> 6;
235 radeon_crtc
->lut_b
[i
] = blue
[i
] >> 6;
237 radeon_crtc_load_lut(crtc
);
240 static void radeon_crtc_destroy(struct drm_crtc
*crtc
)
242 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
244 drm_crtc_cleanup(crtc
);
249 * Handle unpin events outside the interrupt handler proper.
251 static void radeon_unpin_work_func(struct work_struct
*__work
)
253 struct radeon_unpin_work
*work
=
254 container_of(__work
, struct radeon_unpin_work
, work
);
257 /* unpin of the old buffer */
258 r
= radeon_bo_reserve(work
->old_rbo
, false);
259 if (likely(r
== 0)) {
260 r
= radeon_bo_unpin(work
->old_rbo
);
261 if (unlikely(r
!= 0)) {
262 DRM_ERROR("failed to unpin buffer after flip\n");
264 radeon_bo_unreserve(work
->old_rbo
);
266 DRM_ERROR("failed to reserve buffer after flip\n");
270 void radeon_crtc_handle_flip(struct radeon_device
*rdev
, int crtc_id
)
272 struct radeon_crtc
*radeon_crtc
= rdev
->mode_info
.crtcs
[crtc_id
];
273 struct radeon_unpin_work
*work
;
274 struct drm_pending_vblank_event
*e
;
280 spin_lock_irqsave(&rdev
->ddev
->event_lock
, flags
);
281 work
= radeon_crtc
->unpin_work
;
283 !radeon_fence_signaled(work
->fence
)) {
284 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
287 /* New pageflip, or just completion of a previous one? */
288 if (!radeon_crtc
->deferred_flip_completion
) {
289 /* do the flip (mmio) */
290 update_pending
= radeon_page_flip(rdev
, crtc_id
, work
->new_crtc_base
);
292 /* This is just a completion of a flip queued in crtc
293 * at last invocation. Make sure we go directly to
294 * completion routine.
297 radeon_crtc
->deferred_flip_completion
= 0;
300 /* Has the pageflip already completed in crtc, or is it certain
301 * to complete in this vblank?
303 if (update_pending
&&
304 (DRM_SCANOUTPOS_VALID
& radeon_get_crtc_scanoutpos(rdev
->ddev
, crtc_id
,
307 (vpos
< (99 * rdev
->mode_info
.crtcs
[crtc_id
]->base
.hwmode
.crtc_vdisplay
)/100)) {
308 /* crtc didn't flip in this target vblank interval,
309 * but flip is pending in crtc. It will complete it
310 * in next vblank interval, so complete the flip at
313 radeon_crtc
->deferred_flip_completion
= 1;
314 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
318 /* Pageflip (will be) certainly completed in this vblank. Clean up. */
319 radeon_crtc
->unpin_work
= NULL
;
321 /* wakeup userspace */
324 e
->event
.sequence
= drm_vblank_count_and_time(rdev
->ddev
, crtc_id
, &now
);
325 e
->event
.tv_sec
= now
.tv_sec
;
326 e
->event
.tv_usec
= now
.tv_usec
;
327 list_add_tail(&e
->base
.link
, &e
->base
.file_priv
->event_list
);
328 wake_up_interruptible(&e
->base
.file_priv
->event_wait
);
330 spin_unlock_irqrestore(&rdev
->ddev
->event_lock
, flags
);
332 drm_vblank_put(rdev
->ddev
, radeon_crtc
->crtc_id
);
333 radeon_fence_unref(&work
->fence
);
334 radeon_post_page_flip(work
->rdev
, work
->crtc_id
);
335 schedule_work(&work
->work
);
338 static int radeon_crtc_page_flip(struct drm_crtc
*crtc
,
339 struct drm_framebuffer
*fb
,
340 struct drm_pending_vblank_event
*event
)
342 struct drm_device
*dev
= crtc
->dev
;
343 struct radeon_device
*rdev
= dev
->dev_private
;
344 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
345 struct radeon_framebuffer
*old_radeon_fb
;
346 struct radeon_framebuffer
*new_radeon_fb
;
347 struct drm_gem_object
*obj
;
348 struct radeon_bo
*rbo
;
349 struct radeon_fence
*fence
;
350 struct radeon_unpin_work
*work
;
352 u32 tiling_flags
, pitch_pixels
;
356 work
= kzalloc(sizeof *work
, GFP_KERNEL
);
360 r
= radeon_fence_create(rdev
, &fence
);
361 if (unlikely(r
!= 0)) {
363 DRM_ERROR("flip queue: failed to create fence.\n");
368 work
->crtc_id
= radeon_crtc
->crtc_id
;
369 work
->fence
= radeon_fence_ref(fence
);
370 old_radeon_fb
= to_radeon_framebuffer(crtc
->fb
);
371 new_radeon_fb
= to_radeon_framebuffer(fb
);
372 /* schedule unpin of the old buffer */
373 obj
= old_radeon_fb
->obj
;
374 rbo
= gem_to_radeon_bo(obj
);
376 INIT_WORK(&work
->work
, radeon_unpin_work_func
);
378 /* We borrow the event spin lock for protecting unpin_work */
379 spin_lock_irqsave(&dev
->event_lock
, flags
);
380 if (radeon_crtc
->unpin_work
) {
381 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
383 radeon_fence_unref(&fence
);
385 DRM_DEBUG_DRIVER("flip queue: crtc already busy\n");
388 radeon_crtc
->unpin_work
= work
;
389 radeon_crtc
->deferred_flip_completion
= 0;
390 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
392 /* pin the new buffer */
393 obj
= new_radeon_fb
->obj
;
394 rbo
= gem_to_radeon_bo(obj
);
396 DRM_DEBUG_DRIVER("flip-ioctl() cur_fbo = %p, cur_bbo = %p\n",
399 r
= radeon_bo_reserve(rbo
, false);
400 if (unlikely(r
!= 0)) {
401 DRM_ERROR("failed to reserve new rbo buffer before flip\n");
404 r
= radeon_bo_pin(rbo
, RADEON_GEM_DOMAIN_VRAM
, &base
);
405 if (unlikely(r
!= 0)) {
406 radeon_bo_unreserve(rbo
);
408 DRM_ERROR("failed to pin new rbo buffer before flip\n");
411 radeon_bo_get_tiling_flags(rbo
, &tiling_flags
, NULL
);
412 radeon_bo_unreserve(rbo
);
414 if (!ASIC_IS_AVIVO(rdev
)) {
415 /* crtc offset is from display base addr not FB location */
416 base
-= radeon_crtc
->legacy_display_base_addr
;
417 pitch_pixels
= fb
->pitch
/ (fb
->bits_per_pixel
/ 8);
419 if (tiling_flags
& RADEON_TILING_MACRO
) {
420 if (ASIC_IS_R300(rdev
)) {
423 int byteshift
= fb
->bits_per_pixel
>> 4;
424 int tile_addr
= (((crtc
->y
>> 3) * pitch_pixels
+ crtc
->x
) >> (8 - byteshift
)) << 11;
425 base
+= tile_addr
+ ((crtc
->x
<< byteshift
) % 256) + ((crtc
->y
% 8) << 8);
428 int offset
= crtc
->y
* pitch_pixels
+ crtc
->x
;
429 switch (fb
->bits_per_pixel
) {
450 spin_lock_irqsave(&dev
->event_lock
, flags
);
451 work
->new_crtc_base
= base
;
452 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
457 r
= drm_vblank_get(dev
, radeon_crtc
->crtc_id
);
459 DRM_ERROR("failed to get vblank before flip\n");
463 /* 32 ought to cover us */
464 r
= radeon_ring_lock(rdev
, 32);
466 DRM_ERROR("failed to lock the ring before flip\n");
471 radeon_fence_emit(rdev
, fence
);
472 /* set the proper interrupt */
473 radeon_pre_page_flip(rdev
, radeon_crtc
->crtc_id
);
475 radeon_ring_unlock_commit(rdev
);
480 drm_vblank_put(dev
, radeon_crtc
->crtc_id
);
483 r
= radeon_bo_reserve(rbo
, false);
484 if (unlikely(r
!= 0)) {
485 DRM_ERROR("failed to reserve new rbo in error path\n");
488 r
= radeon_bo_unpin(rbo
);
489 if (unlikely(r
!= 0)) {
490 radeon_bo_unreserve(rbo
);
492 DRM_ERROR("failed to unpin new rbo in error path\n");
495 radeon_bo_unreserve(rbo
);
498 spin_lock_irqsave(&dev
->event_lock
, flags
);
499 radeon_crtc
->unpin_work
= NULL
;
500 spin_unlock_irqrestore(&dev
->event_lock
, flags
);
501 radeon_fence_unref(&fence
);
507 static const struct drm_crtc_funcs radeon_crtc_funcs
= {
508 .cursor_set
= radeon_crtc_cursor_set
,
509 .cursor_move
= radeon_crtc_cursor_move
,
510 .gamma_set
= radeon_crtc_gamma_set
,
511 .set_config
= drm_crtc_helper_set_config
,
512 .destroy
= radeon_crtc_destroy
,
513 .page_flip
= radeon_crtc_page_flip
,
516 static void radeon_crtc_init(struct drm_device
*dev
, int index
)
518 struct radeon_device
*rdev
= dev
->dev_private
;
519 struct radeon_crtc
*radeon_crtc
;
522 radeon_crtc
= kzalloc(sizeof(struct radeon_crtc
) + (RADEONFB_CONN_LIMIT
* sizeof(struct drm_connector
*)), GFP_KERNEL
);
523 if (radeon_crtc
== NULL
)
526 drm_crtc_init(dev
, &radeon_crtc
->base
, &radeon_crtc_funcs
);
528 drm_mode_crtc_set_gamma_size(&radeon_crtc
->base
, 256);
529 radeon_crtc
->crtc_id
= index
;
530 rdev
->mode_info
.crtcs
[index
] = radeon_crtc
;
533 radeon_crtc
->mode_set
.crtc
= &radeon_crtc
->base
;
534 radeon_crtc
->mode_set
.connectors
= (struct drm_connector
**)(radeon_crtc
+ 1);
535 radeon_crtc
->mode_set
.num_connectors
= 0;
538 for (i
= 0; i
< 256; i
++) {
539 radeon_crtc
->lut_r
[i
] = i
<< 2;
540 radeon_crtc
->lut_g
[i
] = i
<< 2;
541 radeon_crtc
->lut_b
[i
] = i
<< 2;
544 if (rdev
->is_atom_bios
&& (ASIC_IS_AVIVO(rdev
) || radeon_r4xx_atom
))
545 radeon_atombios_init_crtc(dev
, radeon_crtc
);
547 radeon_legacy_init_crtc(dev
, radeon_crtc
);
550 static const char *encoder_names
[36] = {
570 "INTERNAL_KLDSCP_TMDS1",
571 "INTERNAL_KLDSCP_DVO1",
572 "INTERNAL_KLDSCP_DAC1",
573 "INTERNAL_KLDSCP_DAC2",
582 "INTERNAL_KLDSCP_LVTMA",
589 static const char *connector_names
[15] = {
607 static const char *hpd_names
[6] = {
616 static void radeon_print_display_setup(struct drm_device
*dev
)
618 struct drm_connector
*connector
;
619 struct radeon_connector
*radeon_connector
;
620 struct drm_encoder
*encoder
;
621 struct radeon_encoder
*radeon_encoder
;
625 DRM_INFO("Radeon Display Connectors\n");
626 list_for_each_entry(connector
, &dev
->mode_config
.connector_list
, head
) {
627 radeon_connector
= to_radeon_connector(connector
);
628 DRM_INFO("Connector %d:\n", i
);
629 DRM_INFO(" %s\n", connector_names
[connector
->connector_type
]);
630 if (radeon_connector
->hpd
.hpd
!= RADEON_HPD_NONE
)
631 DRM_INFO(" %s\n", hpd_names
[radeon_connector
->hpd
.hpd
]);
632 if (radeon_connector
->ddc_bus
) {
633 DRM_INFO(" DDC: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\n",
634 radeon_connector
->ddc_bus
->rec
.mask_clk_reg
,
635 radeon_connector
->ddc_bus
->rec
.mask_data_reg
,
636 radeon_connector
->ddc_bus
->rec
.a_clk_reg
,
637 radeon_connector
->ddc_bus
->rec
.a_data_reg
,
638 radeon_connector
->ddc_bus
->rec
.en_clk_reg
,
639 radeon_connector
->ddc_bus
->rec
.en_data_reg
,
640 radeon_connector
->ddc_bus
->rec
.y_clk_reg
,
641 radeon_connector
->ddc_bus
->rec
.y_data_reg
);
642 if (radeon_connector
->router
.ddc_valid
)
643 DRM_INFO(" DDC Router 0x%x/0x%x\n",
644 radeon_connector
->router
.ddc_mux_control_pin
,
645 radeon_connector
->router
.ddc_mux_state
);
646 if (radeon_connector
->router
.cd_valid
)
647 DRM_INFO(" Clock/Data Router 0x%x/0x%x\n",
648 radeon_connector
->router
.cd_mux_control_pin
,
649 radeon_connector
->router
.cd_mux_state
);
651 if (connector
->connector_type
== DRM_MODE_CONNECTOR_VGA
||
652 connector
->connector_type
== DRM_MODE_CONNECTOR_DVII
||
653 connector
->connector_type
== DRM_MODE_CONNECTOR_DVID
||
654 connector
->connector_type
== DRM_MODE_CONNECTOR_DVIA
||
655 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIA
||
656 connector
->connector_type
== DRM_MODE_CONNECTOR_HDMIB
)
657 DRM_INFO(" DDC: no ddc bus - possible BIOS bug - please report to xorg-driver-ati@lists.x.org\n");
659 DRM_INFO(" Encoders:\n");
660 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
661 radeon_encoder
= to_radeon_encoder(encoder
);
662 devices
= radeon_encoder
->devices
& radeon_connector
->devices
;
664 if (devices
& ATOM_DEVICE_CRT1_SUPPORT
)
665 DRM_INFO(" CRT1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
666 if (devices
& ATOM_DEVICE_CRT2_SUPPORT
)
667 DRM_INFO(" CRT2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
668 if (devices
& ATOM_DEVICE_LCD1_SUPPORT
)
669 DRM_INFO(" LCD1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
670 if (devices
& ATOM_DEVICE_DFP1_SUPPORT
)
671 DRM_INFO(" DFP1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
672 if (devices
& ATOM_DEVICE_DFP2_SUPPORT
)
673 DRM_INFO(" DFP2: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
674 if (devices
& ATOM_DEVICE_DFP3_SUPPORT
)
675 DRM_INFO(" DFP3: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
676 if (devices
& ATOM_DEVICE_DFP4_SUPPORT
)
677 DRM_INFO(" DFP4: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
678 if (devices
& ATOM_DEVICE_DFP5_SUPPORT
)
679 DRM_INFO(" DFP5: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
680 if (devices
& ATOM_DEVICE_DFP6_SUPPORT
)
681 DRM_INFO(" DFP6: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
682 if (devices
& ATOM_DEVICE_TV1_SUPPORT
)
683 DRM_INFO(" TV1: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
684 if (devices
& ATOM_DEVICE_CV_SUPPORT
)
685 DRM_INFO(" CV: %s\n", encoder_names
[radeon_encoder
->encoder_id
]);
692 static bool radeon_setup_enc_conn(struct drm_device
*dev
)
694 struct radeon_device
*rdev
= dev
->dev_private
;
695 struct drm_connector
*drm_connector
;
699 if (rdev
->is_atom_bios
) {
700 ret
= radeon_get_atom_connector_info_from_supported_devices_table(dev
);
702 ret
= radeon_get_atom_connector_info_from_object_table(dev
);
704 ret
= radeon_get_legacy_connector_info_from_bios(dev
);
706 ret
= radeon_get_legacy_connector_info_from_table(dev
);
709 if (!ASIC_IS_AVIVO(rdev
))
710 ret
= radeon_get_legacy_connector_info_from_table(dev
);
713 radeon_setup_encoder_clones(dev
);
714 radeon_print_display_setup(dev
);
715 list_for_each_entry(drm_connector
, &dev
->mode_config
.connector_list
, head
)
716 radeon_ddc_dump(drm_connector
);
722 int radeon_ddc_get_modes(struct radeon_connector
*radeon_connector
)
724 struct drm_device
*dev
= radeon_connector
->base
.dev
;
725 struct radeon_device
*rdev
= dev
->dev_private
;
728 /* on hw with routers, select right port */
729 if (radeon_connector
->router
.ddc_valid
)
730 radeon_router_select_ddc_port(radeon_connector
);
732 if ((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_DisplayPort
) ||
733 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)) {
734 struct radeon_connector_atom_dig
*dig
= radeon_connector
->con_priv
;
735 if ((dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_DISPLAYPORT
||
736 dig
->dp_sink_type
== CONNECTOR_OBJECT_ID_eDP
) && dig
->dp_i2c_bus
)
737 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
, &dig
->dp_i2c_bus
->adapter
);
739 if (!radeon_connector
->ddc_bus
)
741 if (!radeon_connector
->edid
) {
742 radeon_connector
->edid
= drm_get_edid(&radeon_connector
->base
, &radeon_connector
->ddc_bus
->adapter
);
745 if (!radeon_connector
->edid
) {
746 if (rdev
->is_atom_bios
) {
747 /* some laptops provide a hardcoded edid in rom for LCDs */
748 if (((radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_LVDS
) ||
749 (radeon_connector
->base
.connector_type
== DRM_MODE_CONNECTOR_eDP
)))
750 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
752 /* some servers provide a hardcoded edid in rom for KVMs */
753 radeon_connector
->edid
= radeon_bios_get_hardcoded_edid(rdev
);
755 if (radeon_connector
->edid
) {
756 drm_mode_connector_update_edid_property(&radeon_connector
->base
, radeon_connector
->edid
);
757 ret
= drm_add_edid_modes(&radeon_connector
->base
, radeon_connector
->edid
);
760 drm_mode_connector_update_edid_property(&radeon_connector
->base
, NULL
);
764 static int radeon_ddc_dump(struct drm_connector
*connector
)
767 struct radeon_connector
*radeon_connector
= to_radeon_connector(connector
);
770 /* on hw with routers, select right port */
771 if (radeon_connector
->router
.ddc_valid
)
772 radeon_router_select_ddc_port(radeon_connector
);
774 if (!radeon_connector
->ddc_bus
)
776 edid
= drm_get_edid(connector
, &radeon_connector
->ddc_bus
->adapter
);
784 static void avivo_get_fb_div(struct radeon_pll
*pll
,
791 u32 tmp
= post_div
* ref_div
;
794 *fb_div
= tmp
/ pll
->reference_freq
;
795 *frac_fb_div
= tmp
% pll
->reference_freq
;
797 if (*fb_div
> pll
->max_feedback_div
)
798 *fb_div
= pll
->max_feedback_div
;
799 else if (*fb_div
< pll
->min_feedback_div
)
800 *fb_div
= pll
->min_feedback_div
;
803 static u32
avivo_get_post_div(struct radeon_pll
*pll
,
806 u32 vco
, post_div
, tmp
;
808 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
809 return pll
->post_div
;
811 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
812 if (pll
->flags
& RADEON_PLL_IS_LCD
)
813 vco
= pll
->lcd_pll_out_min
;
815 vco
= pll
->pll_out_min
;
817 if (pll
->flags
& RADEON_PLL_IS_LCD
)
818 vco
= pll
->lcd_pll_out_max
;
820 vco
= pll
->pll_out_max
;
823 post_div
= vco
/ target_clock
;
824 tmp
= vco
% target_clock
;
826 if (pll
->flags
& RADEON_PLL_PREFER_MINM_OVER_MAXP
) {
834 if (post_div
> pll
->max_post_div
)
835 post_div
= pll
->max_post_div
;
836 else if (post_div
< pll
->min_post_div
)
837 post_div
= pll
->min_post_div
;
842 #define MAX_TOLERANCE 10
844 void radeon_compute_pll_avivo(struct radeon_pll
*pll
,
852 u32 target_clock
= freq
/ 10;
853 u32 post_div
= avivo_get_post_div(pll
, target_clock
);
854 u32 ref_div
= pll
->min_ref_div
;
855 u32 fb_div
= 0, frac_fb_div
= 0, tmp
;
857 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
858 ref_div
= pll
->reference_div
;
860 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
861 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
, &fb_div
, &frac_fb_div
);
862 frac_fb_div
= (100 * frac_fb_div
) / pll
->reference_freq
;
863 if (frac_fb_div
>= 5) {
865 frac_fb_div
= frac_fb_div
/ 10;
868 if (frac_fb_div
>= 10) {
873 while (ref_div
<= pll
->max_ref_div
) {
874 avivo_get_fb_div(pll
, target_clock
, post_div
, ref_div
,
875 &fb_div
, &frac_fb_div
);
876 if (frac_fb_div
>= (pll
->reference_freq
/ 2))
879 tmp
= (pll
->reference_freq
* fb_div
) / (post_div
* ref_div
);
880 tmp
= (tmp
* 10000) / target_clock
;
882 if (tmp
> (10000 + MAX_TOLERANCE
))
884 else if (tmp
>= (10000 - MAX_TOLERANCE
))
891 *dot_clock_p
= ((pll
->reference_freq
* fb_div
* 10) + (pll
->reference_freq
* frac_fb_div
)) /
892 (ref_div
* post_div
* 10);
894 *frac_fb_div_p
= frac_fb_div
;
895 *ref_div_p
= ref_div
;
896 *post_div_p
= post_div
;
897 DRM_DEBUG_KMS("%d, pll dividers - fb: %d.%d ref: %d, post %d\n",
898 *dot_clock_p
, fb_div
, frac_fb_div
, ref_div
, post_div
);
902 static inline uint32_t radeon_div(uint64_t n
, uint32_t d
)
912 void radeon_compute_pll_legacy(struct radeon_pll
*pll
,
914 uint32_t *dot_clock_p
,
916 uint32_t *frac_fb_div_p
,
918 uint32_t *post_div_p
)
920 uint32_t min_ref_div
= pll
->min_ref_div
;
921 uint32_t max_ref_div
= pll
->max_ref_div
;
922 uint32_t min_post_div
= pll
->min_post_div
;
923 uint32_t max_post_div
= pll
->max_post_div
;
924 uint32_t min_fractional_feed_div
= 0;
925 uint32_t max_fractional_feed_div
= 0;
926 uint32_t best_vco
= pll
->best_vco
;
927 uint32_t best_post_div
= 1;
928 uint32_t best_ref_div
= 1;
929 uint32_t best_feedback_div
= 1;
930 uint32_t best_frac_feedback_div
= 0;
931 uint32_t best_freq
= -1;
932 uint32_t best_error
= 0xffffffff;
933 uint32_t best_vco_diff
= 1;
935 u32 pll_out_min
, pll_out_max
;
937 DRM_DEBUG_KMS("PLL freq %llu %u %u\n", freq
, pll
->min_ref_div
, pll
->max_ref_div
);
940 if (pll
->flags
& RADEON_PLL_IS_LCD
) {
941 pll_out_min
= pll
->lcd_pll_out_min
;
942 pll_out_max
= pll
->lcd_pll_out_max
;
944 pll_out_min
= pll
->pll_out_min
;
945 pll_out_max
= pll
->pll_out_max
;
948 if (pll_out_min
> 64800)
951 if (pll
->flags
& RADEON_PLL_USE_REF_DIV
)
952 min_ref_div
= max_ref_div
= pll
->reference_div
;
954 while (min_ref_div
< max_ref_div
-1) {
955 uint32_t mid
= (min_ref_div
+ max_ref_div
) / 2;
956 uint32_t pll_in
= pll
->reference_freq
/ mid
;
957 if (pll_in
< pll
->pll_in_min
)
959 else if (pll_in
> pll
->pll_in_max
)
966 if (pll
->flags
& RADEON_PLL_USE_POST_DIV
)
967 min_post_div
= max_post_div
= pll
->post_div
;
969 if (pll
->flags
& RADEON_PLL_USE_FRAC_FB_DIV
) {
970 min_fractional_feed_div
= pll
->min_frac_feedback_div
;
971 max_fractional_feed_div
= pll
->max_frac_feedback_div
;
974 for (post_div
= max_post_div
; post_div
>= min_post_div
; --post_div
) {
977 if ((pll
->flags
& RADEON_PLL_NO_ODD_POST_DIV
) && (post_div
& 1))
980 /* legacy radeons only have a few post_divs */
981 if (pll
->flags
& RADEON_PLL_LEGACY
) {
982 if ((post_div
== 5) ||
993 for (ref_div
= min_ref_div
; ref_div
<= max_ref_div
; ++ref_div
) {
994 uint32_t feedback_div
, current_freq
= 0, error
, vco_diff
;
995 uint32_t pll_in
= pll
->reference_freq
/ ref_div
;
996 uint32_t min_feed_div
= pll
->min_feedback_div
;
997 uint32_t max_feed_div
= pll
->max_feedback_div
+ 1;
999 if (pll_in
< pll
->pll_in_min
|| pll_in
> pll
->pll_in_max
)
1002 while (min_feed_div
< max_feed_div
) {
1004 uint32_t min_frac_feed_div
= min_fractional_feed_div
;
1005 uint32_t max_frac_feed_div
= max_fractional_feed_div
+ 1;
1006 uint32_t frac_feedback_div
;
1009 feedback_div
= (min_feed_div
+ max_feed_div
) / 2;
1011 tmp
= (uint64_t)pll
->reference_freq
* feedback_div
;
1012 vco
= radeon_div(tmp
, ref_div
);
1014 if (vco
< pll_out_min
) {
1015 min_feed_div
= feedback_div
+ 1;
1017 } else if (vco
> pll_out_max
) {
1018 max_feed_div
= feedback_div
;
1022 while (min_frac_feed_div
< max_frac_feed_div
) {
1023 frac_feedback_div
= (min_frac_feed_div
+ max_frac_feed_div
) / 2;
1024 tmp
= (uint64_t)pll
->reference_freq
* 10000 * feedback_div
;
1025 tmp
+= (uint64_t)pll
->reference_freq
* 1000 * frac_feedback_div
;
1026 current_freq
= radeon_div(tmp
, ref_div
* post_div
);
1028 if (pll
->flags
& RADEON_PLL_PREFER_CLOSEST_LOWER
) {
1029 if (freq
< current_freq
)
1032 error
= freq
- current_freq
;
1034 error
= abs(current_freq
- freq
);
1035 vco_diff
= abs(vco
- best_vco
);
1037 if ((best_vco
== 0 && error
< best_error
) ||
1039 ((best_error
> 100 && error
< best_error
- 100) ||
1040 (abs(error
- best_error
) < 100 && vco_diff
< best_vco_diff
)))) {
1041 best_post_div
= post_div
;
1042 best_ref_div
= ref_div
;
1043 best_feedback_div
= feedback_div
;
1044 best_frac_feedback_div
= frac_feedback_div
;
1045 best_freq
= current_freq
;
1047 best_vco_diff
= vco_diff
;
1048 } else if (current_freq
== freq
) {
1049 if (best_freq
== -1) {
1050 best_post_div
= post_div
;
1051 best_ref_div
= ref_div
;
1052 best_feedback_div
= feedback_div
;
1053 best_frac_feedback_div
= frac_feedback_div
;
1054 best_freq
= current_freq
;
1056 best_vco_diff
= vco_diff
;
1057 } else if (((pll
->flags
& RADEON_PLL_PREFER_LOW_REF_DIV
) && (ref_div
< best_ref_div
)) ||
1058 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_REF_DIV
) && (ref_div
> best_ref_div
)) ||
1059 ((pll
->flags
& RADEON_PLL_PREFER_LOW_FB_DIV
) && (feedback_div
< best_feedback_div
)) ||
1060 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_FB_DIV
) && (feedback_div
> best_feedback_div
)) ||
1061 ((pll
->flags
& RADEON_PLL_PREFER_LOW_POST_DIV
) && (post_div
< best_post_div
)) ||
1062 ((pll
->flags
& RADEON_PLL_PREFER_HIGH_POST_DIV
) && (post_div
> best_post_div
))) {
1063 best_post_div
= post_div
;
1064 best_ref_div
= ref_div
;
1065 best_feedback_div
= feedback_div
;
1066 best_frac_feedback_div
= frac_feedback_div
;
1067 best_freq
= current_freq
;
1069 best_vco_diff
= vco_diff
;
1072 if (current_freq
< freq
)
1073 min_frac_feed_div
= frac_feedback_div
+ 1;
1075 max_frac_feed_div
= frac_feedback_div
;
1077 if (current_freq
< freq
)
1078 min_feed_div
= feedback_div
+ 1;
1080 max_feed_div
= feedback_div
;
1085 *dot_clock_p
= best_freq
/ 10000;
1086 *fb_div_p
= best_feedback_div
;
1087 *frac_fb_div_p
= best_frac_feedback_div
;
1088 *ref_div_p
= best_ref_div
;
1089 *post_div_p
= best_post_div
;
1090 DRM_DEBUG_KMS("%lld %d, pll dividers - fb: %d.%d ref: %d, post %d\n",
1092 best_freq
/ 1000, best_feedback_div
, best_frac_feedback_div
,
1093 best_ref_div
, best_post_div
);
1097 static void radeon_user_framebuffer_destroy(struct drm_framebuffer
*fb
)
1099 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1101 if (radeon_fb
->obj
) {
1102 drm_gem_object_unreference_unlocked(radeon_fb
->obj
);
1104 drm_framebuffer_cleanup(fb
);
1108 static int radeon_user_framebuffer_create_handle(struct drm_framebuffer
*fb
,
1109 struct drm_file
*file_priv
,
1110 unsigned int *handle
)
1112 struct radeon_framebuffer
*radeon_fb
= to_radeon_framebuffer(fb
);
1114 return drm_gem_handle_create(file_priv
, radeon_fb
->obj
, handle
);
1117 static const struct drm_framebuffer_funcs radeon_fb_funcs
= {
1118 .destroy
= radeon_user_framebuffer_destroy
,
1119 .create_handle
= radeon_user_framebuffer_create_handle
,
1123 radeon_framebuffer_init(struct drm_device
*dev
,
1124 struct radeon_framebuffer
*rfb
,
1125 struct drm_mode_fb_cmd
*mode_cmd
,
1126 struct drm_gem_object
*obj
)
1129 drm_framebuffer_init(dev
, &rfb
->base
, &radeon_fb_funcs
);
1130 drm_helper_mode_fill_fb_struct(&rfb
->base
, mode_cmd
);
1133 static struct drm_framebuffer
*
1134 radeon_user_framebuffer_create(struct drm_device
*dev
,
1135 struct drm_file
*file_priv
,
1136 struct drm_mode_fb_cmd
*mode_cmd
)
1138 struct drm_gem_object
*obj
;
1139 struct radeon_framebuffer
*radeon_fb
;
1141 obj
= drm_gem_object_lookup(dev
, file_priv
, mode_cmd
->handle
);
1143 dev_err(&dev
->pdev
->dev
, "No GEM object associated to handle 0x%08X, "
1144 "can't create framebuffer\n", mode_cmd
->handle
);
1145 return ERR_PTR(-ENOENT
);
1148 radeon_fb
= kzalloc(sizeof(*radeon_fb
), GFP_KERNEL
);
1149 if (radeon_fb
== NULL
)
1150 return ERR_PTR(-ENOMEM
);
1152 radeon_framebuffer_init(dev
, radeon_fb
, mode_cmd
, obj
);
1154 return &radeon_fb
->base
;
1157 static void radeon_output_poll_changed(struct drm_device
*dev
)
1159 struct radeon_device
*rdev
= dev
->dev_private
;
1160 radeon_fb_output_poll_changed(rdev
);
1163 static const struct drm_mode_config_funcs radeon_mode_funcs
= {
1164 .fb_create
= radeon_user_framebuffer_create
,
1165 .output_poll_changed
= radeon_output_poll_changed
1168 struct drm_prop_enum_list
{
1173 static struct drm_prop_enum_list radeon_tmds_pll_enum_list
[] =
1178 static struct drm_prop_enum_list radeon_tv_std_enum_list
[] =
1179 { { TV_STD_NTSC
, "ntsc" },
1180 { TV_STD_PAL
, "pal" },
1181 { TV_STD_PAL_M
, "pal-m" },
1182 { TV_STD_PAL_60
, "pal-60" },
1183 { TV_STD_NTSC_J
, "ntsc-j" },
1184 { TV_STD_SCART_PAL
, "scart-pal" },
1185 { TV_STD_PAL_CN
, "pal-cn" },
1186 { TV_STD_SECAM
, "secam" },
1189 static struct drm_prop_enum_list radeon_underscan_enum_list
[] =
1190 { { UNDERSCAN_OFF
, "off" },
1191 { UNDERSCAN_ON
, "on" },
1192 { UNDERSCAN_AUTO
, "auto" },
1195 static int radeon_modeset_create_props(struct radeon_device
*rdev
)
1199 if (rdev
->is_atom_bios
) {
1200 rdev
->mode_info
.coherent_mode_property
=
1201 drm_property_create(rdev
->ddev
,
1202 DRM_MODE_PROP_RANGE
,
1204 if (!rdev
->mode_info
.coherent_mode_property
)
1207 rdev
->mode_info
.coherent_mode_property
->values
[0] = 0;
1208 rdev
->mode_info
.coherent_mode_property
->values
[1] = 1;
1211 if (!ASIC_IS_AVIVO(rdev
)) {
1212 sz
= ARRAY_SIZE(radeon_tmds_pll_enum_list
);
1213 rdev
->mode_info
.tmds_pll_property
=
1214 drm_property_create(rdev
->ddev
,
1217 for (i
= 0; i
< sz
; i
++) {
1218 drm_property_add_enum(rdev
->mode_info
.tmds_pll_property
,
1220 radeon_tmds_pll_enum_list
[i
].type
,
1221 radeon_tmds_pll_enum_list
[i
].name
);
1225 rdev
->mode_info
.load_detect_property
=
1226 drm_property_create(rdev
->ddev
,
1227 DRM_MODE_PROP_RANGE
,
1228 "load detection", 2);
1229 if (!rdev
->mode_info
.load_detect_property
)
1231 rdev
->mode_info
.load_detect_property
->values
[0] = 0;
1232 rdev
->mode_info
.load_detect_property
->values
[1] = 1;
1234 drm_mode_create_scaling_mode_property(rdev
->ddev
);
1236 sz
= ARRAY_SIZE(radeon_tv_std_enum_list
);
1237 rdev
->mode_info
.tv_std_property
=
1238 drm_property_create(rdev
->ddev
,
1241 for (i
= 0; i
< sz
; i
++) {
1242 drm_property_add_enum(rdev
->mode_info
.tv_std_property
,
1244 radeon_tv_std_enum_list
[i
].type
,
1245 radeon_tv_std_enum_list
[i
].name
);
1248 sz
= ARRAY_SIZE(radeon_underscan_enum_list
);
1249 rdev
->mode_info
.underscan_property
=
1250 drm_property_create(rdev
->ddev
,
1253 for (i
= 0; i
< sz
; i
++) {
1254 drm_property_add_enum(rdev
->mode_info
.underscan_property
,
1256 radeon_underscan_enum_list
[i
].type
,
1257 radeon_underscan_enum_list
[i
].name
);
1260 rdev
->mode_info
.underscan_hborder_property
=
1261 drm_property_create(rdev
->ddev
,
1262 DRM_MODE_PROP_RANGE
,
1263 "underscan hborder", 2);
1264 if (!rdev
->mode_info
.underscan_hborder_property
)
1266 rdev
->mode_info
.underscan_hborder_property
->values
[0] = 0;
1267 rdev
->mode_info
.underscan_hborder_property
->values
[1] = 128;
1269 rdev
->mode_info
.underscan_vborder_property
=
1270 drm_property_create(rdev
->ddev
,
1271 DRM_MODE_PROP_RANGE
,
1272 "underscan vborder", 2);
1273 if (!rdev
->mode_info
.underscan_vborder_property
)
1275 rdev
->mode_info
.underscan_vborder_property
->values
[0] = 0;
1276 rdev
->mode_info
.underscan_vborder_property
->values
[1] = 128;
1281 void radeon_update_display_priority(struct radeon_device
*rdev
)
1283 /* adjustment options for the display watermarks */
1284 if ((radeon_disp_priority
== 0) || (radeon_disp_priority
> 2)) {
1285 /* set display priority to high for r3xx, rv515 chips
1286 * this avoids flickering due to underflow to the
1287 * display controllers during heavy acceleration.
1288 * Don't force high on rs4xx igp chips as it seems to
1289 * affect the sound card. See kernel bug 15982.
1291 if ((ASIC_IS_R300(rdev
) || (rdev
->family
== CHIP_RV515
)) &&
1292 !(rdev
->flags
& RADEON_IS_IGP
))
1293 rdev
->disp_priority
= 2;
1295 rdev
->disp_priority
= 0;
1297 rdev
->disp_priority
= radeon_disp_priority
;
1301 int radeon_modeset_init(struct radeon_device
*rdev
)
1306 drm_mode_config_init(rdev
->ddev
);
1307 rdev
->mode_info
.mode_config_initialized
= true;
1309 rdev
->ddev
->mode_config
.funcs
= (void *)&radeon_mode_funcs
;
1311 if (ASIC_IS_DCE5(rdev
)) {
1312 rdev
->ddev
->mode_config
.max_width
= 16384;
1313 rdev
->ddev
->mode_config
.max_height
= 16384;
1314 } else if (ASIC_IS_AVIVO(rdev
)) {
1315 rdev
->ddev
->mode_config
.max_width
= 8192;
1316 rdev
->ddev
->mode_config
.max_height
= 8192;
1318 rdev
->ddev
->mode_config
.max_width
= 4096;
1319 rdev
->ddev
->mode_config
.max_height
= 4096;
1322 rdev
->ddev
->mode_config
.fb_base
= rdev
->mc
.aper_base
;
1324 ret
= radeon_modeset_create_props(rdev
);
1329 /* init i2c buses */
1330 radeon_i2c_init(rdev
);
1332 /* check combios for a valid hardcoded EDID - Sun servers */
1333 if (!rdev
->is_atom_bios
) {
1334 /* check for hardcoded EDID in BIOS */
1335 radeon_combios_check_hardcoded_edid(rdev
);
1338 /* allocate crtcs */
1339 for (i
= 0; i
< rdev
->num_crtc
; i
++) {
1340 radeon_crtc_init(rdev
->ddev
, i
);
1343 /* okay we should have all the bios connectors */
1344 ret
= radeon_setup_enc_conn(rdev
->ddev
);
1350 if (rdev
->is_atom_bios
)
1351 radeon_atom_encoder_init(rdev
);
1353 /* initialize hpd */
1354 radeon_hpd_init(rdev
);
1356 /* Initialize power management */
1357 radeon_pm_init(rdev
);
1359 radeon_fbdev_init(rdev
);
1360 drm_kms_helper_poll_init(rdev
->ddev
);
1365 void radeon_modeset_fini(struct radeon_device
*rdev
)
1367 radeon_fbdev_fini(rdev
);
1368 kfree(rdev
->mode_info
.bios_hardcoded_edid
);
1369 radeon_pm_fini(rdev
);
1371 if (rdev
->mode_info
.mode_config_initialized
) {
1372 drm_kms_helper_poll_fini(rdev
->ddev
);
1373 radeon_hpd_fini(rdev
);
1374 drm_mode_config_cleanup(rdev
->ddev
);
1375 rdev
->mode_info
.mode_config_initialized
= false;
1377 /* free i2c buses */
1378 radeon_i2c_fini(rdev
);
1381 static bool is_hdtv_mode(struct drm_display_mode
*mode
)
1383 /* try and guess if this is a tv or a monitor */
1384 if ((mode
->vdisplay
== 480 && mode
->hdisplay
== 720) || /* 480p */
1385 (mode
->vdisplay
== 576) || /* 576p */
1386 (mode
->vdisplay
== 720) || /* 720p */
1387 (mode
->vdisplay
== 1080)) /* 1080p */
1393 bool radeon_crtc_scaling_mode_fixup(struct drm_crtc
*crtc
,
1394 struct drm_display_mode
*mode
,
1395 struct drm_display_mode
*adjusted_mode
)
1397 struct drm_device
*dev
= crtc
->dev
;
1398 struct radeon_device
*rdev
= dev
->dev_private
;
1399 struct drm_encoder
*encoder
;
1400 struct radeon_crtc
*radeon_crtc
= to_radeon_crtc(crtc
);
1401 struct radeon_encoder
*radeon_encoder
;
1402 struct drm_connector
*connector
;
1403 struct radeon_connector
*radeon_connector
;
1405 u32 src_v
= 1, dst_v
= 1;
1406 u32 src_h
= 1, dst_h
= 1;
1408 radeon_crtc
->h_border
= 0;
1409 radeon_crtc
->v_border
= 0;
1411 list_for_each_entry(encoder
, &dev
->mode_config
.encoder_list
, head
) {
1412 if (encoder
->crtc
!= crtc
)
1414 radeon_encoder
= to_radeon_encoder(encoder
);
1415 connector
= radeon_get_connector_for_encoder(encoder
);
1416 radeon_connector
= to_radeon_connector(connector
);
1420 if (radeon_encoder
->rmx_type
== RMX_OFF
)
1421 radeon_crtc
->rmx_type
= RMX_OFF
;
1422 else if (mode
->hdisplay
< radeon_encoder
->native_mode
.hdisplay
||
1423 mode
->vdisplay
< radeon_encoder
->native_mode
.vdisplay
)
1424 radeon_crtc
->rmx_type
= radeon_encoder
->rmx_type
;
1426 radeon_crtc
->rmx_type
= RMX_OFF
;
1427 /* copy native mode */
1428 memcpy(&radeon_crtc
->native_mode
,
1429 &radeon_encoder
->native_mode
,
1430 sizeof(struct drm_display_mode
));
1431 src_v
= crtc
->mode
.vdisplay
;
1432 dst_v
= radeon_crtc
->native_mode
.vdisplay
;
1433 src_h
= crtc
->mode
.hdisplay
;
1434 dst_h
= radeon_crtc
->native_mode
.hdisplay
;
1436 /* fix up for overscan on hdmi */
1437 if (ASIC_IS_AVIVO(rdev
) &&
1438 (!(mode
->flags
& DRM_MODE_FLAG_INTERLACE
)) &&
1439 ((radeon_encoder
->underscan_type
== UNDERSCAN_ON
) ||
1440 ((radeon_encoder
->underscan_type
== UNDERSCAN_AUTO
) &&
1441 drm_detect_hdmi_monitor(radeon_connector
->edid
) &&
1442 is_hdtv_mode(mode
)))) {
1443 if (radeon_encoder
->underscan_hborder
!= 0)
1444 radeon_crtc
->h_border
= radeon_encoder
->underscan_hborder
;
1446 radeon_crtc
->h_border
= (mode
->hdisplay
>> 5) + 16;
1447 if (radeon_encoder
->underscan_vborder
!= 0)
1448 radeon_crtc
->v_border
= radeon_encoder
->underscan_vborder
;
1450 radeon_crtc
->v_border
= (mode
->vdisplay
>> 5) + 16;
1451 radeon_crtc
->rmx_type
= RMX_FULL
;
1452 src_v
= crtc
->mode
.vdisplay
;
1453 dst_v
= crtc
->mode
.vdisplay
- (radeon_crtc
->v_border
* 2);
1454 src_h
= crtc
->mode
.hdisplay
;
1455 dst_h
= crtc
->mode
.hdisplay
- (radeon_crtc
->h_border
* 2);
1459 if (radeon_crtc
->rmx_type
!= radeon_encoder
->rmx_type
) {
1460 /* WARNING: Right now this can't happen but
1461 * in the future we need to check that scaling
1462 * are consistent across different encoder
1463 * (ie all encoder can work with the same
1466 DRM_ERROR("Scaling not consistent across encoder.\n");
1471 if (radeon_crtc
->rmx_type
!= RMX_OFF
) {
1473 a
.full
= dfixed_const(src_v
);
1474 b
.full
= dfixed_const(dst_v
);
1475 radeon_crtc
->vsc
.full
= dfixed_div(a
, b
);
1476 a
.full
= dfixed_const(src_h
);
1477 b
.full
= dfixed_const(dst_h
);
1478 radeon_crtc
->hsc
.full
= dfixed_div(a
, b
);
1480 radeon_crtc
->vsc
.full
= dfixed_const(1);
1481 radeon_crtc
->hsc
.full
= dfixed_const(1);
1487 * Retrieve current video scanout position of crtc on a given gpu.
1489 * \param dev Device to query.
1490 * \param crtc Crtc to query.
1491 * \param *vpos Location where vertical scanout position should be stored.
1492 * \param *hpos Location where horizontal scanout position should go.
1494 * Returns vpos as a positive number while in active scanout area.
1495 * Returns vpos as a negative number inside vblank, counting the number
1496 * of scanlines to go until end of vblank, e.g., -1 means "one scanline
1497 * until start of active scanout / end of vblank."
1499 * \return Flags, or'ed together as follows:
1501 * DRM_SCANOUTPOS_VALID = Query successful.
1502 * DRM_SCANOUTPOS_INVBL = Inside vblank.
1503 * DRM_SCANOUTPOS_ACCURATE = Returned position is accurate. A lack of
1504 * this flag means that returned position may be offset by a constant but
1505 * unknown small number of scanlines wrt. real scanout position.
1508 int radeon_get_crtc_scanoutpos(struct drm_device
*dev
, int crtc
, int *vpos
, int *hpos
)
1510 u32 stat_crtc
= 0, vbl
= 0, position
= 0;
1511 int vbl_start
, vbl_end
, vtotal
, ret
= 0;
1514 struct radeon_device
*rdev
= dev
->dev_private
;
1516 if (ASIC_IS_DCE4(rdev
)) {
1518 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1519 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1520 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1521 EVERGREEN_CRTC0_REGISTER_OFFSET
);
1522 ret
|= DRM_SCANOUTPOS_VALID
;
1525 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1526 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1527 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1528 EVERGREEN_CRTC1_REGISTER_OFFSET
);
1529 ret
|= DRM_SCANOUTPOS_VALID
;
1532 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1533 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1534 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1535 EVERGREEN_CRTC2_REGISTER_OFFSET
);
1536 ret
|= DRM_SCANOUTPOS_VALID
;
1539 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1540 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1541 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1542 EVERGREEN_CRTC3_REGISTER_OFFSET
);
1543 ret
|= DRM_SCANOUTPOS_VALID
;
1546 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1547 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1548 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1549 EVERGREEN_CRTC4_REGISTER_OFFSET
);
1550 ret
|= DRM_SCANOUTPOS_VALID
;
1553 vbl
= RREG32(EVERGREEN_CRTC_V_BLANK_START_END
+
1554 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1555 position
= RREG32(EVERGREEN_CRTC_STATUS_POSITION
+
1556 EVERGREEN_CRTC5_REGISTER_OFFSET
);
1557 ret
|= DRM_SCANOUTPOS_VALID
;
1559 } else if (ASIC_IS_AVIVO(rdev
)) {
1561 vbl
= RREG32(AVIVO_D1CRTC_V_BLANK_START_END
);
1562 position
= RREG32(AVIVO_D1CRTC_STATUS_POSITION
);
1563 ret
|= DRM_SCANOUTPOS_VALID
;
1566 vbl
= RREG32(AVIVO_D2CRTC_V_BLANK_START_END
);
1567 position
= RREG32(AVIVO_D2CRTC_STATUS_POSITION
);
1568 ret
|= DRM_SCANOUTPOS_VALID
;
1571 /* Pre-AVIVO: Different encoding of scanout pos and vblank interval. */
1573 /* Assume vbl_end == 0, get vbl_start from
1576 vbl
= (RREG32(RADEON_CRTC_V_TOTAL_DISP
) &
1577 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1578 /* Only retrieve vpos from upper 16 bits, set hpos == 0. */
1579 position
= (RREG32(RADEON_CRTC_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1580 stat_crtc
= RREG32(RADEON_CRTC_STATUS
);
1581 if (!(stat_crtc
& 1))
1584 ret
|= DRM_SCANOUTPOS_VALID
;
1587 vbl
= (RREG32(RADEON_CRTC2_V_TOTAL_DISP
) &
1588 RADEON_CRTC_V_DISP
) >> RADEON_CRTC_V_DISP_SHIFT
;
1589 position
= (RREG32(RADEON_CRTC2_VLINE_CRNT_VLINE
) >> 16) & RADEON_CRTC_V_TOTAL
;
1590 stat_crtc
= RREG32(RADEON_CRTC2_STATUS
);
1591 if (!(stat_crtc
& 1))
1594 ret
|= DRM_SCANOUTPOS_VALID
;
1598 /* Decode into vertical and horizontal scanout position. */
1599 *vpos
= position
& 0x1fff;
1600 *hpos
= (position
>> 16) & 0x1fff;
1602 /* Valid vblank area boundaries from gpu retrieved? */
1605 ret
|= DRM_SCANOUTPOS_ACCURATE
;
1606 vbl_start
= vbl
& 0x1fff;
1607 vbl_end
= (vbl
>> 16) & 0x1fff;
1610 /* No: Fake something reasonable which gives at least ok results. */
1611 vbl_start
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vdisplay
;
1615 /* Test scanout position against vblank region. */
1616 if ((*vpos
< vbl_start
) && (*vpos
>= vbl_end
))
1619 /* Check if inside vblank area and apply corrective offsets:
1620 * vpos will then be >=0 in video scanout area, but negative
1621 * within vblank area, counting down the number of lines until
1625 /* Inside "upper part" of vblank area? Apply corrective offset if so: */
1626 if (in_vbl
&& (*vpos
>= vbl_start
)) {
1627 vtotal
= rdev
->mode_info
.crtcs
[crtc
]->base
.hwmode
.crtc_vtotal
;
1628 *vpos
= *vpos
- vtotal
;
1631 /* Correct for shifted end of vbl at vbl_end. */
1632 *vpos
= *vpos
- vbl_end
;
1636 ret
|= DRM_SCANOUTPOS_INVBL
;