drm/nouveau: Pre-G80 tiling support.
[linux-2.6/libata-dev.git] / drivers / gpu / drm / nouveau / nouveau_state.c
blob6a459139910f6976eb75fcf38602023adebde8dc
1 /*
2 * Copyright 2005 Stephane Marchesin
3 * Copyright 2008 Stuart Bennett
4 * All Rights Reserved.
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
13 * The above copyright notice and this permission notice (including the next
14 * paragraph) shall be included in all copies or substantial portions of the
15 * Software.
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
21 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
22 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 * DEALINGS IN THE SOFTWARE.
26 #include <linux/swab.h>
27 #include "drmP.h"
28 #include "drm.h"
29 #include "drm_sarea.h"
30 #include "drm_crtc_helper.h"
31 #include <linux/vgaarb.h>
33 #include "nouveau_drv.h"
34 #include "nouveau_drm.h"
35 #include "nv50_display.h"
37 static int nouveau_stub_init(struct drm_device *dev) { return 0; }
38 static void nouveau_stub_takedown(struct drm_device *dev) {}
40 static int nouveau_init_engine_ptrs(struct drm_device *dev)
42 struct drm_nouveau_private *dev_priv = dev->dev_private;
43 struct nouveau_engine *engine = &dev_priv->engine;
45 switch (dev_priv->chipset & 0xf0) {
46 case 0x00:
47 engine->instmem.init = nv04_instmem_init;
48 engine->instmem.takedown = nv04_instmem_takedown;
49 engine->instmem.suspend = nv04_instmem_suspend;
50 engine->instmem.resume = nv04_instmem_resume;
51 engine->instmem.populate = nv04_instmem_populate;
52 engine->instmem.clear = nv04_instmem_clear;
53 engine->instmem.bind = nv04_instmem_bind;
54 engine->instmem.unbind = nv04_instmem_unbind;
55 engine->instmem.prepare_access = nv04_instmem_prepare_access;
56 engine->instmem.finish_access = nv04_instmem_finish_access;
57 engine->mc.init = nv04_mc_init;
58 engine->mc.takedown = nv04_mc_takedown;
59 engine->timer.init = nv04_timer_init;
60 engine->timer.read = nv04_timer_read;
61 engine->timer.takedown = nv04_timer_takedown;
62 engine->fb.init = nv04_fb_init;
63 engine->fb.takedown = nv04_fb_takedown;
64 engine->graph.grclass = nv04_graph_grclass;
65 engine->graph.init = nv04_graph_init;
66 engine->graph.takedown = nv04_graph_takedown;
67 engine->graph.fifo_access = nv04_graph_fifo_access;
68 engine->graph.channel = nv04_graph_channel;
69 engine->graph.create_context = nv04_graph_create_context;
70 engine->graph.destroy_context = nv04_graph_destroy_context;
71 engine->graph.load_context = nv04_graph_load_context;
72 engine->graph.unload_context = nv04_graph_unload_context;
73 engine->fifo.channels = 16;
74 engine->fifo.init = nv04_fifo_init;
75 engine->fifo.takedown = nouveau_stub_takedown;
76 engine->fifo.disable = nv04_fifo_disable;
77 engine->fifo.enable = nv04_fifo_enable;
78 engine->fifo.reassign = nv04_fifo_reassign;
79 engine->fifo.cache_flush = nv04_fifo_cache_flush;
80 engine->fifo.cache_pull = nv04_fifo_cache_pull;
81 engine->fifo.channel_id = nv04_fifo_channel_id;
82 engine->fifo.create_context = nv04_fifo_create_context;
83 engine->fifo.destroy_context = nv04_fifo_destroy_context;
84 engine->fifo.load_context = nv04_fifo_load_context;
85 engine->fifo.unload_context = nv04_fifo_unload_context;
86 break;
87 case 0x10:
88 engine->instmem.init = nv04_instmem_init;
89 engine->instmem.takedown = nv04_instmem_takedown;
90 engine->instmem.suspend = nv04_instmem_suspend;
91 engine->instmem.resume = nv04_instmem_resume;
92 engine->instmem.populate = nv04_instmem_populate;
93 engine->instmem.clear = nv04_instmem_clear;
94 engine->instmem.bind = nv04_instmem_bind;
95 engine->instmem.unbind = nv04_instmem_unbind;
96 engine->instmem.prepare_access = nv04_instmem_prepare_access;
97 engine->instmem.finish_access = nv04_instmem_finish_access;
98 engine->mc.init = nv04_mc_init;
99 engine->mc.takedown = nv04_mc_takedown;
100 engine->timer.init = nv04_timer_init;
101 engine->timer.read = nv04_timer_read;
102 engine->timer.takedown = nv04_timer_takedown;
103 engine->fb.init = nv10_fb_init;
104 engine->fb.takedown = nv10_fb_takedown;
105 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
106 engine->graph.grclass = nv10_graph_grclass;
107 engine->graph.init = nv10_graph_init;
108 engine->graph.takedown = nv10_graph_takedown;
109 engine->graph.channel = nv10_graph_channel;
110 engine->graph.create_context = nv10_graph_create_context;
111 engine->graph.destroy_context = nv10_graph_destroy_context;
112 engine->graph.fifo_access = nv04_graph_fifo_access;
113 engine->graph.load_context = nv10_graph_load_context;
114 engine->graph.unload_context = nv10_graph_unload_context;
115 engine->graph.set_region_tiling = nv10_graph_set_region_tiling;
116 engine->fifo.channels = 32;
117 engine->fifo.init = nv10_fifo_init;
118 engine->fifo.takedown = nouveau_stub_takedown;
119 engine->fifo.disable = nv04_fifo_disable;
120 engine->fifo.enable = nv04_fifo_enable;
121 engine->fifo.reassign = nv04_fifo_reassign;
122 engine->fifo.cache_flush = nv04_fifo_cache_flush;
123 engine->fifo.cache_pull = nv04_fifo_cache_pull;
124 engine->fifo.channel_id = nv10_fifo_channel_id;
125 engine->fifo.create_context = nv10_fifo_create_context;
126 engine->fifo.destroy_context = nv10_fifo_destroy_context;
127 engine->fifo.load_context = nv10_fifo_load_context;
128 engine->fifo.unload_context = nv10_fifo_unload_context;
129 break;
130 case 0x20:
131 engine->instmem.init = nv04_instmem_init;
132 engine->instmem.takedown = nv04_instmem_takedown;
133 engine->instmem.suspend = nv04_instmem_suspend;
134 engine->instmem.resume = nv04_instmem_resume;
135 engine->instmem.populate = nv04_instmem_populate;
136 engine->instmem.clear = nv04_instmem_clear;
137 engine->instmem.bind = nv04_instmem_bind;
138 engine->instmem.unbind = nv04_instmem_unbind;
139 engine->instmem.prepare_access = nv04_instmem_prepare_access;
140 engine->instmem.finish_access = nv04_instmem_finish_access;
141 engine->mc.init = nv04_mc_init;
142 engine->mc.takedown = nv04_mc_takedown;
143 engine->timer.init = nv04_timer_init;
144 engine->timer.read = nv04_timer_read;
145 engine->timer.takedown = nv04_timer_takedown;
146 engine->fb.init = nv10_fb_init;
147 engine->fb.takedown = nv10_fb_takedown;
148 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
149 engine->graph.grclass = nv20_graph_grclass;
150 engine->graph.init = nv20_graph_init;
151 engine->graph.takedown = nv20_graph_takedown;
152 engine->graph.channel = nv10_graph_channel;
153 engine->graph.create_context = nv20_graph_create_context;
154 engine->graph.destroy_context = nv20_graph_destroy_context;
155 engine->graph.fifo_access = nv04_graph_fifo_access;
156 engine->graph.load_context = nv20_graph_load_context;
157 engine->graph.unload_context = nv20_graph_unload_context;
158 engine->graph.set_region_tiling = nv20_graph_set_region_tiling;
159 engine->fifo.channels = 32;
160 engine->fifo.init = nv10_fifo_init;
161 engine->fifo.takedown = nouveau_stub_takedown;
162 engine->fifo.disable = nv04_fifo_disable;
163 engine->fifo.enable = nv04_fifo_enable;
164 engine->fifo.reassign = nv04_fifo_reassign;
165 engine->fifo.cache_flush = nv04_fifo_cache_flush;
166 engine->fifo.cache_pull = nv04_fifo_cache_pull;
167 engine->fifo.channel_id = nv10_fifo_channel_id;
168 engine->fifo.create_context = nv10_fifo_create_context;
169 engine->fifo.destroy_context = nv10_fifo_destroy_context;
170 engine->fifo.load_context = nv10_fifo_load_context;
171 engine->fifo.unload_context = nv10_fifo_unload_context;
172 break;
173 case 0x30:
174 engine->instmem.init = nv04_instmem_init;
175 engine->instmem.takedown = nv04_instmem_takedown;
176 engine->instmem.suspend = nv04_instmem_suspend;
177 engine->instmem.resume = nv04_instmem_resume;
178 engine->instmem.populate = nv04_instmem_populate;
179 engine->instmem.clear = nv04_instmem_clear;
180 engine->instmem.bind = nv04_instmem_bind;
181 engine->instmem.unbind = nv04_instmem_unbind;
182 engine->instmem.prepare_access = nv04_instmem_prepare_access;
183 engine->instmem.finish_access = nv04_instmem_finish_access;
184 engine->mc.init = nv04_mc_init;
185 engine->mc.takedown = nv04_mc_takedown;
186 engine->timer.init = nv04_timer_init;
187 engine->timer.read = nv04_timer_read;
188 engine->timer.takedown = nv04_timer_takedown;
189 engine->fb.init = nv10_fb_init;
190 engine->fb.takedown = nv10_fb_takedown;
191 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
192 engine->graph.grclass = nv30_graph_grclass;
193 engine->graph.init = nv30_graph_init;
194 engine->graph.takedown = nv20_graph_takedown;
195 engine->graph.fifo_access = nv04_graph_fifo_access;
196 engine->graph.channel = nv10_graph_channel;
197 engine->graph.create_context = nv20_graph_create_context;
198 engine->graph.destroy_context = nv20_graph_destroy_context;
199 engine->graph.load_context = nv20_graph_load_context;
200 engine->graph.unload_context = nv20_graph_unload_context;
201 engine->graph.set_region_tiling = nv20_graph_set_region_tiling;
202 engine->fifo.channels = 32;
203 engine->fifo.init = nv10_fifo_init;
204 engine->fifo.takedown = nouveau_stub_takedown;
205 engine->fifo.disable = nv04_fifo_disable;
206 engine->fifo.enable = nv04_fifo_enable;
207 engine->fifo.reassign = nv04_fifo_reassign;
208 engine->fifo.cache_flush = nv04_fifo_cache_flush;
209 engine->fifo.cache_pull = nv04_fifo_cache_pull;
210 engine->fifo.channel_id = nv10_fifo_channel_id;
211 engine->fifo.create_context = nv10_fifo_create_context;
212 engine->fifo.destroy_context = nv10_fifo_destroy_context;
213 engine->fifo.load_context = nv10_fifo_load_context;
214 engine->fifo.unload_context = nv10_fifo_unload_context;
215 break;
216 case 0x40:
217 case 0x60:
218 engine->instmem.init = nv04_instmem_init;
219 engine->instmem.takedown = nv04_instmem_takedown;
220 engine->instmem.suspend = nv04_instmem_suspend;
221 engine->instmem.resume = nv04_instmem_resume;
222 engine->instmem.populate = nv04_instmem_populate;
223 engine->instmem.clear = nv04_instmem_clear;
224 engine->instmem.bind = nv04_instmem_bind;
225 engine->instmem.unbind = nv04_instmem_unbind;
226 engine->instmem.prepare_access = nv04_instmem_prepare_access;
227 engine->instmem.finish_access = nv04_instmem_finish_access;
228 engine->mc.init = nv40_mc_init;
229 engine->mc.takedown = nv40_mc_takedown;
230 engine->timer.init = nv04_timer_init;
231 engine->timer.read = nv04_timer_read;
232 engine->timer.takedown = nv04_timer_takedown;
233 engine->fb.init = nv40_fb_init;
234 engine->fb.takedown = nv40_fb_takedown;
235 engine->fb.set_region_tiling = nv40_fb_set_region_tiling;
236 engine->graph.grclass = nv40_graph_grclass;
237 engine->graph.init = nv40_graph_init;
238 engine->graph.takedown = nv40_graph_takedown;
239 engine->graph.fifo_access = nv04_graph_fifo_access;
240 engine->graph.channel = nv40_graph_channel;
241 engine->graph.create_context = nv40_graph_create_context;
242 engine->graph.destroy_context = nv40_graph_destroy_context;
243 engine->graph.load_context = nv40_graph_load_context;
244 engine->graph.unload_context = nv40_graph_unload_context;
245 engine->graph.set_region_tiling = nv40_graph_set_region_tiling;
246 engine->fifo.channels = 32;
247 engine->fifo.init = nv40_fifo_init;
248 engine->fifo.takedown = nouveau_stub_takedown;
249 engine->fifo.disable = nv04_fifo_disable;
250 engine->fifo.enable = nv04_fifo_enable;
251 engine->fifo.reassign = nv04_fifo_reassign;
252 engine->fifo.cache_flush = nv04_fifo_cache_flush;
253 engine->fifo.cache_pull = nv04_fifo_cache_pull;
254 engine->fifo.channel_id = nv10_fifo_channel_id;
255 engine->fifo.create_context = nv40_fifo_create_context;
256 engine->fifo.destroy_context = nv40_fifo_destroy_context;
257 engine->fifo.load_context = nv40_fifo_load_context;
258 engine->fifo.unload_context = nv40_fifo_unload_context;
259 break;
260 case 0x50:
261 case 0x80: /* gotta love NVIDIA's consistency.. */
262 case 0x90:
263 case 0xA0:
264 engine->instmem.init = nv50_instmem_init;
265 engine->instmem.takedown = nv50_instmem_takedown;
266 engine->instmem.suspend = nv50_instmem_suspend;
267 engine->instmem.resume = nv50_instmem_resume;
268 engine->instmem.populate = nv50_instmem_populate;
269 engine->instmem.clear = nv50_instmem_clear;
270 engine->instmem.bind = nv50_instmem_bind;
271 engine->instmem.unbind = nv50_instmem_unbind;
272 engine->instmem.prepare_access = nv50_instmem_prepare_access;
273 engine->instmem.finish_access = nv50_instmem_finish_access;
274 engine->mc.init = nv50_mc_init;
275 engine->mc.takedown = nv50_mc_takedown;
276 engine->timer.init = nv04_timer_init;
277 engine->timer.read = nv04_timer_read;
278 engine->timer.takedown = nv04_timer_takedown;
279 engine->fb.init = nouveau_stub_init;
280 engine->fb.takedown = nouveau_stub_takedown;
281 engine->graph.grclass = nv50_graph_grclass;
282 engine->graph.init = nv50_graph_init;
283 engine->graph.takedown = nv50_graph_takedown;
284 engine->graph.fifo_access = nv50_graph_fifo_access;
285 engine->graph.channel = nv50_graph_channel;
286 engine->graph.create_context = nv50_graph_create_context;
287 engine->graph.destroy_context = nv50_graph_destroy_context;
288 engine->graph.load_context = nv50_graph_load_context;
289 engine->graph.unload_context = nv50_graph_unload_context;
290 engine->fifo.channels = 128;
291 engine->fifo.init = nv50_fifo_init;
292 engine->fifo.takedown = nv50_fifo_takedown;
293 engine->fifo.disable = nv04_fifo_disable;
294 engine->fifo.enable = nv04_fifo_enable;
295 engine->fifo.reassign = nv04_fifo_reassign;
296 engine->fifo.channel_id = nv50_fifo_channel_id;
297 engine->fifo.create_context = nv50_fifo_create_context;
298 engine->fifo.destroy_context = nv50_fifo_destroy_context;
299 engine->fifo.load_context = nv50_fifo_load_context;
300 engine->fifo.unload_context = nv50_fifo_unload_context;
301 break;
302 default:
303 NV_ERROR(dev, "NV%02x unsupported\n", dev_priv->chipset);
304 return 1;
307 return 0;
310 static unsigned int
311 nouveau_vga_set_decode(void *priv, bool state)
313 if (state)
314 return VGA_RSRC_LEGACY_IO | VGA_RSRC_LEGACY_MEM |
315 VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM;
316 else
317 return VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM;
320 static int
321 nouveau_card_init_channel(struct drm_device *dev)
323 struct drm_nouveau_private *dev_priv = dev->dev_private;
324 struct nouveau_gpuobj *gpuobj;
325 int ret;
327 ret = nouveau_channel_alloc(dev, &dev_priv->channel,
328 (struct drm_file *)-2,
329 NvDmaFB, NvDmaTT);
330 if (ret)
331 return ret;
333 gpuobj = NULL;
334 ret = nouveau_gpuobj_dma_new(dev_priv->channel, NV_CLASS_DMA_IN_MEMORY,
335 0, nouveau_mem_fb_amount(dev),
336 NV_DMA_ACCESS_RW, NV_DMA_TARGET_VIDMEM,
337 &gpuobj);
338 if (ret)
339 goto out_err;
341 ret = nouveau_gpuobj_ref_add(dev, dev_priv->channel, NvDmaVRAM,
342 gpuobj, NULL);
343 if (ret)
344 goto out_err;
346 gpuobj = NULL;
347 ret = nouveau_gpuobj_gart_dma_new(dev_priv->channel, 0,
348 dev_priv->gart_info.aper_size,
349 NV_DMA_ACCESS_RW, &gpuobj, NULL);
350 if (ret)
351 goto out_err;
353 ret = nouveau_gpuobj_ref_add(dev, dev_priv->channel, NvDmaGART,
354 gpuobj, NULL);
355 if (ret)
356 goto out_err;
358 return 0;
359 out_err:
360 nouveau_gpuobj_del(dev, &gpuobj);
361 nouveau_channel_free(dev_priv->channel);
362 dev_priv->channel = NULL;
363 return ret;
367 nouveau_card_init(struct drm_device *dev)
369 struct drm_nouveau_private *dev_priv = dev->dev_private;
370 struct nouveau_engine *engine;
371 int ret;
373 NV_DEBUG(dev, "prev state = %d\n", dev_priv->init_state);
375 if (dev_priv->init_state == NOUVEAU_CARD_INIT_DONE)
376 return 0;
378 vga_client_register(dev->pdev, dev, NULL, nouveau_vga_set_decode);
380 /* Initialise internal driver API hooks */
381 ret = nouveau_init_engine_ptrs(dev);
382 if (ret)
383 goto out;
384 engine = &dev_priv->engine;
385 dev_priv->init_state = NOUVEAU_CARD_INIT_FAILED;
387 /* Parse BIOS tables / Run init tables if card not POSTed */
388 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
389 ret = nouveau_bios_init(dev);
390 if (ret)
391 goto out;
394 ret = nouveau_gpuobj_early_init(dev);
395 if (ret)
396 goto out_bios;
398 /* Initialise instance memory, must happen before mem_init so we
399 * know exactly how much VRAM we're able to use for "normal"
400 * purposes.
402 ret = engine->instmem.init(dev);
403 if (ret)
404 goto out_gpuobj_early;
406 /* Setup the memory manager */
407 ret = nouveau_mem_init(dev);
408 if (ret)
409 goto out_instmem;
411 ret = nouveau_gpuobj_init(dev);
412 if (ret)
413 goto out_mem;
415 /* PMC */
416 ret = engine->mc.init(dev);
417 if (ret)
418 goto out_gpuobj;
420 /* PTIMER */
421 ret = engine->timer.init(dev);
422 if (ret)
423 goto out_mc;
425 /* PFB */
426 ret = engine->fb.init(dev);
427 if (ret)
428 goto out_timer;
430 /* PGRAPH */
431 ret = engine->graph.init(dev);
432 if (ret)
433 goto out_fb;
435 /* PFIFO */
436 ret = engine->fifo.init(dev);
437 if (ret)
438 goto out_graph;
440 /* this call irq_preinstall, register irq handler and
441 * call irq_postinstall
443 ret = drm_irq_install(dev);
444 if (ret)
445 goto out_fifo;
447 ret = drm_vblank_init(dev, 0);
448 if (ret)
449 goto out_irq;
451 /* what about PVIDEO/PCRTC/PRAMDAC etc? */
453 if (!engine->graph.accel_blocked) {
454 ret = nouveau_card_init_channel(dev);
455 if (ret)
456 goto out_irq;
459 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
460 if (dev_priv->card_type >= NV_50)
461 ret = nv50_display_create(dev);
462 else
463 ret = nv04_display_create(dev);
464 if (ret)
465 goto out_irq;
468 ret = nouveau_backlight_init(dev);
469 if (ret)
470 NV_ERROR(dev, "Error %d registering backlight\n", ret);
472 dev_priv->init_state = NOUVEAU_CARD_INIT_DONE;
474 if (drm_core_check_feature(dev, DRIVER_MODESET))
475 drm_helper_initial_config(dev);
477 return 0;
479 out_irq:
480 drm_irq_uninstall(dev);
481 out_fifo:
482 engine->fifo.takedown(dev);
483 out_graph:
484 engine->graph.takedown(dev);
485 out_fb:
486 engine->fb.takedown(dev);
487 out_timer:
488 engine->timer.takedown(dev);
489 out_mc:
490 engine->mc.takedown(dev);
491 out_gpuobj:
492 nouveau_gpuobj_takedown(dev);
493 out_mem:
494 nouveau_mem_close(dev);
495 out_instmem:
496 engine->instmem.takedown(dev);
497 out_gpuobj_early:
498 nouveau_gpuobj_late_takedown(dev);
499 out_bios:
500 nouveau_bios_takedown(dev);
501 out:
502 vga_client_register(dev->pdev, NULL, NULL, NULL);
503 return ret;
506 static void nouveau_card_takedown(struct drm_device *dev)
508 struct drm_nouveau_private *dev_priv = dev->dev_private;
509 struct nouveau_engine *engine = &dev_priv->engine;
511 NV_DEBUG(dev, "prev state = %d\n", dev_priv->init_state);
513 if (dev_priv->init_state != NOUVEAU_CARD_INIT_DOWN) {
514 nouveau_backlight_exit(dev);
516 if (dev_priv->channel) {
517 nouveau_channel_free(dev_priv->channel);
518 dev_priv->channel = NULL;
521 engine->fifo.takedown(dev);
522 engine->graph.takedown(dev);
523 engine->fb.takedown(dev);
524 engine->timer.takedown(dev);
525 engine->mc.takedown(dev);
527 mutex_lock(&dev->struct_mutex);
528 ttm_bo_clean_mm(&dev_priv->ttm.bdev, TTM_PL_TT);
529 mutex_unlock(&dev->struct_mutex);
530 nouveau_sgdma_takedown(dev);
532 nouveau_gpuobj_takedown(dev);
533 nouveau_mem_close(dev);
534 engine->instmem.takedown(dev);
536 if (drm_core_check_feature(dev, DRIVER_MODESET))
537 drm_irq_uninstall(dev);
539 nouveau_gpuobj_late_takedown(dev);
540 nouveau_bios_takedown(dev);
542 vga_client_register(dev->pdev, NULL, NULL, NULL);
544 dev_priv->init_state = NOUVEAU_CARD_INIT_DOWN;
548 /* here a client dies, release the stuff that was allocated for its
549 * file_priv */
550 void nouveau_preclose(struct drm_device *dev, struct drm_file *file_priv)
552 nouveau_channel_cleanup(dev, file_priv);
555 /* first module load, setup the mmio/fb mapping */
556 /* KMS: we need mmio at load time, not when the first drm client opens. */
557 int nouveau_firstopen(struct drm_device *dev)
559 return 0;
562 /* if we have an OF card, copy vbios to RAMIN */
563 static void nouveau_OF_copy_vbios_to_ramin(struct drm_device *dev)
565 #if defined(__powerpc__)
566 int size, i;
567 const uint32_t *bios;
568 struct device_node *dn = pci_device_to_OF_node(dev->pdev);
569 if (!dn) {
570 NV_INFO(dev, "Unable to get the OF node\n");
571 return;
574 bios = of_get_property(dn, "NVDA,BMP", &size);
575 if (bios) {
576 for (i = 0; i < size; i += 4)
577 nv_wi32(dev, i, bios[i/4]);
578 NV_INFO(dev, "OF bios successfully copied (%d bytes)\n", size);
579 } else {
580 NV_INFO(dev, "Unable to get the OF bios\n");
582 #endif
585 int nouveau_load(struct drm_device *dev, unsigned long flags)
587 struct drm_nouveau_private *dev_priv;
588 uint32_t reg0;
589 resource_size_t mmio_start_offs;
591 dev_priv = kzalloc(sizeof(*dev_priv), GFP_KERNEL);
592 if (!dev_priv)
593 return -ENOMEM;
594 dev->dev_private = dev_priv;
595 dev_priv->dev = dev;
597 dev_priv->flags = flags & NOUVEAU_FLAGS;
598 dev_priv->init_state = NOUVEAU_CARD_INIT_DOWN;
600 NV_DEBUG(dev, "vendor: 0x%X device: 0x%X class: 0x%X\n",
601 dev->pci_vendor, dev->pci_device, dev->pdev->class);
603 dev_priv->acpi_dsm = nouveau_dsm_probe(dev);
605 if (dev_priv->acpi_dsm)
606 nouveau_hybrid_setup(dev);
608 dev_priv->wq = create_workqueue("nouveau");
609 if (!dev_priv->wq)
610 return -EINVAL;
612 /* resource 0 is mmio regs */
613 /* resource 1 is linear FB */
614 /* resource 2 is RAMIN (mmio regs + 0x1000000) */
615 /* resource 6 is bios */
617 /* map the mmio regs */
618 mmio_start_offs = pci_resource_start(dev->pdev, 0);
619 dev_priv->mmio = ioremap(mmio_start_offs, 0x00800000);
620 if (!dev_priv->mmio) {
621 NV_ERROR(dev, "Unable to initialize the mmio mapping. "
622 "Please report your setup to " DRIVER_EMAIL "\n");
623 return -EINVAL;
625 NV_DEBUG(dev, "regs mapped ok at 0x%llx\n",
626 (unsigned long long)mmio_start_offs);
628 #ifdef __BIG_ENDIAN
629 /* Put the card in BE mode if it's not */
630 if (nv_rd32(dev, NV03_PMC_BOOT_1))
631 nv_wr32(dev, NV03_PMC_BOOT_1, 0x00000001);
633 DRM_MEMORYBARRIER();
634 #endif
636 /* Time to determine the card architecture */
637 reg0 = nv_rd32(dev, NV03_PMC_BOOT_0);
639 /* We're dealing with >=NV10 */
640 if ((reg0 & 0x0f000000) > 0) {
641 /* Bit 27-20 contain the architecture in hex */
642 dev_priv->chipset = (reg0 & 0xff00000) >> 20;
643 /* NV04 or NV05 */
644 } else if ((reg0 & 0xff00fff0) == 0x20004000) {
645 dev_priv->chipset = 0x04;
646 } else
647 dev_priv->chipset = 0xff;
649 switch (dev_priv->chipset & 0xf0) {
650 case 0x00:
651 case 0x10:
652 case 0x20:
653 case 0x30:
654 dev_priv->card_type = dev_priv->chipset & 0xf0;
655 break;
656 case 0x40:
657 case 0x60:
658 dev_priv->card_type = NV_40;
659 break;
660 case 0x50:
661 case 0x80:
662 case 0x90:
663 case 0xa0:
664 dev_priv->card_type = NV_50;
665 break;
666 default:
667 NV_INFO(dev, "Unsupported chipset 0x%08x\n", reg0);
668 return -EINVAL;
671 NV_INFO(dev, "Detected an NV%2x generation card (0x%08x)\n",
672 dev_priv->card_type, reg0);
674 /* map larger RAMIN aperture on NV40 cards */
675 dev_priv->ramin = NULL;
676 if (dev_priv->card_type >= NV_40) {
677 int ramin_bar = 2;
678 if (pci_resource_len(dev->pdev, ramin_bar) == 0)
679 ramin_bar = 3;
681 dev_priv->ramin_size = pci_resource_len(dev->pdev, ramin_bar);
682 dev_priv->ramin = ioremap(
683 pci_resource_start(dev->pdev, ramin_bar),
684 dev_priv->ramin_size);
685 if (!dev_priv->ramin) {
686 NV_ERROR(dev, "Failed to init RAMIN mapping, "
687 "limited instance memory available\n");
691 /* On older cards (or if the above failed), create a map covering
692 * the BAR0 PRAMIN aperture */
693 if (!dev_priv->ramin) {
694 dev_priv->ramin_size = 1 * 1024 * 1024;
695 dev_priv->ramin = ioremap(mmio_start_offs + NV_RAMIN,
696 dev_priv->ramin_size);
697 if (!dev_priv->ramin) {
698 NV_ERROR(dev, "Failed to map BAR0 PRAMIN.\n");
699 return -ENOMEM;
703 nouveau_OF_copy_vbios_to_ramin(dev);
705 /* Special flags */
706 if (dev->pci_device == 0x01a0)
707 dev_priv->flags |= NV_NFORCE;
708 else if (dev->pci_device == 0x01f0)
709 dev_priv->flags |= NV_NFORCE2;
711 /* For kernel modesetting, init card now and bring up fbcon */
712 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
713 int ret = nouveau_card_init(dev);
714 if (ret)
715 return ret;
718 return 0;
721 static void nouveau_close(struct drm_device *dev)
723 struct drm_nouveau_private *dev_priv = dev->dev_private;
725 /* In the case of an error dev_priv may not be be allocated yet */
726 if (dev_priv && dev_priv->card_type)
727 nouveau_card_takedown(dev);
730 /* KMS: we need mmio at load time, not when the first drm client opens. */
731 void nouveau_lastclose(struct drm_device *dev)
733 if (drm_core_check_feature(dev, DRIVER_MODESET))
734 return;
736 nouveau_close(dev);
739 int nouveau_unload(struct drm_device *dev)
741 struct drm_nouveau_private *dev_priv = dev->dev_private;
743 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
744 if (dev_priv->card_type >= NV_50)
745 nv50_display_destroy(dev);
746 else
747 nv04_display_destroy(dev);
748 nouveau_close(dev);
751 iounmap(dev_priv->mmio);
752 iounmap(dev_priv->ramin);
754 kfree(dev_priv);
755 dev->dev_private = NULL;
756 return 0;
760 nouveau_ioctl_card_init(struct drm_device *dev, void *data,
761 struct drm_file *file_priv)
763 return nouveau_card_init(dev);
766 int nouveau_ioctl_getparam(struct drm_device *dev, void *data,
767 struct drm_file *file_priv)
769 struct drm_nouveau_private *dev_priv = dev->dev_private;
770 struct drm_nouveau_getparam *getparam = data;
772 NOUVEAU_CHECK_INITIALISED_WITH_RETURN;
774 switch (getparam->param) {
775 case NOUVEAU_GETPARAM_CHIPSET_ID:
776 getparam->value = dev_priv->chipset;
777 break;
778 case NOUVEAU_GETPARAM_PCI_VENDOR:
779 getparam->value = dev->pci_vendor;
780 break;
781 case NOUVEAU_GETPARAM_PCI_DEVICE:
782 getparam->value = dev->pci_device;
783 break;
784 case NOUVEAU_GETPARAM_BUS_TYPE:
785 if (drm_device_is_agp(dev))
786 getparam->value = NV_AGP;
787 else if (drm_device_is_pcie(dev))
788 getparam->value = NV_PCIE;
789 else
790 getparam->value = NV_PCI;
791 break;
792 case NOUVEAU_GETPARAM_FB_PHYSICAL:
793 getparam->value = dev_priv->fb_phys;
794 break;
795 case NOUVEAU_GETPARAM_AGP_PHYSICAL:
796 getparam->value = dev_priv->gart_info.aper_base;
797 break;
798 case NOUVEAU_GETPARAM_PCI_PHYSICAL:
799 if (dev->sg) {
800 getparam->value = (unsigned long)dev->sg->virtual;
801 } else {
802 NV_ERROR(dev, "Requested PCIGART address, "
803 "while no PCIGART was created\n");
804 return -EINVAL;
806 break;
807 case NOUVEAU_GETPARAM_FB_SIZE:
808 getparam->value = dev_priv->fb_available_size;
809 break;
810 case NOUVEAU_GETPARAM_AGP_SIZE:
811 getparam->value = dev_priv->gart_info.aper_size;
812 break;
813 case NOUVEAU_GETPARAM_VM_VRAM_BASE:
814 getparam->value = dev_priv->vm_vram_base;
815 break;
816 default:
817 NV_ERROR(dev, "unknown parameter %lld\n", getparam->param);
818 return -EINVAL;
821 return 0;
825 nouveau_ioctl_setparam(struct drm_device *dev, void *data,
826 struct drm_file *file_priv)
828 struct drm_nouveau_setparam *setparam = data;
830 NOUVEAU_CHECK_INITIALISED_WITH_RETURN;
832 switch (setparam->param) {
833 default:
834 NV_ERROR(dev, "unknown parameter %lld\n", setparam->param);
835 return -EINVAL;
838 return 0;
841 /* Wait until (value(reg) & mask) == val, up until timeout has hit */
842 bool nouveau_wait_until(struct drm_device *dev, uint64_t timeout,
843 uint32_t reg, uint32_t mask, uint32_t val)
845 struct drm_nouveau_private *dev_priv = dev->dev_private;
846 struct nouveau_timer_engine *ptimer = &dev_priv->engine.timer;
847 uint64_t start = ptimer->read(dev);
849 do {
850 if ((nv_rd32(dev, reg) & mask) == val)
851 return true;
852 } while (ptimer->read(dev) - start < timeout);
854 return false;
857 /* Waits for PGRAPH to go completely idle */
858 bool nouveau_wait_for_idle(struct drm_device *dev)
860 if (!nv_wait(NV04_PGRAPH_STATUS, 0xffffffff, 0x00000000)) {
861 NV_ERROR(dev, "PGRAPH idle timed out with status 0x%08x\n",
862 nv_rd32(dev, NV04_PGRAPH_STATUS));
863 return false;
866 return true;