2 * Copyright 2007 Stephane Marchesin
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
27 #include "nouveau_drm.h"
28 #include "nouveau_drv.h"
29 #include "nouveau_hw.h"
30 #include "nouveau_util.h"
31 #include "nouveau_ramht.h"
33 struct nv04_graph_engine {
34 struct nouveau_exec_engine base;
37 static uint32_t nv04_graph_ctx_regs[] = {
42 NV04_PGRAPH_CTX_SWITCH1,
43 NV04_PGRAPH_CTX_SWITCH2,
44 NV04_PGRAPH_CTX_SWITCH3,
45 NV04_PGRAPH_CTX_SWITCH4,
46 NV04_PGRAPH_CTX_CACHE1,
47 NV04_PGRAPH_CTX_CACHE2,
48 NV04_PGRAPH_CTX_CACHE3,
49 NV04_PGRAPH_CTX_CACHE4,
79 NV04_PGRAPH_DMA_START_0,
80 NV04_PGRAPH_DMA_START_1,
81 NV04_PGRAPH_DMA_LENGTH,
83 NV04_PGRAPH_DMA_PITCH,
109 NV04_PGRAPH_BSWIZZLE2,
110 NV04_PGRAPH_BSWIZZLE5,
113 NV04_PGRAPH_PATT_COLOR0,
114 NV04_PGRAPH_PATT_COLOR1,
115 NV04_PGRAPH_PATT_COLORRAM+0x00,
116 NV04_PGRAPH_PATT_COLORRAM+0x04,
117 NV04_PGRAPH_PATT_COLORRAM+0x08,
118 NV04_PGRAPH_PATT_COLORRAM+0x0c,
119 NV04_PGRAPH_PATT_COLORRAM+0x10,
120 NV04_PGRAPH_PATT_COLORRAM+0x14,
121 NV04_PGRAPH_PATT_COLORRAM+0x18,
122 NV04_PGRAPH_PATT_COLORRAM+0x1c,
123 NV04_PGRAPH_PATT_COLORRAM+0x20,
124 NV04_PGRAPH_PATT_COLORRAM+0x24,
125 NV04_PGRAPH_PATT_COLORRAM+0x28,
126 NV04_PGRAPH_PATT_COLORRAM+0x2c,
127 NV04_PGRAPH_PATT_COLORRAM+0x30,
128 NV04_PGRAPH_PATT_COLORRAM+0x34,
129 NV04_PGRAPH_PATT_COLORRAM+0x38,
130 NV04_PGRAPH_PATT_COLORRAM+0x3c,
131 NV04_PGRAPH_PATT_COLORRAM+0x40,
132 NV04_PGRAPH_PATT_COLORRAM+0x44,
133 NV04_PGRAPH_PATT_COLORRAM+0x48,
134 NV04_PGRAPH_PATT_COLORRAM+0x4c,
135 NV04_PGRAPH_PATT_COLORRAM+0x50,
136 NV04_PGRAPH_PATT_COLORRAM+0x54,
137 NV04_PGRAPH_PATT_COLORRAM+0x58,
138 NV04_PGRAPH_PATT_COLORRAM+0x5c,
139 NV04_PGRAPH_PATT_COLORRAM+0x60,
140 NV04_PGRAPH_PATT_COLORRAM+0x64,
141 NV04_PGRAPH_PATT_COLORRAM+0x68,
142 NV04_PGRAPH_PATT_COLORRAM+0x6c,
143 NV04_PGRAPH_PATT_COLORRAM+0x70,
144 NV04_PGRAPH_PATT_COLORRAM+0x74,
145 NV04_PGRAPH_PATT_COLORRAM+0x78,
146 NV04_PGRAPH_PATT_COLORRAM+0x7c,
147 NV04_PGRAPH_PATT_COLORRAM+0x80,
148 NV04_PGRAPH_PATT_COLORRAM+0x84,
149 NV04_PGRAPH_PATT_COLORRAM+0x88,
150 NV04_PGRAPH_PATT_COLORRAM+0x8c,
151 NV04_PGRAPH_PATT_COLORRAM+0x90,
152 NV04_PGRAPH_PATT_COLORRAM+0x94,
153 NV04_PGRAPH_PATT_COLORRAM+0x98,
154 NV04_PGRAPH_PATT_COLORRAM+0x9c,
155 NV04_PGRAPH_PATT_COLORRAM+0xa0,
156 NV04_PGRAPH_PATT_COLORRAM+0xa4,
157 NV04_PGRAPH_PATT_COLORRAM+0xa8,
158 NV04_PGRAPH_PATT_COLORRAM+0xac,
159 NV04_PGRAPH_PATT_COLORRAM+0xb0,
160 NV04_PGRAPH_PATT_COLORRAM+0xb4,
161 NV04_PGRAPH_PATT_COLORRAM+0xb8,
162 NV04_PGRAPH_PATT_COLORRAM+0xbc,
163 NV04_PGRAPH_PATT_COLORRAM+0xc0,
164 NV04_PGRAPH_PATT_COLORRAM+0xc4,
165 NV04_PGRAPH_PATT_COLORRAM+0xc8,
166 NV04_PGRAPH_PATT_COLORRAM+0xcc,
167 NV04_PGRAPH_PATT_COLORRAM+0xd0,
168 NV04_PGRAPH_PATT_COLORRAM+0xd4,
169 NV04_PGRAPH_PATT_COLORRAM+0xd8,
170 NV04_PGRAPH_PATT_COLORRAM+0xdc,
171 NV04_PGRAPH_PATT_COLORRAM+0xe0,
172 NV04_PGRAPH_PATT_COLORRAM+0xe4,
173 NV04_PGRAPH_PATT_COLORRAM+0xe8,
174 NV04_PGRAPH_PATT_COLORRAM+0xec,
175 NV04_PGRAPH_PATT_COLORRAM+0xf0,
176 NV04_PGRAPH_PATT_COLORRAM+0xf4,
177 NV04_PGRAPH_PATT_COLORRAM+0xf8,
178 NV04_PGRAPH_PATT_COLORRAM+0xfc,
181 NV04_PGRAPH_PATTERN_SHAPE,
185 NV04_PGRAPH_BETA_AND,
186 NV04_PGRAPH_BETA_PREMULT,
187 NV04_PGRAPH_CONTROL0,
188 NV04_PGRAPH_CONTROL1,
189 NV04_PGRAPH_CONTROL2,
191 NV04_PGRAPH_STORED_FMT,
192 NV04_PGRAPH_SOURCE_COLOR,
336 NV04_PGRAPH_PASSTHRU_0,
337 NV04_PGRAPH_PASSTHRU_1,
338 NV04_PGRAPH_PASSTHRU_2,
339 NV04_PGRAPH_DVD_COLORFMT,
340 NV04_PGRAPH_SCALED_FORMAT,
341 NV04_PGRAPH_MISC24_0,
342 NV04_PGRAPH_MISC24_1,
343 NV04_PGRAPH_MISC24_2,
352 uint32_t nv04[ARRAY_SIZE(nv04_graph_ctx_regs)];
355 static struct nouveau_channel *
356 nv04_graph_channel(struct drm_device *dev)
358 struct drm_nouveau_private *dev_priv = dev->dev_private;
359 int chid = dev_priv->engine.fifo.channels;
361 if (nv_rd32(dev, NV04_PGRAPH_CTX_CONTROL) & 0x00010000)
362 chid = nv_rd32(dev, NV04_PGRAPH_CTX_USER) >> 24;
364 if (chid >= dev_priv->engine.fifo.channels)
367 return dev_priv->channels.ptr[chid];
370 static uint32_t *ctx_reg(struct graph_state *ctx, uint32_t reg)
374 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++) {
375 if (nv04_graph_ctx_regs[i] == reg)
376 return &ctx->nv04[i];
383 nv04_graph_load_context(struct nouveau_channel *chan)
385 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
386 struct drm_device *dev = chan->dev;
390 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
391 nv_wr32(dev, nv04_graph_ctx_regs[i], pgraph_ctx->nv04[i]);
393 nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10010100);
395 tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
396 nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp | chan->id << 24);
398 tmp = nv_rd32(dev, NV04_PGRAPH_FFINTFC_ST2);
399 nv_wr32(dev, NV04_PGRAPH_FFINTFC_ST2, tmp & 0x000fffff);
405 nv04_graph_unload_context(struct drm_device *dev)
407 struct drm_nouveau_private *dev_priv = dev->dev_private;
408 struct nouveau_channel *chan = NULL;
409 struct graph_state *ctx;
413 chan = nv04_graph_channel(dev);
416 ctx = chan->engctx[NVOBJ_ENGINE_GR];
418 for (i = 0; i < ARRAY_SIZE(nv04_graph_ctx_regs); i++)
419 ctx->nv04[i] = nv_rd32(dev, nv04_graph_ctx_regs[i]);
421 nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL, 0x10000000);
422 tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
423 tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
424 nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
429 nv04_graph_context_new(struct nouveau_channel *chan, int engine)
431 struct graph_state *pgraph_ctx;
432 NV_DEBUG(chan->dev, "nv04_graph_context_create %d\n", chan->id);
434 pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
435 if (pgraph_ctx == NULL)
438 *ctx_reg(pgraph_ctx, NV04_PGRAPH_DEBUG_3) = 0xfad4ff31;
440 chan->engctx[engine] = pgraph_ctx;
445 nv04_graph_context_del(struct nouveau_channel *chan, int engine)
447 struct drm_device *dev = chan->dev;
448 struct drm_nouveau_private *dev_priv = dev->dev_private;
449 struct graph_state *pgraph_ctx = chan->engctx[engine];
452 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
453 nv04_graph_fifo_access(dev, false);
455 /* Unload the context if it's the currently active one */
456 if (nv04_graph_channel(dev) == chan)
457 nv04_graph_unload_context(dev);
459 nv04_graph_fifo_access(dev, true);
460 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
462 /* Free the context resources */
464 chan->engctx[engine] = NULL;
468 nv04_graph_object_new(struct nouveau_channel *chan, int engine,
469 u32 handle, u16 class)
471 struct drm_device *dev = chan->dev;
472 struct nouveau_gpuobj *obj = NULL;
475 ret = nouveau_gpuobj_new(dev, chan, 16, 16, NVOBJ_FLAG_ZERO_FREE, &obj);
482 nv_wo32(obj, 0x00, 0x00080000 | class);
484 nv_wo32(obj, 0x00, class);
486 nv_wo32(obj, 0x04, 0x00000000);
487 nv_wo32(obj, 0x08, 0x00000000);
488 nv_wo32(obj, 0x0c, 0x00000000);
490 ret = nouveau_ramht_insert(chan, handle, obj);
491 nouveau_gpuobj_ref(NULL, &obj);
496 nv04_graph_init(struct drm_device *dev, int engine)
498 struct drm_nouveau_private *dev_priv = dev->dev_private;
501 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
502 ~NV_PMC_ENABLE_PGRAPH);
503 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
504 NV_PMC_ENABLE_PGRAPH);
506 /* Enable PGRAPH interrupts */
507 nv_wr32(dev, NV03_PGRAPH_INTR, 0xFFFFFFFF);
508 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
510 nv_wr32(dev, NV04_PGRAPH_VALID1, 0);
511 nv_wr32(dev, NV04_PGRAPH_VALID2, 0);
512 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x000001FF);
513 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x001FFFFF);*/
514 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x1231c000);
515 /*1231C000 blob, 001 haiku*/
516 /*V_WRITE(NV04_PGRAPH_DEBUG_1, 0xf2d91100);*/
517 nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x72111100);
518 /*0x72111100 blob , 01 haiku*/
519 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f870);*/
520 nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x11d5f071);
523 /*nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xfad4ff31);*/
524 nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0xf0d4ff31);
525 /*haiku and blob 10d4*/
527 nv_wr32(dev, NV04_PGRAPH_STATE , 0xFFFFFFFF);
528 nv_wr32(dev, NV04_PGRAPH_CTX_CONTROL , 0x10000100);
529 tmp = nv_rd32(dev, NV04_PGRAPH_CTX_USER) & 0x00ffffff;
530 tmp |= (dev_priv->engine.fifo.channels - 1) << 24;
531 nv_wr32(dev, NV04_PGRAPH_CTX_USER, tmp);
533 /* These don't belong here, they're part of a per-channel context */
534 nv_wr32(dev, NV04_PGRAPH_PATTERN_SHAPE, 0x00000000);
535 nv_wr32(dev, NV04_PGRAPH_BETA_AND , 0xFFFFFFFF);
541 nv04_graph_fini(struct drm_device *dev, int engine)
543 nv04_graph_unload_context(dev);
544 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
549 nv04_graph_fifo_access(struct drm_device *dev, bool enabled)
552 nv_wr32(dev, NV04_PGRAPH_FIFO,
553 nv_rd32(dev, NV04_PGRAPH_FIFO) | 1);
555 nv_wr32(dev, NV04_PGRAPH_FIFO,
556 nv_rd32(dev, NV04_PGRAPH_FIFO) & ~1);
560 nv04_graph_mthd_set_ref(struct nouveau_channel *chan,
561 u32 class, u32 mthd, u32 data)
563 atomic_set(&chan->fence.last_sequence_irq, data);
568 nv04_graph_mthd_page_flip(struct nouveau_channel *chan,
569 u32 class, u32 mthd, u32 data)
571 struct drm_device *dev = chan->dev;
572 struct nouveau_page_flip_state s;
574 if (!nouveau_finish_page_flip(chan, &s))
575 nv_set_crtc_base(dev, s.crtc,
576 s.offset + s.y * s.pitch + s.x * s.bpp / 8);
582 * Software methods, why they are needed, and how they all work:
584 * NV04 and NV05 keep most of the state in PGRAPH context itself, but some
585 * 2d engine settings are kept inside the grobjs themselves. The grobjs are
586 * 3 words long on both. grobj format on NV04 is:
590 * - bit 12: color key active
591 * - bit 13: clip rect active
592 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
593 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
594 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
595 * NV03_CONTEXT_SURFACE_DST].
596 * - bits 15-17: 2d operation [aka patch config]
597 * - bit 24: patch valid [enables rendering using this object]
598 * - bit 25: surf3d valid [for tex_tri and multitex_tri only]
600 * - bits 0-1: mono format
601 * - bits 8-13: color format
602 * - bits 16-31: DMA_NOTIFY instance
604 * - bits 0-15: DMA_A instance
605 * - bits 16-31: DMA_B instance
611 * - bit 12: color key active
612 * - bit 13: clip rect active
613 * - bit 14: if set, destination surface is swizzled and taken from buffer 5
614 * [set by NV04_SWIZZLED_SURFACE], otherwise it's linear and taken
615 * from buffer 0 [set by NV04_CONTEXT_SURFACES_2D or
616 * NV03_CONTEXT_SURFACE_DST].
617 * - bits 15-17: 2d operation [aka patch config]
618 * - bits 20-22: dither mode
619 * - bit 24: patch valid [enables rendering using this object]
620 * - bit 25: surface_dst/surface_color/surf2d/surf3d valid
621 * - bit 26: surface_src/surface_zeta valid
622 * - bit 27: pattern valid
623 * - bit 28: rop valid
624 * - bit 29: beta1 valid
625 * - bit 30: beta4 valid
627 * - bits 0-1: mono format
628 * - bits 8-13: color format
629 * - bits 16-31: DMA_NOTIFY instance
631 * - bits 0-15: DMA_A instance
632 * - bits 16-31: DMA_B instance
634 * NV05 will set/unset the relevant valid bits when you poke the relevant
635 * object-binding methods with object of the proper type, or with the NULL
636 * type. It'll only allow rendering using the grobj if all needed objects
637 * are bound. The needed set of objects depends on selected operation: for
638 * example rop object is needed by ROP_AND, but not by SRCCOPY_AND.
640 * NV04 doesn't have these methods implemented at all, and doesn't have the
641 * relevant bits in grobj. Instead, it'll allow rendering whenever bit 24
642 * is set. So we have to emulate them in software, internally keeping the
643 * same bits as NV05 does. Since grobjs are aligned to 16 bytes on nv04,
644 * but the last word isn't actually used for anything, we abuse it for this
647 * Actually, NV05 can optionally check bit 24 too, but we disable this since
648 * there's no use for it.
650 * For unknown reasons, NV04 implements surf3d binding in hardware as an
651 * exception. Also for unknown reasons, NV04 doesn't implement the clipping
652 * methods on the surf3d object, so we have to emulate them too.
656 nv04_graph_set_ctx1(struct nouveau_channel *chan, u32 mask, u32 value)
658 struct drm_device *dev = chan->dev;
659 u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
660 int subc = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 13) & 0x7;
663 tmp = nv_ri32(dev, instance);
667 nv_wi32(dev, instance, tmp);
668 nv_wr32(dev, NV04_PGRAPH_CTX_SWITCH1, tmp);
669 nv_wr32(dev, NV04_PGRAPH_CTX_CACHE1 + (subc<<2), tmp);
673 nv04_graph_set_ctx_val(struct nouveau_channel *chan, u32 mask, u32 value)
675 struct drm_device *dev = chan->dev;
676 u32 instance = (nv_rd32(dev, NV04_PGRAPH_CTX_SWITCH4) & 0xffff) << 4;
678 int class, op, valid = 1;
680 ctx1 = nv_ri32(dev, instance);
682 op = (ctx1 >> 15) & 7;
683 tmp = nv_ri32(dev, instance + 0xc);
686 nv_wi32(dev, instance + 0xc, tmp);
688 /* check for valid surf2d/surf_dst/surf_color */
689 if (!(tmp & 0x02000000))
691 /* check for valid surf_src/surf_zeta */
692 if ((class == 0x1f || class == 0x48) && !(tmp & 0x04000000))
696 /* SRCCOPY_AND, SRCCOPY: no extra objects required */
700 /* ROP_AND: requires pattern and rop */
702 if (!(tmp & 0x18000000))
705 /* BLEND_AND: requires beta1 */
707 if (!(tmp & 0x20000000))
710 /* SRCCOPY_PREMULT, BLEND_PREMULT: beta4 required */
713 if (!(tmp & 0x40000000))
718 nv04_graph_set_ctx1(chan, 0x01000000, valid << 24);
722 nv04_graph_mthd_set_operation(struct nouveau_channel *chan,
723 u32 class, u32 mthd, u32 data)
727 /* Old versions of the objects only accept first three operations. */
728 if (data > 2 && class < 0x40)
730 nv04_graph_set_ctx1(chan, 0x00038000, data << 15);
731 /* changing operation changes set of objects needed for validation */
732 nv04_graph_set_ctx_val(chan, 0, 0);
737 nv04_graph_mthd_surf3d_clip_h(struct nouveau_channel *chan,
738 u32 class, u32 mthd, u32 data)
740 uint32_t min = data & 0xffff, max;
741 uint32_t w = data >> 16;
746 /* yes, it accepts negative for some reason. */
750 nv_wr32(chan->dev, 0x40053c, min);
751 nv_wr32(chan->dev, 0x400544, max);
756 nv04_graph_mthd_surf3d_clip_v(struct nouveau_channel *chan,
757 u32 class, u32 mthd, u32 data)
759 uint32_t min = data & 0xffff, max;
760 uint32_t w = data >> 16;
765 /* yes, it accepts negative for some reason. */
769 nv_wr32(chan->dev, 0x400540, min);
770 nv_wr32(chan->dev, 0x400548, max);
775 nv04_graph_mthd_bind_surf2d(struct nouveau_channel *chan,
776 u32 class, u32 mthd, u32 data)
778 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
780 nv04_graph_set_ctx1(chan, 0x00004000, 0);
781 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
784 nv04_graph_set_ctx1(chan, 0x00004000, 0);
785 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
792 nv04_graph_mthd_bind_surf2d_swzsurf(struct nouveau_channel *chan,
793 u32 class, u32 mthd, u32 data)
795 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
797 nv04_graph_set_ctx1(chan, 0x00004000, 0);
798 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
801 nv04_graph_set_ctx1(chan, 0x00004000, 0);
802 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
805 nv04_graph_set_ctx1(chan, 0x00004000, 0x00004000);
806 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
813 nv04_graph_mthd_bind_nv01_patt(struct nouveau_channel *chan,
814 u32 class, u32 mthd, u32 data)
816 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
818 nv04_graph_set_ctx_val(chan, 0x08000000, 0);
821 nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
828 nv04_graph_mthd_bind_nv04_patt(struct nouveau_channel *chan,
829 u32 class, u32 mthd, u32 data)
831 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
833 nv04_graph_set_ctx_val(chan, 0x08000000, 0);
836 nv04_graph_set_ctx_val(chan, 0x08000000, 0x08000000);
843 nv04_graph_mthd_bind_rop(struct nouveau_channel *chan,
844 u32 class, u32 mthd, u32 data)
846 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
848 nv04_graph_set_ctx_val(chan, 0x10000000, 0);
851 nv04_graph_set_ctx_val(chan, 0x10000000, 0x10000000);
858 nv04_graph_mthd_bind_beta1(struct nouveau_channel *chan,
859 u32 class, u32 mthd, u32 data)
861 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
863 nv04_graph_set_ctx_val(chan, 0x20000000, 0);
866 nv04_graph_set_ctx_val(chan, 0x20000000, 0x20000000);
873 nv04_graph_mthd_bind_beta4(struct nouveau_channel *chan,
874 u32 class, u32 mthd, u32 data)
876 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
878 nv04_graph_set_ctx_val(chan, 0x40000000, 0);
881 nv04_graph_set_ctx_val(chan, 0x40000000, 0x40000000);
888 nv04_graph_mthd_bind_surf_dst(struct nouveau_channel *chan,
889 u32 class, u32 mthd, u32 data)
891 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
893 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
896 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
903 nv04_graph_mthd_bind_surf_src(struct nouveau_channel *chan,
904 u32 class, u32 mthd, u32 data)
906 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
908 nv04_graph_set_ctx_val(chan, 0x04000000, 0);
911 nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
918 nv04_graph_mthd_bind_surf_color(struct nouveau_channel *chan,
919 u32 class, u32 mthd, u32 data)
921 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
923 nv04_graph_set_ctx_val(chan, 0x02000000, 0);
926 nv04_graph_set_ctx_val(chan, 0x02000000, 0x02000000);
933 nv04_graph_mthd_bind_surf_zeta(struct nouveau_channel *chan,
934 u32 class, u32 mthd, u32 data)
936 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
938 nv04_graph_set_ctx_val(chan, 0x04000000, 0);
941 nv04_graph_set_ctx_val(chan, 0x04000000, 0x04000000);
948 nv04_graph_mthd_bind_clip(struct nouveau_channel *chan,
949 u32 class, u32 mthd, u32 data)
951 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
953 nv04_graph_set_ctx1(chan, 0x2000, 0);
956 nv04_graph_set_ctx1(chan, 0x2000, 0x2000);
963 nv04_graph_mthd_bind_chroma(struct nouveau_channel *chan,
964 u32 class, u32 mthd, u32 data)
966 switch (nv_ri32(chan->dev, data << 4) & 0xff) {
968 nv04_graph_set_ctx1(chan, 0x1000, 0);
970 /* Yes, for some reason even the old versions of objects
971 * accept 0x57 and not 0x17. Consistency be damned.
974 nv04_graph_set_ctx1(chan, 0x1000, 0x1000);
980 static struct nouveau_bitfield nv04_graph_intr[] = {
981 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
985 static struct nouveau_bitfield nv04_graph_nstatus[] = {
986 { NV04_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
987 { NV04_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
988 { NV04_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
989 { NV04_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
993 struct nouveau_bitfield nv04_graph_nsource[] = {
994 { NV03_PGRAPH_NSOURCE_NOTIFICATION, "NOTIFICATION" },
995 { NV03_PGRAPH_NSOURCE_DATA_ERROR, "DATA_ERROR" },
996 { NV03_PGRAPH_NSOURCE_PROTECTION_ERROR, "PROTECTION_ERROR" },
997 { NV03_PGRAPH_NSOURCE_RANGE_EXCEPTION, "RANGE_EXCEPTION" },
998 { NV03_PGRAPH_NSOURCE_LIMIT_COLOR, "LIMIT_COLOR" },
999 { NV03_PGRAPH_NSOURCE_LIMIT_ZETA, "LIMIT_ZETA" },
1000 { NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD, "ILLEGAL_MTHD" },
1001 { NV03_PGRAPH_NSOURCE_DMA_R_PROTECTION, "DMA_R_PROTECTION" },
1002 { NV03_PGRAPH_NSOURCE_DMA_W_PROTECTION, "DMA_W_PROTECTION" },
1003 { NV03_PGRAPH_NSOURCE_FORMAT_EXCEPTION, "FORMAT_EXCEPTION" },
1004 { NV03_PGRAPH_NSOURCE_PATCH_EXCEPTION, "PATCH_EXCEPTION" },
1005 { NV03_PGRAPH_NSOURCE_STATE_INVALID, "STATE_INVALID" },
1006 { NV03_PGRAPH_NSOURCE_DOUBLE_NOTIFY, "DOUBLE_NOTIFY" },
1007 { NV03_PGRAPH_NSOURCE_NOTIFY_IN_USE, "NOTIFY_IN_USE" },
1008 { NV03_PGRAPH_NSOURCE_METHOD_CNT, "METHOD_CNT" },
1009 { NV03_PGRAPH_NSOURCE_BFR_NOTIFICATION, "BFR_NOTIFICATION" },
1010 { NV03_PGRAPH_NSOURCE_DMA_VTX_PROTECTION, "DMA_VTX_PROTECTION" },
1011 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_A, "DMA_WIDTH_A" },
1012 { NV03_PGRAPH_NSOURCE_DMA_WIDTH_B, "DMA_WIDTH_B" },
1017 nv04_graph_context_switch(struct drm_device *dev)
1019 struct drm_nouveau_private *dev_priv = dev->dev_private;
1020 struct nouveau_channel *chan = NULL;
1023 nouveau_wait_for_idle(dev);
1025 /* If previous context is valid, we need to save it */
1026 nv04_graph_unload_context(dev);
1028 /* Load context for next channel */
1029 chid = dev_priv->engine.fifo.channel_id(dev);
1030 chan = dev_priv->channels.ptr[chid];
1032 nv04_graph_load_context(chan);
1036 nv04_graph_isr(struct drm_device *dev)
1040 while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1041 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1042 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1043 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1044 u32 chid = (addr & 0x0f000000) >> 24;
1045 u32 subc = (addr & 0x0000e000) >> 13;
1046 u32 mthd = (addr & 0x00001ffc);
1047 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1048 u32 class = nv_rd32(dev, 0x400180 + subc * 4) & 0xff;
1051 if (stat & NV_PGRAPH_INTR_NOTIFY) {
1052 if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1053 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1054 show &= ~NV_PGRAPH_INTR_NOTIFY;
1058 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1059 nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1060 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1061 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1062 nv04_graph_context_switch(dev);
1065 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1066 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1068 if (show && nouveau_ratelimit()) {
1069 NV_INFO(dev, "PGRAPH -");
1070 nouveau_bitfield_print(nv04_graph_intr, show);
1071 printk(" nsource:");
1072 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1073 printk(" nstatus:");
1074 nouveau_bitfield_print(nv04_graph_nstatus, nstatus);
1076 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1077 "mthd 0x%04x data 0x%08x\n",
1078 chid, subc, class, mthd, data);
1084 nv04_graph_destroy(struct drm_device *dev, int engine)
1086 struct nv04_graph_engine *pgraph = nv_engine(dev, engine);
1088 nouveau_irq_unregister(dev, 12);
1090 NVOBJ_ENGINE_DEL(dev, GR);
1095 nv04_graph_create(struct drm_device *dev)
1097 struct nv04_graph_engine *pgraph;
1099 pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
1103 pgraph->base.destroy = nv04_graph_destroy;
1104 pgraph->base.init = nv04_graph_init;
1105 pgraph->base.fini = nv04_graph_fini;
1106 pgraph->base.context_new = nv04_graph_context_new;
1107 pgraph->base.context_del = nv04_graph_context_del;
1108 pgraph->base.object_new = nv04_graph_object_new;
1110 NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
1111 nouveau_irq_register(dev, 12, nv04_graph_isr);
1113 /* dvd subpicture */
1114 NVOBJ_CLASS(dev, 0x0038, GR);
1117 NVOBJ_CLASS(dev, 0x0039, GR);
1120 NVOBJ_CLASS(dev, 0x004b, GR);
1121 NVOBJ_MTHD (dev, 0x004b, 0x0184, nv04_graph_mthd_bind_nv01_patt);
1122 NVOBJ_MTHD (dev, 0x004b, 0x0188, nv04_graph_mthd_bind_rop);
1123 NVOBJ_MTHD (dev, 0x004b, 0x018c, nv04_graph_mthd_bind_beta1);
1124 NVOBJ_MTHD (dev, 0x004b, 0x0190, nv04_graph_mthd_bind_surf_dst);
1125 NVOBJ_MTHD (dev, 0x004b, 0x02fc, nv04_graph_mthd_set_operation);
1128 NVOBJ_CLASS(dev, 0x004a, GR);
1129 NVOBJ_MTHD (dev, 0x004a, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1130 NVOBJ_MTHD (dev, 0x004a, 0x018c, nv04_graph_mthd_bind_rop);
1131 NVOBJ_MTHD (dev, 0x004a, 0x0190, nv04_graph_mthd_bind_beta1);
1132 NVOBJ_MTHD (dev, 0x004a, 0x0194, nv04_graph_mthd_bind_beta4);
1133 NVOBJ_MTHD (dev, 0x004a, 0x0198, nv04_graph_mthd_bind_surf2d);
1134 NVOBJ_MTHD (dev, 0x004a, 0x02fc, nv04_graph_mthd_set_operation);
1136 /* nv01 imageblit */
1137 NVOBJ_CLASS(dev, 0x001f, GR);
1138 NVOBJ_MTHD (dev, 0x001f, 0x0184, nv04_graph_mthd_bind_chroma);
1139 NVOBJ_MTHD (dev, 0x001f, 0x0188, nv04_graph_mthd_bind_clip);
1140 NVOBJ_MTHD (dev, 0x001f, 0x018c, nv04_graph_mthd_bind_nv01_patt);
1141 NVOBJ_MTHD (dev, 0x001f, 0x0190, nv04_graph_mthd_bind_rop);
1142 NVOBJ_MTHD (dev, 0x001f, 0x0194, nv04_graph_mthd_bind_beta1);
1143 NVOBJ_MTHD (dev, 0x001f, 0x0198, nv04_graph_mthd_bind_surf_dst);
1144 NVOBJ_MTHD (dev, 0x001f, 0x019c, nv04_graph_mthd_bind_surf_src);
1145 NVOBJ_MTHD (dev, 0x001f, 0x02fc, nv04_graph_mthd_set_operation);
1147 /* nv04 imageblit */
1148 NVOBJ_CLASS(dev, 0x005f, GR);
1149 NVOBJ_MTHD (dev, 0x005f, 0x0184, nv04_graph_mthd_bind_chroma);
1150 NVOBJ_MTHD (dev, 0x005f, 0x0188, nv04_graph_mthd_bind_clip);
1151 NVOBJ_MTHD (dev, 0x005f, 0x018c, nv04_graph_mthd_bind_nv04_patt);
1152 NVOBJ_MTHD (dev, 0x005f, 0x0190, nv04_graph_mthd_bind_rop);
1153 NVOBJ_MTHD (dev, 0x005f, 0x0194, nv04_graph_mthd_bind_beta1);
1154 NVOBJ_MTHD (dev, 0x005f, 0x0198, nv04_graph_mthd_bind_beta4);
1155 NVOBJ_MTHD (dev, 0x005f, 0x019c, nv04_graph_mthd_bind_surf2d);
1156 NVOBJ_MTHD (dev, 0x005f, 0x02fc, nv04_graph_mthd_set_operation);
1159 NVOBJ_CLASS(dev, 0x0060, GR);
1160 NVOBJ_MTHD (dev, 0x0060, 0x0188, nv04_graph_mthd_bind_chroma);
1161 NVOBJ_MTHD (dev, 0x0060, 0x018c, nv04_graph_mthd_bind_clip);
1162 NVOBJ_MTHD (dev, 0x0060, 0x0190, nv04_graph_mthd_bind_nv04_patt);
1163 NVOBJ_MTHD (dev, 0x0060, 0x0194, nv04_graph_mthd_bind_rop);
1164 NVOBJ_MTHD (dev, 0x0060, 0x0198, nv04_graph_mthd_bind_beta1);
1165 NVOBJ_MTHD (dev, 0x0060, 0x019c, nv04_graph_mthd_bind_beta4);
1166 NVOBJ_MTHD (dev, 0x0060, 0x01a0, nv04_graph_mthd_bind_surf2d_swzsurf);
1167 NVOBJ_MTHD (dev, 0x0060, 0x03e4, nv04_graph_mthd_set_operation);
1170 NVOBJ_CLASS(dev, 0x0064, GR);
1173 NVOBJ_CLASS(dev, 0x0021, GR);
1174 NVOBJ_MTHD (dev, 0x0021, 0x0184, nv04_graph_mthd_bind_chroma);
1175 NVOBJ_MTHD (dev, 0x0021, 0x0188, nv04_graph_mthd_bind_clip);
1176 NVOBJ_MTHD (dev, 0x0021, 0x018c, nv04_graph_mthd_bind_nv01_patt);
1177 NVOBJ_MTHD (dev, 0x0021, 0x0190, nv04_graph_mthd_bind_rop);
1178 NVOBJ_MTHD (dev, 0x0021, 0x0194, nv04_graph_mthd_bind_beta1);
1179 NVOBJ_MTHD (dev, 0x0021, 0x0198, nv04_graph_mthd_bind_surf_dst);
1180 NVOBJ_MTHD (dev, 0x0021, 0x02fc, nv04_graph_mthd_set_operation);
1183 NVOBJ_CLASS(dev, 0x0061, GR);
1184 NVOBJ_MTHD (dev, 0x0061, 0x0184, nv04_graph_mthd_bind_chroma);
1185 NVOBJ_MTHD (dev, 0x0061, 0x0188, nv04_graph_mthd_bind_clip);
1186 NVOBJ_MTHD (dev, 0x0061, 0x018c, nv04_graph_mthd_bind_nv04_patt);
1187 NVOBJ_MTHD (dev, 0x0061, 0x0190, nv04_graph_mthd_bind_rop);
1188 NVOBJ_MTHD (dev, 0x0061, 0x0194, nv04_graph_mthd_bind_beta1);
1189 NVOBJ_MTHD (dev, 0x0061, 0x0198, nv04_graph_mthd_bind_beta4);
1190 NVOBJ_MTHD (dev, 0x0061, 0x019c, nv04_graph_mthd_bind_surf2d);
1191 NVOBJ_MTHD (dev, 0x0061, 0x02fc, nv04_graph_mthd_set_operation);
1194 NVOBJ_CLASS(dev, 0x0065, GR);
1197 NVOBJ_CLASS(dev, 0x0036, GR);
1198 NVOBJ_MTHD (dev, 0x0036, 0x0184, nv04_graph_mthd_bind_chroma);
1199 NVOBJ_MTHD (dev, 0x0036, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1200 NVOBJ_MTHD (dev, 0x0036, 0x018c, nv04_graph_mthd_bind_rop);
1201 NVOBJ_MTHD (dev, 0x0036, 0x0190, nv04_graph_mthd_bind_beta1);
1202 NVOBJ_MTHD (dev, 0x0036, 0x0194, nv04_graph_mthd_bind_surf_dst);
1203 NVOBJ_MTHD (dev, 0x0036, 0x02fc, nv04_graph_mthd_set_operation);
1206 NVOBJ_CLASS(dev, 0x0076, GR);
1207 NVOBJ_MTHD (dev, 0x0076, 0x0184, nv04_graph_mthd_bind_chroma);
1208 NVOBJ_MTHD (dev, 0x0076, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1209 NVOBJ_MTHD (dev, 0x0076, 0x018c, nv04_graph_mthd_bind_rop);
1210 NVOBJ_MTHD (dev, 0x0076, 0x0190, nv04_graph_mthd_bind_beta1);
1211 NVOBJ_MTHD (dev, 0x0076, 0x0194, nv04_graph_mthd_bind_beta4);
1212 NVOBJ_MTHD (dev, 0x0076, 0x0198, nv04_graph_mthd_bind_surf2d);
1213 NVOBJ_MTHD (dev, 0x0076, 0x02fc, nv04_graph_mthd_set_operation);
1216 NVOBJ_CLASS(dev, 0x0066, GR);
1219 NVOBJ_CLASS(dev, 0x0037, GR);
1220 NVOBJ_MTHD (dev, 0x0037, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1221 NVOBJ_MTHD (dev, 0x0037, 0x018c, nv04_graph_mthd_bind_rop);
1222 NVOBJ_MTHD (dev, 0x0037, 0x0190, nv04_graph_mthd_bind_beta1);
1223 NVOBJ_MTHD (dev, 0x0037, 0x0194, nv04_graph_mthd_bind_surf_dst);
1224 NVOBJ_MTHD (dev, 0x0037, 0x0304, nv04_graph_mthd_set_operation);
1227 NVOBJ_CLASS(dev, 0x0077, GR);
1228 NVOBJ_MTHD (dev, 0x0077, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1229 NVOBJ_MTHD (dev, 0x0077, 0x018c, nv04_graph_mthd_bind_rop);
1230 NVOBJ_MTHD (dev, 0x0077, 0x0190, nv04_graph_mthd_bind_beta1);
1231 NVOBJ_MTHD (dev, 0x0077, 0x0194, nv04_graph_mthd_bind_beta4);
1232 NVOBJ_MTHD (dev, 0x0077, 0x0198, nv04_graph_mthd_bind_surf2d_swzsurf);
1233 NVOBJ_MTHD (dev, 0x0077, 0x0304, nv04_graph_mthd_set_operation);
1236 NVOBJ_CLASS(dev, 0x0030, GR);
1239 NVOBJ_CLASS(dev, 0x0042, GR);
1242 NVOBJ_CLASS(dev, 0x0043, GR);
1245 NVOBJ_CLASS(dev, 0x0012, GR);
1248 NVOBJ_CLASS(dev, 0x0072, GR);
1251 NVOBJ_CLASS(dev, 0x0019, GR);
1254 NVOBJ_CLASS(dev, 0x0018, GR);
1257 NVOBJ_CLASS(dev, 0x0044, GR);
1260 NVOBJ_CLASS(dev, 0x0052, GR);
1263 NVOBJ_CLASS(dev, 0x0053, GR);
1264 NVOBJ_MTHD (dev, 0x0053, 0x02f8, nv04_graph_mthd_surf3d_clip_h);
1265 NVOBJ_MTHD (dev, 0x0053, 0x02fc, nv04_graph_mthd_surf3d_clip_v);
1268 NVOBJ_CLASS(dev, 0x0048, GR);
1269 NVOBJ_MTHD (dev, 0x0048, 0x0188, nv04_graph_mthd_bind_clip);
1270 NVOBJ_MTHD (dev, 0x0048, 0x018c, nv04_graph_mthd_bind_surf_color);
1271 NVOBJ_MTHD (dev, 0x0048, 0x0190, nv04_graph_mthd_bind_surf_zeta);
1274 NVOBJ_CLASS(dev, 0x0054, GR);
1277 NVOBJ_CLASS(dev, 0x0055, GR);
1280 NVOBJ_CLASS(dev, 0x0017, GR);
1283 NVOBJ_CLASS(dev, 0x0057, GR);
1286 NVOBJ_CLASS(dev, 0x0058, GR);
1289 NVOBJ_CLASS(dev, 0x0059, GR);
1292 NVOBJ_CLASS(dev, 0x005a, GR);
1295 NVOBJ_CLASS(dev, 0x005b, GR);
1298 NVOBJ_CLASS(dev, 0x001c, GR);
1299 NVOBJ_MTHD (dev, 0x001c, 0x0184, nv04_graph_mthd_bind_clip);
1300 NVOBJ_MTHD (dev, 0x001c, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1301 NVOBJ_MTHD (dev, 0x001c, 0x018c, nv04_graph_mthd_bind_rop);
1302 NVOBJ_MTHD (dev, 0x001c, 0x0190, nv04_graph_mthd_bind_beta1);
1303 NVOBJ_MTHD (dev, 0x001c, 0x0194, nv04_graph_mthd_bind_surf_dst);
1304 NVOBJ_MTHD (dev, 0x001c, 0x02fc, nv04_graph_mthd_set_operation);
1307 NVOBJ_CLASS(dev, 0x005c, GR);
1308 NVOBJ_MTHD (dev, 0x005c, 0x0184, nv04_graph_mthd_bind_clip);
1309 NVOBJ_MTHD (dev, 0x005c, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1310 NVOBJ_MTHD (dev, 0x005c, 0x018c, nv04_graph_mthd_bind_rop);
1311 NVOBJ_MTHD (dev, 0x005c, 0x0190, nv04_graph_mthd_bind_beta1);
1312 NVOBJ_MTHD (dev, 0x005c, 0x0194, nv04_graph_mthd_bind_beta4);
1313 NVOBJ_MTHD (dev, 0x005c, 0x0198, nv04_graph_mthd_bind_surf2d);
1314 NVOBJ_MTHD (dev, 0x005c, 0x02fc, nv04_graph_mthd_set_operation);
1317 NVOBJ_CLASS(dev, 0x001d, GR);
1318 NVOBJ_MTHD (dev, 0x001d, 0x0184, nv04_graph_mthd_bind_clip);
1319 NVOBJ_MTHD (dev, 0x001d, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1320 NVOBJ_MTHD (dev, 0x001d, 0x018c, nv04_graph_mthd_bind_rop);
1321 NVOBJ_MTHD (dev, 0x001d, 0x0190, nv04_graph_mthd_bind_beta1);
1322 NVOBJ_MTHD (dev, 0x001d, 0x0194, nv04_graph_mthd_bind_surf_dst);
1323 NVOBJ_MTHD (dev, 0x001d, 0x02fc, nv04_graph_mthd_set_operation);
1326 NVOBJ_CLASS(dev, 0x005d, GR);
1327 NVOBJ_MTHD (dev, 0x005d, 0x0184, nv04_graph_mthd_bind_clip);
1328 NVOBJ_MTHD (dev, 0x005d, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1329 NVOBJ_MTHD (dev, 0x005d, 0x018c, nv04_graph_mthd_bind_rop);
1330 NVOBJ_MTHD (dev, 0x005d, 0x0190, nv04_graph_mthd_bind_beta1);
1331 NVOBJ_MTHD (dev, 0x005d, 0x0194, nv04_graph_mthd_bind_beta4);
1332 NVOBJ_MTHD (dev, 0x005d, 0x0198, nv04_graph_mthd_bind_surf2d);
1333 NVOBJ_MTHD (dev, 0x005d, 0x02fc, nv04_graph_mthd_set_operation);
1336 NVOBJ_CLASS(dev, 0x001e, GR);
1337 NVOBJ_MTHD (dev, 0x001e, 0x0184, nv04_graph_mthd_bind_clip);
1338 NVOBJ_MTHD (dev, 0x001e, 0x0188, nv04_graph_mthd_bind_nv01_patt);
1339 NVOBJ_MTHD (dev, 0x001e, 0x018c, nv04_graph_mthd_bind_rop);
1340 NVOBJ_MTHD (dev, 0x001e, 0x0190, nv04_graph_mthd_bind_beta1);
1341 NVOBJ_MTHD (dev, 0x001e, 0x0194, nv04_graph_mthd_bind_surf_dst);
1342 NVOBJ_MTHD (dev, 0x001e, 0x02fc, nv04_graph_mthd_set_operation);
1345 NVOBJ_CLASS(dev, 0x005e, GR);
1346 NVOBJ_MTHD (dev, 0x005e, 0x0184, nv04_graph_mthd_bind_clip);
1347 NVOBJ_MTHD (dev, 0x005e, 0x0188, nv04_graph_mthd_bind_nv04_patt);
1348 NVOBJ_MTHD (dev, 0x005e, 0x018c, nv04_graph_mthd_bind_rop);
1349 NVOBJ_MTHD (dev, 0x005e, 0x0190, nv04_graph_mthd_bind_beta1);
1350 NVOBJ_MTHD (dev, 0x005e, 0x0194, nv04_graph_mthd_bind_beta4);
1351 NVOBJ_MTHD (dev, 0x005e, 0x0198, nv04_graph_mthd_bind_surf2d);
1352 NVOBJ_MTHD (dev, 0x005e, 0x02fc, nv04_graph_mthd_set_operation);
1355 NVOBJ_CLASS(dev, 0x506e, SW);
1356 NVOBJ_MTHD (dev, 0x506e, 0x0150, nv04_graph_mthd_set_ref);
1357 NVOBJ_MTHD (dev, 0x506e, 0x0500, nv04_graph_mthd_page_flip);