1 /*
2 * Copyright 2010 Christoph Bumiller
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
18 * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF
19 * OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
20 * SOFTWARE.
21 */
22
23 #include "pipe/p_defines.h"
24 #include "util/u_inlines.h"
25 #include "util/u_transfer.h"
26
27 #include "tgsi/tgsi_parse.h"
28
29 #include "nvc0_stateobj.h"
30 #include "nvc0_context.h"
31
32 #include "nvc0_3d.xml.h"
33 #include "nv50/nv50_texture.xml.h"
34
35 #include "nouveau/nouveau_gldefs.h"
36
37 static INLINE uint32_t
nvc0_colormask(unsigned mask)38 nvc0_colormask(unsigned mask)
39 {
40 uint32_t ret = 0;
41
42 if (mask & PIPE_MASK_R)
43 ret |= 0x0001;
44 if (mask & PIPE_MASK_G)
45 ret |= 0x0010;
46 if (mask & PIPE_MASK_B)
47 ret |= 0x0100;
48 if (mask & PIPE_MASK_A)
49 ret |= 0x1000;
50
51 return ret;
52 }
53
54 #define NVC0_BLEND_FACTOR_CASE(a, b) \
55 case PIPE_BLENDFACTOR_##a: return NV50_3D_BLEND_FACTOR_##b
56
57 static INLINE uint32_t
nvc0_blend_fac(unsigned factor)58 nvc0_blend_fac(unsigned factor)
59 {
60 switch (factor) {
61 NVC0_BLEND_FACTOR_CASE(ONE, ONE);
62 NVC0_BLEND_FACTOR_CASE(SRC_COLOR, SRC_COLOR);
63 NVC0_BLEND_FACTOR_CASE(SRC_ALPHA, SRC_ALPHA);
64 NVC0_BLEND_FACTOR_CASE(DST_ALPHA, DST_ALPHA);
65 NVC0_BLEND_FACTOR_CASE(DST_COLOR, DST_COLOR);
66 NVC0_BLEND_FACTOR_CASE(SRC_ALPHA_SATURATE, SRC_ALPHA_SATURATE);
67 NVC0_BLEND_FACTOR_CASE(CONST_COLOR, CONSTANT_COLOR);
68 NVC0_BLEND_FACTOR_CASE(CONST_ALPHA, CONSTANT_ALPHA);
69 NVC0_BLEND_FACTOR_CASE(SRC1_COLOR, SRC1_COLOR);
70 NVC0_BLEND_FACTOR_CASE(SRC1_ALPHA, SRC1_ALPHA);
71 NVC0_BLEND_FACTOR_CASE(ZERO, ZERO);
72 NVC0_BLEND_FACTOR_CASE(INV_SRC_COLOR, ONE_MINUS_SRC_COLOR);
73 NVC0_BLEND_FACTOR_CASE(INV_SRC_ALPHA, ONE_MINUS_SRC_ALPHA);
74 NVC0_BLEND_FACTOR_CASE(INV_DST_ALPHA, ONE_MINUS_DST_ALPHA);
75 NVC0_BLEND_FACTOR_CASE(INV_DST_COLOR, ONE_MINUS_DST_COLOR);
76 NVC0_BLEND_FACTOR_CASE(INV_CONST_COLOR, ONE_MINUS_CONSTANT_COLOR);
77 NVC0_BLEND_FACTOR_CASE(INV_CONST_ALPHA, ONE_MINUS_CONSTANT_ALPHA);
78 NVC0_BLEND_FACTOR_CASE(INV_SRC1_COLOR, ONE_MINUS_SRC1_COLOR);
79 NVC0_BLEND_FACTOR_CASE(INV_SRC1_ALPHA, ONE_MINUS_SRC1_ALPHA);
80 default:
81 return NV50_3D_BLEND_FACTOR_ZERO;
82 }
83 }
84
85 static void *
nvc0_blend_state_create(struct pipe_context * pipe,const struct pipe_blend_state * cso)86 nvc0_blend_state_create(struct pipe_context *pipe,
87 const struct pipe_blend_state *cso)
88 {
89 struct nvc0_blend_stateobj *so = CALLOC_STRUCT(nvc0_blend_stateobj);
90 int i;
91 int r; /* reference */
92 uint32_t ms;
93 uint8_t blend_en = 0;
94 boolean indep_masks = FALSE;
95 boolean indep_funcs = FALSE;
96
97 so->pipe = *cso;
98
99 /* check which states actually have differing values */
100 if (cso->independent_blend_enable) {
101 for (r = 0; r < 8 && !cso->rt[r].blend_enable; ++r);
102 blend_en |= 1 << r;
103 for (i = r + 1; i < 8; ++i) {
104 if (!cso->rt[i].blend_enable)
105 continue;
106 blend_en |= 1 << i;
107 if (cso->rt[i].rgb_func != cso->rt[r].rgb_func ||
108 cso->rt[i].rgb_src_factor != cso->rt[r].rgb_src_factor ||
109 cso->rt[i].rgb_dst_factor != cso->rt[r].rgb_dst_factor ||
110 cso->rt[i].alpha_func != cso->rt[r].alpha_func ||
111 cso->rt[i].alpha_src_factor != cso->rt[r].alpha_src_factor ||
112 cso->rt[i].alpha_dst_factor != cso->rt[r].alpha_dst_factor) {
113 indep_funcs = TRUE;
114 break;
115 }
116 }
117 for (; i < 8; ++i)
118 blend_en |= (cso->rt[i].blend_enable ? 1 : 0) << i;
119
120 for (i = 1; i < 8; ++i) {
121 if (cso->rt[i].colormask != cso->rt[0].colormask) {
122 indep_masks = TRUE;
123 break;
124 }
125 }
126 } else {
127 r = 0;
128 if (cso->rt[0].blend_enable)
129 blend_en = 0xff;
130 }
131
132 if (cso->logicop_enable) {
133 SB_BEGIN_3D(so, LOGIC_OP_ENABLE, 2);
134 SB_DATA (so, 1);
135 SB_DATA (so, nvgl_logicop_func(cso->logicop_func));
136
137 SB_IMMED_3D(so, MACRO_BLEND_ENABLES, 0);
138 } else {
139 SB_IMMED_3D(so, LOGIC_OP_ENABLE, 0);
140
141 SB_IMMED_3D(so, BLEND_INDEPENDENT, indep_funcs);
142 SB_IMMED_3D(so, MACRO_BLEND_ENABLES, blend_en);
143 if (indep_funcs) {
144 for (i = 0; i < 8; ++i) {
145 if (cso->rt[i].blend_enable) {
146 SB_BEGIN_3D(so, IBLEND_EQUATION_RGB(i), 6);
147 SB_DATA (so, nvgl_blend_eqn(cso->rt[i].rgb_func));
148 SB_DATA (so, nvc0_blend_fac(cso->rt[i].rgb_src_factor));
149 SB_DATA (so, nvc0_blend_fac(cso->rt[i].rgb_dst_factor));
150 SB_DATA (so, nvgl_blend_eqn(cso->rt[i].alpha_func));
151 SB_DATA (so, nvc0_blend_fac(cso->rt[i].alpha_src_factor));
152 SB_DATA (so, nvc0_blend_fac(cso->rt[i].alpha_dst_factor));
153 }
154 }
155 } else
156 if (blend_en) {
157 SB_BEGIN_3D(so, BLEND_EQUATION_RGB, 5);
158 SB_DATA (so, nvgl_blend_eqn(cso->rt[r].rgb_func));
159 SB_DATA (so, nvc0_blend_fac(cso->rt[r].rgb_src_factor));
160 SB_DATA (so, nvc0_blend_fac(cso->rt[r].rgb_dst_factor));
161 SB_DATA (so, nvgl_blend_eqn(cso->rt[r].alpha_func));
162 SB_DATA (so, nvc0_blend_fac(cso->rt[r].alpha_src_factor));
163 SB_BEGIN_3D(so, BLEND_FUNC_DST_ALPHA, 1);
164 SB_DATA (so, nvc0_blend_fac(cso->rt[r].alpha_dst_factor));
165 }
166
167 SB_IMMED_3D(so, COLOR_MASK_COMMON, !indep_masks);
168 if (indep_masks) {
169 SB_BEGIN_3D(so, COLOR_MASK(0), 8);
170 for (i = 0; i < 8; ++i)
171 SB_DATA(so, nvc0_colormask(cso->rt[i].colormask));
172 } else {
173 SB_BEGIN_3D(so, COLOR_MASK(0), 1);
174 SB_DATA (so, nvc0_colormask(cso->rt[0].colormask));
175 }
176 }
177
178 ms = 0;
179 if (cso->alpha_to_coverage)
180 ms |= NVC0_3D_MULTISAMPLE_CTRL_ALPHA_TO_COVERAGE;
181 if (cso->alpha_to_one)
182 ms |= NVC0_3D_MULTISAMPLE_CTRL_ALPHA_TO_ONE;
183
184 SB_BEGIN_3D(so, MULTISAMPLE_CTRL, 1);
185 SB_DATA (so, ms);
186
187 assert(so->size <= (sizeof(so->state) / sizeof(so->state[0])));
188 return so;
189 }
190
191 static void
nvc0_blend_state_bind(struct pipe_context * pipe,void * hwcso)192 nvc0_blend_state_bind(struct pipe_context *pipe, void *hwcso)
193 {
194 struct nvc0_context *nvc0 = nvc0_context(pipe);
195
196 nvc0->blend = hwcso;
197 nvc0->dirty |= NVC0_NEW_BLEND;
198 }
199
200 static void
nvc0_blend_state_delete(struct pipe_context * pipe,void * hwcso)201 nvc0_blend_state_delete(struct pipe_context *pipe, void *hwcso)
202 {
203 FREE(hwcso);
204 }
205
206 /* NOTE: ignoring line_last_pixel, using FALSE (set on screen init) */
207 static void *
nvc0_rasterizer_state_create(struct pipe_context * pipe,const struct pipe_rasterizer_state * cso)208 nvc0_rasterizer_state_create(struct pipe_context *pipe,
209 const struct pipe_rasterizer_state *cso)
210 {
211 struct nvc0_rasterizer_stateobj *so;
212 uint32_t reg;
213
214 so = CALLOC_STRUCT(nvc0_rasterizer_stateobj);
215 if (!so)
216 return NULL;
217 so->pipe = *cso;
218
219 /* Scissor enables are handled in scissor state, we will not want to
220 * always emit 16 commands, one for each scissor rectangle, here.
221 */
222
223 SB_BEGIN_3D(so, SHADE_MODEL, 1);
224 SB_DATA (so, cso->flatshade ? NVC0_3D_SHADE_MODEL_FLAT :
225 NVC0_3D_SHADE_MODEL_SMOOTH);
226 SB_IMMED_3D(so, PROVOKING_VERTEX_LAST, !cso->flatshade_first);
227 SB_IMMED_3D(so, VERTEX_TWO_SIDE_ENABLE, cso->light_twoside);
228
229 SB_IMMED_3D(so, VERT_COLOR_CLAMP_EN, cso->clamp_vertex_color);
230 SB_BEGIN_3D(so, FRAG_COLOR_CLAMP_EN, 1);
231 SB_DATA (so, cso->clamp_fragment_color ? 0x11111111 : 0x00000000);
232
233 SB_IMMED_3D(so, MULTISAMPLE_ENABLE, cso->multisample);
234
235 SB_IMMED_3D(so, LINE_SMOOTH_ENABLE, cso->line_smooth);
236 if (cso->line_smooth)
237 SB_BEGIN_3D(so, LINE_WIDTH_SMOOTH, 1);
238 else
239 SB_BEGIN_3D(so, LINE_WIDTH_ALIASED, 1);
240 SB_DATA (so, fui(cso->line_width));
241
242 SB_IMMED_3D(so, LINE_STIPPLE_ENABLE, cso->line_stipple_enable);
243 if (cso->line_stipple_enable) {
244 SB_BEGIN_3D(so, LINE_STIPPLE_PATTERN, 1);
245 SB_DATA (so, (cso->line_stipple_pattern << 8) |
246 cso->line_stipple_factor);
247
248 }
249
250 SB_IMMED_3D(so, VP_POINT_SIZE_EN, cso->point_size_per_vertex);
251 if (!cso->point_size_per_vertex) {
252 SB_BEGIN_3D(so, POINT_SIZE, 1);
253 SB_DATA (so, fui(cso->point_size));
254 }
255
256 reg = (cso->sprite_coord_mode == PIPE_SPRITE_COORD_UPPER_LEFT) ?
257 NVC0_3D_POINT_COORD_REPLACE_COORD_ORIGIN_UPPER_LEFT :
258 NVC0_3D_POINT_COORD_REPLACE_COORD_ORIGIN_LOWER_LEFT;
259
260 SB_BEGIN_3D(so, POINT_COORD_REPLACE, 1);
261 SB_DATA (so, ((cso->sprite_coord_enable & 0xff) << 3) | reg);
262 SB_IMMED_3D(so, POINT_SPRITE_ENABLE, cso->point_quad_rasterization);
263 SB_IMMED_3D(so, POINT_SMOOTH_ENABLE, cso->point_smooth);
264
265 SB_BEGIN_3D(so, MACRO_POLYGON_MODE_FRONT, 1);
266 SB_DATA (so, nvgl_polygon_mode(cso->fill_front));
267 SB_BEGIN_3D(so, MACRO_POLYGON_MODE_BACK, 1);
268 SB_DATA (so, nvgl_polygon_mode(cso->fill_back));
269 SB_IMMED_3D(so, POLYGON_SMOOTH_ENABLE, cso->poly_smooth);
270
271 SB_BEGIN_3D(so, CULL_FACE_ENABLE, 3);
272 SB_DATA (so, cso->cull_face != PIPE_FACE_NONE);
273 SB_DATA (so, cso->front_ccw ? NVC0_3D_FRONT_FACE_CCW :
274 NVC0_3D_FRONT_FACE_CW);
275 switch (cso->cull_face) {
276 case PIPE_FACE_FRONT_AND_BACK:
277 SB_DATA(so, NVC0_3D_CULL_FACE_FRONT_AND_BACK);
278 break;
279 case PIPE_FACE_FRONT:
280 SB_DATA(so, NVC0_3D_CULL_FACE_FRONT);
281 break;
282 case PIPE_FACE_BACK:
283 default:
284 SB_DATA(so, NVC0_3D_CULL_FACE_BACK);
285 break;
286 }
287
288 SB_IMMED_3D(so, POLYGON_STIPPLE_ENABLE, cso->poly_stipple_enable);
289 SB_BEGIN_3D(so, POLYGON_OFFSET_POINT_ENABLE, 3);
290 SB_DATA (so, cso->offset_point);
291 SB_DATA (so, cso->offset_line);
292 SB_DATA (so, cso->offset_tri);
293
294 if (cso->offset_point || cso->offset_line || cso->offset_tri) {
295 SB_BEGIN_3D(so, POLYGON_OFFSET_FACTOR, 1);
296 SB_DATA (so, fui(cso->offset_scale));
297 SB_BEGIN_3D(so, POLYGON_OFFSET_UNITS, 1);
298 SB_DATA (so, fui(cso->offset_units * 2.0f));
299 SB_BEGIN_3D(so, POLYGON_OFFSET_CLAMP, 1);
300 SB_DATA (so, fui(cso->offset_clamp));
301 }
302
303 if (cso->depth_clip)
304 reg = NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK1_UNK1;
305 else
306 reg =
307 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK1_UNK1 |
308 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_DEPTH_CLAMP_NEAR |
309 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_DEPTH_CLAMP_FAR |
310 NVC0_3D_VIEW_VOLUME_CLIP_CTRL_UNK12_UNK2;
311
312 SB_BEGIN_3D(so, VIEW_VOLUME_CLIP_CTRL, 1);
313 SB_DATA (so, reg);
314
315 assert(so->size <= (sizeof(so->state) / sizeof(so->state[0])));
316 return (void *)so;
317 }
318
319 static void
nvc0_rasterizer_state_bind(struct pipe_context * pipe,void * hwcso)320 nvc0_rasterizer_state_bind(struct pipe_context *pipe, void *hwcso)
321 {
322 struct nvc0_context *nvc0 = nvc0_context(pipe);
323
324 nvc0->rast = hwcso;
325 nvc0->dirty |= NVC0_NEW_RASTERIZER;
326 }
327
328 static void
nvc0_rasterizer_state_delete(struct pipe_context * pipe,void * hwcso)329 nvc0_rasterizer_state_delete(struct pipe_context *pipe, void *hwcso)
330 {
331 FREE(hwcso);
332 }
333
334 static void *
nvc0_zsa_state_create(struct pipe_context * pipe,const struct pipe_depth_stencil_alpha_state * cso)335 nvc0_zsa_state_create(struct pipe_context *pipe,
336 const struct pipe_depth_stencil_alpha_state *cso)
337 {
338 struct nvc0_zsa_stateobj *so = CALLOC_STRUCT(nvc0_zsa_stateobj);
339
340 so->pipe = *cso;
341
342 SB_IMMED_3D(so, DEPTH_TEST_ENABLE, cso->depth.enabled);
343 if (cso->depth.enabled) {
344 SB_IMMED_3D(so, DEPTH_WRITE_ENABLE, cso->depth.writemask);
345 SB_BEGIN_3D(so, DEPTH_TEST_FUNC, 1);
346 SB_DATA (so, nvgl_comparison_op(cso->depth.func));
347 }
348
349 if (cso->stencil[0].enabled) {
350 SB_BEGIN_3D(so, STENCIL_ENABLE, 5);
351 SB_DATA (so, 1);
352 SB_DATA (so, nvgl_stencil_op(cso->stencil[0].fail_op));
353 SB_DATA (so, nvgl_stencil_op(cso->stencil[0].zfail_op));
354 SB_DATA (so, nvgl_stencil_op(cso->stencil[0].zpass_op));
355 SB_DATA (so, nvgl_comparison_op(cso->stencil[0].func));
356 SB_BEGIN_3D(so, STENCIL_FRONT_FUNC_MASK, 2);
357 SB_DATA (so, cso->stencil[0].valuemask);
358 SB_DATA (so, cso->stencil[0].writemask);
359 } else {
360 SB_IMMED_3D(so, STENCIL_ENABLE, 0);
361 }
362
363 if (cso->stencil[1].enabled) {
364 assert(cso->stencil[0].enabled);
365 SB_BEGIN_3D(so, STENCIL_TWO_SIDE_ENABLE, 5);
366 SB_DATA (so, 1);
367 SB_DATA (so, nvgl_stencil_op(cso->stencil[1].fail_op));
368 SB_DATA (so, nvgl_stencil_op(cso->stencil[1].zfail_op));
369 SB_DATA (so, nvgl_stencil_op(cso->stencil[1].zpass_op));
370 SB_DATA (so, nvgl_comparison_op(cso->stencil[1].func));
371 SB_BEGIN_3D(so, STENCIL_BACK_MASK, 2);
372 SB_DATA (so, cso->stencil[1].writemask);
373 SB_DATA (so, cso->stencil[1].valuemask);
374 } else
375 if (cso->stencil[0].enabled) {
376 SB_IMMED_3D(so, STENCIL_TWO_SIDE_ENABLE, 0);
377 }
378
379 SB_IMMED_3D(so, ALPHA_TEST_ENABLE, cso->alpha.enabled);
380 if (cso->alpha.enabled) {
381 SB_BEGIN_3D(so, ALPHA_TEST_REF, 2);
382 SB_DATA (so, fui(cso->alpha.ref_value));
383 SB_DATA (so, nvgl_comparison_op(cso->alpha.func));
384 }
385
386 assert(so->size <= (sizeof(so->state) / sizeof(so->state[0])));
387 return (void *)so;
388 }
389
390 static void
nvc0_zsa_state_bind(struct pipe_context * pipe,void * hwcso)391 nvc0_zsa_state_bind(struct pipe_context *pipe, void *hwcso)
392 {
393 struct nvc0_context *nvc0 = nvc0_context(pipe);
394
395 nvc0->zsa = hwcso;
396 nvc0->dirty |= NVC0_NEW_ZSA;
397 }
398
399 static void
nvc0_zsa_state_delete(struct pipe_context * pipe,void * hwcso)400 nvc0_zsa_state_delete(struct pipe_context *pipe, void *hwcso)
401 {
402 FREE(hwcso);
403 }
404
405 /* ====================== SAMPLERS AND TEXTURES ================================
406 */
407
408 #define NV50_TSC_WRAP_CASE(n) \
409 case PIPE_TEX_WRAP_##n: return NV50_TSC_WRAP_##n
410
411 static INLINE unsigned
nv50_tsc_wrap_mode(unsigned wrap)412 nv50_tsc_wrap_mode(unsigned wrap)
413 {
414 switch (wrap) {
415 NV50_TSC_WRAP_CASE(REPEAT);
416 NV50_TSC_WRAP_CASE(MIRROR_REPEAT);
417 NV50_TSC_WRAP_CASE(CLAMP_TO_EDGE);
418 NV50_TSC_WRAP_CASE(CLAMP_TO_BORDER);
419 NV50_TSC_WRAP_CASE(CLAMP);
420 NV50_TSC_WRAP_CASE(MIRROR_CLAMP_TO_EDGE);
421 NV50_TSC_WRAP_CASE(MIRROR_CLAMP_TO_BORDER);
422 NV50_TSC_WRAP_CASE(MIRROR_CLAMP);
423 default:
424 NOUVEAU_ERR("unknown wrap mode: %d\n", wrap);
425 return NV50_TSC_WRAP_REPEAT;
426 }
427 }
428
429 static void
nvc0_sampler_state_delete(struct pipe_context * pipe,void * hwcso)430 nvc0_sampler_state_delete(struct pipe_context *pipe, void *hwcso)
431 {
432 unsigned s, i;
433
434 for (s = 0; s < 5; ++s)
435 for (i = 0; i < nvc0_context(pipe)->num_samplers[s]; ++i)
436 if (nvc0_context(pipe)->samplers[s][i] == hwcso)
437 nvc0_context(pipe)->samplers[s][i] = NULL;
438
439 nvc0_screen_tsc_free(nvc0_context(pipe)->screen, nv50_tsc_entry(hwcso));
440
441 FREE(hwcso);
442 }
443
444 static INLINE void
nvc0_stage_sampler_states_bind(struct nvc0_context * nvc0,int s,unsigned nr,void ** hwcso)445 nvc0_stage_sampler_states_bind(struct nvc0_context *nvc0, int s,
446 unsigned nr, void **hwcso)
447 {
448 unsigned i;
449
450 for (i = 0; i < nr; ++i) {
451 struct nv50_tsc_entry *old = nvc0->samplers[s][i];
452
453 if (hwcso[i] == old)
454 continue;
455 nvc0->samplers_dirty[s] |= 1 << i;
456
457 nvc0->samplers[s][i] = nv50_tsc_entry(hwcso[i]);
458 if (old)
459 nvc0_screen_tsc_unlock(nvc0->screen, old);
460 }
461 for (; i < nvc0->num_samplers[s]; ++i) {
462 if (nvc0->samplers[s][i]) {
463 nvc0_screen_tsc_unlock(nvc0->screen, nvc0->samplers[s][i]);
464 nvc0->samplers[s][i] = NULL;
465 }
466 }
467
468 nvc0->num_samplers[s] = nr;
469
470 nvc0->dirty |= NVC0_NEW_SAMPLERS;
471 }
472
473 static void
nvc0_vp_sampler_states_bind(struct pipe_context * pipe,unsigned nr,void ** s)474 nvc0_vp_sampler_states_bind(struct pipe_context *pipe, unsigned nr, void **s)
475 {
476 nvc0_stage_sampler_states_bind(nvc0_context(pipe), 0, nr, s);
477 }
478
479 static void
nvc0_fp_sampler_states_bind(struct pipe_context * pipe,unsigned nr,void ** s)480 nvc0_fp_sampler_states_bind(struct pipe_context *pipe, unsigned nr, void **s)
481 {
482 nvc0_stage_sampler_states_bind(nvc0_context(pipe), 4, nr, s);
483 }
484
485 static void
nvc0_gp_sampler_states_bind(struct pipe_context * pipe,unsigned nr,void ** s)486 nvc0_gp_sampler_states_bind(struct pipe_context *pipe, unsigned nr, void **s)
487 {
488 nvc0_stage_sampler_states_bind(nvc0_context(pipe), 3, nr, s);
489 }
490
491 /* NOTE: only called when not referenced anywhere, won't be bound */
492 static void
nvc0_sampler_view_destroy(struct pipe_context * pipe,struct pipe_sampler_view * view)493 nvc0_sampler_view_destroy(struct pipe_context *pipe,
494 struct pipe_sampler_view *view)
495 {
496 pipe_resource_reference(&view->texture, NULL);
497
498 nvc0_screen_tic_free(nvc0_context(pipe)->screen, nv50_tic_entry(view));
499
500 FREE(nv50_tic_entry(view));
501 }
502
503 static INLINE void
nvc0_stage_set_sampler_views(struct nvc0_context * nvc0,int s,unsigned nr,struct pipe_sampler_view ** views)504 nvc0_stage_set_sampler_views(struct nvc0_context *nvc0, int s,
505 unsigned nr,
506 struct pipe_sampler_view **views)
507 {
508 unsigned i;
509
510 for (i = 0; i < nr; ++i) {
511 struct nv50_tic_entry *old = nv50_tic_entry(nvc0->textures[s][i]);
512
513 if (views[i] == nvc0->textures[s][i])
514 continue;
515 nvc0->textures_dirty[s] |= 1 << i;
516
517 if (old) {
518 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_TEX(s, i));
519 nvc0_screen_tic_unlock(nvc0->screen, old);
520 }
521
522 pipe_sampler_view_reference(&nvc0->textures[s][i], views[i]);
523 }
524
525 for (i = nr; i < nvc0->num_textures[s]; ++i) {
526 struct nv50_tic_entry *old = nv50_tic_entry(nvc0->textures[s][i]);
527 if (old) {
528 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_TEX(s, i));
529 nvc0_screen_tic_unlock(nvc0->screen, old);
530 pipe_sampler_view_reference(&nvc0->textures[s][i], NULL);
531 }
532 }
533
534 nvc0->num_textures[s] = nr;
535
536 nvc0->dirty |= NVC0_NEW_TEXTURES;
537 }
538
539 static void
nvc0_vp_set_sampler_views(struct pipe_context * pipe,unsigned nr,struct pipe_sampler_view ** views)540 nvc0_vp_set_sampler_views(struct pipe_context *pipe,
541 unsigned nr,
542 struct pipe_sampler_view **views)
543 {
544 nvc0_stage_set_sampler_views(nvc0_context(pipe), 0, nr, views);
545 }
546
547 static void
nvc0_fp_set_sampler_views(struct pipe_context * pipe,unsigned nr,struct pipe_sampler_view ** views)548 nvc0_fp_set_sampler_views(struct pipe_context *pipe,
549 unsigned nr,
550 struct pipe_sampler_view **views)
551 {
552 nvc0_stage_set_sampler_views(nvc0_context(pipe), 4, nr, views);
553 }
554
555 static void
nvc0_gp_set_sampler_views(struct pipe_context * pipe,unsigned nr,struct pipe_sampler_view ** views)556 nvc0_gp_set_sampler_views(struct pipe_context *pipe,
557 unsigned nr,
558 struct pipe_sampler_view **views)
559 {
560 nvc0_stage_set_sampler_views(nvc0_context(pipe), 3, nr, views);
561 }
562
563 /* ============================= SHADERS =======================================
564 */
565
566 static void *
nvc0_sp_state_create(struct pipe_context * pipe,const struct pipe_shader_state * cso,unsigned type)567 nvc0_sp_state_create(struct pipe_context *pipe,
568 const struct pipe_shader_state *cso, unsigned type)
569 {
570 struct nvc0_program *prog;
571
572 prog = CALLOC_STRUCT(nvc0_program);
573 if (!prog)
574 return NULL;
575
576 prog->type = type;
577
578 if (cso->tokens)
579 prog->pipe.tokens = tgsi_dup_tokens(cso->tokens);
580
581 if (cso->stream_output.num_outputs)
582 prog->pipe.stream_output = cso->stream_output;
583
584 return (void *)prog;
585 }
586
587 static void
nvc0_sp_state_delete(struct pipe_context * pipe,void * hwcso)588 nvc0_sp_state_delete(struct pipe_context *pipe, void *hwcso)
589 {
590 struct nvc0_program *prog = (struct nvc0_program *)hwcso;
591
592 nvc0_program_destroy(nvc0_context(pipe), prog);
593
594 FREE((void *)prog->pipe.tokens);
595 FREE(prog);
596 }
597
598 static void *
nvc0_vp_state_create(struct pipe_context * pipe,const struct pipe_shader_state * cso)599 nvc0_vp_state_create(struct pipe_context *pipe,
600 const struct pipe_shader_state *cso)
601 {
602 return nvc0_sp_state_create(pipe, cso, PIPE_SHADER_VERTEX);
603 }
604
605 static void
nvc0_vp_state_bind(struct pipe_context * pipe,void * hwcso)606 nvc0_vp_state_bind(struct pipe_context *pipe, void *hwcso)
607 {
608 struct nvc0_context *nvc0 = nvc0_context(pipe);
609
610 nvc0->vertprog = hwcso;
611 nvc0->dirty |= NVC0_NEW_VERTPROG;
612 }
613
614 static void *
nvc0_fp_state_create(struct pipe_context * pipe,const struct pipe_shader_state * cso)615 nvc0_fp_state_create(struct pipe_context *pipe,
616 const struct pipe_shader_state *cso)
617 {
618 return nvc0_sp_state_create(pipe, cso, PIPE_SHADER_FRAGMENT);
619 }
620
621 static void
nvc0_fp_state_bind(struct pipe_context * pipe,void * hwcso)622 nvc0_fp_state_bind(struct pipe_context *pipe, void *hwcso)
623 {
624 struct nvc0_context *nvc0 = nvc0_context(pipe);
625
626 nvc0->fragprog = hwcso;
627 nvc0->dirty |= NVC0_NEW_FRAGPROG;
628 }
629
630 static void *
nvc0_gp_state_create(struct pipe_context * pipe,const struct pipe_shader_state * cso)631 nvc0_gp_state_create(struct pipe_context *pipe,
632 const struct pipe_shader_state *cso)
633 {
634 return nvc0_sp_state_create(pipe, cso, PIPE_SHADER_GEOMETRY);
635 }
636
637 static void
nvc0_gp_state_bind(struct pipe_context * pipe,void * hwcso)638 nvc0_gp_state_bind(struct pipe_context *pipe, void *hwcso)
639 {
640 struct nvc0_context *nvc0 = nvc0_context(pipe);
641
642 nvc0->gmtyprog = hwcso;
643 nvc0->dirty |= NVC0_NEW_GMTYPROG;
644 }
645
646 static void
nvc0_set_constant_buffer(struct pipe_context * pipe,uint shader,uint index,struct pipe_constant_buffer * cb)647 nvc0_set_constant_buffer(struct pipe_context *pipe, uint shader, uint index,
648 struct pipe_constant_buffer *cb)
649 {
650 struct nvc0_context *nvc0 = nvc0_context(pipe);
651 struct pipe_resource *res = cb ? cb->buffer : NULL;
652 const unsigned s = nvc0_shader_stage(shader);
653 const unsigned i = index;
654
655 if (shader == PIPE_SHADER_COMPUTE)
656 return;
657
658 if (nvc0->constbuf[s][i].user)
659 nvc0->constbuf[s][i].u.buf = NULL;
660 else
661 if (nvc0->constbuf[s][i].u.buf)
662 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_CB(s, i));
663
664 pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, res);
665
666 nvc0->constbuf[s][i].user = (cb && cb->user_buffer) ? TRUE : FALSE;
667 if (nvc0->constbuf[s][i].user) {
668 nvc0->constbuf[s][i].u.data = cb->user_buffer;
669 nvc0->constbuf[s][i].size = cb->buffer_size;
670 } else
671 if (cb) {
672 nvc0->constbuf[s][i].offset = cb->buffer_offset;
673 nvc0->constbuf[s][i].size = align(cb->buffer_size, 0x100);
674 }
675
676 nvc0->constbuf_dirty[s] |= 1 << i;
677
678 nvc0->dirty |= NVC0_NEW_CONSTBUF;
679 }
680
681 /* =============================================================================
682 */
683
684 static void
nvc0_set_blend_color(struct pipe_context * pipe,const struct pipe_blend_color * bcol)685 nvc0_set_blend_color(struct pipe_context *pipe,
686 const struct pipe_blend_color *bcol)
687 {
688 struct nvc0_context *nvc0 = nvc0_context(pipe);
689
690 nvc0->blend_colour = *bcol;
691 nvc0->dirty |= NVC0_NEW_BLEND_COLOUR;
692 }
693
694 static void
nvc0_set_stencil_ref(struct pipe_context * pipe,const struct pipe_stencil_ref * sr)695 nvc0_set_stencil_ref(struct pipe_context *pipe,
696 const struct pipe_stencil_ref *sr)
697 {
698 struct nvc0_context *nvc0 = nvc0_context(pipe);
699
700 nvc0->stencil_ref = *sr;
701 nvc0->dirty |= NVC0_NEW_STENCIL_REF;
702 }
703
704 static void
nvc0_set_clip_state(struct pipe_context * pipe,const struct pipe_clip_state * clip)705 nvc0_set_clip_state(struct pipe_context *pipe,
706 const struct pipe_clip_state *clip)
707 {
708 struct nvc0_context *nvc0 = nvc0_context(pipe);
709
710 memcpy(nvc0->clip.ucp, clip->ucp, sizeof(clip->ucp));
711
712 nvc0->dirty |= NVC0_NEW_CLIP;
713 }
714
715 static void
nvc0_set_sample_mask(struct pipe_context * pipe,unsigned sample_mask)716 nvc0_set_sample_mask(struct pipe_context *pipe, unsigned sample_mask)
717 {
718 struct nvc0_context *nvc0 = nvc0_context(pipe);
719
720 nvc0->sample_mask = sample_mask;
721 nvc0->dirty |= NVC0_NEW_SAMPLE_MASK;
722 }
723
724
725 static void
nvc0_set_framebuffer_state(struct pipe_context * pipe,const struct pipe_framebuffer_state * fb)726 nvc0_set_framebuffer_state(struct pipe_context *pipe,
727 const struct pipe_framebuffer_state *fb)
728 {
729 struct nvc0_context *nvc0 = nvc0_context(pipe);
730 unsigned i;
731
732 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_FB);
733
734 for (i = 0; i < fb->nr_cbufs; ++i)
735 pipe_surface_reference(&nvc0->framebuffer.cbufs[i], fb->cbufs[i]);
736 for (; i < nvc0->framebuffer.nr_cbufs; ++i)
737 pipe_surface_reference(&nvc0->framebuffer.cbufs[i], NULL);
738
739 nvc0->framebuffer.nr_cbufs = fb->nr_cbufs;
740
741 nvc0->framebuffer.width = fb->width;
742 nvc0->framebuffer.height = fb->height;
743
744 pipe_surface_reference(&nvc0->framebuffer.zsbuf, fb->zsbuf);
745
746 nvc0->dirty |= NVC0_NEW_FRAMEBUFFER;
747 }
748
749 static void
nvc0_set_polygon_stipple(struct pipe_context * pipe,const struct pipe_poly_stipple * stipple)750 nvc0_set_polygon_stipple(struct pipe_context *pipe,
751 const struct pipe_poly_stipple *stipple)
752 {
753 struct nvc0_context *nvc0 = nvc0_context(pipe);
754
755 nvc0->stipple = *stipple;
756 nvc0->dirty |= NVC0_NEW_STIPPLE;
757 }
758
759 static void
nvc0_set_scissor_state(struct pipe_context * pipe,const struct pipe_scissor_state * scissor)760 nvc0_set_scissor_state(struct pipe_context *pipe,
761 const struct pipe_scissor_state *scissor)
762 {
763 struct nvc0_context *nvc0 = nvc0_context(pipe);
764
765 nvc0->scissor = *scissor;
766 nvc0->dirty |= NVC0_NEW_SCISSOR;
767 }
768
769 static void
nvc0_set_viewport_state(struct pipe_context * pipe,const struct pipe_viewport_state * vpt)770 nvc0_set_viewport_state(struct pipe_context *pipe,
771 const struct pipe_viewport_state *vpt)
772 {
773 struct nvc0_context *nvc0 = nvc0_context(pipe);
774
775 nvc0->viewport = *vpt;
776 nvc0->dirty |= NVC0_NEW_VIEWPORT;
777 }
778
779 static void
nvc0_set_vertex_buffers(struct pipe_context * pipe,unsigned count,const struct pipe_vertex_buffer * vb)780 nvc0_set_vertex_buffers(struct pipe_context *pipe,
781 unsigned count,
782 const struct pipe_vertex_buffer *vb)
783 {
784 struct nvc0_context *nvc0 = nvc0_context(pipe);
785 uint32_t constant_vbos = 0;
786 unsigned i;
787
788 nvc0->vbo_user = 0;
789
790 if (count != nvc0->num_vtxbufs) {
791 for (i = 0; i < count; ++i) {
792 pipe_resource_reference(&nvc0->vtxbuf[i].buffer, vb[i].buffer);
793 if (vb[i].user_buffer) {
794 nvc0->vbo_user |= 1 << i;
795 nvc0->vtxbuf[i].user_buffer = vb[i].user_buffer;
796 if (!vb[i].stride)
797 constant_vbos |= 1 << i;
798 } else {
799 nvc0->vtxbuf[i].buffer_offset = vb[i].buffer_offset;
800 }
801 nvc0->vtxbuf[i].stride = vb[i].stride;
802 }
803 for (; i < nvc0->num_vtxbufs; ++i)
804 pipe_resource_reference(&nvc0->vtxbuf[i].buffer, NULL);
805
806 nvc0->num_vtxbufs = count;
807 nvc0->dirty |= NVC0_NEW_ARRAYS;
808 } else {
809 for (i = 0; i < count; ++i) {
810 if (vb[i].user_buffer) {
811 nvc0->vtxbuf[i].user_buffer = vb[i].user_buffer;
812 nvc0->vbo_user |= 1 << i;
813 if (!vb[i].stride)
814 constant_vbos |= 1 << i;
815 assert(!vb[i].buffer);
816 }
817 if (nvc0->vtxbuf[i].buffer == vb[i].buffer &&
818 nvc0->vtxbuf[i].buffer_offset == vb[i].buffer_offset &&
819 nvc0->vtxbuf[i].stride == vb[i].stride)
820 continue;
821 pipe_resource_reference(&nvc0->vtxbuf[i].buffer, vb[i].buffer);
822 nvc0->vtxbuf[i].buffer_offset = vb[i].buffer_offset;
823 nvc0->vtxbuf[i].stride = vb[i].stride;
824 nvc0->dirty |= NVC0_NEW_ARRAYS;
825 }
826 }
827 if (constant_vbos != nvc0->constant_vbos) {
828 nvc0->constant_vbos = constant_vbos;
829 nvc0->dirty |= NVC0_NEW_ARRAYS;
830 }
831
832 if (nvc0->dirty & NVC0_NEW_ARRAYS)
833 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_VTX);
834 }
835
836 static void
nvc0_set_index_buffer(struct pipe_context * pipe,const struct pipe_index_buffer * ib)837 nvc0_set_index_buffer(struct pipe_context *pipe,
838 const struct pipe_index_buffer *ib)
839 {
840 struct nvc0_context *nvc0 = nvc0_context(pipe);
841
842 if (nvc0->idxbuf.buffer)
843 nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_IDX);
844
845 if (ib) {
846 pipe_resource_reference(&nvc0->idxbuf.buffer, ib->buffer);
847 nvc0->idxbuf.index_size = ib->index_size;
848 if (ib->buffer) {
849 nvc0->idxbuf.offset = ib->offset;
850 nvc0->dirty |= NVC0_NEW_IDXBUF;
851 } else {
852 nvc0->idxbuf.user_buffer = ib->user_buffer;
853 nvc0->dirty &= ~NVC0_NEW_IDXBUF;
854 }
855 } else {
856 nvc0->dirty &= ~NVC0_NEW_IDXBUF;
857 pipe_resource_reference(&nvc0->idxbuf.buffer, NULL);
858 }
859 }
860
861 static void
nvc0_vertex_state_bind(struct pipe_context * pipe,void * hwcso)862 nvc0_vertex_state_bind(struct pipe_context *pipe, void *hwcso)
863 {
864 struct nvc0_context *nvc0 = nvc0_context(pipe);
865
866 nvc0->vertex = hwcso;
867 nvc0->dirty |= NVC0_NEW_VERTEX;
868 }
869
870 static struct pipe_stream_output_target *
nvc0_so_target_create(struct pipe_context * pipe,struct pipe_resource * res,unsigned offset,unsigned size)871 nvc0_so_target_create(struct pipe_context *pipe,
872 struct pipe_resource *res,
873 unsigned offset, unsigned size)
874 {
875 struct nvc0_so_target *targ = MALLOC_STRUCT(nvc0_so_target);
876 if (!targ)
877 return NULL;
878
879 targ->pq = pipe->create_query(pipe, NVC0_QUERY_TFB_BUFFER_OFFSET);
880 if (!targ->pq) {
881 FREE(targ);
882 return NULL;
883 }
884 targ->clean = TRUE;
885
886 targ->pipe.buffer_size = size;
887 targ->pipe.buffer_offset = offset;
888 targ->pipe.context = pipe;
889 targ->pipe.buffer = NULL;
890 pipe_resource_reference(&targ->pipe.buffer, res);
891 pipe_reference_init(&targ->pipe.reference, 1);
892
893 return &targ->pipe;
894 }
895
896 static void
nvc0_so_target_destroy(struct pipe_context * pipe,struct pipe_stream_output_target * ptarg)897 nvc0_so_target_destroy(struct pipe_context *pipe,
898 struct pipe_stream_output_target *ptarg)
899 {
900 struct nvc0_so_target *targ = nvc0_so_target(ptarg);
901 pipe->destroy_query(pipe, targ->pq);
902 pipe_resource_reference(&targ->pipe.buffer, NULL);
903 FREE(targ);
904 }
905
906 static void
nvc0_set_transform_feedback_targets(struct pipe_context * pipe,unsigned num_targets,struct pipe_stream_output_target ** targets,unsigned append_mask)907 nvc0_set_transform_feedback_targets(struct pipe_context *pipe,
908 unsigned num_targets,
909 struct pipe_stream_output_target **targets,
910 unsigned append_mask)
911 {
912 struct nvc0_context *nvc0 = nvc0_context(pipe);
913 unsigned i;
914 boolean serialize = TRUE;
915
916 assert(num_targets <= 4);
917
918 for (i = 0; i < num_targets; ++i) {
919 if (nvc0->tfbbuf[i] == targets[i] && (append_mask & (1 << i)))
920 continue;
921 nvc0->tfbbuf_dirty |= 1 << i;
922
923 if (nvc0->tfbbuf[i] && nvc0->tfbbuf[i] != targets[i])
924 nvc0_so_target_save_offset(pipe, nvc0->tfbbuf[i], i, &serialize);
925
926 if (targets[i] && !(append_mask & (1 << i)))
927 nvc0_so_target(targets[i])->clean = TRUE;
928
929 pipe_so_target_reference(&nvc0->tfbbuf[i], targets[i]);
930 }
931 for (; i < nvc0->num_tfbbufs; ++i) {
932 nvc0->tfbbuf_dirty |= 1 << i;
933 nvc0_so_target_save_offset(pipe, nvc0->tfbbuf[i], i, &serialize);
934 pipe_so_target_reference(&nvc0->tfbbuf[i], NULL);
935 }
936 nvc0->num_tfbbufs = num_targets;
937
938 if (nvc0->tfbbuf_dirty)
939 nvc0->dirty |= NVC0_NEW_TFB_TARGETS;
940 }
941
942 void
nvc0_init_state_functions(struct nvc0_context * nvc0)943 nvc0_init_state_functions(struct nvc0_context *nvc0)
944 {
945 struct pipe_context *pipe = &nvc0->base.pipe;
946
947 pipe->create_blend_state = nvc0_blend_state_create;
948 pipe->bind_blend_state = nvc0_blend_state_bind;
949 pipe->delete_blend_state = nvc0_blend_state_delete;
950
951 pipe->create_rasterizer_state = nvc0_rasterizer_state_create;
952 pipe->bind_rasterizer_state = nvc0_rasterizer_state_bind;
953 pipe->delete_rasterizer_state = nvc0_rasterizer_state_delete;
954
955 pipe->create_depth_stencil_alpha_state = nvc0_zsa_state_create;
956 pipe->bind_depth_stencil_alpha_state = nvc0_zsa_state_bind;
957 pipe->delete_depth_stencil_alpha_state = nvc0_zsa_state_delete;
958
959 pipe->create_sampler_state = nv50_sampler_state_create;
960 pipe->delete_sampler_state = nvc0_sampler_state_delete;
961 pipe->bind_vertex_sampler_states = nvc0_vp_sampler_states_bind;
962 pipe->bind_fragment_sampler_states = nvc0_fp_sampler_states_bind;
963 pipe->bind_geometry_sampler_states = nvc0_gp_sampler_states_bind;
964
965 pipe->create_sampler_view = nvc0_create_sampler_view;
966 pipe->sampler_view_destroy = nvc0_sampler_view_destroy;
967 pipe->set_vertex_sampler_views = nvc0_vp_set_sampler_views;
968 pipe->set_fragment_sampler_views = nvc0_fp_set_sampler_views;
969 pipe->set_geometry_sampler_views = nvc0_gp_set_sampler_views;
970
971 pipe->create_vs_state = nvc0_vp_state_create;
972 pipe->create_fs_state = nvc0_fp_state_create;
973 pipe->create_gs_state = nvc0_gp_state_create;
974 pipe->bind_vs_state = nvc0_vp_state_bind;
975 pipe->bind_fs_state = nvc0_fp_state_bind;
976 pipe->bind_gs_state = nvc0_gp_state_bind;
977 pipe->delete_vs_state = nvc0_sp_state_delete;
978 pipe->delete_fs_state = nvc0_sp_state_delete;
979 pipe->delete_gs_state = nvc0_sp_state_delete;
980
981 pipe->set_blend_color = nvc0_set_blend_color;
982 pipe->set_stencil_ref = nvc0_set_stencil_ref;
983 pipe->set_clip_state = nvc0_set_clip_state;
984 pipe->set_sample_mask = nvc0_set_sample_mask;
985 pipe->set_constant_buffer = nvc0_set_constant_buffer;
986 pipe->set_framebuffer_state = nvc0_set_framebuffer_state;
987 pipe->set_polygon_stipple = nvc0_set_polygon_stipple;
988 pipe->set_scissor_state = nvc0_set_scissor_state;
989 pipe->set_viewport_state = nvc0_set_viewport_state;
990
991 pipe->create_vertex_elements_state = nvc0_vertex_state_create;
992 pipe->delete_vertex_elements_state = nvc0_vertex_state_delete;
993 pipe->bind_vertex_elements_state = nvc0_vertex_state_bind;
994
995 pipe->set_vertex_buffers = nvc0_set_vertex_buffers;
996 pipe->set_index_buffer = nvc0_set_index_buffer;
997
998 pipe->create_stream_output_target = nvc0_so_target_create;
999 pipe->stream_output_target_destroy = nvc0_so_target_destroy;
1000 pipe->set_stream_output_targets = nvc0_set_transform_feedback_targets;
1001 }
1002
1003