• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2010 Christoph Bumiller
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  */
22 
23 #include "pipe/p_defines.h"
24 #include "util/u_framebuffer.h"
25 #include "util/u_upload_mgr.h"
26 
27 #include "nvc0/nvc0_context.h"
28 #include "nvc0/nvc0_screen.h"
29 #include "nvc0/nvc0_resource.h"
30 
31 static void
nvc0_flush(struct pipe_context * pipe,struct pipe_fence_handle ** fence,unsigned flags)32 nvc0_flush(struct pipe_context *pipe,
33            struct pipe_fence_handle **fence,
34            unsigned flags)
35 {
36    struct nvc0_context *nvc0 = nvc0_context(pipe);
37    struct nouveau_screen *screen = &nvc0->screen->base;
38 
39    if (fence)
40       nouveau_fence_ref(screen->fence.current, (struct nouveau_fence **)fence);
41 
42    PUSH_KICK(nvc0->base.pushbuf); /* fencing handled in kick_notify */
43 
44    nouveau_context_update_frame_stats(&nvc0->base);
45 }
46 
47 static void
nvc0_texture_barrier(struct pipe_context * pipe,unsigned flags)48 nvc0_texture_barrier(struct pipe_context *pipe, unsigned flags)
49 {
50    struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf;
51 
52    IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0);
53    IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0);
54 }
55 
56 static void
nvc0_memory_barrier(struct pipe_context * pipe,unsigned flags)57 nvc0_memory_barrier(struct pipe_context *pipe, unsigned flags)
58 {
59    struct nvc0_context *nvc0 = nvc0_context(pipe);
60    struct nouveau_pushbuf *push = nvc0->base.pushbuf;
61    int i, s;
62 
63    if (flags & PIPE_BARRIER_MAPPED_BUFFER) {
64       for (i = 0; i < nvc0->num_vtxbufs; ++i) {
65          if (!nvc0->vtxbuf[i].buffer.resource && !nvc0->vtxbuf[i].is_user_buffer)
66             continue;
67          if (nvc0->vtxbuf[i].buffer.resource->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
68             nvc0->base.vbo_dirty = true;
69       }
70 
71       for (s = 0; s < 5 && !nvc0->cb_dirty; ++s) {
72          uint32_t valid = nvc0->constbuf_valid[s];
73 
74          while (valid && !nvc0->cb_dirty) {
75             const unsigned i = ffs(valid) - 1;
76             struct pipe_resource *res;
77 
78             valid &= ~(1 << i);
79             if (nvc0->constbuf[s][i].user)
80                continue;
81 
82             res = nvc0->constbuf[s][i].u.buf;
83             if (!res)
84                continue;
85 
86             if (res->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
87                nvc0->cb_dirty = true;
88          }
89       }
90    } else {
91       /* Pretty much any writing by shaders needs a serialize after
92        * it. Especially when moving between 3d and compute pipelines, but even
93        * without that.
94        */
95       IMMED_NVC0(push, NVC0_3D(SERIALIZE), 0);
96    }
97 
98    /* If we're going to texture from a buffer/image written by a shader, we
99     * must flush the texture cache.
100     */
101    if (flags & PIPE_BARRIER_TEXTURE)
102       IMMED_NVC0(push, NVC0_3D(TEX_CACHE_CTL), 0);
103 
104    if (flags & PIPE_BARRIER_CONSTANT_BUFFER)
105       nvc0->cb_dirty = true;
106    if (flags & (PIPE_BARRIER_VERTEX_BUFFER | PIPE_BARRIER_INDEX_BUFFER))
107       nvc0->base.vbo_dirty = true;
108 }
109 
110 static void
nvc0_emit_string_marker(struct pipe_context * pipe,const char * str,int len)111 nvc0_emit_string_marker(struct pipe_context *pipe, const char *str, int len)
112 {
113    struct nouveau_pushbuf *push = nvc0_context(pipe)->base.pushbuf;
114    int string_words = len / 4;
115    int data_words;
116 
117    if (len <= 0)
118       return;
119    string_words = MIN2(string_words, NV04_PFIFO_MAX_PACKET_LEN);
120    if (string_words == NV04_PFIFO_MAX_PACKET_LEN)
121       data_words = string_words;
122    else
123       data_words = string_words + !!(len & 3);
124    BEGIN_NIC0(push, SUBC_3D(NV04_GRAPH_NOP), data_words);
125    if (string_words)
126       PUSH_DATAp(push, str, string_words);
127    if (string_words != data_words) {
128       int data = 0;
129       memcpy(&data, &str[string_words * 4], len & 3);
130       PUSH_DATA (push, data);
131    }
132 }
133 
134 static void
nvc0_context_unreference_resources(struct nvc0_context * nvc0)135 nvc0_context_unreference_resources(struct nvc0_context *nvc0)
136 {
137    unsigned s, i;
138 
139    nouveau_bufctx_del(&nvc0->bufctx_3d);
140    nouveau_bufctx_del(&nvc0->bufctx);
141    nouveau_bufctx_del(&nvc0->bufctx_cp);
142 
143    util_unreference_framebuffer_state(&nvc0->framebuffer);
144 
145    for (i = 0; i < nvc0->num_vtxbufs; ++i)
146       pipe_vertex_buffer_unreference(&nvc0->vtxbuf[i]);
147 
148    for (s = 0; s < 6; ++s) {
149       for (i = 0; i < nvc0->num_textures[s]; ++i)
150          pipe_sampler_view_reference(&nvc0->textures[s][i], NULL);
151 
152       for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i)
153          if (!nvc0->constbuf[s][i].user)
154             pipe_resource_reference(&nvc0->constbuf[s][i].u.buf, NULL);
155 
156       for (i = 0; i < NVC0_MAX_BUFFERS; ++i)
157          pipe_resource_reference(&nvc0->buffers[s][i].buffer, NULL);
158 
159       for (i = 0; i < NVC0_MAX_IMAGES; ++i) {
160          pipe_resource_reference(&nvc0->images[s][i].resource, NULL);
161          if (nvc0->screen->base.class_3d >= GM107_3D_CLASS)
162             pipe_sampler_view_reference(&nvc0->images_tic[s][i], NULL);
163       }
164    }
165 
166    for (s = 0; s < 2; ++s) {
167       for (i = 0; i < NVC0_MAX_SURFACE_SLOTS; ++i)
168          pipe_surface_reference(&nvc0->surfaces[s][i], NULL);
169    }
170 
171    for (i = 0; i < nvc0->num_tfbbufs; ++i)
172       pipe_so_target_reference(&nvc0->tfbbuf[i], NULL);
173 
174    for (i = 0; i < nvc0->global_residents.size / sizeof(struct pipe_resource *);
175         ++i) {
176       struct pipe_resource **res = util_dynarray_element(
177          &nvc0->global_residents, struct pipe_resource *, i);
178       pipe_resource_reference(res, NULL);
179    }
180    util_dynarray_fini(&nvc0->global_residents);
181 
182    if (nvc0->tcp_empty)
183       nvc0->base.pipe.delete_tcs_state(&nvc0->base.pipe, nvc0->tcp_empty);
184 }
185 
186 static void
nvc0_destroy(struct pipe_context * pipe)187 nvc0_destroy(struct pipe_context *pipe)
188 {
189    struct nvc0_context *nvc0 = nvc0_context(pipe);
190 
191    if (nvc0->screen->cur_ctx == nvc0) {
192       nvc0->screen->cur_ctx = NULL;
193       nvc0->screen->save_state = nvc0->state;
194       nvc0->screen->save_state.tfb = NULL;
195    }
196 
197    if (nvc0->base.pipe.stream_uploader)
198       u_upload_destroy(nvc0->base.pipe.stream_uploader);
199 
200    /* Unset bufctx, we don't want to revalidate any resources after the flush.
201     * Other contexts will always set their bufctx again on action calls.
202     */
203    nouveau_pushbuf_bufctx(nvc0->base.pushbuf, NULL);
204    nouveau_pushbuf_kick(nvc0->base.pushbuf, nvc0->base.pushbuf->channel);
205 
206    nvc0_context_unreference_resources(nvc0);
207    nvc0_blitctx_destroy(nvc0);
208 
209    list_for_each_entry_safe(struct nvc0_resident, pos, &nvc0->tex_head, list) {
210       list_del(&pos->list);
211       free(pos);
212    }
213 
214    list_for_each_entry_safe(struct nvc0_resident, pos, &nvc0->img_head, list) {
215       list_del(&pos->list);
216       free(pos);
217    }
218 
219    nouveau_context_destroy(&nvc0->base);
220 }
221 
222 void
nvc0_default_kick_notify(struct nouveau_pushbuf * push)223 nvc0_default_kick_notify(struct nouveau_pushbuf *push)
224 {
225    struct nvc0_screen *screen = push->user_priv;
226 
227    if (screen) {
228       nouveau_fence_next(&screen->base);
229       nouveau_fence_update(&screen->base, true);
230       if (screen->cur_ctx)
231          screen->cur_ctx->state.flushed = true;
232       NOUVEAU_DRV_STAT(&screen->base, pushbuf_count, 1);
233    }
234 }
235 
236 static int
nvc0_invalidate_resource_storage(struct nouveau_context * ctx,struct pipe_resource * res,int ref)237 nvc0_invalidate_resource_storage(struct nouveau_context *ctx,
238                                  struct pipe_resource *res,
239                                  int ref)
240 {
241    struct nvc0_context *nvc0 = nvc0_context(&ctx->pipe);
242    unsigned s, i;
243 
244    if (res->bind & PIPE_BIND_RENDER_TARGET) {
245       for (i = 0; i < nvc0->framebuffer.nr_cbufs; ++i) {
246          if (nvc0->framebuffer.cbufs[i] &&
247              nvc0->framebuffer.cbufs[i]->texture == res) {
248             nvc0->dirty_3d |= NVC0_NEW_3D_FRAMEBUFFER;
249             nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB);
250             if (!--ref)
251                return ref;
252          }
253       }
254    }
255    if (res->bind & PIPE_BIND_DEPTH_STENCIL) {
256       if (nvc0->framebuffer.zsbuf &&
257           nvc0->framebuffer.zsbuf->texture == res) {
258          nvc0->dirty_3d |= NVC0_NEW_3D_FRAMEBUFFER;
259          nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_FB);
260          if (!--ref)
261             return ref;
262       }
263    }
264 
265    if (res->target == PIPE_BUFFER) {
266       for (i = 0; i < nvc0->num_vtxbufs; ++i) {
267          if (nvc0->vtxbuf[i].buffer.resource == res) {
268             nvc0->dirty_3d |= NVC0_NEW_3D_ARRAYS;
269             nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_VTX);
270             if (!--ref)
271                return ref;
272          }
273       }
274 
275       for (s = 0; s < 6; ++s) {
276          for (i = 0; i < nvc0->num_textures[s]; ++i) {
277             if (nvc0->textures[s][i] &&
278                 nvc0->textures[s][i]->texture == res) {
279                nvc0->textures_dirty[s] |= 1 << i;
280                if (unlikely(s == 5)) {
281                   nvc0->dirty_cp |= NVC0_NEW_CP_TEXTURES;
282                   nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_TEX(i));
283                } else {
284                   nvc0->dirty_3d |= NVC0_NEW_3D_TEXTURES;
285                   nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_TEX(s, i));
286                }
287                if (!--ref)
288                   return ref;
289             }
290          }
291       }
292 
293       for (s = 0; s < 6; ++s) {
294          for (i = 0; i < NVC0_MAX_PIPE_CONSTBUFS; ++i) {
295             if (!(nvc0->constbuf_valid[s] & (1 << i)))
296                continue;
297             if (!nvc0->constbuf[s][i].user &&
298                 nvc0->constbuf[s][i].u.buf == res) {
299                nvc0->constbuf_dirty[s] |= 1 << i;
300                if (unlikely(s == 5)) {
301                   nvc0->dirty_cp |= NVC0_NEW_CP_CONSTBUF;
302                   nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_CB(i));
303                } else {
304                   nvc0->dirty_3d |= NVC0_NEW_3D_CONSTBUF;
305                   nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_CB(s, i));
306                }
307                if (!--ref)
308                   return ref;
309             }
310          }
311       }
312 
313       for (s = 0; s < 6; ++s) {
314          for (i = 0; i < NVC0_MAX_BUFFERS; ++i) {
315             if (nvc0->buffers[s][i].buffer == res) {
316                nvc0->buffers_dirty[s] |= 1 << i;
317                if (unlikely(s == 5)) {
318                   nvc0->dirty_cp |= NVC0_NEW_CP_BUFFERS;
319                   nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_BUF);
320                } else {
321                   nvc0->dirty_3d |= NVC0_NEW_3D_BUFFERS;
322                   nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_BUF);
323                }
324                if (!--ref)
325                   return ref;
326             }
327          }
328       }
329 
330       for (s = 0; s < 6; ++s) {
331          for (i = 0; i < NVC0_MAX_IMAGES; ++i) {
332             if (nvc0->images[s][i].resource == res) {
333                nvc0->images_dirty[s] |= 1 << i;
334                if (unlikely(s == 5)) {
335                   nvc0->dirty_cp |= NVC0_NEW_CP_SURFACES;
336                   nouveau_bufctx_reset(nvc0->bufctx_cp, NVC0_BIND_CP_SUF);
337                } else {
338                   nvc0->dirty_3d |= NVC0_NEW_3D_SURFACES;
339                   nouveau_bufctx_reset(nvc0->bufctx_3d, NVC0_BIND_3D_SUF);
340                }
341             }
342             if (!--ref)
343                return ref;
344          }
345       }
346    }
347 
348    return ref;
349 }
350 
351 static void
352 nvc0_context_get_sample_position(struct pipe_context *, unsigned, unsigned,
353                                  float *);
354 
355 struct pipe_context *
nvc0_create(struct pipe_screen * pscreen,void * priv,unsigned ctxflags)356 nvc0_create(struct pipe_screen *pscreen, void *priv, unsigned ctxflags)
357 {
358    struct nvc0_screen *screen = nvc0_screen(pscreen);
359    struct nvc0_context *nvc0;
360    struct pipe_context *pipe;
361    int ret;
362    uint32_t flags;
363 
364    nvc0 = CALLOC_STRUCT(nvc0_context);
365    if (!nvc0)
366       return NULL;
367    pipe = &nvc0->base.pipe;
368 
369    if (!nvc0_blitctx_create(nvc0))
370       goto out_err;
371 
372    nvc0->base.pushbuf = screen->base.pushbuf;
373    nvc0->base.client = screen->base.client;
374 
375    ret = nouveau_bufctx_new(screen->base.client, 2, &nvc0->bufctx);
376    if (!ret)
377       ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_3D_COUNT,
378                                &nvc0->bufctx_3d);
379    if (!ret)
380       ret = nouveau_bufctx_new(screen->base.client, NVC0_BIND_CP_COUNT,
381                                &nvc0->bufctx_cp);
382    if (ret)
383       goto out_err;
384 
385    nvc0->screen = screen;
386    nvc0->base.screen = &screen->base;
387 
388    pipe->screen = pscreen;
389    pipe->priv = priv;
390    pipe->stream_uploader = u_upload_create_default(pipe);
391    if (!pipe->stream_uploader)
392       goto out_err;
393    pipe->const_uploader = pipe->stream_uploader;
394 
395    pipe->destroy = nvc0_destroy;
396 
397    pipe->draw_vbo = nvc0_draw_vbo;
398    pipe->clear = nvc0_clear;
399    pipe->launch_grid = (nvc0->screen->base.class_3d >= NVE4_3D_CLASS) ?
400       nve4_launch_grid : nvc0_launch_grid;
401 
402    pipe->flush = nvc0_flush;
403    pipe->texture_barrier = nvc0_texture_barrier;
404    pipe->memory_barrier = nvc0_memory_barrier;
405    pipe->get_sample_position = nvc0_context_get_sample_position;
406    pipe->emit_string_marker = nvc0_emit_string_marker;
407 
408    nouveau_context_init(&nvc0->base);
409    nvc0_init_query_functions(nvc0);
410    nvc0_init_surface_functions(nvc0);
411    nvc0_init_state_functions(nvc0);
412    nvc0_init_transfer_functions(nvc0);
413    nvc0_init_resource_functions(pipe);
414    if (nvc0->screen->base.class_3d >= NVE4_3D_CLASS)
415       nvc0_init_bindless_functions(pipe);
416 
417    list_inithead(&nvc0->tex_head);
418    list_inithead(&nvc0->img_head);
419 
420    nvc0->base.invalidate_resource_storage = nvc0_invalidate_resource_storage;
421 
422    pipe->create_video_codec = nvc0_create_decoder;
423    pipe->create_video_buffer = nvc0_video_buffer_create;
424 
425    /* shader builtin library is per-screen, but we need a context for m2mf */
426    nvc0_program_library_upload(nvc0);
427    nvc0_program_init_tcp_empty(nvc0);
428    if (!nvc0->tcp_empty)
429       goto out_err;
430    /* set the empty tctl prog on next draw in case one is never set */
431    nvc0->dirty_3d |= NVC0_NEW_3D_TCTLPROG;
432 
433    /* Do not bind the COMPUTE driver constbuf at screen initialization because
434     * CBs are aliased between 3D and COMPUTE, but make sure it will be bound if
435     * a grid is launched later. */
436    nvc0->dirty_cp |= NVC0_NEW_CP_DRIVERCONST;
437 
438    /* now that there are no more opportunities for errors, set the current
439     * context if there isn't already one.
440     */
441    if (!screen->cur_ctx) {
442       nvc0->state = screen->save_state;
443       screen->cur_ctx = nvc0;
444       nouveau_pushbuf_bufctx(screen->base.pushbuf, nvc0->bufctx);
445    }
446    screen->base.pushbuf->kick_notify = nvc0_default_kick_notify;
447 
448    /* add permanently resident buffers to bufctxts */
449 
450    flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RD;
451 
452    BCTX_REFN_bo(nvc0->bufctx_3d, 3D_TEXT, flags, screen->text);
453    BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->uniform_bo);
454    BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->txc);
455    if (screen->compute) {
456       BCTX_REFN_bo(nvc0->bufctx_cp, CP_TEXT, flags, screen->text);
457       BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->uniform_bo);
458       BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->txc);
459    }
460 
461    flags = NV_VRAM_DOMAIN(&screen->base) | NOUVEAU_BO_RDWR;
462 
463    if (screen->poly_cache)
464       BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->poly_cache);
465    if (screen->compute)
466       BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->tls);
467 
468    flags = NOUVEAU_BO_GART | NOUVEAU_BO_WR;
469 
470    BCTX_REFN_bo(nvc0->bufctx_3d, 3D_SCREEN, flags, screen->fence.bo);
471    BCTX_REFN_bo(nvc0->bufctx, FENCE, flags, screen->fence.bo);
472    if (screen->compute)
473       BCTX_REFN_bo(nvc0->bufctx_cp, CP_SCREEN, flags, screen->fence.bo);
474 
475    nvc0->base.scratch.bo_size = 2 << 20;
476 
477    memset(nvc0->tex_handles, ~0, sizeof(nvc0->tex_handles));
478 
479    util_dynarray_init(&nvc0->global_residents, NULL);
480 
481    return pipe;
482 
483 out_err:
484    if (nvc0) {
485       if (pipe->stream_uploader)
486          u_upload_destroy(pipe->stream_uploader);
487       if (nvc0->bufctx_3d)
488          nouveau_bufctx_del(&nvc0->bufctx_3d);
489       if (nvc0->bufctx_cp)
490          nouveau_bufctx_del(&nvc0->bufctx_cp);
491       if (nvc0->bufctx)
492          nouveau_bufctx_del(&nvc0->bufctx);
493       FREE(nvc0->blit);
494       FREE(nvc0);
495    }
496    return NULL;
497 }
498 
499 void
nvc0_bufctx_fence(struct nvc0_context * nvc0,struct nouveau_bufctx * bufctx,bool on_flush)500 nvc0_bufctx_fence(struct nvc0_context *nvc0, struct nouveau_bufctx *bufctx,
501                   bool on_flush)
502 {
503    struct nouveau_list *list = on_flush ? &bufctx->current : &bufctx->pending;
504    struct nouveau_list *it;
505    NOUVEAU_DRV_STAT_IFD(unsigned count = 0);
506 
507    for (it = list->next; it != list; it = it->next) {
508       struct nouveau_bufref *ref = (struct nouveau_bufref *)it;
509       struct nv04_resource *res = ref->priv;
510       if (res)
511          nvc0_resource_validate(res, (unsigned)ref->priv_data);
512       NOUVEAU_DRV_STAT_IFD(count++);
513    }
514    NOUVEAU_DRV_STAT(&nvc0->screen->base, resource_validate_count, count);
515 }
516 
517 const void *
nvc0_get_sample_locations(unsigned sample_count)518 nvc0_get_sample_locations(unsigned sample_count)
519 {
520    static const uint8_t ms1[1][2] = { { 0x8, 0x8 } };
521    static const uint8_t ms2[2][2] = {
522       { 0x4, 0x4 }, { 0xc, 0xc } }; /* surface coords (0,0), (1,0) */
523    static const uint8_t ms4[4][2] = {
524       { 0x6, 0x2 }, { 0xe, 0x6 },   /* (0,0), (1,0) */
525       { 0x2, 0xa }, { 0xa, 0xe } }; /* (0,1), (1,1) */
526    static const uint8_t ms8[8][2] = {
527       { 0x1, 0x7 }, { 0x5, 0x3 },   /* (0,0), (1,0) */
528       { 0x3, 0xd }, { 0x7, 0xb },   /* (0,1), (1,1) */
529       { 0x9, 0x5 }, { 0xf, 0x1 },   /* (2,0), (3,0) */
530       { 0xb, 0xf }, { 0xd, 0x9 } }; /* (2,1), (3,1) */
531 #if 0
532    /* NOTE: there are alternative modes for MS2 and MS8, currently not used */
533    static const uint8_t ms8_alt[8][2] = {
534       { 0x9, 0x5 }, { 0x7, 0xb },   /* (2,0), (1,1) */
535       { 0xd, 0x9 }, { 0x5, 0x3 },   /* (3,1), (1,0) */
536       { 0x3, 0xd }, { 0x1, 0x7 },   /* (0,1), (0,0) */
537       { 0xb, 0xf }, { 0xf, 0x1 } }; /* (2,1), (3,0) */
538 #endif
539 
540    const uint8_t (*ptr)[2];
541 
542    switch (sample_count) {
543    case 0:
544    case 1: ptr = ms1; break;
545    case 2: ptr = ms2; break;
546    case 4: ptr = ms4; break;
547    case 8: ptr = ms8; break;
548    default:
549       assert(0);
550       return NULL; /* bad sample count -> undefined locations */
551    }
552    return ptr;
553 }
554 
555 static void
nvc0_context_get_sample_position(struct pipe_context * pipe,unsigned sample_count,unsigned sample_index,float * xy)556 nvc0_context_get_sample_position(struct pipe_context *pipe,
557                                  unsigned sample_count, unsigned sample_index,
558                                  float *xy)
559 {
560    const uint8_t (*ptr)[2];
561 
562    ptr = nvc0_get_sample_locations(sample_count);
563    if (!ptr)
564       return;
565 
566    xy[0] = ptr[sample_index][0] * 0.0625f;
567    xy[1] = ptr[sample_index][1] * 0.0625f;
568 }
569