• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright 2010 Christoph Bumiller
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  */
22 
23 #include "pipe/p_defines.h"
24 #include "util/u_framebuffer.h"
25 #include "util/u_upload_mgr.h"
26 
27 #include "nv50/nv50_context.h"
28 #include "nv50/nv50_screen.h"
29 #include "nv50/nv50_resource.h"
30 
31 static void
nv50_flush(struct pipe_context * pipe,struct pipe_fence_handle ** fence,unsigned flags)32 nv50_flush(struct pipe_context *pipe,
33            struct pipe_fence_handle **fence,
34            unsigned flags)
35 {
36    struct nouveau_screen *screen = nouveau_screen(pipe->screen);
37 
38    if (fence)
39       nouveau_fence_ref(screen->fence.current, (struct nouveau_fence **)fence);
40 
41    PUSH_KICK(screen->pushbuf);
42 
43    nouveau_context_update_frame_stats(nouveau_context(pipe));
44 }
45 
46 static void
nv50_texture_barrier(struct pipe_context * pipe,unsigned flags)47 nv50_texture_barrier(struct pipe_context *pipe, unsigned flags)
48 {
49    struct nouveau_pushbuf *push = nv50_context(pipe)->base.pushbuf;
50 
51    BEGIN_NV04(push, SUBC_3D(NV50_GRAPH_SERIALIZE), 1);
52    PUSH_DATA (push, 0);
53    BEGIN_NV04(push, NV50_3D(TEX_CACHE_CTL), 1);
54    PUSH_DATA (push, 0x20);
55 }
56 
57 static void
nv50_memory_barrier(struct pipe_context * pipe,unsigned flags)58 nv50_memory_barrier(struct pipe_context *pipe, unsigned flags)
59 {
60    struct nv50_context *nv50 = nv50_context(pipe);
61    struct nouveau_pushbuf *push = nv50->base.pushbuf;
62    int i, s;
63 
64    if (flags & PIPE_BARRIER_MAPPED_BUFFER) {
65       for (i = 0; i < nv50->num_vtxbufs; ++i) {
66          if (!nv50->vtxbuf[i].buffer.resource && !nv50->vtxbuf[i].is_user_buffer)
67             continue;
68          if (nv50->vtxbuf[i].buffer.resource->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
69             nv50->base.vbo_dirty = true;
70       }
71 
72       for (s = 0; s < NV50_MAX_3D_SHADER_STAGES && !nv50->cb_dirty; ++s) {
73          uint32_t valid = nv50->constbuf_valid[s];
74 
75          while (valid && !nv50->cb_dirty) {
76             const unsigned i = ffs(valid) - 1;
77             struct pipe_resource *res;
78 
79             valid &= ~(1 << i);
80             if (nv50->constbuf[s][i].user)
81                continue;
82 
83             res = nv50->constbuf[s][i].u.buf;
84             if (!res)
85                continue;
86 
87             if (res->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
88                nv50->cb_dirty = true;
89          }
90       }
91    } else {
92       BEGIN_NV04(push, SUBC_3D(NV50_GRAPH_SERIALIZE), 1);
93       PUSH_DATA (push, 0);
94    }
95 
96    /* If we're going to texture from a buffer/image written by a shader, we
97     * must flush the texture cache.
98     */
99    if (flags & PIPE_BARRIER_TEXTURE) {
100       BEGIN_NV04(push, NV50_3D(TEX_CACHE_CTL), 1);
101       PUSH_DATA (push, 0x20);
102    }
103 
104    if (flags & PIPE_BARRIER_CONSTANT_BUFFER)
105       nv50->cb_dirty = true;
106    if (flags & (PIPE_BARRIER_VERTEX_BUFFER | PIPE_BARRIER_INDEX_BUFFER))
107       nv50->base.vbo_dirty = true;
108 }
109 
110 static void
nv50_emit_string_marker(struct pipe_context * pipe,const char * str,int len)111 nv50_emit_string_marker(struct pipe_context *pipe, const char *str, int len)
112 {
113    struct nouveau_pushbuf *push = nv50_context(pipe)->base.pushbuf;
114    int string_words = len / 4;
115    int data_words;
116 
117    if (len <= 0)
118       return;
119    string_words = MIN2(string_words, NV04_PFIFO_MAX_PACKET_LEN);
120    if (string_words == NV04_PFIFO_MAX_PACKET_LEN)
121       data_words = string_words;
122    else
123       data_words = string_words + !!(len & 3);
124    BEGIN_NI04(push, SUBC_3D(NV04_GRAPH_NOP), data_words);
125    if (string_words)
126       PUSH_DATAp(push, str, string_words);
127    if (string_words != data_words) {
128       int data = 0;
129       memcpy(&data, &str[string_words * 4], len & 3);
130       PUSH_DATA (push, data);
131    }
132 }
133 
134 void
nv50_default_kick_notify(struct nouveau_pushbuf * push)135 nv50_default_kick_notify(struct nouveau_pushbuf *push)
136 {
137    struct nv50_screen *screen = push->user_priv;
138 
139    if (screen) {
140       nouveau_fence_next(&screen->base);
141       nouveau_fence_update(&screen->base, true);
142       if (screen->cur_ctx)
143          screen->cur_ctx->state.flushed = true;
144    }
145 }
146 
147 static void
nv50_context_unreference_resources(struct nv50_context * nv50)148 nv50_context_unreference_resources(struct nv50_context *nv50)
149 {
150    unsigned s, i;
151 
152    nouveau_bufctx_del(&nv50->bufctx_3d);
153    nouveau_bufctx_del(&nv50->bufctx);
154    nouveau_bufctx_del(&nv50->bufctx_cp);
155 
156    util_unreference_framebuffer_state(&nv50->framebuffer);
157 
158    assert(nv50->num_vtxbufs <= PIPE_MAX_ATTRIBS);
159    for (i = 0; i < nv50->num_vtxbufs; ++i)
160       pipe_vertex_buffer_unreference(&nv50->vtxbuf[i]);
161 
162    for (s = 0; s < NV50_MAX_SHADER_STAGES; ++s) {
163       assert(nv50->num_textures[s] <= PIPE_MAX_SAMPLERS);
164       for (i = 0; i < nv50->num_textures[s]; ++i)
165          pipe_sampler_view_reference(&nv50->textures[s][i], NULL);
166 
167       for (i = 0; i < NV50_MAX_PIPE_CONSTBUFS; ++i)
168          if (!nv50->constbuf[s][i].user)
169             pipe_resource_reference(&nv50->constbuf[s][i].u.buf, NULL);
170    }
171 
172    for (i = 0; i < nv50->global_residents.size / sizeof(struct pipe_resource *);
173         ++i) {
174       struct pipe_resource **res = util_dynarray_element(
175          &nv50->global_residents, struct pipe_resource *, i);
176       pipe_resource_reference(res, NULL);
177    }
178    util_dynarray_fini(&nv50->global_residents);
179 }
180 
181 static void
nv50_destroy(struct pipe_context * pipe)182 nv50_destroy(struct pipe_context *pipe)
183 {
184    struct nv50_context *nv50 = nv50_context(pipe);
185 
186    if (nv50->screen->cur_ctx == nv50) {
187       nv50->screen->cur_ctx = NULL;
188       /* Save off the state in case another context gets created */
189       nv50->screen->save_state = nv50->state;
190    }
191 
192    if (nv50->base.pipe.stream_uploader)
193       u_upload_destroy(nv50->base.pipe.stream_uploader);
194 
195    nouveau_pushbuf_bufctx(nv50->base.pushbuf, NULL);
196    nouveau_pushbuf_kick(nv50->base.pushbuf, nv50->base.pushbuf->channel);
197 
198    nv50_context_unreference_resources(nv50);
199 
200    FREE(nv50->blit);
201 
202    nouveau_context_destroy(&nv50->base);
203 }
204 
205 static int
nv50_invalidate_resource_storage(struct nouveau_context * ctx,struct pipe_resource * res,int ref)206 nv50_invalidate_resource_storage(struct nouveau_context *ctx,
207                                  struct pipe_resource *res,
208                                  int ref)
209 {
210    struct nv50_context *nv50 = nv50_context(&ctx->pipe);
211    unsigned bind = res->bind ? res->bind : PIPE_BIND_VERTEX_BUFFER;
212    unsigned s, i;
213 
214    if (bind & PIPE_BIND_RENDER_TARGET) {
215       assert(nv50->framebuffer.nr_cbufs <= PIPE_MAX_COLOR_BUFS);
216       for (i = 0; i < nv50->framebuffer.nr_cbufs; ++i) {
217          if (nv50->framebuffer.cbufs[i] &&
218              nv50->framebuffer.cbufs[i]->texture == res) {
219             nv50->dirty_3d |= NV50_NEW_3D_FRAMEBUFFER;
220             nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_FB);
221             if (!--ref)
222                return ref;
223          }
224       }
225    }
226    if (bind & PIPE_BIND_DEPTH_STENCIL) {
227       if (nv50->framebuffer.zsbuf &&
228           nv50->framebuffer.zsbuf->texture == res) {
229          nv50->dirty_3d |= NV50_NEW_3D_FRAMEBUFFER;
230          nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_FB);
231          if (!--ref)
232             return ref;
233       }
234    }
235 
236    if (bind & (PIPE_BIND_VERTEX_BUFFER |
237                PIPE_BIND_INDEX_BUFFER |
238                PIPE_BIND_CONSTANT_BUFFER |
239                PIPE_BIND_STREAM_OUTPUT |
240                PIPE_BIND_SAMPLER_VIEW)) {
241 
242       assert(nv50->num_vtxbufs <= PIPE_MAX_ATTRIBS);
243       for (i = 0; i < nv50->num_vtxbufs; ++i) {
244          if (nv50->vtxbuf[i].buffer.resource == res) {
245             nv50->dirty_3d |= NV50_NEW_3D_ARRAYS;
246             nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_VERTEX);
247             if (!--ref)
248                return ref;
249          }
250       }
251 
252       for (s = 0; s < NV50_MAX_SHADER_STAGES; ++s) {
253       assert(nv50->num_textures[s] <= PIPE_MAX_SAMPLERS);
254       for (i = 0; i < nv50->num_textures[s]; ++i) {
255          if (nv50->textures[s][i] &&
256              nv50->textures[s][i]->texture == res) {
257             if (unlikely(s == NV50_SHADER_STAGE_COMPUTE)) {
258                nv50->dirty_cp |= NV50_NEW_CP_TEXTURES;
259                nouveau_bufctx_reset(nv50->bufctx_cp, NV50_BIND_CP_TEXTURES);
260             } else {
261                nv50->dirty_3d |= NV50_NEW_3D_TEXTURES;
262                nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_TEXTURES);
263             }
264             if (!--ref)
265                return ref;
266          }
267       }
268       }
269 
270       for (s = 0; s < NV50_MAX_SHADER_STAGES; ++s) {
271       for (i = 0; i < NV50_MAX_PIPE_CONSTBUFS; ++i) {
272          if (!(nv50->constbuf_valid[s] & (1 << i)))
273             continue;
274          if (!nv50->constbuf[s][i].user &&
275              nv50->constbuf[s][i].u.buf == res) {
276             nv50->constbuf_dirty[s] |= 1 << i;
277             if (unlikely(s == NV50_SHADER_STAGE_COMPUTE)) {
278                nv50->dirty_cp |= NV50_NEW_CP_CONSTBUF;
279                nouveau_bufctx_reset(nv50->bufctx_cp, NV50_BIND_CP_CB(i));
280             } else {
281                nv50->dirty_3d |= NV50_NEW_3D_CONSTBUF;
282                nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_CB(s, i));
283             }
284             if (!--ref)
285                return ref;
286          }
287       }
288       }
289    }
290 
291    return ref;
292 }
293 
294 static void
295 nv50_context_get_sample_position(struct pipe_context *, unsigned, unsigned,
296                                  float *);
297 
298 struct pipe_context *
nv50_create(struct pipe_screen * pscreen,void * priv,unsigned ctxflags)299 nv50_create(struct pipe_screen *pscreen, void *priv, unsigned ctxflags)
300 {
301    struct nv50_screen *screen = nv50_screen(pscreen);
302    struct nv50_context *nv50;
303    struct pipe_context *pipe;
304    int ret;
305    uint32_t flags;
306 
307    nv50 = CALLOC_STRUCT(nv50_context);
308    if (!nv50)
309       return NULL;
310    pipe = &nv50->base.pipe;
311 
312    if (!nv50_blitctx_create(nv50))
313       goto out_err;
314 
315    nv50->base.pushbuf = screen->base.pushbuf;
316    nv50->base.client = screen->base.client;
317 
318    ret = nouveau_bufctx_new(screen->base.client, 2, &nv50->bufctx);
319    if (!ret)
320       ret = nouveau_bufctx_new(screen->base.client, NV50_BIND_3D_COUNT,
321                                &nv50->bufctx_3d);
322    if (!ret)
323       ret = nouveau_bufctx_new(screen->base.client, NV50_BIND_CP_COUNT,
324                                &nv50->bufctx_cp);
325    if (ret)
326       goto out_err;
327 
328    nv50->base.screen    = &screen->base;
329    nv50->base.copy_data = nv50_m2mf_copy_linear;
330    nv50->base.push_data = nv50_sifc_linear_u8;
331    nv50->base.push_cb   = nv50_cb_push;
332 
333    nv50->screen = screen;
334    pipe->screen = pscreen;
335    pipe->priv = priv;
336    pipe->stream_uploader = u_upload_create_default(pipe);
337    if (!pipe->stream_uploader)
338       goto out_err;
339    pipe->const_uploader = pipe->stream_uploader;
340 
341    pipe->destroy = nv50_destroy;
342 
343    pipe->draw_vbo = nv50_draw_vbo;
344    pipe->clear = nv50_clear;
345    pipe->launch_grid = nv50_launch_grid;
346 
347    pipe->flush = nv50_flush;
348    pipe->texture_barrier = nv50_texture_barrier;
349    pipe->memory_barrier = nv50_memory_barrier;
350    pipe->get_sample_position = nv50_context_get_sample_position;
351    pipe->emit_string_marker = nv50_emit_string_marker;
352 
353    if (!screen->cur_ctx) {
354       /* Restore the last context's state here, normally handled during
355        * context switch
356        */
357       nv50->state = screen->save_state;
358       screen->cur_ctx = nv50;
359       nouveau_pushbuf_bufctx(screen->base.pushbuf, nv50->bufctx);
360    }
361    nv50->base.pushbuf->kick_notify = nv50_default_kick_notify;
362 
363    nouveau_context_init(&nv50->base);
364    nv50_init_query_functions(nv50);
365    nv50_init_surface_functions(nv50);
366    nv50_init_state_functions(nv50);
367    nv50_init_resource_functions(pipe);
368 
369    nv50->base.invalidate_resource_storage = nv50_invalidate_resource_storage;
370 
371    if (screen->base.device->chipset < 0x84 ||
372        debug_get_bool_option("NOUVEAU_PMPEG", false)) {
373       /* PMPEG */
374       nouveau_context_init_vdec(&nv50->base);
375    } else if (screen->base.device->chipset < 0x98 ||
376               screen->base.device->chipset == 0xa0) {
377       /* VP2 */
378       pipe->create_video_codec = nv84_create_decoder;
379       pipe->create_video_buffer = nv84_video_buffer_create;
380    } else {
381       /* VP3/4 */
382       pipe->create_video_codec = nv98_create_decoder;
383       pipe->create_video_buffer = nv98_video_buffer_create;
384    }
385 
386    flags = NOUVEAU_BO_VRAM | NOUVEAU_BO_RD;
387 
388    BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->code);
389    BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->uniforms);
390    BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->txc);
391    BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->stack_bo);
392    if (screen->compute) {
393       BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->code);
394       BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->uniforms);
395       BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->txc);
396       BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->stack_bo);
397    }
398 
399    flags = NOUVEAU_BO_GART | NOUVEAU_BO_WR;
400 
401    BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->fence.bo);
402    BCTX_REFN_bo(nv50->bufctx, FENCE, flags, screen->fence.bo);
403    if (screen->compute)
404       BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->fence.bo);
405 
406    nv50->base.scratch.bo_size = 2 << 20;
407 
408    util_dynarray_init(&nv50->global_residents, NULL);
409 
410    // Make sure that the first TSC entry has SRGB conversion bit set, since we
411    // use it as a fallback.
412    if (!screen->tsc.entries[0])
413       nv50_upload_tsc0(nv50);
414 
415    // And mark samplers as dirty so that the first slot would get bound to the
416    // zero entry if it's not otherwise set.
417    nv50->dirty_3d |= NV50_NEW_3D_SAMPLERS;
418 
419    return pipe;
420 
421 out_err:
422    if (pipe->stream_uploader)
423       u_upload_destroy(pipe->stream_uploader);
424    if (nv50->bufctx_3d)
425       nouveau_bufctx_del(&nv50->bufctx_3d);
426    if (nv50->bufctx_cp)
427       nouveau_bufctx_del(&nv50->bufctx_cp);
428    if (nv50->bufctx)
429       nouveau_bufctx_del(&nv50->bufctx);
430    FREE(nv50->blit);
431    FREE(nv50);
432    return NULL;
433 }
434 
435 void
nv50_bufctx_fence(struct nouveau_bufctx * bufctx,bool on_flush)436 nv50_bufctx_fence(struct nouveau_bufctx *bufctx, bool on_flush)
437 {
438    struct nouveau_list *list = on_flush ? &bufctx->current : &bufctx->pending;
439    struct nouveau_list *it;
440 
441    for (it = list->next; it != list; it = it->next) {
442       struct nouveau_bufref *ref = (struct nouveau_bufref *)it;
443       struct nv04_resource *res = ref->priv;
444       if (res)
445          nv50_resource_validate(res, (unsigned)ref->priv_data);
446    }
447 }
448 
449 static void
nv50_context_get_sample_position(struct pipe_context * pipe,unsigned sample_count,unsigned sample_index,float * xy)450 nv50_context_get_sample_position(struct pipe_context *pipe,
451                                  unsigned sample_count, unsigned sample_index,
452                                  float *xy)
453 {
454    static const uint8_t ms1[1][2] = { { 0x8, 0x8 } };
455    static const uint8_t ms2[2][2] = {
456       { 0x4, 0x4 }, { 0xc, 0xc } }; /* surface coords (0,0), (1,0) */
457    static const uint8_t ms4[4][2] = {
458       { 0x6, 0x2 }, { 0xe, 0x6 },   /* (0,0), (1,0) */
459       { 0x2, 0xa }, { 0xa, 0xe } }; /* (0,1), (1,1) */
460    static const uint8_t ms8[8][2] = {
461       { 0x1, 0x7 }, { 0x5, 0x3 },   /* (0,0), (1,0) */
462       { 0x3, 0xd }, { 0x7, 0xb },   /* (0,1), (1,1) */
463       { 0x9, 0x5 }, { 0xf, 0x1 },   /* (2,0), (3,0) */
464       { 0xb, 0xf }, { 0xd, 0x9 } }; /* (2,1), (3,1) */
465 #if 0
466    /* NOTE: there are alternative modes for MS2 and MS8, currently not used */
467    static const uint8_t ms8_alt[8][2] = {
468       { 0x9, 0x5 }, { 0x7, 0xb },   /* (2,0), (1,1) */
469       { 0xd, 0x9 }, { 0x5, 0x3 },   /* (3,1), (1,0) */
470       { 0x3, 0xd }, { 0x1, 0x7 },   /* (0,1), (0,0) */
471       { 0xb, 0xf }, { 0xf, 0x1 } }; /* (2,1), (3,0) */
472 #endif
473 
474    const uint8_t (*ptr)[2];
475 
476    switch (sample_count) {
477    case 0:
478    case 1: ptr = ms1; break;
479    case 2: ptr = ms2; break;
480    case 4: ptr = ms4; break;
481    case 8: ptr = ms8; break;
482    default:
483       assert(0);
484       return; /* bad sample count -> undefined locations */
485    }
486    xy[0] = ptr[sample_index][0] * 0.0625f;
487    xy[1] = ptr[sample_index][1] * 0.0625f;
488 }
489