1 /*
2 * Copyright 2010 Christoph Bumiller
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 */
22
23 #include "pipe/p_defines.h"
24 #include "util/u_framebuffer.h"
25 #include "util/u_upload_mgr.h"
26
27 #include "nv50/nv50_context.h"
28 #include "nv50/nv50_screen.h"
29 #include "nv50/nv50_resource.h"
30
31 static void
nv50_flush(struct pipe_context * pipe,struct pipe_fence_handle ** fence,unsigned flags)32 nv50_flush(struct pipe_context *pipe,
33 struct pipe_fence_handle **fence,
34 unsigned flags)
35 {
36 struct nouveau_screen *screen = nouveau_screen(pipe->screen);
37
38 if (fence)
39 nouveau_fence_ref(screen->fence.current, (struct nouveau_fence **)fence);
40
41 PUSH_KICK(screen->pushbuf);
42
43 nouveau_context_update_frame_stats(nouveau_context(pipe));
44 }
45
46 static void
nv50_texture_barrier(struct pipe_context * pipe,unsigned flags)47 nv50_texture_barrier(struct pipe_context *pipe, unsigned flags)
48 {
49 struct nouveau_pushbuf *push = nv50_context(pipe)->base.pushbuf;
50
51 BEGIN_NV04(push, SUBC_3D(NV50_GRAPH_SERIALIZE), 1);
52 PUSH_DATA (push, 0);
53 BEGIN_NV04(push, NV50_3D(TEX_CACHE_CTL), 1);
54 PUSH_DATA (push, 0x20);
55 }
56
57 static void
nv50_memory_barrier(struct pipe_context * pipe,unsigned flags)58 nv50_memory_barrier(struct pipe_context *pipe, unsigned flags)
59 {
60 struct nv50_context *nv50 = nv50_context(pipe);
61 int i, s;
62
63 if (flags & PIPE_BARRIER_MAPPED_BUFFER) {
64 for (i = 0; i < nv50->num_vtxbufs; ++i) {
65 if (!nv50->vtxbuf[i].buffer.resource && !nv50->vtxbuf[i].is_user_buffer)
66 continue;
67 if (nv50->vtxbuf[i].buffer.resource->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
68 nv50->base.vbo_dirty = true;
69 }
70
71 for (s = 0; s < 3 && !nv50->cb_dirty; ++s) {
72 uint32_t valid = nv50->constbuf_valid[s];
73
74 while (valid && !nv50->cb_dirty) {
75 const unsigned i = ffs(valid) - 1;
76 struct pipe_resource *res;
77
78 valid &= ~(1 << i);
79 if (nv50->constbuf[s][i].user)
80 continue;
81
82 res = nv50->constbuf[s][i].u.buf;
83 if (!res)
84 continue;
85
86 if (res->flags & PIPE_RESOURCE_FLAG_MAP_PERSISTENT)
87 nv50->cb_dirty = true;
88 }
89 }
90 }
91 }
92
93 static void
nv50_emit_string_marker(struct pipe_context * pipe,const char * str,int len)94 nv50_emit_string_marker(struct pipe_context *pipe, const char *str, int len)
95 {
96 struct nouveau_pushbuf *push = nv50_context(pipe)->base.pushbuf;
97 int string_words = len / 4;
98 int data_words;
99
100 if (len <= 0)
101 return;
102 string_words = MIN2(string_words, NV04_PFIFO_MAX_PACKET_LEN);
103 if (string_words == NV04_PFIFO_MAX_PACKET_LEN)
104 data_words = string_words;
105 else
106 data_words = string_words + !!(len & 3);
107 BEGIN_NI04(push, SUBC_3D(NV04_GRAPH_NOP), data_words);
108 if (string_words)
109 PUSH_DATAp(push, str, string_words);
110 if (string_words != data_words) {
111 int data = 0;
112 memcpy(&data, &str[string_words * 4], len & 3);
113 PUSH_DATA (push, data);
114 }
115 }
116
117 void
nv50_default_kick_notify(struct nouveau_pushbuf * push)118 nv50_default_kick_notify(struct nouveau_pushbuf *push)
119 {
120 struct nv50_screen *screen = push->user_priv;
121
122 if (screen) {
123 nouveau_fence_next(&screen->base);
124 nouveau_fence_update(&screen->base, true);
125 if (screen->cur_ctx)
126 screen->cur_ctx->state.flushed = true;
127 }
128 }
129
130 static void
nv50_context_unreference_resources(struct nv50_context * nv50)131 nv50_context_unreference_resources(struct nv50_context *nv50)
132 {
133 unsigned s, i;
134
135 nouveau_bufctx_del(&nv50->bufctx_3d);
136 nouveau_bufctx_del(&nv50->bufctx);
137 nouveau_bufctx_del(&nv50->bufctx_cp);
138
139 util_unreference_framebuffer_state(&nv50->framebuffer);
140
141 assert(nv50->num_vtxbufs <= PIPE_MAX_ATTRIBS);
142 for (i = 0; i < nv50->num_vtxbufs; ++i)
143 pipe_vertex_buffer_unreference(&nv50->vtxbuf[i]);
144
145 for (s = 0; s < 3; ++s) {
146 assert(nv50->num_textures[s] <= PIPE_MAX_SAMPLERS);
147 for (i = 0; i < nv50->num_textures[s]; ++i)
148 pipe_sampler_view_reference(&nv50->textures[s][i], NULL);
149
150 for (i = 0; i < NV50_MAX_PIPE_CONSTBUFS; ++i)
151 if (!nv50->constbuf[s][i].user)
152 pipe_resource_reference(&nv50->constbuf[s][i].u.buf, NULL);
153 }
154
155 for (i = 0; i < nv50->global_residents.size / sizeof(struct pipe_resource *);
156 ++i) {
157 struct pipe_resource **res = util_dynarray_element(
158 &nv50->global_residents, struct pipe_resource *, i);
159 pipe_resource_reference(res, NULL);
160 }
161 util_dynarray_fini(&nv50->global_residents);
162 }
163
164 static void
nv50_destroy(struct pipe_context * pipe)165 nv50_destroy(struct pipe_context *pipe)
166 {
167 struct nv50_context *nv50 = nv50_context(pipe);
168
169 if (nv50->screen->cur_ctx == nv50) {
170 nv50->screen->cur_ctx = NULL;
171 /* Save off the state in case another context gets created */
172 nv50->screen->save_state = nv50->state;
173 }
174
175 if (nv50->base.pipe.stream_uploader)
176 u_upload_destroy(nv50->base.pipe.stream_uploader);
177
178 nouveau_pushbuf_bufctx(nv50->base.pushbuf, NULL);
179 nouveau_pushbuf_kick(nv50->base.pushbuf, nv50->base.pushbuf->channel);
180
181 nv50_context_unreference_resources(nv50);
182
183 FREE(nv50->blit);
184
185 nouveau_context_destroy(&nv50->base);
186 }
187
188 static int
nv50_invalidate_resource_storage(struct nouveau_context * ctx,struct pipe_resource * res,int ref)189 nv50_invalidate_resource_storage(struct nouveau_context *ctx,
190 struct pipe_resource *res,
191 int ref)
192 {
193 struct nv50_context *nv50 = nv50_context(&ctx->pipe);
194 unsigned bind = res->bind ? res->bind : PIPE_BIND_VERTEX_BUFFER;
195 unsigned s, i;
196
197 if (bind & PIPE_BIND_RENDER_TARGET) {
198 assert(nv50->framebuffer.nr_cbufs <= PIPE_MAX_COLOR_BUFS);
199 for (i = 0; i < nv50->framebuffer.nr_cbufs; ++i) {
200 if (nv50->framebuffer.cbufs[i] &&
201 nv50->framebuffer.cbufs[i]->texture == res) {
202 nv50->dirty_3d |= NV50_NEW_3D_FRAMEBUFFER;
203 nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_FB);
204 if (!--ref)
205 return ref;
206 }
207 }
208 }
209 if (bind & PIPE_BIND_DEPTH_STENCIL) {
210 if (nv50->framebuffer.zsbuf &&
211 nv50->framebuffer.zsbuf->texture == res) {
212 nv50->dirty_3d |= NV50_NEW_3D_FRAMEBUFFER;
213 nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_FB);
214 if (!--ref)
215 return ref;
216 }
217 }
218
219 if (bind & (PIPE_BIND_VERTEX_BUFFER |
220 PIPE_BIND_INDEX_BUFFER |
221 PIPE_BIND_CONSTANT_BUFFER |
222 PIPE_BIND_STREAM_OUTPUT |
223 PIPE_BIND_SAMPLER_VIEW)) {
224
225 assert(nv50->num_vtxbufs <= PIPE_MAX_ATTRIBS);
226 for (i = 0; i < nv50->num_vtxbufs; ++i) {
227 if (nv50->vtxbuf[i].buffer.resource == res) {
228 nv50->dirty_3d |= NV50_NEW_3D_ARRAYS;
229 nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_VERTEX);
230 if (!--ref)
231 return ref;
232 }
233 }
234
235 for (s = 0; s < 3; ++s) {
236 assert(nv50->num_textures[s] <= PIPE_MAX_SAMPLERS);
237 for (i = 0; i < nv50->num_textures[s]; ++i) {
238 if (nv50->textures[s][i] &&
239 nv50->textures[s][i]->texture == res) {
240 nv50->dirty_3d |= NV50_NEW_3D_TEXTURES;
241 nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_TEXTURES);
242 if (!--ref)
243 return ref;
244 }
245 }
246 }
247
248 for (s = 0; s < 3; ++s) {
249 for (i = 0; i < NV50_MAX_PIPE_CONSTBUFS; ++i) {
250 if (!(nv50->constbuf_valid[s] & (1 << i)))
251 continue;
252 if (!nv50->constbuf[s][i].user &&
253 nv50->constbuf[s][i].u.buf == res) {
254 nv50->dirty_3d |= NV50_NEW_3D_CONSTBUF;
255 nv50->constbuf_dirty[s] |= 1 << i;
256 nouveau_bufctx_reset(nv50->bufctx_3d, NV50_BIND_3D_CB(s, i));
257 if (!--ref)
258 return ref;
259 }
260 }
261 }
262 }
263
264 return ref;
265 }
266
267 static void
268 nv50_context_get_sample_position(struct pipe_context *, unsigned, unsigned,
269 float *);
270
271 struct pipe_context *
nv50_create(struct pipe_screen * pscreen,void * priv,unsigned ctxflags)272 nv50_create(struct pipe_screen *pscreen, void *priv, unsigned ctxflags)
273 {
274 struct nv50_screen *screen = nv50_screen(pscreen);
275 struct nv50_context *nv50;
276 struct pipe_context *pipe;
277 int ret;
278 uint32_t flags;
279
280 nv50 = CALLOC_STRUCT(nv50_context);
281 if (!nv50)
282 return NULL;
283 pipe = &nv50->base.pipe;
284
285 if (!nv50_blitctx_create(nv50))
286 goto out_err;
287
288 nv50->base.pushbuf = screen->base.pushbuf;
289 nv50->base.client = screen->base.client;
290
291 ret = nouveau_bufctx_new(screen->base.client, 2, &nv50->bufctx);
292 if (!ret)
293 ret = nouveau_bufctx_new(screen->base.client, NV50_BIND_3D_COUNT,
294 &nv50->bufctx_3d);
295 if (!ret)
296 ret = nouveau_bufctx_new(screen->base.client, NV50_BIND_CP_COUNT,
297 &nv50->bufctx_cp);
298 if (ret)
299 goto out_err;
300
301 nv50->base.screen = &screen->base;
302 nv50->base.copy_data = nv50_m2mf_copy_linear;
303 nv50->base.push_data = nv50_sifc_linear_u8;
304 nv50->base.push_cb = nv50_cb_push;
305
306 nv50->screen = screen;
307 pipe->screen = pscreen;
308 pipe->priv = priv;
309 pipe->stream_uploader = u_upload_create_default(pipe);
310 if (!pipe->stream_uploader)
311 goto out_err;
312 pipe->const_uploader = pipe->stream_uploader;
313
314 pipe->destroy = nv50_destroy;
315
316 pipe->draw_vbo = nv50_draw_vbo;
317 pipe->clear = nv50_clear;
318 pipe->launch_grid = nv50_launch_grid;
319
320 pipe->flush = nv50_flush;
321 pipe->texture_barrier = nv50_texture_barrier;
322 pipe->memory_barrier = nv50_memory_barrier;
323 pipe->get_sample_position = nv50_context_get_sample_position;
324 pipe->emit_string_marker = nv50_emit_string_marker;
325
326 if (!screen->cur_ctx) {
327 /* Restore the last context's state here, normally handled during
328 * context switch
329 */
330 nv50->state = screen->save_state;
331 screen->cur_ctx = nv50;
332 nouveau_pushbuf_bufctx(screen->base.pushbuf, nv50->bufctx);
333 }
334 nv50->base.pushbuf->kick_notify = nv50_default_kick_notify;
335
336 nouveau_context_init(&nv50->base);
337 nv50_init_query_functions(nv50);
338 nv50_init_surface_functions(nv50);
339 nv50_init_state_functions(nv50);
340 nv50_init_resource_functions(pipe);
341
342 nv50->base.invalidate_resource_storage = nv50_invalidate_resource_storage;
343
344 if (screen->base.device->chipset < 0x84 ||
345 debug_get_bool_option("NOUVEAU_PMPEG", false)) {
346 /* PMPEG */
347 nouveau_context_init_vdec(&nv50->base);
348 } else if (screen->base.device->chipset < 0x98 ||
349 screen->base.device->chipset == 0xa0) {
350 /* VP2 */
351 pipe->create_video_codec = nv84_create_decoder;
352 pipe->create_video_buffer = nv84_video_buffer_create;
353 } else {
354 /* VP3/4 */
355 pipe->create_video_codec = nv98_create_decoder;
356 pipe->create_video_buffer = nv98_video_buffer_create;
357 }
358
359 flags = NOUVEAU_BO_VRAM | NOUVEAU_BO_RD;
360
361 BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->code);
362 BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->uniforms);
363 BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->txc);
364 BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->stack_bo);
365 if (screen->compute) {
366 BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->code);
367 BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->txc);
368 BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->stack_bo);
369 }
370
371 flags = NOUVEAU_BO_GART | NOUVEAU_BO_WR;
372
373 BCTX_REFN_bo(nv50->bufctx_3d, 3D_SCREEN, flags, screen->fence.bo);
374 BCTX_REFN_bo(nv50->bufctx, FENCE, flags, screen->fence.bo);
375 if (screen->compute)
376 BCTX_REFN_bo(nv50->bufctx_cp, CP_SCREEN, flags, screen->fence.bo);
377
378 nv50->base.scratch.bo_size = 2 << 20;
379
380 util_dynarray_init(&nv50->global_residents, NULL);
381
382 // Make sure that the first TSC entry has SRGB conversion bit set, since we
383 // use it as a fallback.
384 if (!screen->tsc.entries[0])
385 nv50_upload_tsc0(nv50);
386
387 // And mark samplers as dirty so that the first slot would get bound to the
388 // zero entry if it's not otherwise set.
389 nv50->dirty_3d |= NV50_NEW_3D_SAMPLERS;
390
391 return pipe;
392
393 out_err:
394 if (pipe->stream_uploader)
395 u_upload_destroy(pipe->stream_uploader);
396 if (nv50->bufctx_3d)
397 nouveau_bufctx_del(&nv50->bufctx_3d);
398 if (nv50->bufctx_cp)
399 nouveau_bufctx_del(&nv50->bufctx_cp);
400 if (nv50->bufctx)
401 nouveau_bufctx_del(&nv50->bufctx);
402 FREE(nv50->blit);
403 FREE(nv50);
404 return NULL;
405 }
406
407 void
nv50_bufctx_fence(struct nouveau_bufctx * bufctx,bool on_flush)408 nv50_bufctx_fence(struct nouveau_bufctx *bufctx, bool on_flush)
409 {
410 struct nouveau_list *list = on_flush ? &bufctx->current : &bufctx->pending;
411 struct nouveau_list *it;
412
413 for (it = list->next; it != list; it = it->next) {
414 struct nouveau_bufref *ref = (struct nouveau_bufref *)it;
415 struct nv04_resource *res = ref->priv;
416 if (res)
417 nv50_resource_validate(res, (unsigned)ref->priv_data);
418 }
419 }
420
421 static void
nv50_context_get_sample_position(struct pipe_context * pipe,unsigned sample_count,unsigned sample_index,float * xy)422 nv50_context_get_sample_position(struct pipe_context *pipe,
423 unsigned sample_count, unsigned sample_index,
424 float *xy)
425 {
426 static const uint8_t ms1[1][2] = { { 0x8, 0x8 } };
427 static const uint8_t ms2[2][2] = {
428 { 0x4, 0x4 }, { 0xc, 0xc } }; /* surface coords (0,0), (1,0) */
429 static const uint8_t ms4[4][2] = {
430 { 0x6, 0x2 }, { 0xe, 0x6 }, /* (0,0), (1,0) */
431 { 0x2, 0xa }, { 0xa, 0xe } }; /* (0,1), (1,1) */
432 static const uint8_t ms8[8][2] = {
433 { 0x1, 0x7 }, { 0x5, 0x3 }, /* (0,0), (1,0) */
434 { 0x3, 0xd }, { 0x7, 0xb }, /* (0,1), (1,1) */
435 { 0x9, 0x5 }, { 0xf, 0x1 }, /* (2,0), (3,0) */
436 { 0xb, 0xf }, { 0xd, 0x9 } }; /* (2,1), (3,1) */
437 #if 0
438 /* NOTE: there are alternative modes for MS2 and MS8, currently not used */
439 static const uint8_t ms8_alt[8][2] = {
440 { 0x9, 0x5 }, { 0x7, 0xb }, /* (2,0), (1,1) */
441 { 0xd, 0x9 }, { 0x5, 0x3 }, /* (3,1), (1,0) */
442 { 0x3, 0xd }, { 0x1, 0x7 }, /* (0,1), (0,0) */
443 { 0xb, 0xf }, { 0xf, 0x1 } }; /* (2,1), (3,0) */
444 #endif
445
446 const uint8_t (*ptr)[2];
447
448 switch (sample_count) {
449 case 0:
450 case 1: ptr = ms1; break;
451 case 2: ptr = ms2; break;
452 case 4: ptr = ms4; break;
453 case 8: ptr = ms8; break;
454 default:
455 assert(0);
456 return; /* bad sample count -> undefined locations */
457 }
458 xy[0] = ptr[sample_index][0] * 0.0625f;
459 xy[1] = ptr[sample_index][1] * 0.0625f;
460 }
461