1 /*
2 * Copyright 2011 Joakim Sindholt <opensource@zhasha.com>
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * on the rights to use, copy, modify, merge, publish, distribute, sub
8 * license, and/or sell copies of the Software, and to permit persons to whom
9 * the Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice (including the next
12 * paragraph) shall be included in all copies or substantial portions of the
13 * Software.
14 *
15 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18 * THE AUTHOR(S) AND/OR THEIR SUPPLIERS BE LIABLE FOR ANY CLAIM,
19 * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
20 * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
21 * USE OR OTHER DEALINGS IN THE SOFTWARE. */
22
23 #include "iunknown.h"
24 #include "surface9.h"
25 #include "device9.h"
26
27 /* for marking dirty */
28 #include "basetexture9.h"
29 #include "texture9.h"
30 #include "cubetexture9.h"
31
32 #include "nine_helpers.h"
33 #include "nine_pipe.h"
34 #include "nine_dump.h"
35 #include "nine_memory_helper.h"
36 #include "nine_state.h"
37
38 #include "pipe/p_context.h"
39 #include "pipe/p_screen.h"
40 #include "pipe/p_state.h"
41
42 #include "util/u_math.h"
43 #include "util/u_inlines.h"
44 #include "util/u_surface.h"
45
46 #define DBG_CHANNEL DBG_SURFACE
47
48 static void
49 NineSurface9_CreatePipeSurfaces( struct NineSurface9 *This );
50
51 HRESULT
NineSurface9_ctor(struct NineSurface9 * This,struct NineUnknownParams * pParams,struct NineUnknown * pContainer,struct pipe_resource * pResource,struct nine_allocation * user_buffer,uint8_t TextureType,unsigned Level,unsigned Layer,D3DSURFACE_DESC * pDesc)52 NineSurface9_ctor( struct NineSurface9 *This,
53 struct NineUnknownParams *pParams,
54 struct NineUnknown *pContainer,
55 struct pipe_resource *pResource,
56 struct nine_allocation *user_buffer,
57 uint8_t TextureType,
58 unsigned Level,
59 unsigned Layer,
60 D3DSURFACE_DESC *pDesc )
61 {
62 HRESULT hr;
63 bool allocate = !pContainer && pDesc->Format != D3DFMT_NULL;
64 D3DMULTISAMPLE_TYPE multisample_type;
65
66 DBG("This=%p pDevice=%p pResource=%p Level=%u Layer=%u pDesc=%p\n",
67 This, pParams->device, pResource, Level, Layer, pDesc);
68
69 /* Mark this as a special surface held by another internal resource. */
70 pParams->container = pContainer;
71 This->base.base.device = pParams->device; /* Early fill this field in case of failure */
72 /* Make sure there's a Desc */
73 assert(pDesc);
74
75 assert(allocate || pResource || user_buffer ||
76 pDesc->Format == D3DFMT_NULL);
77 assert(!allocate || (!pResource && !user_buffer));
78 assert(!pResource || !user_buffer);
79 assert(!user_buffer || pDesc->Pool != D3DPOOL_DEFAULT);
80 assert(!pResource || pDesc->Pool == D3DPOOL_DEFAULT);
81 /* Allocation only from create_zs_or_rt_surface with params 0 0 0 */
82 assert(!allocate || (Level == 0 && Layer == 0 && TextureType == 0));
83
84 This->data = user_buffer;
85
86 multisample_type = pDesc->MultiSampleType;
87
88 /* Map MultiSampleQuality to MultiSampleType */
89 hr = d3dmultisample_type_check(pParams->device->screen,
90 pDesc->Format,
91 &multisample_type,
92 pDesc->MultiSampleQuality,
93 NULL);
94 if (FAILED(hr)) {
95 return hr;
96 }
97
98 /* TODO: this is (except width and height) duplicate from
99 * container info (in the pContainer case). Some refactoring is
100 * needed to avoid duplication */
101 This->base.info.screen = pParams->device->screen;
102 This->base.info.target = PIPE_TEXTURE_2D;
103 This->base.info.width0 = pDesc->Width;
104 This->base.info.height0 = pDesc->Height;
105 This->base.info.depth0 = 1;
106 This->base.info.last_level = 0;
107 This->base.info.array_size = 1;
108 This->base.info.nr_samples = multisample_type;
109 This->base.info.nr_storage_samples = multisample_type;
110 This->base.info.usage = PIPE_USAGE_DEFAULT;
111 This->base.info.bind = PIPE_BIND_SAMPLER_VIEW; /* StretchRect */
112
113 if (pDesc->Usage & D3DUSAGE_RENDERTARGET) {
114 This->base.info.bind |= PIPE_BIND_RENDER_TARGET;
115 } else if (pDesc->Usage & D3DUSAGE_DEPTHSTENCIL) {
116 if (!depth_stencil_format(pDesc->Format))
117 return D3DERR_INVALIDCALL;
118 This->base.info.bind = d3d9_get_pipe_depth_format_bindings(pDesc->Format);
119 if (TextureType)
120 This->base.info.bind |= PIPE_BIND_SAMPLER_VIEW;
121 }
122
123 This->base.info.flags = 0;
124 This->base.info.format = d3d9_to_pipe_format_checked(This->base.info.screen,
125 pDesc->Format,
126 This->base.info.target,
127 This->base.info.nr_samples,
128 This->base.info.bind,
129 FALSE,
130 pDesc->Pool == D3DPOOL_SCRATCH);
131
132 if (This->base.info.format == PIPE_FORMAT_NONE && pDesc->Format != D3DFMT_NULL)
133 return D3DERR_INVALIDCALL;
134
135 if (allocate && compressed_format(pDesc->Format)) {
136 const unsigned w = util_format_get_blockwidth(This->base.info.format);
137 const unsigned h = util_format_get_blockheight(This->base.info.format);
138
139 /* Note: In the !allocate case, the test could fail (lower levels of a texture) */
140 user_assert(!(pDesc->Width % w) && !(pDesc->Height % h), D3DERR_INVALIDCALL);
141 }
142
143 /* Get true format */
144 This->format_internal = d3d9_to_pipe_format_checked(This->base.info.screen,
145 pDesc->Format,
146 This->base.info.target,
147 This->base.info.nr_samples,
148 This->base.info.bind,
149 FALSE,
150 TRUE);
151 if (This->base.info.format != This->format_internal ||
152 /* DYNAMIC Textures requires same stride as ram buffers.
153 * The workaround stores a copy in RAM for locks. It eats more virtual space,
154 * but that is compensated by the use of shmem */
155 (pParams->device->workarounds.dynamic_texture_workaround &&
156 pDesc->Pool == D3DPOOL_DEFAULT && pDesc->Usage & D3DUSAGE_DYNAMIC)) {
157 This->data_internal = nine_allocate(pParams->device->allocator,
158 nine_format_get_level_alloc_size(This->format_internal,
159 pDesc->Width,
160 pDesc->Height,
161 0));
162 if (!This->data_internal)
163 return E_OUTOFMEMORY;
164 This->stride_internal = nine_format_get_stride(This->format_internal,
165 pDesc->Width);
166 }
167
168 if ((allocate && pDesc->Pool != D3DPOOL_DEFAULT) || pDesc->Format == D3DFMT_NULL) {
169 /* Ram buffer with no parent. Has to allocate the resource itself */
170 assert(!user_buffer);
171 This->data = nine_allocate(pParams->device->allocator,
172 nine_format_get_level_alloc_size(This->base.info.format,
173 pDesc->Width,
174 pDesc->Height,
175 0));
176 if (!This->data)
177 return E_OUTOFMEMORY;
178 }
179
180 hr = NineResource9_ctor(&This->base, pParams, pResource,
181 allocate && (pDesc->Pool == D3DPOOL_DEFAULT),
182 D3DRTYPE_SURFACE, pDesc->Pool, pDesc->Usage);
183
184 if (FAILED(hr))
185 return hr;
186
187 This->transfer = NULL;
188
189 This->texture = TextureType;
190 This->level = Level;
191 This->level_actual = Level;
192 This->layer = Layer;
193 This->desc = *pDesc;
194
195 This->stride = nine_format_get_stride(This->base.info.format, pDesc->Width);
196
197 if (This->base.resource && (pDesc->Usage & D3DUSAGE_DYNAMIC))
198 This->base.resource->flags |= NINE_RESOURCE_FLAG_LOCKABLE;
199
200 if (This->base.resource && (pDesc->Usage & (D3DUSAGE_RENDERTARGET | D3DUSAGE_DEPTHSTENCIL)))
201 NineSurface9_CreatePipeSurfaces(This);
202
203 /* TODO: investigate what else exactly needs to be cleared */
204 if (This->base.resource && (pDesc->Usage & D3DUSAGE_RENDERTARGET))
205 nine_context_clear_render_target(pParams->device, This, 0, 0, 0, pDesc->Width, pDesc->Height);
206
207 NineSurface9_Dump(This);
208
209 return D3D_OK;
210 }
211
212 void
NineSurface9_dtor(struct NineSurface9 * This)213 NineSurface9_dtor( struct NineSurface9 *This )
214 {
215 bool is_worker = nine_context_is_worker(This->base.base.device);
216 DBG("This=%p\n", This);
217
218 if (This->transfer) {
219 struct pipe_context *pipe = nine_context_get_pipe_multithread(This->base.base.device);
220 pipe->texture_unmap(pipe, This->transfer);
221 This->transfer = NULL;
222 }
223
224 /* Note: Following condition cannot happen currently, since we
225 * refcount the surface in the functions increasing
226 * pending_uploads_counter. */
227 if (p_atomic_read(&This->pending_uploads_counter))
228 nine_csmt_process(This->base.base.device);
229
230 pipe_surface_reference(&This->surface[0], NULL);
231 pipe_surface_reference(&This->surface[1], NULL);
232
233 if (!is_worker && This->lock_count && (This->data_internal || This->data)) {
234 /* For is_worker nine_free_worker will handle it */
235 nine_pointer_strongrelease(This->base.base.device->allocator,
236 This->data_internal ? This->data_internal : This->data);
237 }
238
239 /* Release system memory when we have to manage it (no parent) */
240 if (This->data) {
241 if (is_worker)
242 nine_free_worker(This->base.base.device->allocator, This->data);
243 else
244 nine_free(This->base.base.device->allocator, This->data);
245 }
246 if (This->data_internal) {
247 if (is_worker)
248 nine_free_worker(This->base.base.device->allocator, This->data_internal);
249 else
250 nine_free(This->base.base.device->allocator, This->data_internal);
251 }
252 NineResource9_dtor(&This->base);
253 }
254
255 static void
NineSurface9_CreatePipeSurfaces(struct NineSurface9 * This)256 NineSurface9_CreatePipeSurfaces( struct NineSurface9 *This )
257 {
258 struct pipe_context *pipe;
259 struct pipe_screen *screen = NineDevice9_GetScreen(This->base.base.device);
260 struct pipe_resource *resource = This->base.resource;
261 struct pipe_surface templ;
262 enum pipe_format srgb_format;
263
264 assert(This->desc.Pool == D3DPOOL_DEFAULT);
265 assert(resource);
266
267 srgb_format = util_format_srgb(resource->format);
268 if (srgb_format == PIPE_FORMAT_NONE ||
269 !screen->is_format_supported(screen, srgb_format,
270 resource->target, 0, 0, resource->bind))
271 srgb_format = resource->format;
272
273 memset(&templ, 0, sizeof(templ));
274 templ.format = resource->format;
275 templ.u.tex.level = This->level;
276 templ.u.tex.first_layer = This->layer;
277 templ.u.tex.last_layer = This->layer;
278
279 pipe = nine_context_get_pipe_acquire(This->base.base.device);
280
281 This->surface[0] = pipe->create_surface(pipe, resource, &templ);
282
283 memset(&templ, 0, sizeof(templ));
284 templ.format = srgb_format;
285 templ.u.tex.level = This->level;
286 templ.u.tex.first_layer = This->layer;
287 templ.u.tex.last_layer = This->layer;
288
289 This->surface[1] = pipe->create_surface(pipe, resource, &templ);
290
291 nine_context_get_pipe_release(This->base.base.device);
292
293 assert(This->surface[0]); /* TODO: Handle failure */
294 assert(This->surface[1]);
295 }
296
297 #if defined(DEBUG) || !defined(NDEBUG)
298 void
NineSurface9_Dump(struct NineSurface9 * This)299 NineSurface9_Dump( struct NineSurface9 *This )
300 {
301 struct NineBaseTexture9 *tex;
302 GUID id = IID_IDirect3DBaseTexture9;
303 REFIID ref = &id;
304
305 DBG("\nNineSurface9(%p->%p/%p): Pool=%s Type=%s Usage=%s\n"
306 "Dims=%ux%u Format=%s Stride=%u Lockable=%i\n"
307 "Level=%u(%u), Layer=%u\n", This, This->base.resource, This->data,
308 nine_D3DPOOL_to_str(This->desc.Pool),
309 nine_D3DRTYPE_to_str(This->desc.Type),
310 nine_D3DUSAGE_to_str(This->desc.Usage),
311 This->desc.Width, This->desc.Height,
312 d3dformat_to_string(This->desc.Format), This->stride,
313 This->base.resource &&
314 (This->base.resource->flags & NINE_RESOURCE_FLAG_LOCKABLE),
315 This->level, This->level_actual, This->layer);
316
317 if (!This->base.base.container)
318 return;
319 NineUnknown_QueryInterface(This->base.base.container, ref, (void **)&tex);
320 if (tex) {
321 NineBaseTexture9_Dump(tex);
322 NineUnknown_Release(NineUnknown(tex));
323 }
324 }
325 #endif /* DEBUG || !NDEBUG */
326
327 HRESULT NINE_WINAPI
NineSurface9_GetContainer(struct NineSurface9 * This,REFIID riid,void ** ppContainer)328 NineSurface9_GetContainer( struct NineSurface9 *This,
329 REFIID riid,
330 void **ppContainer )
331 {
332 HRESULT hr;
333 char guid_str[64];
334
335 DBG("This=%p riid=%p id=%s ppContainer=%p\n",
336 This, riid, riid ? GUID_sprintf(guid_str, riid) : "", ppContainer);
337
338 (void)guid_str;
339
340 if (!ppContainer) return E_POINTER;
341
342 /* Use device for OffscreenPlainSurface, DepthStencilSurface and RenderTarget */
343 hr = NineUnknown_QueryInterface(NineUnknown(This)->container ?
344 NineUnknown(This)->container : &NineUnknown(This)->device->base,
345 riid, ppContainer);
346 if (FAILED(hr))
347 DBG("QueryInterface FAILED!\n");
348 return hr;
349 }
350
351 void
NineSurface9_MarkContainerDirty(struct NineSurface9 * This)352 NineSurface9_MarkContainerDirty( struct NineSurface9 *This )
353 {
354 if (This->texture) {
355 struct NineBaseTexture9 *tex =
356 NineBaseTexture9(This->base.base.container);
357 assert(tex);
358 assert(This->texture == D3DRTYPE_TEXTURE ||
359 This->texture == D3DRTYPE_CUBETEXTURE);
360 if (This->base.pool == D3DPOOL_MANAGED)
361 tex->managed.dirty = TRUE;
362 else
363 if (This->base.usage & D3DUSAGE_AUTOGENMIPMAP)
364 tex->dirty_mip = TRUE;
365
366 BASETEX_REGISTER_UPDATE(tex);
367 }
368 }
369
370 HRESULT NINE_WINAPI
NineSurface9_GetDesc(struct NineSurface9 * This,D3DSURFACE_DESC * pDesc)371 NineSurface9_GetDesc( struct NineSurface9 *This,
372 D3DSURFACE_DESC *pDesc )
373 {
374 user_assert(pDesc != NULL, E_POINTER);
375 DBG("This=%p pDesc=%p\n", This, pDesc);
376 *pDesc = This->desc;
377 return D3D_OK;
378 }
379
380 /* Add the dirty rects to the source texture */
381 inline void
NineSurface9_AddDirtyRect(struct NineSurface9 * This,const struct pipe_box * box)382 NineSurface9_AddDirtyRect( struct NineSurface9 *This,
383 const struct pipe_box *box )
384 {
385 RECT dirty_rect;
386
387 DBG("This=%p box=%p\n", This, box);
388
389 assert (This->base.pool != D3DPOOL_MANAGED ||
390 This->texture == D3DRTYPE_CUBETEXTURE ||
391 This->texture == D3DRTYPE_TEXTURE);
392
393 if (This->base.pool == D3DPOOL_DEFAULT)
394 return;
395
396 /* Add a dirty rect to level 0 of the parent texture */
397 dirty_rect.left = box->x << This->level_actual;
398 dirty_rect.right = dirty_rect.left + (box->width << This->level_actual);
399 dirty_rect.top = box->y << This->level_actual;
400 dirty_rect.bottom = dirty_rect.top + (box->height << This->level_actual);
401
402 if (This->texture == D3DRTYPE_TEXTURE) {
403 struct NineTexture9 *tex =
404 NineTexture9(This->base.base.container);
405
406 NineTexture9_AddDirtyRect(tex, &dirty_rect);
407 } else if (This->texture == D3DRTYPE_CUBETEXTURE) {
408 struct NineCubeTexture9 *ctex =
409 NineCubeTexture9(This->base.base.container);
410
411 NineCubeTexture9_AddDirtyRect(ctex, This->layer, &dirty_rect);
412 }
413 }
414
415 static inline unsigned
NineSurface9_GetSystemMemOffset(enum pipe_format format,unsigned stride,int x,int y)416 NineSurface9_GetSystemMemOffset(enum pipe_format format, unsigned stride,
417 int x, int y)
418 {
419 unsigned x_offset = util_format_get_stride(format, x);
420
421 y = util_format_get_nblocksy(format, y);
422
423 return y * stride + x_offset;
424 }
425
426 HRESULT NINE_WINAPI
NineSurface9_LockRect(struct NineSurface9 * This,D3DLOCKED_RECT * pLockedRect,const RECT * pRect,DWORD Flags)427 NineSurface9_LockRect( struct NineSurface9 *This,
428 D3DLOCKED_RECT *pLockedRect,
429 const RECT *pRect,
430 DWORD Flags )
431 {
432 struct pipe_resource *resource = This->base.resource;
433 struct pipe_context *pipe;
434 struct pipe_box box;
435 unsigned usage;
436
437 DBG("This=%p pLockedRect=%p pRect=%p[%u..%u,%u..%u] Flags=%s\n", This,
438 pLockedRect, pRect,
439 pRect ? pRect->left : 0, pRect ? pRect->right : 0,
440 pRect ? pRect->top : 0, pRect ? pRect->bottom : 0,
441 nine_D3DLOCK_to_str(Flags));
442 NineSurface9_Dump(This);
443
444 /* check if it's already locked */
445 user_assert(This->lock_count == 0, D3DERR_INVALIDCALL);
446
447 /* set pBits to NULL after lock_count check */
448 user_assert(pLockedRect, E_POINTER);
449 pLockedRect->pBits = NULL;
450
451 #ifdef NINE_STRICT
452 user_assert(This->base.pool != D3DPOOL_DEFAULT ||
453 (resource && (resource->flags & NINE_RESOURCE_FLAG_LOCKABLE)),
454 D3DERR_INVALIDCALL);
455 #endif
456 user_assert(!((Flags & D3DLOCK_DISCARD) && (Flags & D3DLOCK_READONLY)),
457 D3DERR_INVALIDCALL);
458
459 user_assert(This->desc.MultiSampleType == D3DMULTISAMPLE_NONE,
460 D3DERR_INVALIDCALL);
461
462 if (pRect && This->desc.Pool == D3DPOOL_DEFAULT &&
463 util_format_is_compressed(This->base.info.format)) {
464 const unsigned w = util_format_get_blockwidth(This->base.info.format);
465 const unsigned h = util_format_get_blockheight(This->base.info.format);
466 user_assert((pRect->left == 0 && pRect->right == This->desc.Width &&
467 pRect->top == 0 && pRect->bottom == This->desc.Height) ||
468 (!(pRect->left % w) && !(pRect->right % w) &&
469 !(pRect->top % h) && !(pRect->bottom % h)),
470 D3DERR_INVALIDCALL);
471 }
472
473 if (Flags & D3DLOCK_DISCARD) {
474 usage = PIPE_MAP_WRITE | PIPE_MAP_DISCARD_RANGE;
475 } else {
476 usage = (Flags & D3DLOCK_READONLY) ?
477 PIPE_MAP_READ : PIPE_MAP_READ_WRITE;
478 }
479 if (Flags & D3DLOCK_DONOTWAIT)
480 usage |= PIPE_MAP_DONTBLOCK;
481
482 if (pRect) {
483 /* Windows XP accepts invalid locking rectangles, Windows 7 rejects
484 * them. Use Windows XP behaviour for now. */
485 rect_to_pipe_box(&box, pRect);
486 } else {
487 u_box_origin_2d(This->desc.Width, This->desc.Height, &box);
488 }
489 box.z = This->layer;
490
491 user_warn(This->desc.Format == D3DFMT_NULL);
492
493 if (p_atomic_read(&This->pending_uploads_counter))
494 nine_csmt_process(This->base.base.device);
495
496 if (This->data_internal || This->data) {
497 enum pipe_format format = This->base.info.format;
498 unsigned stride = This->stride;
499 uint8_t *data = nine_get_pointer(This->base.base.device->allocator, This->data_internal ? This->data_internal : This->data);
500 if (This->data_internal) {
501 format = This->format_internal;
502 stride = This->stride_internal;
503 }
504 /* ATI1 and ATI2 need special handling, because of d3d9 bug.
505 * We must advertise to the application as if it is uncompressed
506 * and bpp 8, and the app has a workaround to work with the fact
507 * that it is actually compressed. */
508 if (is_ATI1_ATI2(format)) {
509 pLockedRect->Pitch = This->desc.Width;
510 pLockedRect->pBits = data + box.y * This->desc.Width + box.x;
511 } else {
512 pLockedRect->Pitch = stride;
513 pLockedRect->pBits = data +
514 NineSurface9_GetSystemMemOffset(format,
515 stride,
516 box.x,
517 box.y);
518 }
519 DBG("returning system memory %p\n", pLockedRect->pBits);
520 } else {
521 bool no_refs = !p_atomic_read(&This->base.base.bind) &&
522 !(This->base.base.container && p_atomic_read(&This->base.base.container->bind));
523 DBG("mapping pipe_resource %p (level=%u usage=%x)\n",
524 resource, This->level, usage);
525
526 /* if the object is not bound internally, there can't be any pending
527 * operation with the surface in the queue */
528 if (no_refs)
529 pipe = nine_context_get_pipe_acquire(This->base.base.device);
530 else
531 pipe = NineDevice9_GetPipe(This->base.base.device);
532 pLockedRect->pBits = pipe->texture_map(pipe, resource,
533 This->level, usage, &box,
534 &This->transfer);
535 if (no_refs)
536 nine_context_get_pipe_release(This->base.base.device);
537 if (!This->transfer) {
538 DBG("texture_map failed\n");
539 if (Flags & D3DLOCK_DONOTWAIT)
540 return D3DERR_WASSTILLDRAWING;
541 return D3DERR_INVALIDCALL;
542 }
543 pLockedRect->Pitch = This->transfer->stride;
544 }
545
546 if (!(Flags & (D3DLOCK_NO_DIRTY_UPDATE | D3DLOCK_READONLY))) {
547 NineSurface9_MarkContainerDirty(This);
548 NineSurface9_AddDirtyRect(This, &box);
549 }
550
551 ++This->lock_count;
552 return D3D_OK;
553 }
554
555 HRESULT NINE_WINAPI
NineSurface9_UnlockRect(struct NineSurface9 * This)556 NineSurface9_UnlockRect( struct NineSurface9 *This )
557 {
558 struct pipe_box dst_box, src_box;
559 struct pipe_context *pipe;
560 DBG("This=%p lock_count=%u\n", This, This->lock_count);
561 user_assert(This->lock_count, D3DERR_INVALIDCALL);
562 if (This->transfer) {
563 pipe = nine_context_get_pipe_acquire(This->base.base.device);
564 pipe->texture_unmap(pipe, This->transfer);
565 nine_context_get_pipe_release(This->base.base.device);
566 This->transfer = NULL;
567 }
568 --This->lock_count;
569
570 if (This->data_internal) {
571 nine_pointer_weakrelease(This->base.base.device->allocator, This->data_internal);
572 if (This->data) {
573 (void) util_format_translate(This->base.info.format,
574 nine_get_pointer(This->base.base.device->allocator, This->data),
575 This->stride,
576 0, 0,
577 This->format_internal,
578 nine_get_pointer(This->base.base.device->allocator, This->data_internal),
579 This->stride_internal,
580 0, 0,
581 This->desc.Width, This->desc.Height);
582 nine_pointer_weakrelease(This->base.base.device->allocator, This->data);
583 nine_pointer_strongrelease(This->base.base.device->allocator, This->data_internal);
584 } else {
585 u_box_2d_zslice(0, 0, This->layer,
586 This->desc.Width, This->desc.Height, &dst_box);
587 u_box_2d_zslice(0, 0, 0,
588 This->desc.Width, This->desc.Height, &src_box);
589
590 nine_context_box_upload(This->base.base.device,
591 &This->pending_uploads_counter,
592 (struct NineUnknown *)This,
593 This->base.resource,
594 This->level,
595 &dst_box,
596 This->format_internal,
597 nine_get_pointer(This->base.base.device->allocator, This->data_internal),
598 This->stride_internal,
599 0, /* depth = 1 */
600 &src_box);
601 nine_pointer_delayedstrongrelease(This->base.base.device->allocator, This->data_internal, &This->pending_uploads_counter);
602 }
603 } else if (This->data) {
604 nine_pointer_weakrelease(This->base.base.device->allocator, This->data);
605 }
606
607 return D3D_OK;
608 }
609
610 HRESULT NINE_WINAPI
NineSurface9_GetDC(struct NineSurface9 * This,HDC * phdc)611 NineSurface9_GetDC( struct NineSurface9 *This,
612 HDC *phdc )
613 {
614 STUB(D3DERR_INVALIDCALL);
615 }
616
617 HRESULT NINE_WINAPI
NineSurface9_ReleaseDC(struct NineSurface9 * This,HDC hdc)618 NineSurface9_ReleaseDC( struct NineSurface9 *This,
619 HDC hdc )
620 {
621 STUB(D3DERR_INVALIDCALL);
622 }
623
624 IDirect3DSurface9Vtbl NineSurface9_vtable = {
625 (void *)NineUnknown_QueryInterface,
626 (void *)NineUnknown_AddRef,
627 (void *)NineUnknown_Release,
628 (void *)NineUnknown_GetDevice, /* actually part of Resource9 iface */
629 (void *)NineUnknown_SetPrivateData,
630 (void *)NineUnknown_GetPrivateData,
631 (void *)NineUnknown_FreePrivateData,
632 (void *)NineResource9_SetPriority,
633 (void *)NineResource9_GetPriority,
634 (void *)NineResource9_PreLoad,
635 (void *)NineResource9_GetType,
636 (void *)NineSurface9_GetContainer,
637 (void *)NineSurface9_GetDesc,
638 (void *)NineSurface9_LockRect,
639 (void *)NineSurface9_UnlockRect,
640 (void *)NineSurface9_GetDC,
641 (void *)NineSurface9_ReleaseDC
642 };
643
644 /* When this function is called, we have already checked
645 * The copy regions fit the surfaces */
646 void
NineSurface9_CopyMemToDefault(struct NineSurface9 * This,struct NineSurface9 * From,const POINT * pDestPoint,const RECT * pSourceRect)647 NineSurface9_CopyMemToDefault( struct NineSurface9 *This,
648 struct NineSurface9 *From,
649 const POINT *pDestPoint,
650 const RECT *pSourceRect )
651 {
652 struct pipe_resource *r_dst = This->base.resource;
653 struct pipe_box dst_box, src_box;
654 int src_x, src_y, dst_x, dst_y, copy_width, copy_height;
655
656 assert(This->base.pool == D3DPOOL_DEFAULT &&
657 From->base.pool == D3DPOOL_SYSTEMMEM);
658
659 if (pDestPoint) {
660 dst_x = pDestPoint->x;
661 dst_y = pDestPoint->y;
662 } else {
663 dst_x = 0;
664 dst_y = 0;
665 }
666
667 if (pSourceRect) {
668 src_x = pSourceRect->left;
669 src_y = pSourceRect->top;
670 copy_width = pSourceRect->right - pSourceRect->left;
671 copy_height = pSourceRect->bottom - pSourceRect->top;
672 } else {
673 src_x = 0;
674 src_y = 0;
675 copy_width = From->desc.Width;
676 copy_height = From->desc.Height;
677 }
678
679 u_box_2d_zslice(dst_x, dst_y, This->layer,
680 copy_width, copy_height, &dst_box);
681 u_box_2d_zslice(src_x, src_y, 0,
682 copy_width, copy_height, &src_box);
683
684 if (This->data_internal) {
685 (void) util_format_translate(This->format_internal,
686 nine_get_pointer(This->base.base.device->allocator, This->data_internal),
687 This->stride_internal,
688 dst_x, dst_y,
689 From->base.info.format,
690 nine_get_pointer(This->base.base.device->allocator, From->data),
691 From->stride,
692 src_x, src_y,
693 copy_width, copy_height);
694 nine_pointer_weakrelease(This->base.base.device->allocator, From->data);
695 nine_pointer_strongrelease(This->base.base.device->allocator, This->data_internal);
696 }
697
698 nine_context_box_upload(This->base.base.device,
699 &From->pending_uploads_counter,
700 (struct NineUnknown *)From,
701 r_dst,
702 This->level,
703 &dst_box,
704 From->base.info.format,
705 nine_get_pointer(This->base.base.device->allocator, From->data),
706 From->stride,
707 0, /* depth = 1 */
708 &src_box);
709 nine_pointer_delayedstrongrelease(This->base.base.device->allocator, From->data, &From->pending_uploads_counter);
710
711 if (From->texture == D3DRTYPE_TEXTURE) {
712 struct NineTexture9 *tex =
713 NineTexture9(From->base.base.container);
714 /* D3DPOOL_SYSTEMMEM with buffer content passed
715 * from the user: execute the upload right now.
716 * It is possible it is enough to delay upload
717 * until the surface refcount is 0, but the
718 * bind refcount may not be 0, and thus the dtor
719 * is not executed (and doesn't trigger the
720 * pending_uploads_counter check). */
721 if (!tex->managed_buffer)
722 nine_csmt_process(This->base.base.device);
723 }
724
725 NineSurface9_MarkContainerDirty(This);
726 }
727
728 void
NineSurface9_CopyDefaultToMem(struct NineSurface9 * This,struct NineSurface9 * From)729 NineSurface9_CopyDefaultToMem( struct NineSurface9 *This,
730 struct NineSurface9 *From )
731 {
732 struct pipe_context *pipe;
733 struct pipe_resource *r_src = From->base.resource;
734 struct pipe_transfer *transfer;
735 struct pipe_box src_box;
736 uint8_t *p_dst;
737 const uint8_t *p_src;
738
739 assert(This->base.pool == D3DPOOL_SYSTEMMEM &&
740 From->base.pool == D3DPOOL_DEFAULT);
741
742 assert(This->desc.Width == From->desc.Width);
743 assert(This->desc.Height == From->desc.Height);
744
745 u_box_origin_2d(This->desc.Width, This->desc.Height, &src_box);
746 src_box.z = From->layer;
747
748 if (p_atomic_read(&This->pending_uploads_counter))
749 nine_csmt_process(This->base.base.device);
750
751 pipe = NineDevice9_GetPipe(This->base.base.device);
752 p_src = pipe->texture_map(pipe, r_src, From->level,
753 PIPE_MAP_READ,
754 &src_box, &transfer);
755 p_dst = nine_get_pointer(This->base.base.device->allocator, This->data);
756
757 assert (p_src && p_dst);
758
759 util_copy_rect(p_dst, This->base.info.format,
760 This->stride, 0, 0,
761 This->desc.Width, This->desc.Height,
762 p_src,
763 transfer->stride, 0, 0);
764
765 pipe->texture_unmap(pipe, transfer);
766
767 nine_pointer_weakrelease(This->base.base.device->allocator, This->data);
768 }
769
770
771 /* Gladly, rendering to a MANAGED surface is not permitted, so we will
772 * never have to do the reverse, i.e. download the surface.
773 */
774 HRESULT
NineSurface9_UploadSelf(struct NineSurface9 * This,const struct pipe_box * damaged)775 NineSurface9_UploadSelf( struct NineSurface9 *This,
776 const struct pipe_box *damaged )
777 {
778 struct pipe_resource *res = This->base.resource;
779 struct pipe_box box;
780
781 DBG("This=%p damaged=%p\n", This, damaged);
782
783 assert(This->base.pool == D3DPOOL_MANAGED);
784
785 if (damaged) {
786 box = *damaged;
787 box.z = This->layer;
788 box.depth = 1;
789 } else {
790 box.x = 0;
791 box.y = 0;
792 box.z = This->layer;
793 box.width = This->desc.Width;
794 box.height = This->desc.Height;
795 box.depth = 1;
796 }
797
798 nine_context_box_upload(This->base.base.device,
799 &This->pending_uploads_counter,
800 (struct NineUnknown *)This,
801 res,
802 This->level,
803 &box,
804 res->format,
805 nine_get_pointer(This->base.base.device->allocator, This->data),
806 This->stride,
807 0, /* depth = 1 */
808 &box);
809 nine_pointer_delayedstrongrelease(This->base.base.device->allocator, This->data, &This->pending_uploads_counter);
810
811 return D3D_OK;
812 }
813
814 /* Currently nine_context uses the NineSurface9
815 * fields when it is render target. Any modification requires
816 * pending commands with the surface to be executed. If the bind
817 * count is 0, there is no pending commands. */
818 #define PROCESS_IF_BOUND(surf) \
819 if (surf->base.base.bind) \
820 nine_csmt_process(surf->base.base.device);
821
822 void
NineSurface9_SetResource(struct NineSurface9 * This,struct pipe_resource * resource,unsigned level)823 NineSurface9_SetResource( struct NineSurface9 *This,
824 struct pipe_resource *resource, unsigned level )
825 {
826 /* No need to call PROCESS_IF_BOUND, because SetResource is used only
827 * for MANAGED textures, and they are not render targets. */
828 assert(This->base.pool == D3DPOOL_MANAGED);
829 This->level = level;
830 pipe_resource_reference(&This->base.resource, resource);
831 }
832
833 void
NineSurface9_SetMultiSampleType(struct NineSurface9 * This,D3DMULTISAMPLE_TYPE mst)834 NineSurface9_SetMultiSampleType( struct NineSurface9 *This,
835 D3DMULTISAMPLE_TYPE mst )
836 {
837 PROCESS_IF_BOUND(This);
838 This->desc.MultiSampleType = mst;
839 }
840
841 void
NineSurface9_SetResourceResize(struct NineSurface9 * This,struct pipe_resource * resource)842 NineSurface9_SetResourceResize( struct NineSurface9 *This,
843 struct pipe_resource *resource )
844 {
845 assert(This->level == 0 && This->level_actual == 0);
846 assert(!This->lock_count);
847 assert(This->desc.Pool == D3DPOOL_DEFAULT);
848 assert(!This->texture);
849
850 PROCESS_IF_BOUND(This);
851 pipe_resource_reference(&This->base.resource, resource);
852
853 This->desc.Width = This->base.info.width0 = resource->width0;
854 This->desc.Height = This->base.info.height0 = resource->height0;
855 This->base.info.nr_samples = resource->nr_samples;
856 This->base.info.nr_storage_samples = resource->nr_storage_samples;
857
858 This->stride = nine_format_get_stride(This->base.info.format,
859 This->desc.Width);
860
861 pipe_surface_reference(&This->surface[0], NULL);
862 pipe_surface_reference(&This->surface[1], NULL);
863 NineSurface9_CreatePipeSurfaces(This);
864 }
865
866
867 static const GUID *NineSurface9_IIDs[] = {
868 &IID_IDirect3DSurface9,
869 &IID_IDirect3DResource9,
870 &IID_IUnknown,
871 NULL
872 };
873
874 HRESULT
NineSurface9_new(struct NineDevice9 * pDevice,struct NineUnknown * pContainer,struct pipe_resource * pResource,struct nine_allocation * user_buffer,uint8_t TextureType,unsigned Level,unsigned Layer,D3DSURFACE_DESC * pDesc,struct NineSurface9 ** ppOut)875 NineSurface9_new( struct NineDevice9 *pDevice,
876 struct NineUnknown *pContainer,
877 struct pipe_resource *pResource,
878 struct nine_allocation *user_buffer,
879 uint8_t TextureType,
880 unsigned Level,
881 unsigned Layer,
882 D3DSURFACE_DESC *pDesc,
883 struct NineSurface9 **ppOut )
884 {
885 NINE_DEVICE_CHILD_NEW(Surface9, ppOut, pDevice, /* args */
886 pContainer, pResource, user_buffer,
887 TextureType, Level, Layer, pDesc);
888 }
889