• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright (c) 2016 Etnaviv Project
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sub license,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the
12  * next paragraph) shall be included in all copies or substantial portions
13  * of the Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
21  * DEALINGS IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Christian Gmeiner <christian.gmeiner@gmail.com>
25  */
26 
27 #include "etnaviv_format.h"
28 
29 #include "hw/common_3d.xml.h"
30 #include "hw/state.xml.h"
31 #include "hw/state_3d.xml.h"
32 
33 #include "pipe/p_defines.h"
34 #include "util/compiler.h"
35 
36 /* Specifies the table of all the formats and their features. Also supplies
37  * the helpers that look up various data in those tables.
38  */
39 
40 struct etna_format {
41    unsigned vtx;
42    unsigned tex;
43    unsigned pe;
44    bool present;
45 };
46 
47 #define PE_FORMAT_NONE ~0
48 
49 #define PE_FORMAT_MASK        0x7f
50 #define PE_FORMAT(x)          ((x) & PE_FORMAT_MASK)
51 #define PE_FORMAT_RB_SWAP     0x80
52 
53 #define PE_FORMAT_X8B8G8R8    (PE_FORMAT_X8R8G8B8 | PE_FORMAT_RB_SWAP)
54 #define PE_FORMAT_A8B8G8R8    (PE_FORMAT_A8R8G8B8 | PE_FORMAT_RB_SWAP)
55 
56 #define TS_SAMPLER_FORMAT_NONE      ETNA_NO_MATCH
57 
58 /* vertex + texture */
59 #define VT(pipe, vtxfmt, texfmt, rsfmt)          \
60    [PIPE_FORMAT_##pipe] = {                               \
61       .vtx = FE_DATA_TYPE_##vtxfmt, \
62       .tex = TEXTURE_FORMAT_##texfmt,                     \
63       .pe = PE_FORMAT_##rsfmt,                            \
64       .present = 1,                                       \
65    }
66 
67 /* texture-only */
68 #define _T(pipe, fmt, rsfmt) \
69    [PIPE_FORMAT_##pipe] = {        \
70       .vtx = ETNA_NO_MATCH,        \
71       .tex = TEXTURE_FORMAT_##fmt, \
72       .pe = PE_FORMAT_##rsfmt,     \
73       .present = 1,                \
74    }
75 
76 /* vertex-only */
77 #define V_(pipe, fmt, rsfmt)                           \
78    [PIPE_FORMAT_##pipe] = {                            \
79       .vtx = FE_DATA_TYPE_##fmt, \
80       .tex = ETNA_NO_MATCH,                            \
81       .pe = PE_FORMAT_##rsfmt,                         \
82       .present = 1,                                    \
83    }
84 
85 static struct etna_format formats[PIPE_FORMAT_COUNT] = {
86    /* 8-bit */
87    VT(R8_UNORM,   UNSIGNED_BYTE, L8,                        R8),
88    VT(R8_SNORM,   BYTE,          EXT_R8_SNORM | EXT_FORMAT, NONE),
89    VT(R8_UINT,    BYTE_I,        EXT_R8I | EXT_FORMAT,      R8I),
90    VT(R8_SINT,    BYTE_I,        EXT_R8I | EXT_FORMAT,      R8I),
91    V_(R8_USCALED, UNSIGNED_BYTE, NONE),
92    V_(R8_SSCALED, BYTE,          NONE),
93 
94    _T(A8_UNORM, A8, NONE),
95    _T(L8_UNORM, L8, NONE),
96    _T(I8_UNORM, I8, NONE),
97 
98    /* 16-bit */
99    V_(R16_UNORM,   UNSIGNED_SHORT, NONE),
100    V_(R16_SNORM,   SHORT,          NONE),
101    VT(R16_UINT,    SHORT_I,        EXT_R16I | EXT_FORMAT, R16I),
102    VT(R16_SINT,    SHORT_I,        EXT_R16I | EXT_FORMAT, R16I),
103    V_(R16_USCALED, UNSIGNED_SHORT, NONE),
104    V_(R16_SSCALED, SHORT,          NONE),
105    VT(R16_FLOAT,   HALF_FLOAT,     EXT_R16F | EXT_FORMAT, R16F),
106 
107    _T(B4G4R4A4_UNORM, A4R4G4B4, A4R4G4B4),
108    _T(B4G4R4X4_UNORM, X4R4G4B4, X4R4G4B4),
109 
110    _T(L8A8_UNORM, A8L8, NONE),
111 
112    _T(Z16_UNORM,      D16,      NONE),
113    _T(B5G6R5_UNORM,   R5G6B5,   R5G6B5),
114    _T(B5G5R5A1_UNORM, A1R5G5B5, A1R5G5B5),
115    _T(B5G5R5X1_UNORM, X1R5G5B5, X1R5G5B5),
116 
117    VT(R8G8_UNORM,   UNSIGNED_BYTE,  EXT_G8R8 | EXT_FORMAT,       G8R8),
118    VT(R8G8_SNORM,   BYTE,           EXT_G8R8_SNORM | EXT_FORMAT, NONE),
119    VT(R8G8_UINT,    BYTE_I,         EXT_G8R8I | EXT_FORMAT,      G8R8I),
120    VT(R8G8_SINT,    BYTE_I,         EXT_G8R8I | EXT_FORMAT,      G8R8I),
121    V_(R8G8_USCALED, UNSIGNED_BYTE,  NONE),
122    V_(R8G8_SSCALED, BYTE,           NONE),
123 
124    /* 24-bit */
125    V_(R8G8B8_UNORM,   UNSIGNED_BYTE, NONE),
126    V_(R8G8B8_SNORM,   BYTE,          NONE),
127    V_(R8G8B8_UINT,    BYTE_I,        NONE),
128    V_(R8G8B8_SINT,    BYTE_I,        NONE),
129    V_(R8G8B8_USCALED, UNSIGNED_BYTE, NONE),
130    V_(R8G8B8_SSCALED, BYTE,          NONE),
131 
132    /* 32-bit */
133    V_(R32_UNORM,   UNSIGNED_INT, NONE),
134    V_(R32_SNORM,   INT,          NONE),
135    VT(R32_SINT,    FLOAT,        EXT_R32F | EXT_FORMAT, R32F),
136    VT(R32_UINT,    FLOAT,        EXT_R32F | EXT_FORMAT, R32F),
137    V_(R32_USCALED, UNSIGNED_INT, NONE),
138    V_(R32_SSCALED, INT,          NONE),
139    VT(R32_FLOAT,   FLOAT,        EXT_R32F | EXT_FORMAT, R32F),
140    V_(R32_FIXED,   FIXED,        NONE),
141 
142    V_(R16G16_UNORM,   UNSIGNED_SHORT, NONE),
143    V_(R16G16_SNORM,   SHORT,          NONE),
144    VT(R16G16_UINT,    SHORT_I,        EXT_G16R16I | EXT_FORMAT, G16R16I),
145    VT(R16G16_SINT,    SHORT_I,        EXT_G16R16I | EXT_FORMAT, G16R16I),
146    V_(R16G16_USCALED, UNSIGNED_SHORT, NONE),
147    V_(R16G16_SSCALED, SHORT,          NONE),
148    VT(R16G16_FLOAT,   HALF_FLOAT,     EXT_G16R16F | EXT_FORMAT, G16R16F),
149 
150    V_(A8B8G8R8_UNORM,   UNSIGNED_BYTE, NONE),
151 
152    VT(R8G8B8A8_UNORM,   UNSIGNED_BYTE, A8B8G8R8, A8B8G8R8),
153    VT(R8G8B8A8_SNORM,   BYTE,          EXT_A8B8G8R8_SNORM | EXT_FORMAT, NONE),
154    _T(R8G8B8X8_UNORM,   X8B8G8R8,      X8B8G8R8),
155    _T(R8G8B8X8_SNORM,                  EXT_X8B8G8R8_SNORM | EXT_FORMAT, NONE),
156    VT(R8G8B8A8_UINT,    BYTE_I,        EXT_A8B8G8R8I | EXT_FORMAT,      A8B8G8R8I),
157    VT(R8G8B8A8_SINT,    BYTE_I,        EXT_A8B8G8R8I | EXT_FORMAT,      A8B8G8R8I),
158    V_(R8G8B8A8_USCALED, UNSIGNED_BYTE, A8B8G8R8),
159    V_(R8G8B8A8_SSCALED, BYTE,          A8B8G8R8),
160 
161    _T(B8G8R8A8_UNORM, A8R8G8B8, A8R8G8B8),
162    _T(B8G8R8X8_UNORM, X8R8G8B8, X8R8G8B8),
163 
164    VT(R10G10B10A2_UNORM,   UNSIGNED_INT_2_10_10_10_REV, EXT_A2B10G10R10 | EXT_FORMAT, A2B10G10R10),
165    _T(R10G10B10X2_UNORM,                                EXT_A2B10G10R10 | EXT_FORMAT, A2B10G10R10),
166    V_(R10G10B10A2_SNORM,   INT_2_10_10_10_REV,          NONE),
167    _T(R10G10B10A2_UINT,                                 EXT_A2B10G10R10UI | EXT_FORMAT, A2B10G10R10UI),
168    V_(R10G10B10A2_USCALED, UNSIGNED_INT_2_10_10_10_REV, NONE),
169    V_(R10G10B10A2_SSCALED, INT_2_10_10_10_REV,          NONE),
170 
171    _T(X8Z24_UNORM,       D24X8, NONE),
172    _T(S8_UINT_Z24_UNORM, D24X8, NONE),
173 
174    _T(R9G9B9E5_FLOAT,  E5B9G9R9,                    NONE),
175    _T(R11G11B10_FLOAT, EXT_B10G11R11F | EXT_FORMAT, B10G11R11F),
176 
177    /* 48-bit */
178    V_(R16G16B16_UNORM,   UNSIGNED_SHORT, NONE),
179    V_(R16G16B16_SNORM,   SHORT,          NONE),
180    V_(R16G16B16_UINT,    SHORT_I,        NONE),
181    V_(R16G16B16_SINT,    SHORT_I,        NONE),
182    V_(R16G16B16_USCALED, UNSIGNED_SHORT, NONE),
183    V_(R16G16B16_SSCALED, SHORT,          NONE),
184    V_(R16G16B16_FLOAT,   HALF_FLOAT,     NONE),
185 
186    /* 64-bit */
187    V_(R16G16B16A16_UNORM,   UNSIGNED_SHORT, NONE),
188    V_(R16G16B16A16_SNORM,   SHORT,          NONE),
189    VT(R16G16B16A16_UINT,    SHORT_I,        EXT_A16B16G16R16I | EXT_FORMAT, A16B16G16R16I),
190    VT(R16G16B16A16_SINT,    SHORT_I,        EXT_A16B16G16R16I | EXT_FORMAT, A16B16G16R16I),
191    V_(R16G16B16A16_USCALED, UNSIGNED_SHORT, NONE),
192    V_(R16G16B16A16_SSCALED, SHORT,          NONE),
193    VT(R16G16B16A16_FLOAT,   HALF_FLOAT,     EXT_A16B16G16R16F | EXT_FORMAT, A16B16G16R16F),
194 
195    V_(R32G32_UNORM,   UNSIGNED_INT, NONE),
196    V_(R32G32_SNORM,   INT,          NONE),
197    VT(R32G32_UINT,    FLOAT,        EXT_G32R32F | EXT_FORMAT, G32R32F),
198    VT(R32G32_SINT,    FLOAT,        EXT_G32R32F | EXT_FORMAT, G32R32F),
199    V_(R32G32_USCALED, UNSIGNED_INT, NONE),
200    V_(R32G32_SSCALED, INT,          NONE),
201    VT(R32G32_FLOAT,   FLOAT,        EXT_G32R32F | EXT_FORMAT, G32R32F),
202    V_(R32G32_FIXED,   FIXED,        NONE),
203 
204    /* 96-bit */
205    V_(R32G32B32_UNORM,   UNSIGNED_INT, NONE),
206    V_(R32G32B32_SNORM,   INT,          NONE),
207    V_(R32G32B32_UINT,    FLOAT,        NONE),
208    V_(R32G32B32_SINT,    FLOAT,        NONE),
209    V_(R32G32B32_USCALED, UNSIGNED_INT, NONE),
210    V_(R32G32B32_SSCALED, INT,          NONE),
211    V_(R32G32B32_FLOAT,   FLOAT,        NONE),
212    V_(R32G32B32_FIXED,   FIXED,        NONE),
213 
214    /* 128-bit */
215    V_(R32G32B32A32_UNORM,   UNSIGNED_INT, NONE),
216    V_(R32G32B32A32_SNORM,   INT,          NONE),
217    V_(R32G32B32A32_UINT,    FLOAT,        NONE),
218    V_(R32G32B32A32_SINT,    FLOAT,        NONE),
219    V_(R32G32B32A32_USCALED, UNSIGNED_INT, NONE),
220    V_(R32G32B32A32_SSCALED, INT,          NONE),
221    V_(R32G32B32A32_FLOAT,   FLOAT,        NONE),
222    V_(R32G32B32A32_FIXED,   FIXED,        NONE),
223 
224    /* compressed */
225    _T(ETC1_RGB8, ETC1, NONE),
226 
227    _T(DXT1_RGB,  DXT1,      NONE),
228    _T(DXT1_RGBA, DXT1,      NONE),
229    _T(DXT3_RGBA, DXT2_DXT3, NONE),
230    _T(DXT5_RGBA, DXT4_DXT5, NONE),
231 
232    _T(ETC2_RGB8,       EXT_NONE | EXT_FORMAT,                          NONE), /* Extd. format NONE doubles as ETC2_RGB8 */
233    _T(ETC2_RGB8A1,     EXT_RGB8_PUNCHTHROUGH_ALPHA1_ETC2 | EXT_FORMAT, NONE),
234    _T(ETC2_RGBA8,      EXT_RGBA8_ETC2_EAC | EXT_FORMAT,                NONE),
235    _T(ETC2_R11_UNORM,  EXT_R11_EAC | EXT_FORMAT,                       NONE),
236    _T(ETC2_R11_SNORM,  EXT_SIGNED_R11_EAC | EXT_FORMAT,                NONE),
237    _T(ETC2_RG11_UNORM, EXT_RG11_EAC | EXT_FORMAT,                      NONE),
238    _T(ETC2_RG11_SNORM, EXT_SIGNED_RG11_EAC | EXT_FORMAT,               NONE),
239 
240    _T(ASTC_4x4,        ASTC_RGBA_4x4 | ASTC_FORMAT,                    NONE),
241    _T(ASTC_5x4,        ASTC_RGBA_5x4 | ASTC_FORMAT,                    NONE),
242    _T(ASTC_5x5,        ASTC_RGBA_5x5 | ASTC_FORMAT,                    NONE),
243    _T(ASTC_6x5,        ASTC_RGBA_6x5 | ASTC_FORMAT,                    NONE),
244    _T(ASTC_6x6,        ASTC_RGBA_6x6 | ASTC_FORMAT,                    NONE),
245    _T(ASTC_8x5,        ASTC_RGBA_8x5 | ASTC_FORMAT,                    NONE),
246    _T(ASTC_8x6,        ASTC_RGBA_8x6 | ASTC_FORMAT,                    NONE),
247    _T(ASTC_8x8,        ASTC_RGBA_8x8 | ASTC_FORMAT,                    NONE),
248    _T(ASTC_10x5,       ASTC_RGBA_10x5 | ASTC_FORMAT,                   NONE),
249    _T(ASTC_10x6,       ASTC_RGBA_10x6 | ASTC_FORMAT,                   NONE),
250    _T(ASTC_10x8,       ASTC_RGBA_10x8 | ASTC_FORMAT,                   NONE),
251    _T(ASTC_10x10,      ASTC_RGBA_10x10 | ASTC_FORMAT,                  NONE),
252    _T(ASTC_12x10,      ASTC_RGBA_12x10 | ASTC_FORMAT,                  NONE),
253    _T(ASTC_12x12,      ASTC_RGBA_12x12 | ASTC_FORMAT,                  NONE),
254 
255    /* YUV */
256    _T(YUYV, YUY2, YUY2),
257    _T(UYVY, UYVY, NONE),
258 };
259 
260 uint32_t
translate_texture_format(enum pipe_format fmt)261 translate_texture_format(enum pipe_format fmt)
262 {
263    fmt = util_format_linear(fmt);
264 
265    if (!formats[fmt].present)
266       return ETNA_NO_MATCH;
267 
268    return formats[fmt].tex;
269 }
270 
271 bool
texture_use_int_filter(const struct pipe_sampler_view * sv,const struct pipe_sampler_state * ss,bool tex_desc)272 texture_use_int_filter(const struct pipe_sampler_view *sv,
273                        const struct pipe_sampler_state *ss,
274                        bool tex_desc)
275 {
276    switch (sv->target) {
277    case PIPE_TEXTURE_1D_ARRAY:
278    case PIPE_TEXTURE_2D_ARRAY:
279       if (tex_desc)
280          break;
281       FALLTHROUGH;
282    case PIPE_TEXTURE_3D:
283       return false;
284    default:
285       break;
286    }
287 
288    /* only unorm formats can use int filter */
289    if (!util_format_is_unorm(sv->format))
290       return false;
291 
292    if (util_format_is_srgb(sv->format))
293       return false;
294 
295    if (util_format_description(sv->format)->layout == UTIL_FORMAT_LAYOUT_ASTC)
296       return false;
297 
298    if (ss->max_anisotropy > 1)
299       return false;
300 
301    switch (sv->format) {
302    /* apparently D16 can't use int filter but D24 can */
303    case PIPE_FORMAT_Z16_UNORM:
304    case PIPE_FORMAT_R10G10B10A2_UNORM:
305    case PIPE_FORMAT_R10G10B10X2_UNORM:
306    case PIPE_FORMAT_ETC2_R11_UNORM:
307    case PIPE_FORMAT_ETC2_RG11_UNORM:
308       return false;
309    default:
310       return true;
311    }
312 }
313 
314 bool
texture_format_needs_swiz(enum pipe_format fmt)315 texture_format_needs_swiz(enum pipe_format fmt)
316 {
317    return util_format_linear(fmt) == PIPE_FORMAT_R8_UNORM;
318 }
319 
320 uint32_t
get_texture_swiz(enum pipe_format fmt,unsigned swizzle_r,unsigned swizzle_g,unsigned swizzle_b,unsigned swizzle_a)321 get_texture_swiz(enum pipe_format fmt, unsigned swizzle_r,
322                  unsigned swizzle_g, unsigned swizzle_b, unsigned swizzle_a)
323 {
324    unsigned char swiz[4] = {
325       swizzle_r, swizzle_g, swizzle_b, swizzle_a,
326    };
327 
328    if (util_format_linear(fmt) == PIPE_FORMAT_R8_UNORM) {
329       /* R8 is emulated with L8, needs yz channels set to zero */
330       for (unsigned i = 0; i < 4; i++) {
331          if (swiz[i] == PIPE_SWIZZLE_Y || swiz[i] == PIPE_SWIZZLE_Z)
332             swiz[i] = PIPE_SWIZZLE_0;
333       }
334    }
335 
336    /* PIPE_SWIZZLE_ maps 1:1 to TEXTURE_SWIZZLE_ */
337    STATIC_ASSERT(PIPE_SWIZZLE_X == TEXTURE_SWIZZLE_RED);
338    STATIC_ASSERT(PIPE_SWIZZLE_Y == TEXTURE_SWIZZLE_GREEN);
339    STATIC_ASSERT(PIPE_SWIZZLE_Z == TEXTURE_SWIZZLE_BLUE);
340    STATIC_ASSERT(PIPE_SWIZZLE_W == TEXTURE_SWIZZLE_ALPHA);
341    STATIC_ASSERT(PIPE_SWIZZLE_0 == TEXTURE_SWIZZLE_ZERO);
342    STATIC_ASSERT(PIPE_SWIZZLE_1 == TEXTURE_SWIZZLE_ONE);
343 
344    return VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_R(swiz[0]) |
345           VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_G(swiz[1]) |
346           VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_B(swiz[2]) |
347           VIVS_TE_SAMPLER_CONFIG1_SWIZZLE_A(swiz[3]);
348 }
349 
350 uint32_t
translate_pe_format(enum pipe_format fmt)351 translate_pe_format(enum pipe_format fmt)
352 {
353    fmt = util_format_linear(fmt);
354 
355    if (!formats[fmt].present)
356       return ETNA_NO_MATCH;
357 
358    if (formats[fmt].pe == ETNA_NO_MATCH)
359       return ETNA_NO_MATCH;
360 
361    return PE_FORMAT(formats[fmt].pe);
362 }
363 
364 int
translate_pe_format_rb_swap(enum pipe_format fmt)365 translate_pe_format_rb_swap(enum pipe_format fmt)
366 {
367    fmt = util_format_linear(fmt);
368    assert(formats[fmt].present);
369 
370    return formats[fmt].pe & PE_FORMAT_RB_SWAP;
371 }
372 
373 /* Return type flags for vertex element format */
374 uint32_t
translate_vertex_format_type(enum pipe_format fmt)375 translate_vertex_format_type(enum pipe_format fmt)
376 {
377    if (!formats[fmt].present)
378       return ETNA_NO_MATCH;
379 
380    return formats[fmt].vtx;
381 }
382