• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 /*
2  * Copyright © 2015 Intel Corporation
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice (including the next
12  * paragraph) shall be included in all copies or substantial portions of the
13  * Software.
14  *
15  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
18  * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19  * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
20  * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
21  * IN THE SOFTWARE.
22  *
23  * Authors:
24  *    Jason Ekstrand (jason@jlekstrand.net)
25  *
26  */
27 
28 #ifndef _VTN_PRIVATE_H_
29 #define _VTN_PRIVATE_H_
30 
31 #include <setjmp.h>
32 
33 #include "nir/nir.h"
34 #include "nir/nir_builder.h"
35 #include "util/u_dynarray.h"
36 #include "nir_spirv.h"
37 #include "spirv.h"
38 
39 struct vtn_builder;
40 struct vtn_decoration;
41 
42 void vtn_log(struct vtn_builder *b, enum nir_spirv_debug_level level,
43              size_t spirv_offset, const char *message);
44 
45 void vtn_logf(struct vtn_builder *b, enum nir_spirv_debug_level level,
46               size_t spirv_offset, const char *fmt, ...) PRINTFLIKE(4, 5);
47 
48 #define vtn_info(...) vtn_logf(b, NIR_SPIRV_DEBUG_LEVEL_INFO, 0, __VA_ARGS__)
49 
50 void _vtn_warn(struct vtn_builder *b, const char *file, unsigned line,
51                const char *fmt, ...) PRINTFLIKE(4, 5);
52 #define vtn_warn(...) _vtn_warn(b, __FILE__, __LINE__, __VA_ARGS__)
53 
54 /** Fail SPIR-V parsing
55  *
56  * This function logs an error and then bails out of the shader compile using
57  * longjmp.  This being safe relies on two things:
58  *
59  *  1) We must guarantee that setjmp is called after allocating the builder
60  *     and setting up b->debug (so that logging works) but before before any
61  *     errors have a chance to occur.
62  *
63  *  2) While doing the SPIR-V -> NIR conversion, we need to be careful to
64  *     ensure that all heap allocations happen through ralloc and are parented
65  *     to the builder.  This way they will get properly cleaned up on error.
66  *
67  *  3) We must ensure that _vtn_fail is never called while a mutex lock or a
68  *     reference to any other resource is held with the exception of ralloc
69  *     objects which are parented to the builder.
70  *
71  * So long as these two things continue to hold, we can easily longjmp back to
72  * spirv_to_nir(), clean up the builder, and return NULL.
73  */
74 void _vtn_fail(struct vtn_builder *b, const char *file, unsigned line,
75                const char *fmt, ...) NORETURN PRINTFLIKE(4, 5);
76 #define vtn_fail(...) _vtn_fail(b, __FILE__, __LINE__, __VA_ARGS__)
77 
78 /** Fail if the given expression evaluates to true */
79 #define vtn_fail_if(expr, ...) \
80    do { \
81       if (unlikely(expr)) \
82          vtn_fail(__VA_ARGS__); \
83    } while (0)
84 
85 /** Assert that a condition is true and, if it isn't, vtn_fail
86  *
87  * This macro is transitional only and should not be used in new code.  Use
88  * vtn_fail_if and provide a real message instead.
89  */
90 #define vtn_assert(expr) \
91    do { \
92       if (!likely(expr)) \
93          vtn_fail("%s", #expr); \
94    } while (0)
95 
96 enum vtn_value_type {
97    vtn_value_type_invalid = 0,
98    vtn_value_type_undef,
99    vtn_value_type_string,
100    vtn_value_type_decoration_group,
101    vtn_value_type_type,
102    vtn_value_type_constant,
103    vtn_value_type_pointer,
104    vtn_value_type_function,
105    vtn_value_type_block,
106    vtn_value_type_ssa,
107    vtn_value_type_extension,
108    vtn_value_type_image_pointer,
109    vtn_value_type_sampled_image,
110 };
111 
112 enum vtn_branch_type {
113    vtn_branch_type_none,
114    vtn_branch_type_switch_break,
115    vtn_branch_type_switch_fallthrough,
116    vtn_branch_type_loop_break,
117    vtn_branch_type_loop_continue,
118    vtn_branch_type_discard,
119    vtn_branch_type_return,
120 };
121 
122 enum vtn_cf_node_type {
123    vtn_cf_node_type_block,
124    vtn_cf_node_type_if,
125    vtn_cf_node_type_loop,
126    vtn_cf_node_type_switch,
127 };
128 
129 struct vtn_cf_node {
130    struct list_head link;
131    enum vtn_cf_node_type type;
132 };
133 
134 struct vtn_loop {
135    struct vtn_cf_node node;
136 
137    /* The main body of the loop */
138    struct list_head body;
139 
140    /* The "continue" part of the loop.  This gets executed after the body
141     * and is where you go when you hit a continue.
142     */
143    struct list_head cont_body;
144 
145    SpvLoopControlMask control;
146 };
147 
148 struct vtn_if {
149    struct vtn_cf_node node;
150 
151    uint32_t condition;
152 
153    enum vtn_branch_type then_type;
154    struct list_head then_body;
155 
156    enum vtn_branch_type else_type;
157    struct list_head else_body;
158 
159    SpvSelectionControlMask control;
160 };
161 
162 struct vtn_case {
163    struct list_head link;
164 
165    struct list_head body;
166 
167    /* The block that starts this case */
168    struct vtn_block *start_block;
169 
170    /* The fallthrough case, if any */
171    struct vtn_case *fallthrough;
172 
173    /* The uint32_t values that map to this case */
174    struct util_dynarray values;
175 
176    /* True if this is the default case */
177    bool is_default;
178 
179    /* Initialized to false; used when sorting the list of cases */
180    bool visited;
181 };
182 
183 struct vtn_switch {
184    struct vtn_cf_node node;
185 
186    uint32_t selector;
187 
188    struct list_head cases;
189 };
190 
191 struct vtn_block {
192    struct vtn_cf_node node;
193 
194    /** A pointer to the label instruction */
195    const uint32_t *label;
196 
197    /** A pointer to the merge instruction (or NULL if non exists) */
198    const uint32_t *merge;
199 
200    /** A pointer to the branch instruction that ends this block */
201    const uint32_t *branch;
202 
203    enum vtn_branch_type branch_type;
204 
205    /** Points to the loop that this block starts (if it starts a loop) */
206    struct vtn_loop *loop;
207 
208    /** Points to the switch case started by this block (if any) */
209    struct vtn_case *switch_case;
210 
211    /** Every block ends in a nop intrinsic so that we can find it again */
212    nir_intrinsic_instr *end_nop;
213 };
214 
215 struct vtn_function {
216    struct exec_node node;
217 
218    bool referenced;
219    bool emitted;
220 
221    nir_function_impl *impl;
222    struct vtn_block *start_block;
223 
224    struct list_head body;
225 
226    const uint32_t *end;
227 
228    SpvFunctionControlMask control;
229 };
230 
231 typedef bool (*vtn_instruction_handler)(struct vtn_builder *, uint32_t,
232                                         const uint32_t *, unsigned);
233 
234 void vtn_build_cfg(struct vtn_builder *b, const uint32_t *words,
235                    const uint32_t *end);
236 void vtn_function_emit(struct vtn_builder *b, struct vtn_function *func,
237                        vtn_instruction_handler instruction_handler);
238 
239 const uint32_t *
240 vtn_foreach_instruction(struct vtn_builder *b, const uint32_t *start,
241                         const uint32_t *end, vtn_instruction_handler handler);
242 
243 struct vtn_ssa_value {
244    union {
245       nir_ssa_def *def;
246       struct vtn_ssa_value **elems;
247    };
248 
249    /* For matrices, if this is non-NULL, then this value is actually the
250     * transpose of some other value.  The value that `transposed` points to
251     * always dominates this value.
252     */
253    struct vtn_ssa_value *transposed;
254 
255    const struct glsl_type *type;
256 };
257 
258 enum vtn_base_type {
259    vtn_base_type_void,
260    vtn_base_type_scalar,
261    vtn_base_type_vector,
262    vtn_base_type_matrix,
263    vtn_base_type_array,
264    vtn_base_type_struct,
265    vtn_base_type_pointer,
266    vtn_base_type_image,
267    vtn_base_type_sampler,
268    vtn_base_type_sampled_image,
269    vtn_base_type_function,
270 };
271 
272 struct vtn_type {
273    enum vtn_base_type base_type;
274 
275    const struct glsl_type *type;
276 
277    /* The SPIR-V id of the given type. */
278    uint32_t id;
279 
280    /* Specifies the length of complex types.
281     *
282     * For Workgroup pointers, this is the size of the referenced type.
283     */
284    unsigned length;
285 
286    /* for arrays, matrices and pointers, the array stride */
287    unsigned stride;
288 
289    union {
290       /* Members for scalar, vector, and array-like types */
291       struct {
292          /* for arrays, the vtn_type for the elements of the array */
293          struct vtn_type *array_element;
294 
295          /* for matrices, whether the matrix is stored row-major */
296          bool row_major:1;
297 
298          /* Whether this type, or a parent type, has been decorated as a
299           * builtin
300           */
301          bool is_builtin:1;
302 
303          /* Which built-in to use */
304          SpvBuiltIn builtin;
305       };
306 
307       /* Members for struct types */
308       struct {
309          /* for structures, the vtn_type for each member */
310          struct vtn_type **members;
311 
312          /* for structs, the offset of each member */
313          unsigned *offsets;
314 
315          /* for structs, whether it was decorated as a "non-SSBO-like" block */
316          bool block:1;
317 
318          /* for structs, whether it was decorated as an "SSBO-like" block */
319          bool buffer_block:1;
320 
321          /* for structs with block == true, whether this is a builtin block
322           * (i.e. a block that contains only builtins).
323           */
324          bool builtin_block:1;
325       };
326 
327       /* Members for pointer types */
328       struct {
329          /* For pointers, the vtn_type for dereferenced type */
330          struct vtn_type *deref;
331 
332          /* Storage class for pointers */
333          SpvStorageClass storage_class;
334 
335          /* Required alignment for pointers */
336          uint32_t align;
337       };
338 
339       /* Members for image types */
340       struct {
341          /* For images, indicates whether it's sampled or storage */
342          bool sampled;
343 
344          /* Image format for image_load_store type images */
345          unsigned image_format;
346 
347          /* Access qualifier for storage images */
348          SpvAccessQualifier access_qualifier;
349       };
350 
351       /* Members for sampled image types */
352       struct {
353          /* For sampled images, the image type */
354          struct vtn_type *image;
355       };
356 
357       /* Members for function types */
358       struct {
359          /* For functions, the vtn_type for each parameter */
360          struct vtn_type **params;
361 
362          /* Return type for functions */
363          struct vtn_type *return_type;
364       };
365    };
366 };
367 
368 bool vtn_types_compatible(struct vtn_builder *b,
369                           struct vtn_type *t1, struct vtn_type *t2);
370 
371 struct vtn_variable;
372 
373 enum vtn_access_mode {
374    vtn_access_mode_id,
375    vtn_access_mode_literal,
376 };
377 
378 struct vtn_access_link {
379    enum vtn_access_mode mode;
380    uint32_t id;
381 };
382 
383 struct vtn_access_chain {
384    uint32_t length;
385 
386    /** Whether or not to treat the base pointer as an array.  This is only
387     * true if this access chain came from an OpPtrAccessChain.
388     */
389    bool ptr_as_array;
390 
391    /** Struct elements and array offsets.
392     *
393     * This is an array of 1 so that it can conveniently be created on the
394     * stack but the real length is given by the length field.
395     */
396    struct vtn_access_link link[1];
397 };
398 
399 enum vtn_variable_mode {
400    vtn_variable_mode_local,
401    vtn_variable_mode_global,
402    vtn_variable_mode_param,
403    vtn_variable_mode_ubo,
404    vtn_variable_mode_ssbo,
405    vtn_variable_mode_push_constant,
406    vtn_variable_mode_image,
407    vtn_variable_mode_sampler,
408    vtn_variable_mode_workgroup,
409    vtn_variable_mode_input,
410    vtn_variable_mode_output,
411 };
412 
413 struct vtn_pointer {
414    /** The variable mode for the referenced data */
415    enum vtn_variable_mode mode;
416 
417    /** The dereferenced type of this pointer */
418    struct vtn_type *type;
419 
420    /** The pointer type of this pointer
421     *
422     * This may be NULL for some temporary pointers constructed as part of a
423     * large load, store, or copy.  It MUST be valid for all pointers which are
424     * stored as SPIR-V SSA values.
425     */
426    struct vtn_type *ptr_type;
427 
428    /** The referenced variable, if known
429     *
430     * This field may be NULL if the pointer uses a (block_index, offset) pair
431     * instead of an access chain.
432     */
433    struct vtn_variable *var;
434 
435    /** An access chain describing how to get from var to the referenced data
436     *
437     * This field may be NULL if the pointer references the entire variable or
438     * if a (block_index, offset) pair is used instead of an access chain.
439     */
440    struct vtn_access_chain *chain;
441 
442    /** A (block_index, offset) pair representing a UBO or SSBO position. */
443    struct nir_ssa_def *block_index;
444    struct nir_ssa_def *offset;
445 };
446 
447 struct vtn_variable {
448    enum vtn_variable_mode mode;
449 
450    struct vtn_type *type;
451 
452    unsigned descriptor_set;
453    unsigned binding;
454    unsigned input_attachment_index;
455    bool patch;
456 
457    nir_variable *var;
458    nir_variable **members;
459 
460    int shared_location;
461 
462    /**
463     * In some early released versions of GLSLang, it implemented all function
464     * calls by making copies of all parameters into temporary variables and
465     * passing those variables into the function.  It even did so for samplers
466     * and images which violates the SPIR-V spec.  Unfortunately, two games
467     * (Talos Principle and Doom) shipped with this old version of GLSLang and
468     * also happen to pass samplers into functions.  Talos Principle received
469     * an update fairly shortly after release with an updated GLSLang.  Doom,
470     * on the other hand, has never received an update so we need to work
471     * around this GLSLang issue in SPIR-V -> NIR.  Hopefully, we can drop this
472     * hack at some point in the future.
473     */
474    struct vtn_pointer *copy_prop_sampler;
475 };
476 
477 struct vtn_image_pointer {
478    struct vtn_pointer *image;
479    nir_ssa_def *coord;
480    nir_ssa_def *sample;
481 };
482 
483 struct vtn_sampled_image {
484    struct vtn_type *type;
485    struct vtn_pointer *image; /* Image or array of images */
486    struct vtn_pointer *sampler; /* Sampler */
487 };
488 
489 struct vtn_value {
490    enum vtn_value_type value_type;
491    const char *name;
492    struct vtn_decoration *decoration;
493    struct vtn_type *type;
494    union {
495       void *ptr;
496       char *str;
497       nir_constant *constant;
498       struct vtn_pointer *pointer;
499       struct vtn_image_pointer *image;
500       struct vtn_sampled_image *sampled_image;
501       struct vtn_function *func;
502       struct vtn_block *block;
503       struct vtn_ssa_value *ssa;
504       vtn_instruction_handler ext_handler;
505    };
506 };
507 
508 #define VTN_DEC_DECORATION -1
509 #define VTN_DEC_EXECUTION_MODE -2
510 #define VTN_DEC_STRUCT_MEMBER0 0
511 
512 struct vtn_decoration {
513    struct vtn_decoration *next;
514 
515    /* Specifies how to apply this decoration.  Negative values represent a
516     * decoration or execution mode. (See the VTN_DEC_ #defines above.)
517     * Non-negative values specify that it applies to a structure member.
518     */
519    int scope;
520 
521    const uint32_t *literals;
522    struct vtn_value *group;
523 
524    union {
525       SpvDecoration decoration;
526       SpvExecutionMode exec_mode;
527    };
528 };
529 
530 struct vtn_builder {
531    nir_builder nb;
532 
533    /* Used by vtn_fail to jump back to the beginning of SPIR-V compilation */
534    jmp_buf fail_jump;
535 
536    const uint32_t *spirv;
537    size_t spirv_word_count;
538 
539    nir_shader *shader;
540    const struct spirv_to_nir_options *options;
541    struct vtn_block *block;
542 
543    /* Current offset, file, line, and column.  Useful for debugging.  Set
544     * automatically by vtn_foreach_instruction.
545     */
546    size_t spirv_offset;
547    char *file;
548    int line, col;
549 
550    /*
551     * In SPIR-V, constants are global, whereas in NIR, the load_const
552     * instruction we use is per-function. So while we parse each function, we
553     * keep a hash table of constants we've resolved to nir_ssa_value's so
554     * far, and we lazily resolve them when we see them used in a function.
555     */
556    struct hash_table *const_table;
557 
558    /*
559     * Map from phi instructions (pointer to the start of the instruction)
560     * to the variable corresponding to it.
561     */
562    struct hash_table *phi_table;
563 
564    unsigned num_specializations;
565    struct nir_spirv_specialization *specializations;
566 
567    unsigned value_id_bound;
568    struct vtn_value *values;
569 
570    gl_shader_stage entry_point_stage;
571    const char *entry_point_name;
572    struct vtn_value *entry_point;
573    bool origin_upper_left;
574    bool pixel_center_integer;
575 
576    struct vtn_function *func;
577    struct exec_list functions;
578 
579    /* Current function parameter index */
580    unsigned func_param_idx;
581 
582    bool has_loop_continue;
583 };
584 
585 nir_ssa_def *
586 vtn_pointer_to_ssa(struct vtn_builder *b, struct vtn_pointer *ptr);
587 struct vtn_pointer *
588 vtn_pointer_from_ssa(struct vtn_builder *b, nir_ssa_def *ssa,
589                      struct vtn_type *ptr_type);
590 
591 static inline struct vtn_value *
vtn_untyped_value(struct vtn_builder * b,uint32_t value_id)592 vtn_untyped_value(struct vtn_builder *b, uint32_t value_id)
593 {
594    vtn_fail_if(value_id >= b->value_id_bound,
595                "SPIR-V id %u is out-of-bounds", value_id);
596    return &b->values[value_id];
597 }
598 
599 static inline struct vtn_value *
vtn_push_value(struct vtn_builder * b,uint32_t value_id,enum vtn_value_type value_type)600 vtn_push_value(struct vtn_builder *b, uint32_t value_id,
601                enum vtn_value_type value_type)
602 {
603    struct vtn_value *val = vtn_untyped_value(b, value_id);
604 
605    vtn_fail_if(val->value_type != vtn_value_type_invalid,
606                "SPIR-V id %u has already been written by another instruction",
607                value_id);
608 
609    val->value_type = value_type;
610    return &b->values[value_id];
611 }
612 
613 static inline struct vtn_value *
vtn_push_ssa(struct vtn_builder * b,uint32_t value_id,struct vtn_type * type,struct vtn_ssa_value * ssa)614 vtn_push_ssa(struct vtn_builder *b, uint32_t value_id,
615              struct vtn_type *type, struct vtn_ssa_value *ssa)
616 {
617    struct vtn_value *val;
618    if (type->base_type == vtn_base_type_pointer) {
619       val = vtn_push_value(b, value_id, vtn_value_type_pointer);
620       val->pointer = vtn_pointer_from_ssa(b, ssa->def, type);
621    } else {
622       val = vtn_push_value(b, value_id, vtn_value_type_ssa);
623       val->ssa = ssa;
624    }
625    return val;
626 }
627 
628 static inline struct vtn_value *
vtn_value(struct vtn_builder * b,uint32_t value_id,enum vtn_value_type value_type)629 vtn_value(struct vtn_builder *b, uint32_t value_id,
630           enum vtn_value_type value_type)
631 {
632    struct vtn_value *val = vtn_untyped_value(b, value_id);
633    vtn_fail_if(val->value_type != value_type,
634                "SPIR-V id %u is the wrong kind of value", value_id);
635    return val;
636 }
637 
638 bool
639 vtn_set_instruction_result_type(struct vtn_builder *b, SpvOp opcode,
640                                 const uint32_t *w, unsigned count);
641 
642 struct vtn_ssa_value *vtn_ssa_value(struct vtn_builder *b, uint32_t value_id);
643 
644 struct vtn_ssa_value *vtn_create_ssa_value(struct vtn_builder *b,
645                                            const struct glsl_type *type);
646 
647 struct vtn_ssa_value *vtn_ssa_transpose(struct vtn_builder *b,
648                                         struct vtn_ssa_value *src);
649 
650 nir_ssa_def *vtn_vector_extract(struct vtn_builder *b, nir_ssa_def *src,
651                                 unsigned index);
652 nir_ssa_def *vtn_vector_extract_dynamic(struct vtn_builder *b, nir_ssa_def *src,
653                                         nir_ssa_def *index);
654 nir_ssa_def *vtn_vector_insert(struct vtn_builder *b, nir_ssa_def *src,
655                                nir_ssa_def *insert, unsigned index);
656 nir_ssa_def *vtn_vector_insert_dynamic(struct vtn_builder *b, nir_ssa_def *src,
657                                        nir_ssa_def *insert, nir_ssa_def *index);
658 
659 nir_deref_var *vtn_nir_deref(struct vtn_builder *b, uint32_t id);
660 
661 struct vtn_pointer *vtn_pointer_for_variable(struct vtn_builder *b,
662                                              struct vtn_variable *var,
663                                              struct vtn_type *ptr_type);
664 
665 nir_deref_var *vtn_pointer_to_deref(struct vtn_builder *b,
666                                     struct vtn_pointer *ptr);
667 nir_ssa_def *
668 vtn_pointer_to_offset(struct vtn_builder *b, struct vtn_pointer *ptr,
669                       nir_ssa_def **index_out, unsigned *end_idx_out);
670 
671 struct vtn_ssa_value *vtn_local_load(struct vtn_builder *b, nir_deref_var *src);
672 
673 void vtn_local_store(struct vtn_builder *b, struct vtn_ssa_value *src,
674                      nir_deref_var *dest);
675 
676 struct vtn_ssa_value *
677 vtn_variable_load(struct vtn_builder *b, struct vtn_pointer *src);
678 
679 void vtn_variable_store(struct vtn_builder *b, struct vtn_ssa_value *src,
680                         struct vtn_pointer *dest);
681 
682 void vtn_handle_variables(struct vtn_builder *b, SpvOp opcode,
683                           const uint32_t *w, unsigned count);
684 
685 
686 typedef void (*vtn_decoration_foreach_cb)(struct vtn_builder *,
687                                           struct vtn_value *,
688                                           int member,
689                                           const struct vtn_decoration *,
690                                           void *);
691 
692 void vtn_foreach_decoration(struct vtn_builder *b, struct vtn_value *value,
693                             vtn_decoration_foreach_cb cb, void *data);
694 
695 typedef void (*vtn_execution_mode_foreach_cb)(struct vtn_builder *,
696                                               struct vtn_value *,
697                                               const struct vtn_decoration *,
698                                               void *);
699 
700 void vtn_foreach_execution_mode(struct vtn_builder *b, struct vtn_value *value,
701                                 vtn_execution_mode_foreach_cb cb, void *data);
702 
703 nir_op vtn_nir_alu_op_for_spirv_opcode(struct vtn_builder *b,
704                                        SpvOp opcode, bool *swap,
705                                        nir_alu_type src, nir_alu_type dst);
706 
707 void vtn_handle_alu(struct vtn_builder *b, SpvOp opcode,
708                     const uint32_t *w, unsigned count);
709 
710 bool vtn_handle_glsl450_instruction(struct vtn_builder *b, uint32_t ext_opcode,
711                                     const uint32_t *words, unsigned count);
712 
713 static inline uint32_t
vtn_align_u32(uint32_t v,uint32_t a)714 vtn_align_u32(uint32_t v, uint32_t a)
715 {
716    assert(a != 0 && a == (a & -a));
717    return (v + a - 1) & ~(a - 1);
718 }
719 
720 static inline uint64_t
vtn_u64_literal(const uint32_t * w)721 vtn_u64_literal(const uint32_t *w)
722 {
723    return (uint64_t)w[1] << 32 | w[0];
724 }
725 
726 #endif /* _VTN_PRIVATE_H_ */
727