1 /* 2 * Copyright 2017 Google Inc. 3 * 4 * Use of this source code is governed by a BSD-style license that can 5 * be found in the LICENSE file. 6 * 7 */ 8 9 #ifndef SKC_RASTER_BUILDER_CL_12_ONCE 10 #define SKC_RASTER_BUILDER_CL_12_ONCE 11 12 // 13 // 14 // 15 16 #include "types.h" 17 #include "macros.h" 18 #include "common.h" 19 20 // 21 // FIXME -- these magic numbers will be replaced with tile.h constants 22 // although they're probably universal across all devices 23 // 24 // FIXME -- NEED TO EVALUATE IF THIS DISTRIBUTION OF BITS IS GOING TO 25 // BE TOO SMALL -- plenty of room to jiggle these bits 26 // 27 28 #define SKC_CMD_RASTERIZE_BITS_TRANSFORM 12 29 #define SKC_CMD_RASTERIZE_BITS_CLIP 12 30 #define SKC_CMD_RASTERIZE_BITS_COHORT 8 31 32 SKC_STATIC_ASSERT(SKC_CMD_RASTERIZE_BITS_TRANSFORM == SKC_CMD_FILL_BITS_TRANSFORM); 33 SKC_STATIC_ASSERT(SKC_CMD_RASTERIZE_BITS_CLIP == SKC_CMD_FILL_BITS_CLIP); 34 SKC_STATIC_ASSERT(SKC_CMD_RASTERIZE_BITS_COHORT == SKC_CMD_FILL_BITS_COHORT); 35 36 // 37 // device-side rasterization cmd 38 // 39 40 union skc_cmd_rasterize 41 { 42 skc_ulong u64; 43 44 skc_uint2 u32v2; 45 46 struct { 47 // 48 // Unlike anywhere else in the pipeline, the nodeword index points 49 // "inside" of a path node (with word resolution). This means 50 // there is up to 16 GB of 32-bit word addressing in a unified 51 // block pool: 52 // 53 // "16GB ought to be enough for anyone" -- ASM 5/30/17 54 // 55 skc_uint nodeword; 56 #if defined(__OPENCL_C_VERSION__) 57 skc_uint tcc; 58 #else 59 skc_uint transform : SKC_CMD_RASTERIZE_BITS_TRANSFORM; 60 skc_uint clip : SKC_CMD_RASTERIZE_BITS_CLIP; 61 skc_uint cohort : SKC_CMD_RASTERIZE_BITS_COHORT; 62 #endif 63 }; 64 }; 65 66 SKC_STATIC_ASSERT(sizeof(union skc_cmd_rasterize) == sizeof(skc_uint2)); 67 68 // 69 // 70 // 71 72 #define SKC_CMD_RASTERIZE_HI_OFFSET_COHORT (SKC_CMD_RASTERIZE_BITS_TRANSFORM + SKC_CMD_RASTERIZE_BITS_CLIP) 73 #define SKC_CMD_RASTERIZE_MASK_COHORT(c) ((c).u32v2.hi & SKC_BITS_TO_MASK_AT(SKC_CMD_RASTERIZE_BITS_COHORT,SKC_CMD_RASTERIZE_HI_OFFSET_COHORT)) 74 75 #define SKC_CMD_RASTERIZE_GET_TRANSFORM(c) ((c).u32v2.hi & SKC_BITS_TO_MASK(SKC_CMD_RASTERIZE_BITS_TRANSFORM)) 76 #define SKC_CMD_RASTERIZE_GET_CLIP(c) SKC_BFE((c).tcc,SKC_CMD_RASTERIZE_BITS_CLIP,SKC_CMD_RASTERIZE_BITS_TRANSFORM) 77 #define SKC_CMD_RASTERIZE_GET_COHORT(c) ((c).u32v2.hi >> SKC_CMD_RASTERIZE_HI_OFFSET_COHORT) 78 // SKC_BFE((c).tcc,SKC_CMD_RASTERIZE_BITS_COHORT,SKC_CMD_RASTERIZE_HI_OFFSET_COHORT) 79 80 // 81 // 82 // 83 84 #define SKC_TTSK_SIZE_COHORT (1 << SKC_CMD_RASTERIZE_BITS_COHORT) 85 86 // 87 // COHORT META DATA 88 // 89 90 union skc_raster_cohort_meta_in 91 { 92 skc_uint4 u32v4; 93 94 struct { 95 skc_uint blocks; // # of rk blocks 96 skc_uint offset; // start of rk span 97 skc_uint pk; // # of pk keys 98 skc_uint rk; // # of rk keys 99 }; 100 }; 101 102 union skc_raster_cohort_meta_out 103 { 104 skc_uint4 u32v4; 105 106 struct { 107 skc_uint blocks; // # of blocks in raster -- initially just rk blocks 108 skc_uint offset; // start of rk span 109 skc_uint nodes; // # of nodes in raster -- necessary for walking 110 skc_uint keys; // # of rk & pk keys -- initially just rk 111 }; 112 }; 113 114 union skc_raster_cohort_meta_inout 115 { 116 union skc_raster_cohort_meta_in in; 117 union skc_raster_cohort_meta_out out; 118 }; 119 120 // 121 // followed by one word for the offset 122 // 123 124 struct skc_raster_cohort_meta 125 { 126 union skc_raster_cohort_meta_inout inout[SKC_TTSK_SIZE_COHORT]; 127 skc_uint reads[SKC_TTSK_SIZE_COHORT]; // starting ring reads -- [0] is raster head 128 }; 129 130 #define SKC_RASTER_COHORT_META_OFFSET_READS (SKC_OFFSET_OF(struct skc_raster_cohort_meta,reads) / sizeof(skc_uint)) 131 132 // 133 // COHORT ATOMICS 134 // 135 136 struct skc_raster_cohort_atomic 137 { 138 // rasterization input 139 skc_uint cmds; 140 141 // rasterization output 142 skc_uint keys; 143 144 // block pool base -- idea here is to perform one atomic allocation 145 // skc_uint bp_base; 146 }; 147 148 #define SKC_RASTER_COHORT_ATOMIC_OFFSET_CMDS 0 149 #define SKC_RASTER_COHORT_ATOMIC_OFFSET_KEYS 1 150 151 #define SKC_RASTER_COHORT_ATOMIC_OFFSET_CMDS_CALC (SKC_OFFSET_OF(struct skc_raster_cohort_atomic,cmds) / sizeof(skc_uint)) 152 #define SKC_RASTER_COHORT_ATOMIC_OFFSET_KEYS_CALC (SKC_OFFSET_OF(struct skc_raster_cohort_atomic,keys) / sizeof(skc_uint)) 153 154 SKC_STATIC_ASSERT(SKC_RASTER_COHORT_ATOMIC_OFFSET_CMDS == SKC_RASTER_COHORT_ATOMIC_OFFSET_CMDS_CALC); // verify 155 SKC_STATIC_ASSERT(SKC_RASTER_COHORT_ATOMIC_OFFSET_KEYS == SKC_RASTER_COHORT_ATOMIC_OFFSET_KEYS_CALC); // verify 156 157 // 158 // 159 // 160 161 #endif 162 163 // 164 // 165 // 166