1 // Copyright 2019 Google LLC
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 use byteorder::{LittleEndian, WriteBytesExt};
16
17 use crate::bitwidth::BitWidth;
18 use crate::bitwidth::BitWidth::*;
19 use crate::flexbuffer_type::FlexBufferType;
20 use crate::flexbuffer_type::FlexBufferType::*;
21
22 /// Internal representation of FlexBuffer Types and Data before writing.
23 /// These get placed on the builder's stack and are eventually commited.
24 #[derive(Debug, Clone, Copy, PartialEq)]
25 pub enum Value {
26 // Inline types
27 Null,
28 Int(i64),
29 UInt(u64),
30 Float(f64),
31 Bool(bool),
32 /// Null termintated, c_string. Only used with `Map`s.
33 Key(usize),
34 /// The other ~20 or so types.
35 Reference {
36 address: usize,
37 child_width: BitWidth,
38 fxb_type: FlexBufferType,
39 },
40 }
41
42 macro_rules! new_typed_vector {
43 ($name: ident, $v2: ident, $v3: ident, $v4: ident, $vn: ident) => {
44 /// Returns a typed vector, fixed length if possible.
45 /// Address and child width are zero initialized and must be set.
46 pub fn $name(n: usize) -> Value {
47 let address = 0;
48 let child_width = W8;
49 match n {
50 2 => Value::Reference {
51 address,
52 child_width,
53 fxb_type: $v2,
54 },
55 3 => Value::Reference {
56 address,
57 child_width,
58 fxb_type: $v3,
59 },
60 4 => Value::Reference {
61 address,
62 child_width,
63 fxb_type: $v4,
64 },
65 _ => Value::Reference {
66 address,
67 child_width,
68 fxb_type: $vn,
69 },
70 }
71 }
72 };
73 }
74
75 impl Value {
new_vector() -> Self76 pub fn new_vector() -> Self {
77 Value::Reference {
78 address: 0,
79 child_width: W8,
80 fxb_type: Vector,
81 }
82 }
new_map() -> Self83 pub fn new_map() -> Self {
84 Value::Reference {
85 address: 0,
86 child_width: W8,
87 fxb_type: Map,
88 }
89 }
90 new_typed_vector!(
91 new_int_vector,
92 VectorInt2,
93 VectorInt3,
94 VectorInt4,
95 VectorInt
96 );
97 new_typed_vector!(
98 new_uint_vector,
99 VectorUInt2,
100 VectorUInt3,
101 VectorUInt4,
102 VectorUInt
103 );
104 new_typed_vector!(
105 new_float_vector,
106 VectorFloat2,
107 VectorFloat3,
108 VectorFloat4,
109 VectorFloat
110 );
fxb_type(&self) -> FlexBufferType111 pub fn fxb_type(&self) -> FlexBufferType {
112 match *self {
113 Value::Null => Null,
114 Value::Int(_) => Int,
115 Value::UInt(_) => UInt,
116 Value::Float(_) => Float,
117 Value::Bool(_) => Bool,
118 Value::Key(_) => Key,
119 Value::Reference { fxb_type, .. } => fxb_type,
120 }
121 }
is_fixed_length_vector(&self) -> bool122 pub fn is_fixed_length_vector(&self) -> bool {
123 self.fxb_type().is_fixed_length_vector()
124 }
is_inline(&self) -> bool125 pub fn is_inline(&self) -> bool {
126 self.fxb_type().is_inline()
127 }
is_reference(&self) -> bool128 pub fn is_reference(&self) -> bool {
129 !self.is_inline()
130 }
is_key(&self) -> bool131 pub fn is_key(&self) -> bool {
132 if let Value::Key(_) = self {
133 true
134 } else {
135 false
136 }
137 }
is_typed_vector_or_map(&self) -> bool138 pub fn is_typed_vector_or_map(&self) -> bool {
139 if let Value::Reference { fxb_type, .. } = self {
140 fxb_type.is_heterogenous()
141 } else {
142 false
143 }
144 }
prefix_length(&self) -> usize145 pub fn prefix_length(&self) -> usize {
146 if self.is_fixed_length_vector() || self.is_inline() {
147 return 0;
148 }
149 if let Value::Reference { fxb_type, .. } = self {
150 if *fxb_type == Map {
151 return 3;
152 }
153 }
154 1
155 }
set_fxb_type_or_panic(&mut self, new_type: FlexBufferType)156 pub fn set_fxb_type_or_panic(&mut self, new_type: FlexBufferType) {
157 if let Value::Reference { fxb_type, .. } = self {
158 *fxb_type = new_type;
159 } else {
160 panic!("`set_fxb_type_or_panic` called on {:?}", self)
161 }
162 }
set_child_width_or_panic(&mut self, new_width: BitWidth)163 pub fn set_child_width_or_panic(&mut self, new_width: BitWidth) {
164 if let Value::Reference { child_width, .. } = self {
165 *child_width = new_width;
166 } else {
167 panic!("`set_child_width_or_panic` called on {:?}", self);
168 }
169 }
get_address(&self) -> Option<usize>170 pub fn get_address(&self) -> Option<usize> {
171 if let Value::Reference { address, .. } | Value::Key(address) = self {
172 Some(*address)
173 } else {
174 None
175 }
176 }
set_address_or_panic(&mut self, new_address: usize)177 pub fn set_address_or_panic(&mut self, new_address: usize) {
178 if let Value::Reference { address, .. } | Value::Key(address) = self {
179 *address = new_address;
180 } else {
181 panic!("`set_address_or_panic` called on {:?}", self);
182 }
183 }
184 /// For inline types - the width of the value to be stored.
185 /// For reference types, the width of the referred.
186 /// Note Key types always refer to 8 bit data.
width_or_child_width(&self) -> BitWidth187 pub fn width_or_child_width(&self) -> BitWidth {
188 match *self {
189 Value::Int(x) => x.into(),
190 Value::UInt(x) => x.into(),
191 Value::Float(x) => x.into(),
192 Value::Key(_) | Value::Bool(_) | Value::Null => W8,
193 Value::Reference { child_width, .. } => child_width,
194 }
195 }
relative_address(self, written_at: usize) -> Option<Value>196 pub fn relative_address(self, written_at: usize) -> Option<Value> {
197 self.get_address().map(|address| {
198 let offset = written_at
199 .checked_sub(address)
200 .expect("Error: References may only refer backwards in buffer.");
201 Value::UInt(offset as u64)
202 })
203 }
204 /// Computes the minimum required width of `value` when stored in a vector
205 /// starting at `vector_start` at index `idx` (this index includes the prefix).
206 /// `Value::Reference{..}` variants require location information because
207 /// offsets are relative.
width_in_vector(self, vector_start: usize, idx: usize) -> BitWidth208 pub fn width_in_vector(self, vector_start: usize, idx: usize) -> BitWidth {
209 match self {
210 Value::Bool(_) => W8,
211 Value::Null => W8,
212 Value::Int(x) => x.into(),
213 Value::UInt(x) => x.into(),
214 Value::Float(x) => x.into(),
215 _ => {
216 debug_assert!(self.is_reference());
217 for &width in BitWidth::iter() {
218 let bytes = width as usize + 1;
219 let alignment = (bytes - vector_start % bytes) % bytes;
220 let written_at = vector_start + alignment + idx * bytes;
221 // This match must always succeed.
222 if let Some(Value::UInt(offset)) = self.relative_address(written_at) {
223 if BitWidth::from(offset) == width {
224 return width;
225 }
226 }
227 }
228 unreachable!()
229 }
230 }
231 }
packed_type(self, parent_width: BitWidth) -> u8232 pub fn packed_type(self, parent_width: BitWidth) -> u8 {
233 let width = if self.is_inline() {
234 std::cmp::max(parent_width, self.width_or_child_width())
235 } else {
236 self.width_or_child_width()
237 };
238 (self.fxb_type() as u8) << 2 | width as u8
239 }
240 }
241
find_vector_type<'a, T>(mut values: T) -> Value where T: std::iter::Iterator<Item = &'a Value>,242 pub fn find_vector_type<'a, T>(mut values: T) -> Value
243 where
244 T: std::iter::Iterator<Item = &'a Value>,
245 {
246 let first = values.next();
247 if first.is_none() {
248 return Value::new_vector();
249 }
250 let mut len = 1;
251 let init = first.unwrap().fxb_type();
252 for v in values {
253 if v.fxb_type() != init {
254 return Value::new_vector();
255 }
256 len += 1;
257 }
258 let vector_type = match init {
259 Bool => VectorBool,
260 UInt => return Value::new_uint_vector(len),
261 Int => return Value::new_int_vector(len),
262 Float => return Value::new_float_vector(len),
263 Key => VectorKey,
264 // Note that VectorString is deprecated for writing
265 _ => return Value::new_vector(),
266 };
267 Value::Reference {
268 address: 0,
269 child_width: W8,
270 fxb_type: vector_type,
271 }
272 }
273
274 #[inline]
store_value(buffer: &mut Vec<u8>, mut value: Value, width: BitWidth)275 pub fn store_value(buffer: &mut Vec<u8>, mut value: Value, width: BitWidth) {
276 // Remap to number types.
277 use Value::*;
278 if let Some(offset) = value.relative_address(buffer.len()) {
279 value = offset;
280 } else {
281 value = match value {
282 Bool(x) => UInt(x.into()),
283 Null => UInt(0), // Should this be 0 bytes?
284 _ => value,
285 }
286 }
287 let write_result = match (value, width) {
288 (UInt(x), W8) => buffer.write_u8(x as u8),
289 (UInt(x), W16) => buffer.write_u16::<LittleEndian>(x as u16),
290 (UInt(x), W32) => buffer.write_u32::<LittleEndian>(x as u32),
291 (UInt(x), W64) => buffer.write_u64::<LittleEndian>(x),
292 (Int(x), W8) => buffer.write_i8(x as i8),
293 (Int(x), W16) => buffer.write_i16::<LittleEndian>(x as i16),
294 (Int(x), W32) => buffer.write_i32::<LittleEndian>(x as i32),
295 (Int(x), W64) => buffer.write_i64::<LittleEndian>(x),
296 (Float(x), W32) => buffer.write_f32::<LittleEndian>(x as f32),
297 (Float(x), W64) => buffer.write_f64::<LittleEndian>(x),
298 (Float(_), _) => unreachable!("Error: Flatbuffers does not support 8 and 16 bit floats."),
299 _ => unreachable!("Variant not considered: {:?}", value),
300 };
301 write_result.unwrap_or_else(|err| {
302 panic!(
303 "Error writing value {:?} with width {:?}: {:?}",
304 value, width, err
305 )
306 });
307 }
308