1 // Copyright 2023 Google LLC
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // https://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 //! Rust compiler backend.
16
17 use crate::{ast, lint};
18 use quote::{format_ident, quote};
19 use std::collections::BTreeSet;
20 use std::path::Path;
21 use syn::LitInt;
22
23 use crate::analyzer::ast as analyzer_ast;
24
25 mod parser;
26 mod preamble;
27 mod serializer;
28 mod types;
29
30 use parser::FieldParser;
31 use serializer::FieldSerializer;
32
33 #[cfg(not(tm_mainline_prod))]
34 pub use heck::ToUpperCamelCase;
35
36 #[cfg(tm_mainline_prod)]
37 pub trait ToUpperCamelCase {
to_upper_camel_case(&self) -> String38 fn to_upper_camel_case(&self) -> String;
39 }
40
41 #[cfg(tm_mainline_prod)]
42 impl ToUpperCamelCase for str {
to_upper_camel_case(&self) -> String43 fn to_upper_camel_case(&self) -> String {
44 use heck::CamelCase;
45 let camel_case = self.to_camel_case();
46 if camel_case.is_empty() {
47 camel_case
48 } else {
49 // PDL identifiers are a-zA-z0-9, so we're dealing with
50 // simple ASCII text.
51 format!("{}{}", &camel_case[..1].to_ascii_uppercase(), &camel_case[1..])
52 }
53 }
54 }
55
56 /// Generate a block of code.
57 ///
58 /// Like `quote!`, but the code block will be followed by an empty
59 /// line of code. This makes the generated code more readable.
60 #[macro_export]
61 macro_rules! quote_block {
62 ($($tt:tt)*) => {
63 format!("{}\n\n", ::quote::quote!($($tt)*))
64 }
65 }
66
67 /// Generate a bit-mask which masks out `n` least significant bits.
68 ///
69 /// Literal integers in Rust default to the `i32` type. For this
70 /// reason, if `n` is larger than 31, a suffix is added to the
71 /// `LitInt` returned. This should either be `u64` or `usize`
72 /// depending on where the result is used.
mask_bits(n: usize, suffix: &str) -> syn::LitInt73 pub fn mask_bits(n: usize, suffix: &str) -> syn::LitInt {
74 let suffix = if n > 31 { format!("_{suffix}") } else { String::new() };
75 // Format the hex digits as 0x1111_2222_3333_usize.
76 let hex_digits = format!("{:x}", (1u64 << n) - 1)
77 .as_bytes()
78 .rchunks(4)
79 .rev()
80 .map(|chunk| std::str::from_utf8(chunk).unwrap())
81 .collect::<Vec<&str>>()
82 .join("_");
83 syn::parse_str::<syn::LitInt>(&format!("0x{hex_digits}{suffix}")).unwrap()
84 }
85
generate_packet_size_getter<'a>( scope: &lint::Scope<'a>, fields: impl Iterator<Item = &'a analyzer_ast::Field>, is_packet: bool, ) -> (usize, proc_macro2::TokenStream)86 fn generate_packet_size_getter<'a>(
87 scope: &lint::Scope<'a>,
88 fields: impl Iterator<Item = &'a analyzer_ast::Field>,
89 is_packet: bool,
90 ) -> (usize, proc_macro2::TokenStream) {
91 let mut constant_width = 0;
92 let mut dynamic_widths = Vec::new();
93
94 for field in fields {
95 if let Some(width) = scope.get_field_width(field, false) {
96 constant_width += width;
97 continue;
98 }
99
100 let decl = scope.get_field_declaration(field);
101 dynamic_widths.push(match &field.desc {
102 ast::FieldDesc::Payload { .. } | ast::FieldDesc::Body { .. } => {
103 if is_packet {
104 quote! {
105 self.child.get_total_size()
106 }
107 } else {
108 quote! {
109 self.payload.len()
110 }
111 }
112 }
113 ast::FieldDesc::Typedef { id, .. } => {
114 let id = format_ident!("{id}");
115 quote!(self.#id.get_size())
116 }
117 ast::FieldDesc::Array { id, width, .. } => {
118 let id = format_ident!("{id}");
119 match &decl {
120 Some(analyzer_ast::Decl {
121 desc: ast::DeclDesc::Struct { .. } | ast::DeclDesc::CustomField { .. },
122 ..
123 }) => {
124 quote! {
125 self.#id.iter().map(|elem| elem.get_size()).sum::<usize>()
126 }
127 }
128 Some(analyzer_ast::Decl { desc: ast::DeclDesc::Enum { .. }, .. }) => {
129 let width = syn::Index::from(
130 scope.get_decl_width(decl.unwrap(), false).unwrap() / 8,
131 );
132 let mul_width = (width.index > 1).then(|| quote!(* #width));
133 quote! {
134 self.#id.len() #mul_width
135 }
136 }
137 _ => {
138 let width = syn::Index::from(width.unwrap() / 8);
139 let mul_width = (width.index > 1).then(|| quote!(* #width));
140 quote! {
141 self.#id.len() #mul_width
142 }
143 }
144 }
145 }
146 _ => panic!("Unsupported field type: {field:?}"),
147 });
148 }
149
150 if constant_width > 0 {
151 let width = syn::Index::from(constant_width / 8);
152 dynamic_widths.insert(0, quote!(#width));
153 }
154 if dynamic_widths.is_empty() {
155 dynamic_widths.push(quote!(0))
156 }
157
158 (
159 constant_width,
160 quote! {
161 #(#dynamic_widths)+*
162 },
163 )
164 }
165
top_level_packet<'a>(scope: &lint::Scope<'a>, packet_name: &'a str) -> &'a analyzer_ast::Decl166 fn top_level_packet<'a>(scope: &lint::Scope<'a>, packet_name: &'a str) -> &'a analyzer_ast::Decl {
167 let mut decl = scope.typedef[packet_name];
168 while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. }
169 | ast::DeclDesc::Struct { parent_id: Some(parent_id), .. } = &decl.desc
170 {
171 decl = scope.typedef[parent_id];
172 }
173 decl
174 }
175
176 /// Find all constrained fields in children of `id`.
find_constrained_fields<'a>( scope: &'a lint::Scope<'a>, id: &'a str, ) -> Vec<&'a analyzer_ast::Field>177 fn find_constrained_fields<'a>(
178 scope: &'a lint::Scope<'a>,
179 id: &'a str,
180 ) -> Vec<&'a analyzer_ast::Field> {
181 let mut fields = Vec::new();
182 let mut field_names = BTreeSet::new();
183 let mut children = scope.iter_children(id).collect::<Vec<_>>();
184
185 while let Some(child) = children.pop() {
186 if let ast::DeclDesc::Packet { id, constraints, .. }
187 | ast::DeclDesc::Struct { id, constraints, .. } = &child.desc
188 {
189 let packet_scope = &scope.scopes[&scope.typedef[id]];
190 for constraint in constraints {
191 if field_names.insert(&constraint.id) {
192 fields.push(packet_scope.all_fields[&constraint.id]);
193 }
194 }
195 children.extend(scope.iter_children(id).collect::<Vec<_>>());
196 }
197 }
198
199 fields
200 }
201
202 /// Find parent fields which are constrained in child packets.
203 ///
204 /// These fields are the fields which need to be passed in when
205 /// parsing a `id` packet since their values are needed for one or
206 /// more child packets.
find_constrained_parent_fields<'a>( scope: &'a lint::Scope<'a>, id: &'a str, ) -> impl Iterator<Item = &'a analyzer_ast::Field>207 fn find_constrained_parent_fields<'a>(
208 scope: &'a lint::Scope<'a>,
209 id: &'a str,
210 ) -> impl Iterator<Item = &'a analyzer_ast::Field> {
211 let packet_scope = &scope.scopes[&scope.typedef[id]];
212 find_constrained_fields(scope, id).into_iter().filter(|field| {
213 let id = field.id().unwrap();
214 packet_scope.all_fields.contains_key(id) && packet_scope.get_packet_field(id).is_none()
215 })
216 }
217
218 /// Generate the declaration and implementation for a data struct.
219 ///
220 /// This struct will hold the data for a packet or a struct. It knows
221 /// how to parse and serialize its own fields.
generate_data_struct( scope: &lint::Scope<'_>, endianness: ast::EndiannessValue, id: &str, ) -> (proc_macro2::TokenStream, proc_macro2::TokenStream)222 fn generate_data_struct(
223 scope: &lint::Scope<'_>,
224 endianness: ast::EndiannessValue,
225 id: &str,
226 ) -> (proc_macro2::TokenStream, proc_macro2::TokenStream) {
227 let decl = scope.typedef[id];
228 let packet_scope = &scope.scopes[&decl];
229 let is_packet = matches!(&decl.desc, ast::DeclDesc::Packet { .. });
230
231 let span = format_ident!("bytes");
232 let serializer_span = format_ident!("buffer");
233 let mut field_parser = FieldParser::new(scope, endianness, id, &span);
234 let mut field_serializer = FieldSerializer::new(scope, endianness, id, &serializer_span);
235 for field in packet_scope.iter_fields() {
236 field_parser.add(field);
237 field_serializer.add(field);
238 }
239 field_parser.done();
240
241 let (parse_arg_names, parse_arg_types) = if is_packet {
242 let fields = find_constrained_parent_fields(scope, id).collect::<Vec<_>>();
243 let names = fields.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
244 let types = fields.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
245 (names, types)
246 } else {
247 (Vec::new(), Vec::new()) // No extra arguments to parse in structs.
248 };
249
250 let (constant_width, packet_size) =
251 generate_packet_size_getter(scope, packet_scope.iter_fields(), is_packet);
252 let conforms = if constant_width == 0 {
253 quote! { true }
254 } else {
255 let constant_width = syn::Index::from(constant_width / 8);
256 quote! { #span.len() >= #constant_width }
257 };
258
259 let visibility = if is_packet { quote!() } else { quote!(pub) };
260 let has_payload = packet_scope.get_payload_field().is_some();
261 let has_children = scope.iter_children(id).next().is_some();
262
263 let struct_name = if is_packet { format_ident!("{id}Data") } else { format_ident!("{id}") };
264 let fields_with_ids =
265 packet_scope.iter_fields().filter(|f| f.id().is_some()).collect::<Vec<_>>();
266 let mut field_names =
267 fields_with_ids.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
268 let mut field_types = fields_with_ids.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
269 if has_children || has_payload {
270 if is_packet {
271 field_names.push(format_ident!("child"));
272 let field_type = format_ident!("{id}DataChild");
273 field_types.push(quote!(#field_type));
274 } else {
275 field_names.push(format_ident!("payload"));
276 field_types.push(quote!(Vec<u8>));
277 }
278 }
279
280 let data_struct_decl = quote! {
281 #[derive(Debug, Clone, PartialEq, Eq)]
282 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
283 pub struct #struct_name {
284 #(#visibility #field_names: #field_types,)*
285 }
286 };
287
288 let data_struct_impl = quote! {
289 impl #struct_name {
290 fn conforms(#span: &[u8]) -> bool {
291 #conforms
292 }
293
294 #visibility fn parse(
295 #span: &[u8] #(, #parse_arg_names: #parse_arg_types)*
296 ) -> Result<Self> {
297 let mut cell = Cell::new(#span);
298 let packet = Self::parse_inner(&mut cell #(, #parse_arg_names)*)?;
299 // TODO(mgeisler): communicate back to user if !cell.get().is_empty()?
300 Ok(packet)
301 }
302
303 fn parse_inner(
304 mut #span: &mut Cell<&[u8]> #(, #parse_arg_names: #parse_arg_types)*
305 ) -> Result<Self> {
306 #field_parser
307 Ok(Self {
308 #(#field_names,)*
309 })
310 }
311
312 fn write_to(&self, buffer: &mut BytesMut) {
313 #field_serializer
314 }
315
316 fn get_total_size(&self) -> usize {
317 self.get_size()
318 }
319
320 fn get_size(&self) -> usize {
321 #packet_size
322 }
323 }
324 };
325
326 (data_struct_decl, data_struct_impl)
327 }
328
329 /// Find all parents from `id`.
330 ///
331 /// This includes the `Decl` for `id` itself.
find_parents<'a>(scope: &lint::Scope<'a>, id: &str) -> Vec<&'a analyzer_ast::Decl>332 fn find_parents<'a>(scope: &lint::Scope<'a>, id: &str) -> Vec<&'a analyzer_ast::Decl> {
333 let mut decl = scope.typedef[id];
334 let mut parents = vec![decl];
335 while let ast::DeclDesc::Packet { parent_id: Some(parent_id), .. }
336 | ast::DeclDesc::Struct { parent_id: Some(parent_id), .. } = &decl.desc
337 {
338 decl = scope.typedef[parent_id];
339 parents.push(decl);
340 }
341 parents.reverse();
342 parents
343 }
344
345 /// Turn the constraint into a value (such as `10` or
346 /// `SomeEnum::Foo`).
constraint_to_value( packet_scope: &lint::PacketScope<'_>, constraint: &ast::Constraint, ) -> proc_macro2::TokenStream347 pub fn constraint_to_value(
348 packet_scope: &lint::PacketScope<'_>,
349 constraint: &ast::Constraint,
350 ) -> proc_macro2::TokenStream {
351 match constraint {
352 ast::Constraint { value: Some(value), .. } => {
353 let value = proc_macro2::Literal::usize_unsuffixed(*value);
354 quote!(#value)
355 }
356 // TODO(mgeisler): include type_id in `ast::Constraint` and
357 // drop the packet_scope argument.
358 ast::Constraint { tag_id: Some(tag_id), .. } => {
359 let type_id = match &packet_scope.all_fields[&constraint.id].desc {
360 ast::FieldDesc::Typedef { type_id, .. } => format_ident!("{type_id}"),
361 _ => unreachable!("Invalid constraint: {constraint:?}"),
362 };
363 let tag_id = format_ident!("{}", tag_id.to_upper_camel_case());
364 quote!(#type_id::#tag_id)
365 }
366 _ => unreachable!("Invalid constraint: {constraint:?}"),
367 }
368 }
369
370 /// Generate code for a `ast::Decl::Packet`.
generate_packet_decl( scope: &lint::Scope<'_>, endianness: ast::EndiannessValue, id: &str, ) -> proc_macro2::TokenStream371 fn generate_packet_decl(
372 scope: &lint::Scope<'_>,
373 endianness: ast::EndiannessValue,
374 id: &str,
375 ) -> proc_macro2::TokenStream {
376 let packet_scope = &scope.scopes[&scope.typedef[id]];
377
378 let top_level = top_level_packet(scope, id);
379 let top_level_id = top_level.id().unwrap();
380 let top_level_packet = format_ident!("{top_level_id}");
381 let top_level_data = format_ident!("{top_level_id}Data");
382 let top_level_id_lower = format_ident!("{}", top_level_id.to_lowercase());
383
384 // TODO(mgeisler): use the convert_case crate to convert between
385 // `FooBar` and `foo_bar` in the code below.
386 let span = format_ident!("bytes");
387 let id_lower = format_ident!("{}", id.to_lowercase());
388 let id_packet = format_ident!("{id}");
389 let id_child = format_ident!("{id}Child");
390 let id_data_child = format_ident!("{id}DataChild");
391 let id_builder = format_ident!("{id}Builder");
392
393 let parents = find_parents(scope, id);
394 let parent_ids = parents.iter().map(|p| p.id().unwrap()).collect::<Vec<_>>();
395 let parent_shifted_ids = parent_ids.iter().skip(1).map(|id| format_ident!("{id}"));
396 let parent_lower_ids =
397 parent_ids.iter().map(|id| format_ident!("{}", id.to_lowercase())).collect::<Vec<_>>();
398 let parent_shifted_lower_ids = parent_lower_ids.iter().skip(1).collect::<Vec<_>>();
399 let parent_packet = parent_ids.iter().map(|id| format_ident!("{id}"));
400 let parent_data = parent_ids.iter().map(|id| format_ident!("{id}Data"));
401 let parent_data_child = parent_ids.iter().map(|id| format_ident!("{id}DataChild"));
402
403 let all_fields = {
404 let mut fields = packet_scope.all_fields.values().collect::<Vec<_>>();
405 fields.sort_by_key(|f| f.id());
406 fields
407 };
408 let all_field_names =
409 all_fields.iter().map(|f| format_ident!("{}", f.id().unwrap())).collect::<Vec<_>>();
410 let all_field_types = all_fields.iter().map(|f| types::rust_type(f)).collect::<Vec<_>>();
411 let all_field_borrows =
412 all_fields.iter().map(|f| types::rust_borrow(f, scope)).collect::<Vec<_>>();
413 let all_field_getter_names = all_field_names.iter().map(|id| format_ident!("get_{id}"));
414 let all_field_self_field = all_fields.iter().map(|f| {
415 for (parent, parent_id) in parents.iter().zip(parent_lower_ids.iter()) {
416 if scope.scopes[parent].iter_fields().any(|ff| ff.id() == f.id()) {
417 return quote!(self.#parent_id);
418 }
419 }
420 unreachable!("Could not find {f:?} in parent chain");
421 });
422
423 let unconstrained_fields = all_fields
424 .iter()
425 .filter(|f| !packet_scope.all_constraints.contains_key(f.id().unwrap()))
426 .collect::<Vec<_>>();
427 let unconstrained_field_names = unconstrained_fields
428 .iter()
429 .map(|f| format_ident!("{}", f.id().unwrap()))
430 .collect::<Vec<_>>();
431 let unconstrained_field_types = unconstrained_fields.iter().map(|f| types::rust_type(f));
432
433 let rev_parents = parents.iter().rev().collect::<Vec<_>>();
434 let builder_assignments = rev_parents.iter().enumerate().map(|(idx, parent)| {
435 let parent_id = parent.id().unwrap();
436 let parent_id_lower = format_ident!("{}", parent_id.to_lowercase());
437 let parent_data = format_ident!("{parent_id}Data");
438 let parent_data_child = format_ident!("{parent_id}DataChild");
439 let parent_packet_scope = &scope.scopes[&scope.typedef[parent_id]];
440
441 let named_fields = {
442 let mut names =
443 parent_packet_scope.iter_fields().filter_map(ast::Field::id).collect::<Vec<_>>();
444 names.sort_unstable();
445 names
446 };
447
448 let mut field = named_fields.iter().map(|id| format_ident!("{id}")).collect::<Vec<_>>();
449 let mut value = named_fields
450 .iter()
451 .map(|&id| match packet_scope.all_constraints.get(id) {
452 Some(constraint) => constraint_to_value(packet_scope, constraint),
453 None => {
454 let id = format_ident!("{id}");
455 quote!(self.#id)
456 }
457 })
458 .collect::<Vec<_>>();
459
460 if parent_packet_scope.get_payload_field().is_some() {
461 field.push(format_ident!("child"));
462 if idx == 0 {
463 // Top-most parent, the child is simply created from
464 // our payload.
465 value.push(quote! {
466 match self.payload {
467 None => #parent_data_child::None,
468 Some(bytes) => #parent_data_child::Payload(bytes),
469 }
470 });
471 } else {
472 // Child is created from the previous parent.
473 let prev_parent_id = rev_parents[idx - 1].id().unwrap();
474 let prev_parent_id_lower = format_ident!("{}", prev_parent_id.to_lowercase());
475 let prev_parent_id = format_ident!("{prev_parent_id}");
476 value.push(quote! {
477 #parent_data_child::#prev_parent_id(#prev_parent_id_lower)
478 });
479 }
480 }
481
482 quote! {
483 let #parent_id_lower = Arc::new(#parent_data {
484 #(#field: #value,)*
485 });
486 }
487 });
488
489 let children = scope.iter_children(id).collect::<Vec<_>>();
490 let has_payload = packet_scope.get_payload_field().is_some();
491 let has_children_or_payload = !children.is_empty() || has_payload;
492 let child =
493 children.iter().map(|child| format_ident!("{}", child.id().unwrap())).collect::<Vec<_>>();
494 let child_data = child.iter().map(|child| format_ident!("{child}Data")).collect::<Vec<_>>();
495 let get_payload = (children.is_empty() && has_payload).then(|| {
496 quote! {
497 pub fn get_payload(&self) -> &[u8] {
498 match &self.#id_lower.child {
499 #id_data_child::Payload(bytes) => &bytes,
500 #id_data_child::None => &[],
501 }
502 }
503 }
504 });
505 let child_declaration = has_children_or_payload.then(|| {
506 quote! {
507 #[derive(Debug, Clone, PartialEq, Eq)]
508 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
509 pub enum #id_data_child {
510 #(#child(Arc<#child_data>),)*
511 Payload(Bytes),
512 None,
513 }
514
515 impl #id_data_child {
516 fn get_total_size(&self) -> usize {
517 match self {
518 #(#id_data_child::#child(value) => value.get_total_size(),)*
519 #id_data_child::Payload(bytes) => bytes.len(),
520 #id_data_child::None => 0,
521 }
522 }
523 }
524
525 #[derive(Debug, Clone, PartialEq, Eq)]
526 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
527 pub enum #id_child {
528 #(#child(#child),)*
529 Payload(Bytes),
530 None,
531 }
532 }
533 });
534 let specialize = has_children_or_payload.then(|| {
535 quote! {
536 pub fn specialize(&self) -> #id_child {
537 match &self.#id_lower.child {
538 #(
539 #id_data_child::#child(_) =>
540 #id_child::#child(#child::new(self.#top_level_id_lower.clone()).unwrap()),
541 )*
542 #id_data_child::Payload(payload) => #id_child::Payload(payload.clone()),
543 #id_data_child::None => #id_child::None,
544 }
545 }
546 }
547 });
548
549 let builder_payload_field = has_children_or_payload.then(|| {
550 quote! {
551 pub payload: Option<Bytes>
552 }
553 });
554
555 let ancestor_packets =
556 parent_ids[..parent_ids.len() - 1].iter().map(|id| format_ident!("{id}"));
557 let impl_from_and_try_from = (top_level_id != id).then(|| {
558 quote! {
559 #(
560 impl From<#id_packet> for #ancestor_packets {
561 fn from(packet: #id_packet) -> #ancestor_packets {
562 #ancestor_packets::new(packet.#top_level_id_lower).unwrap()
563 }
564 }
565 )*
566
567 impl TryFrom<#top_level_packet> for #id_packet {
568 type Error = Error;
569 fn try_from(packet: #top_level_packet) -> Result<#id_packet> {
570 #id_packet::new(packet.#top_level_id_lower)
571 }
572 }
573 }
574 });
575
576 let (data_struct_decl, data_struct_impl) = generate_data_struct(scope, endianness, id);
577
578 quote! {
579 #child_declaration
580
581 #data_struct_decl
582
583 #[derive(Debug, Clone, PartialEq, Eq)]
584 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
585 pub struct #id_packet {
586 #(
587 #[cfg_attr(feature = "serde", serde(flatten))]
588 #parent_lower_ids: Arc<#parent_data>,
589 )*
590 }
591
592 #[derive(Debug)]
593 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
594 pub struct #id_builder {
595 #(pub #unconstrained_field_names: #unconstrained_field_types,)*
596 #builder_payload_field
597 }
598
599 #data_struct_impl
600
601 impl Packet for #id_packet {
602 fn to_bytes(self) -> Bytes {
603 let mut buffer = BytesMut::with_capacity(self.#top_level_id_lower.get_size());
604 self.#top_level_id_lower.write_to(&mut buffer);
605 buffer.freeze()
606 }
607
608 fn to_vec(self) -> Vec<u8> {
609 self.to_bytes().to_vec()
610 }
611 }
612
613 impl From<#id_packet> for Bytes {
614 fn from(packet: #id_packet) -> Self {
615 packet.to_bytes()
616 }
617 }
618
619 impl From<#id_packet> for Vec<u8> {
620 fn from(packet: #id_packet) -> Self {
621 packet.to_vec()
622 }
623 }
624
625 #impl_from_and_try_from
626
627 impl #id_packet {
628 pub fn parse(#span: &[u8]) -> Result<Self> {
629 let mut cell = Cell::new(#span);
630 let packet = Self::parse_inner(&mut cell)?;
631 // TODO(mgeisler): communicate back to user if !cell.get().is_empty()?
632 Ok(packet)
633 }
634
635 fn parse_inner(mut bytes: &mut Cell<&[u8]>) -> Result<Self> {
636 let data = #top_level_data::parse_inner(&mut bytes)?;
637 Self::new(Arc::new(data))
638 }
639
640 #specialize
641
642 fn new(#top_level_id_lower: Arc<#top_level_data>) -> Result<Self> {
643 #(
644 let #parent_shifted_lower_ids = match &#parent_lower_ids.child {
645 #parent_data_child::#parent_shifted_ids(value) => value.clone(),
646 _ => return Err(Error::InvalidChildError {
647 expected: stringify!(#parent_data_child::#parent_shifted_ids),
648 actual: format!("{:?}", &#parent_lower_ids.child),
649 }),
650 };
651 )*
652 Ok(Self { #(#parent_lower_ids),* })
653 }
654
655 #(pub fn #all_field_getter_names(&self) -> #all_field_borrows #all_field_types {
656 #all_field_borrows #all_field_self_field.as_ref().#all_field_names
657 })*
658
659 #get_payload
660
661 fn write_to(&self, buffer: &mut BytesMut) {
662 self.#id_lower.write_to(buffer)
663 }
664
665 pub fn get_size(&self) -> usize {
666 self.#top_level_id_lower.get_size()
667 }
668 }
669
670 impl #id_builder {
671 pub fn build(self) -> #id_packet {
672 #(#builder_assignments;)*
673 #id_packet::new(#top_level_id_lower).unwrap()
674 }
675 }
676
677 #(
678 impl From<#id_builder> for #parent_packet {
679 fn from(builder: #id_builder) -> #parent_packet {
680 builder.build().into()
681 }
682 }
683 )*
684 }
685 }
686
687 /// Generate code for a `ast::Decl::Struct`.
generate_struct_decl( scope: &lint::Scope<'_>, endianness: ast::EndiannessValue, id: &str, ) -> proc_macro2::TokenStream688 fn generate_struct_decl(
689 scope: &lint::Scope<'_>,
690 endianness: ast::EndiannessValue,
691 id: &str,
692 ) -> proc_macro2::TokenStream {
693 let (struct_decl, struct_impl) = generate_data_struct(scope, endianness, id);
694 quote! {
695 #struct_decl
696 #struct_impl
697 }
698 }
699
700 /// Generate an enum declaration.
701 ///
702 /// # Arguments
703 /// * `id` - Enum identifier.
704 /// * `tags` - List of enum tags.
705 /// * `width` - Width of the backing type of the enum, in bits.
706 /// * `open` - Whether to generate an open or closed enum. Open enums have
707 /// an additional Unknown case for unmatched valued. Complete
708 /// enums (where the full range of values is covered) are
709 /// automatically closed.
generate_enum_decl( id: &str, tags: &[ast::Tag], width: usize, open: bool, ) -> proc_macro2::TokenStream710 fn generate_enum_decl(
711 id: &str,
712 tags: &[ast::Tag],
713 width: usize,
714 open: bool,
715 ) -> proc_macro2::TokenStream {
716 // Determine if the enum is complete, i.e. all values in the backing
717 // integer range have a matching tag in the original declaration.
718 fn enum_is_complete(tags: &[ast::Tag], max: usize) -> bool {
719 let mut ranges = tags
720 .iter()
721 .map(|tag| match tag {
722 ast::Tag::Value(tag) => (tag.value, tag.value),
723 ast::Tag::Range(tag) => tag.range.clone().into_inner(),
724 })
725 .collect::<Vec<_>>();
726 ranges.sort_unstable();
727 ranges.first().unwrap().0 == 0
728 && ranges.last().unwrap().1 == max
729 && ranges.windows(2).all(|window| {
730 if let [left, right] = window {
731 left.1 == right.0 - 1
732 } else {
733 false
734 }
735 })
736 }
737
738 // Determine if the enum is primitive, i.e. does not contain any
739 // tag range.
740 fn enum_is_primitive(tags: &[ast::Tag]) -> bool {
741 tags.iter().all(|tag| matches!(tag, ast::Tag::Value(_)))
742 }
743
744 // Return the maximum value for the scalar type.
745 fn scalar_max(width: usize) -> usize {
746 if width >= usize::BITS as usize {
747 usize::MAX
748 } else {
749 (1 << width) - 1
750 }
751 }
752
753 // Format an enum tag identifier to rust upper caml case.
754 fn format_tag_ident(id: &str) -> proc_macro2::TokenStream {
755 let id = format_ident!("{}", id.to_upper_camel_case());
756 quote! { #id }
757 }
758
759 // Format a constant value as hexadecimal constant.
760 fn format_value(value: usize) -> LitInt {
761 syn::parse_str::<syn::LitInt>(&format!("{:#x}", value)).unwrap()
762 }
763
764 // Backing type for the enum.
765 let backing_type = types::Integer::new(width);
766 let backing_type_str = proc_macro2::Literal::string(&format!("u{}", backing_type.width));
767 let range_max = scalar_max(width);
768 let is_complete = enum_is_complete(tags, scalar_max(width));
769 let is_primitive = enum_is_primitive(tags);
770 let name = format_ident!("{id}");
771
772 // Generate the variant cases for the enum declaration.
773 // Tags declared in ranges are flattened in the same declaration.
774 let use_variant_values = is_primitive && (is_complete || !open);
775 let repr_u64 = use_variant_values.then(|| quote! { #[repr(u64)] });
776 let mut variants = vec![];
777 for tag in tags.iter() {
778 match tag {
779 ast::Tag::Value(tag) if use_variant_values => {
780 let id = format_tag_ident(&tag.id);
781 let value = format_value(tag.value);
782 variants.push(quote! { #id = #value })
783 }
784 ast::Tag::Value(tag) => variants.push(format_tag_ident(&tag.id)),
785 ast::Tag::Range(tag) => {
786 variants.extend(tag.tags.iter().map(|tag| format_tag_ident(&tag.id)));
787 let id = format_tag_ident(&tag.id);
788 variants.push(quote! { #id(Private<#backing_type>) })
789 }
790 }
791 }
792
793 // Generate the cases for parsing the enum value from an integer.
794 let mut from_cases = vec![];
795 for tag in tags.iter() {
796 match tag {
797 ast::Tag::Value(tag) => {
798 let id = format_tag_ident(&tag.id);
799 let value = format_value(tag.value);
800 from_cases.push(quote! { #value => Ok(#name::#id) })
801 }
802 ast::Tag::Range(tag) => {
803 from_cases.extend(tag.tags.iter().map(|tag| {
804 let id = format_tag_ident(&tag.id);
805 let value = format_value(tag.value);
806 quote! { #value => Ok(#name::#id) }
807 }));
808 let id = format_tag_ident(&tag.id);
809 let start = format_value(*tag.range.start());
810 let end = format_value(*tag.range.end());
811 from_cases.push(quote! { #start ..= #end => Ok(#name::#id(Private(value))) })
812 }
813 }
814 }
815
816 // Generate the cases for serializing the enum value to an integer.
817 let mut into_cases = vec![];
818 for tag in tags.iter() {
819 match tag {
820 ast::Tag::Value(tag) => {
821 let id = format_tag_ident(&tag.id);
822 let value = format_value(tag.value);
823 into_cases.push(quote! { #name::#id => #value })
824 }
825 ast::Tag::Range(tag) => {
826 into_cases.extend(tag.tags.iter().map(|tag| {
827 let id = format_tag_ident(&tag.id);
828 let value = format_value(tag.value);
829 quote! { #name::#id => #value }
830 }));
831 let id = format_tag_ident(&tag.id);
832 into_cases.push(quote! { #name::#id(Private(value)) => *value })
833 }
834 }
835 }
836
837 // Generate a default case if the enum is open and incomplete.
838 if !is_complete && open {
839 variants.push(quote! { Unknown(Private<#backing_type>) });
840 from_cases.push(quote! { 0..#range_max => Ok(#name::Unknown(Private(value))) });
841 into_cases.push(quote! { #name::Unknown(Private(value)) => *value });
842 }
843
844 // Generate an error case if the enum size is lower than the backing
845 // type size, or if the enum is closed or incomplete.
846 if backing_type.width != width || (!is_complete && !open) {
847 from_cases.push(quote! { _ => Err(value) });
848 }
849
850 // Derive other Into<uN> and Into<iN> implementations from the explicit
851 // implementation, where the type is larger than the backing type.
852 let derived_signed_into_types = [8, 16, 32, 64]
853 .into_iter()
854 .filter(|w| *w > width)
855 .map(|w| syn::parse_str::<syn::Type>(&format!("i{}", w)).unwrap());
856 let derived_unsigned_into_types = [8, 16, 32, 64]
857 .into_iter()
858 .filter(|w| *w >= width && *w != backing_type.width)
859 .map(|w| syn::parse_str::<syn::Type>(&format!("u{}", w)).unwrap());
860 let derived_into_types = derived_signed_into_types.chain(derived_unsigned_into_types);
861
862 quote! {
863 #repr_u64
864 #[derive(Debug, Clone, Copy, Hash, Eq, PartialEq)]
865 #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
866 #[cfg_attr(feature = "serde", serde(try_from = #backing_type_str, into = #backing_type_str))]
867 pub enum #name {
868 #(#variants,)*
869 }
870
871 impl TryFrom<#backing_type> for #name {
872 type Error = #backing_type;
873 fn try_from(value: #backing_type) -> std::result::Result<Self, Self::Error> {
874 match value {
875 #(#from_cases,)*
876 }
877 }
878 }
879
880 impl From<&#name> for #backing_type {
881 fn from(value: &#name) -> Self {
882 match value {
883 #(#into_cases,)*
884 }
885 }
886 }
887
888 impl From<#name> for #backing_type {
889 fn from(value: #name) -> Self {
890 (&value).into()
891 }
892 }
893
894 #(impl From<#name> for #derived_into_types {
895 fn from(value: #name) -> Self {
896 #backing_type::from(value) as Self
897 }
898 })*
899 }
900 }
901
generate_decl( scope: &lint::Scope<'_>, file: &analyzer_ast::File, decl: &analyzer_ast::Decl, ) -> String902 fn generate_decl(
903 scope: &lint::Scope<'_>,
904 file: &analyzer_ast::File,
905 decl: &analyzer_ast::Decl,
906 ) -> String {
907 match &decl.desc {
908 ast::DeclDesc::Packet { id, .. } => {
909 generate_packet_decl(scope, file.endianness.value, id).to_string()
910 }
911 ast::DeclDesc::Struct { id, parent_id: None, .. } => {
912 // TODO(mgeisler): handle structs with parents. We could
913 // generate code for them, but the code is not useful
914 // since it would require the caller to unpack everything
915 // manually. We either need to change the API, or
916 // implement the recursive (de)serialization.
917 generate_struct_decl(scope, file.endianness.value, id).to_string()
918 }
919 ast::DeclDesc::Enum { id, tags, width } => {
920 generate_enum_decl(id, tags, *width, false).to_string()
921 }
922 _ => todo!("unsupported Decl::{:?}", decl),
923 }
924 }
925
926 /// Generate Rust code from an AST.
927 ///
928 /// The code is not formatted, pipe it through `rustfmt` to get
929 /// readable source code.
generate(sources: &ast::SourceDatabase, file: &analyzer_ast::File) -> String930 pub fn generate(sources: &ast::SourceDatabase, file: &analyzer_ast::File) -> String {
931 let mut code = String::new();
932
933 let source = sources.get(file.file).expect("could not read source");
934 code.push_str(&preamble::generate(Path::new(source.name())));
935
936 let scope = lint::Scope::new(file);
937 for decl in &file.declarations {
938 code.push_str(&generate_decl(&scope, file, decl));
939 code.push_str("\n\n");
940 }
941
942 code
943 }
944
945 #[cfg(test)]
946 mod tests {
947 use super::*;
948 use crate::analyzer;
949 use crate::ast;
950 use crate::parser::parse_inline;
951 use crate::test_utils::{assert_snapshot_eq, rustfmt};
952 use paste::paste;
953
954 /// Parse a string fragment as a PDL file.
955 ///
956 /// # Panics
957 ///
958 /// Panics on parse errors.
parse_str(text: &str) -> analyzer_ast::File959 pub fn parse_str(text: &str) -> analyzer_ast::File {
960 let mut db = ast::SourceDatabase::new();
961 let file =
962 parse_inline(&mut db, String::from("stdin"), String::from(text)).expect("parse error");
963 analyzer::analyze(&file).expect("analyzer error")
964 }
965
966 #[track_caller]
assert_iter_eq<T: std::cmp::PartialEq + std::fmt::Debug>( left: impl IntoIterator<Item = T>, right: impl IntoIterator<Item = T>, )967 fn assert_iter_eq<T: std::cmp::PartialEq + std::fmt::Debug>(
968 left: impl IntoIterator<Item = T>,
969 right: impl IntoIterator<Item = T>,
970 ) {
971 assert_eq!(left.into_iter().collect::<Vec<T>>(), right.into_iter().collect::<Vec<T>>());
972 }
973
974 #[test]
test_find_constrained_parent_fields()975 fn test_find_constrained_parent_fields() {
976 let code = "
977 little_endian_packets
978 packet Parent {
979 a: 8,
980 b: 8,
981 c: 8,
982 _payload_,
983 }
984 packet Child: Parent(a = 10) {
985 x: 8,
986 _payload_,
987 }
988 packet GrandChild: Child(b = 20) {
989 y: 8,
990 _payload_,
991 }
992 packet GrandGrandChild: GrandChild(c = 30) {
993 z: 8,
994 }
995 ";
996 let file = parse_str(code);
997 let scope = lint::Scope::new(&file);
998 let find_fields =
999 |id| find_constrained_parent_fields(&scope, id).map(|field| field.id().unwrap());
1000 assert_iter_eq(find_fields("Parent"), vec![]);
1001 assert_iter_eq(find_fields("Child"), vec!["b", "c"]);
1002 assert_iter_eq(find_fields("GrandChild"), vec!["c"]);
1003 assert_iter_eq(find_fields("GrandGrandChild"), vec![]);
1004 }
1005
1006 /// Create a unit test for the given PDL `code`.
1007 ///
1008 /// The unit test will compare the generated Rust code for all
1009 /// declarations with previously saved snapshots. The snapshots
1010 /// are read from `"tests/generated/{name}_{endianness}_{id}.rs"`
1011 /// where `is` taken from the declaration.
1012 ///
1013 /// When adding new tests or modifying existing ones, use
1014 /// `UPDATE_SNAPSHOTS=1 cargo test` to automatically populate the
1015 /// snapshots with the expected output.
1016 ///
1017 /// The `code` cannot have an endianness declaration, instead you
1018 /// must supply either `little_endian` or `big_endian` as
1019 /// `endianness`.
1020 macro_rules! make_pdl_test {
1021 ($name:ident, $code:expr, $endianness:ident) => {
1022 paste! {
1023 #[test]
1024 fn [< test_ $name _ $endianness >]() {
1025 let name = stringify!($name);
1026 let endianness = stringify!($endianness);
1027 let code = format!("{endianness}_packets\n{}", $code);
1028 let mut db = ast::SourceDatabase::new();
1029 let file = parse_inline(&mut db, String::from("test"), code).unwrap();
1030 let file = analyzer::analyze(&file).unwrap();
1031 let actual_code = generate(&db, &file);
1032 assert_snapshot_eq(
1033 &format!("tests/generated/{name}_{endianness}.rs"),
1034 &rustfmt(&actual_code),
1035 );
1036 }
1037 }
1038 };
1039 }
1040
1041 /// Create little- and bit-endian tests for the given PDL `code`.
1042 ///
1043 /// The `code` cannot have an endianness declaration: we will
1044 /// automatically generate unit tests for both
1045 /// "little_endian_packets" and "big_endian_packets".
1046 macro_rules! test_pdl {
1047 ($name:ident, $code:expr $(,)?) => {
1048 make_pdl_test!($name, $code, little_endian);
1049 make_pdl_test!($name, $code, big_endian);
1050 };
1051 }
1052
1053 test_pdl!(packet_decl_empty, "packet Foo {}");
1054
1055 test_pdl!(packet_decl_8bit_scalar, " packet Foo { x: 8 }");
1056 test_pdl!(packet_decl_24bit_scalar, "packet Foo { x: 24 }");
1057 test_pdl!(packet_decl_64bit_scalar, "packet Foo { x: 64 }");
1058
1059 test_pdl!(
1060 enum_declaration,
1061 r#"
1062 // Should generate unknown case.
1063 enum IncompleteTruncated : 3 {
1064 A = 0,
1065 B = 1,
1066 }
1067
1068 // Should generate unknown case.
1069 enum IncompleteTruncatedWithRange : 3 {
1070 A = 0,
1071 B = 1..6 {
1072 X = 1,
1073 Y = 2,
1074 }
1075 }
1076
1077 // Should generate unreachable case.
1078 enum CompleteTruncated : 3 {
1079 A = 0,
1080 B = 1,
1081 C = 2,
1082 D = 3,
1083 E = 4,
1084 F = 5,
1085 G = 6,
1086 H = 7,
1087 }
1088
1089 // Should generate unreachable case.
1090 enum CompleteTruncatedWithRange : 3 {
1091 A = 0,
1092 B = 1..7 {
1093 X = 1,
1094 Y = 2,
1095 }
1096 }
1097
1098 // Should generate no unknown or unreachable case.
1099 enum CompleteWithRange : 8 {
1100 A = 0,
1101 B = 1,
1102 C = 2..255,
1103 }
1104 "#
1105 );
1106
1107 test_pdl!(
1108 packet_decl_simple_scalars,
1109 r#"
1110 packet Foo {
1111 x: 8,
1112 y: 16,
1113 z: 24,
1114 }
1115 "#
1116 );
1117
1118 test_pdl!(
1119 packet_decl_complex_scalars,
1120 r#"
1121 packet Foo {
1122 a: 3,
1123 b: 8,
1124 c: 5,
1125 d: 24,
1126 e: 12,
1127 f: 4,
1128 }
1129 "#,
1130 );
1131
1132 // Test that we correctly mask a byte-sized value in the middle of
1133 // a chunk.
1134 test_pdl!(
1135 packet_decl_mask_scalar_value,
1136 r#"
1137 packet Foo {
1138 a: 2,
1139 b: 24,
1140 c: 6,
1141 }
1142 "#,
1143 );
1144
1145 test_pdl!(
1146 struct_decl_complex_scalars,
1147 r#"
1148 struct Foo {
1149 a: 3,
1150 b: 8,
1151 c: 5,
1152 d: 24,
1153 e: 12,
1154 f: 4,
1155 }
1156 "#,
1157 );
1158
1159 test_pdl!(packet_decl_8bit_enum, " enum Foo : 8 { A = 1, B = 2 } packet Bar { x: Foo }");
1160 test_pdl!(packet_decl_24bit_enum, "enum Foo : 24 { A = 1, B = 2 } packet Bar { x: Foo }");
1161 test_pdl!(packet_decl_64bit_enum, "enum Foo : 64 { A = 1, B = 2 } packet Bar { x: Foo }");
1162
1163 test_pdl!(
1164 packet_decl_mixed_scalars_enums,
1165 "
1166 enum Enum7 : 7 {
1167 A = 1,
1168 B = 2,
1169 }
1170
1171 enum Enum9 : 9 {
1172 A = 1,
1173 B = 2,
1174 }
1175
1176 packet Foo {
1177 x: Enum7,
1178 y: 5,
1179 z: Enum9,
1180 w: 3,
1181 }
1182 "
1183 );
1184
1185 test_pdl!(packet_decl_8bit_scalar_array, " packet Foo { x: 8[3] }");
1186 test_pdl!(packet_decl_24bit_scalar_array, "packet Foo { x: 24[5] }");
1187 test_pdl!(packet_decl_64bit_scalar_array, "packet Foo { x: 64[7] }");
1188
1189 test_pdl!(
1190 packet_decl_8bit_enum_array,
1191 "enum Foo : 8 { FOO_BAR = 1, BAZ = 2 } packet Bar { x: Foo[3] }"
1192 );
1193 test_pdl!(
1194 packet_decl_24bit_enum_array,
1195 "enum Foo : 24 { FOO_BAR = 1, BAZ = 2 } packet Bar { x: Foo[5] }"
1196 );
1197 test_pdl!(
1198 packet_decl_64bit_enum_array,
1199 "enum Foo : 64 { FOO_BAR = 1, BAZ = 2 } packet Bar { x: Foo[7] }"
1200 );
1201
1202 test_pdl!(
1203 packet_decl_array_dynamic_count,
1204 "
1205 packet Foo {
1206 _count_(x): 5,
1207 padding: 3,
1208 x: 24[]
1209 }
1210 "
1211 );
1212
1213 test_pdl!(
1214 packet_decl_array_dynamic_size,
1215 "
1216 packet Foo {
1217 _size_(x): 5,
1218 padding: 3,
1219 x: 24[]
1220 }
1221 "
1222 );
1223
1224 test_pdl!(
1225 packet_decl_array_unknown_element_width_dynamic_size,
1226 "
1227 struct Foo {
1228 _count_(a): 40,
1229 a: 16[],
1230 }
1231
1232 packet Bar {
1233 _size_(x): 40,
1234 x: Foo[],
1235 }
1236 "
1237 );
1238
1239 test_pdl!(
1240 packet_decl_array_unknown_element_width_dynamic_count,
1241 "
1242 struct Foo {
1243 _count_(a): 40,
1244 a: 16[],
1245 }
1246
1247 packet Bar {
1248 _count_(x): 40,
1249 x: Foo[],
1250 }
1251 "
1252 );
1253
1254 test_pdl!(
1255 packet_decl_reserved_field,
1256 "
1257 packet Foo {
1258 _reserved_: 40,
1259 }
1260 "
1261 );
1262
1263 test_pdl!(
1264 packet_decl_fixed_scalar_field,
1265 "
1266 packet Foo {
1267 _fixed_ = 7 : 7,
1268 b: 57,
1269 }
1270 "
1271 );
1272
1273 test_pdl!(
1274 packet_decl_fixed_enum_field,
1275 "
1276 enum Enum7 : 7 {
1277 A = 1,
1278 B = 2,
1279 }
1280
1281 packet Foo {
1282 _fixed_ = A : Enum7,
1283 b: 57,
1284 }
1285 "
1286 );
1287
1288 test_pdl!(
1289 packet_decl_payload_field_variable_size,
1290 "
1291 packet Foo {
1292 a: 8,
1293 _size_(_payload_): 8,
1294 _payload_,
1295 b: 16,
1296 }
1297 "
1298 );
1299
1300 test_pdl!(
1301 packet_decl_payload_field_unknown_size,
1302 "
1303 packet Foo {
1304 a: 24,
1305 _payload_,
1306 }
1307 "
1308 );
1309
1310 test_pdl!(
1311 packet_decl_payload_field_unknown_size_terminal,
1312 "
1313 packet Foo {
1314 _payload_,
1315 a: 24,
1316 }
1317 "
1318 );
1319
1320 test_pdl!(
1321 packet_decl_child_packets,
1322 "
1323 enum Enum16 : 16 {
1324 A = 1,
1325 B = 2,
1326 }
1327
1328 packet Foo {
1329 a: 8,
1330 b: Enum16,
1331 _size_(_payload_): 8,
1332 _payload_
1333 }
1334
1335 packet Bar : Foo (a = 100) {
1336 x: 8,
1337 }
1338
1339 packet Baz : Foo (b = B) {
1340 y: 16,
1341 }
1342 "
1343 );
1344
1345 test_pdl!(
1346 packet_decl_grand_children,
1347 "
1348 enum Enum16 : 16 {
1349 A = 1,
1350 B = 2,
1351 }
1352
1353 packet Parent {
1354 foo: Enum16,
1355 bar: Enum16,
1356 baz: Enum16,
1357 _size_(_payload_): 8,
1358 _payload_
1359 }
1360
1361 packet Child : Parent (foo = A) {
1362 quux: Enum16,
1363 _payload_,
1364 }
1365
1366 packet GrandChild : Child (bar = A, quux = A) {
1367 _body_,
1368 }
1369
1370 packet GrandGrandChild : GrandChild (baz = A) {
1371 _body_,
1372 }
1373 "
1374 );
1375
1376 // TODO(mgeisler): enable this test when we have an approach to
1377 // struct fields with parents.
1378 //
1379 // test_pdl!(
1380 // struct_decl_child_structs,
1381 // "
1382 // enum Enum16 : 16 {
1383 // A = 1,
1384 // B = 2,
1385 // }
1386 //
1387 // struct Foo {
1388 // a: 8,
1389 // b: Enum16,
1390 // _size_(_payload_): 8,
1391 // _payload_
1392 // }
1393 //
1394 // struct Bar : Foo (a = 100) {
1395 // x: 8,
1396 // }
1397 //
1398 // struct Baz : Foo (b = B) {
1399 // y: 16,
1400 // }
1401 // "
1402 // );
1403 //
1404 // test_pdl!(
1405 // struct_decl_grand_children,
1406 // "
1407 // enum Enum16 : 16 {
1408 // A = 1,
1409 // B = 2,
1410 // }
1411 //
1412 // struct Parent {
1413 // foo: Enum16,
1414 // bar: Enum16,
1415 // baz: Enum16,
1416 // _size_(_payload_): 8,
1417 // _payload_
1418 // }
1419 //
1420 // struct Child : Parent (foo = A) {
1421 // quux: Enum16,
1422 // _payload_,
1423 // }
1424 //
1425 // struct GrandChild : Child (bar = A, quux = A) {
1426 // _body_,
1427 // }
1428 //
1429 // struct GrandGrandChild : GrandChild (baz = A) {
1430 // _body_,
1431 // }
1432 // "
1433 // );
1434 }
1435