1 use crate::version;
2 use anyhow::{bail, Result};
3 use indexmap::IndexMap;
4 use quote::quote;
5 use std::collections::BTreeMap;
6 use std::fs;
7 use std::path::{Path, PathBuf};
8 use syn::parse::{Error, Parser};
9 use syn::{
10 parse_quote, Attribute, Data, DataEnum, DataStruct, DeriveInput, Fields, GenericArgument,
11 Ident, Item, PathArguments, TypeMacro, TypePath, TypeTuple, Visibility,
12 };
13 use syn_codegen as types;
14 use thiserror::Error;
15
16 const SYN_CRATE_ROOT: &str = "../src/lib.rs";
17 const TOKEN_SRC: &str = "../src/token.rs";
18 const IGNORED_MODS: &[&str] = &["fold", "visit", "visit_mut"];
19 const EXTRA_TYPES: &[&str] = &["Lifetime"];
20
21 // NOTE: BTreeMap is used here instead of HashMap to have deterministic output.
22 type ItemLookup = BTreeMap<Ident, AstItem>;
23 type TokenLookup = BTreeMap<String, String>;
24
25 /// Parse the contents of `src` and return a list of AST types.
parse() -> Result<types::Definitions>26 pub fn parse() -> Result<types::Definitions> {
27 let mut item_lookup = BTreeMap::new();
28 load_file(SYN_CRATE_ROOT, &[], &mut item_lookup)?;
29
30 let token_lookup = load_token_file(TOKEN_SRC)?;
31
32 let version = version::get()?;
33
34 let types = item_lookup
35 .values()
36 .map(|item| introspect_item(item, &item_lookup, &token_lookup))
37 .collect();
38
39 let tokens = token_lookup
40 .into_iter()
41 .map(|(name, ty)| (ty, name))
42 .collect();
43
44 Ok(types::Definitions {
45 version,
46 types,
47 tokens,
48 })
49 }
50
51 /// Data extracted from syn source
52 pub struct AstItem {
53 ast: DeriveInput,
54 features: Vec<Attribute>,
55 }
56
introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node57 fn introspect_item(item: &AstItem, items: &ItemLookup, tokens: &TokenLookup) -> types::Node {
58 let features = introspect_features(&item.features);
59
60 match &item.ast.data {
61 Data::Enum(ref data) => types::Node {
62 ident: item.ast.ident.to_string(),
63 features,
64 data: types::Data::Enum(introspect_enum(data, items, tokens)),
65 exhaustive: !data.variants.iter().any(|v| is_doc_hidden(&v.attrs)),
66 },
67 Data::Struct(ref data) => types::Node {
68 ident: item.ast.ident.to_string(),
69 features,
70 data: {
71 if data.fields.iter().all(|f| is_pub(&f.vis)) {
72 types::Data::Struct(introspect_struct(data, items, tokens))
73 } else {
74 types::Data::Private
75 }
76 },
77 exhaustive: true,
78 },
79 Data::Union(..) => panic!("Union not supported"),
80 }
81 }
82
introspect_enum(item: &DataEnum, items: &ItemLookup, tokens: &TokenLookup) -> types::Variants83 fn introspect_enum(item: &DataEnum, items: &ItemLookup, tokens: &TokenLookup) -> types::Variants {
84 item.variants
85 .iter()
86 .filter_map(|variant| {
87 if is_doc_hidden(&variant.attrs) {
88 return None;
89 }
90 let fields = match &variant.fields {
91 Fields::Unnamed(fields) => fields
92 .unnamed
93 .iter()
94 .map(|field| introspect_type(&field.ty, items, tokens))
95 .collect(),
96 Fields::Unit => vec![],
97 _ => panic!("Enum representation not supported"),
98 };
99 Some((variant.ident.to_string(), fields))
100 })
101 .collect()
102 }
103
introspect_struct(item: &DataStruct, items: &ItemLookup, tokens: &TokenLookup) -> types::Fields104 fn introspect_struct(item: &DataStruct, items: &ItemLookup, tokens: &TokenLookup) -> types::Fields {
105 match &item.fields {
106 Fields::Named(fields) => fields
107 .named
108 .iter()
109 .map(|field| {
110 (
111 field.ident.as_ref().unwrap().to_string(),
112 introspect_type(&field.ty, items, tokens),
113 )
114 })
115 .collect(),
116 Fields::Unit => IndexMap::new(),
117 _ => panic!("Struct representation not supported"),
118 }
119 }
120
introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type121 fn introspect_type(item: &syn::Type, items: &ItemLookup, tokens: &TokenLookup) -> types::Type {
122 match item {
123 syn::Type::Path(TypePath {
124 qself: None,
125 ref path,
126 }) => {
127 let last = path.segments.last().unwrap();
128 let string = last.ident.to_string();
129
130 match string.as_str() {
131 "Option" => {
132 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
133 types::Type::Option(Box::new(nested))
134 }
135 "Punctuated" => {
136 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
137 let punct = match introspect_type(last_arg(&last.arguments), items, tokens) {
138 types::Type::Token(s) => s,
139 _ => panic!(),
140 };
141
142 types::Type::Punctuated(types::Punctuated {
143 element: Box::new(nested),
144 punct,
145 })
146 }
147 "Vec" => {
148 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
149 types::Type::Vec(Box::new(nested))
150 }
151 "Box" => {
152 let nested = introspect_type(first_arg(&last.arguments), items, tokens);
153 types::Type::Box(Box::new(nested))
154 }
155 "Brace" | "Bracket" | "Paren" | "Group" => types::Type::Group(string),
156 "TokenStream" | "Literal" | "Ident" | "Span" => types::Type::Ext(string),
157 "String" | "u32" | "usize" | "bool" => types::Type::Std(string),
158 "Await" => types::Type::Token("Await".to_string()),
159 _ => {
160 if items.get(&last.ident).is_some() || last.ident == "Reserved" {
161 types::Type::Syn(string)
162 } else {
163 unimplemented!("{}", string);
164 }
165 }
166 }
167 }
168 syn::Type::Tuple(TypeTuple { ref elems, .. }) => {
169 let tys = elems
170 .iter()
171 .map(|ty| introspect_type(&ty, items, tokens))
172 .collect();
173 types::Type::Tuple(tys)
174 }
175 syn::Type::Macro(TypeMacro { ref mac })
176 if mac.path.segments.last().unwrap().ident == "Token" =>
177 {
178 let content = mac.tokens.to_string();
179 let ty = tokens.get(&content).unwrap().to_string();
180
181 types::Type::Token(ty)
182 }
183 _ => panic!("{}", quote!(#item).to_string()),
184 }
185 }
186
introspect_features(attrs: &[Attribute]) -> types::Features187 fn introspect_features(attrs: &[Attribute]) -> types::Features {
188 let mut ret = types::Features::default();
189
190 for attr in attrs {
191 if !attr.path.is_ident("cfg") {
192 continue;
193 }
194
195 let features = parsing::parse_features.parse2(attr.tokens.clone()).unwrap();
196
197 if ret.any.is_empty() {
198 ret = features;
199 } else if ret.any.len() < features.any.len() {
200 assert!(ret.any.iter().all(|f| features.any.contains(f)));
201 } else {
202 assert!(features.any.iter().all(|f| ret.any.contains(f)));
203 ret = features;
204 }
205 }
206
207 ret
208 }
209
is_pub(vis: &Visibility) -> bool210 fn is_pub(vis: &Visibility) -> bool {
211 match vis {
212 Visibility::Public(_) => true,
213 _ => false,
214 }
215 }
216
is_doc_hidden(attrs: &[Attribute]) -> bool217 fn is_doc_hidden(attrs: &[Attribute]) -> bool {
218 for attr in attrs {
219 if attr.path.is_ident("doc") {
220 if parsing::parse_doc_hidden_attr
221 .parse2(attr.tokens.clone())
222 .is_ok()
223 {
224 return true;
225 }
226 }
227 }
228 false
229 }
230
first_arg(params: &PathArguments) -> &syn::Type231 fn first_arg(params: &PathArguments) -> &syn::Type {
232 let data = match *params {
233 PathArguments::AngleBracketed(ref data) => data,
234 _ => panic!("Expected at least 1 type argument here"),
235 };
236
237 match *data
238 .args
239 .first()
240 .expect("Expected at least 1 type argument here")
241 {
242 GenericArgument::Type(ref ty) => ty,
243 _ => panic!("Expected at least 1 type argument here"),
244 }
245 }
246
last_arg(params: &PathArguments) -> &syn::Type247 fn last_arg(params: &PathArguments) -> &syn::Type {
248 let data = match *params {
249 PathArguments::AngleBracketed(ref data) => data,
250 _ => panic!("Expected at least 1 type argument here"),
251 };
252
253 match *data
254 .args
255 .last()
256 .expect("Expected at least 1 type argument here")
257 {
258 GenericArgument::Type(ref ty) => ty,
259 _ => panic!("Expected at least 1 type argument here"),
260 }
261 }
262
263 mod parsing {
264 use super::{AstItem, TokenLookup};
265 use proc_macro2::{TokenStream, TokenTree};
266 use quote::quote;
267 use std::collections::{BTreeMap, BTreeSet};
268 use syn::parse::{ParseStream, Parser, Result};
269 use syn::{
270 braced, bracketed, parenthesized, parse_quote, token, Attribute, Ident, LitStr, Path, Token,
271 };
272 use syn_codegen as types;
273
peek_tag(input: ParseStream, tag: &str) -> bool274 fn peek_tag(input: ParseStream, tag: &str) -> bool {
275 let ahead = input.fork();
276 ahead.parse::<Token![#]>().is_ok()
277 && ahead
278 .parse::<Ident>()
279 .map(|ident| ident == tag)
280 .unwrap_or(false)
281 }
282
283 // Parses #full - returns #[cfg(feature = "full")] if it is present, and
284 // nothing otherwise.
full(input: ParseStream) -> Vec<Attribute>285 fn full(input: ParseStream) -> Vec<Attribute> {
286 if peek_tag(input, "full") {
287 input.parse::<Token![#]>().unwrap();
288 input.parse::<Ident>().unwrap();
289 vec![parse_quote!(#[cfg(feature = "full")])]
290 } else {
291 vec![]
292 }
293 }
294
295 // Parses a simple AstStruct without the `pub struct` prefix.
ast_struct_inner(input: ParseStream) -> Result<AstItem>296 fn ast_struct_inner(input: ParseStream) -> Result<AstItem> {
297 let ident: Ident = input.parse()?;
298 let features = full(input);
299 let rest: TokenStream = input.parse()?;
300 Ok(AstItem {
301 ast: syn::parse2(quote! {
302 pub struct #ident #rest
303 })?,
304 features,
305 })
306 }
307
ast_struct(input: ParseStream) -> Result<AstItem>308 pub fn ast_struct(input: ParseStream) -> Result<AstItem> {
309 input.call(Attribute::parse_outer)?;
310 input.parse::<Token![pub]>()?;
311 input.parse::<Token![struct]>()?;
312 let res = input.call(ast_struct_inner)?;
313 Ok(res)
314 }
315
no_visit(input: ParseStream) -> bool316 fn no_visit(input: ParseStream) -> bool {
317 if peek_tag(input, "no_visit") {
318 input.parse::<Token![#]>().unwrap();
319 input.parse::<Ident>().unwrap();
320 true
321 } else {
322 false
323 }
324 }
325
ast_enum(input: ParseStream) -> Result<Option<AstItem>>326 pub fn ast_enum(input: ParseStream) -> Result<Option<AstItem>> {
327 input.call(Attribute::parse_outer)?;
328 input.parse::<Token![pub]>()?;
329 input.parse::<Token![enum]>()?;
330 let ident: Ident = input.parse()?;
331 let no_visit = no_visit(input);
332 let rest: TokenStream = input.parse()?;
333 Ok(if no_visit {
334 None
335 } else {
336 Some(AstItem {
337 ast: syn::parse2(quote! {
338 pub enum #ident #rest
339 })?,
340 features: vec![],
341 })
342 })
343 }
344
345 // A single variant of an ast_enum_of_structs!
346 struct EosVariant {
347 attrs: Vec<Attribute>,
348 name: Ident,
349 member: Option<Path>,
350 }
eos_variant(input: ParseStream) -> Result<EosVariant>351 fn eos_variant(input: ParseStream) -> Result<EosVariant> {
352 let attrs = input.call(Attribute::parse_outer)?;
353 let variant: Ident = input.parse()?;
354 let member = if input.peek(token::Paren) {
355 let content;
356 parenthesized!(content in input);
357 let path: Path = content.parse()?;
358 Some(path)
359 } else {
360 None
361 };
362 input.parse::<Token![,]>()?;
363 Ok(EosVariant {
364 attrs,
365 name: variant,
366 member,
367 })
368 }
369
ast_enum_of_structs(input: ParseStream) -> Result<AstItem>370 pub fn ast_enum_of_structs(input: ParseStream) -> Result<AstItem> {
371 input.call(Attribute::parse_outer)?;
372 input.parse::<Token![pub]>()?;
373 input.parse::<Token![enum]>()?;
374 let ident: Ident = input.parse()?;
375
376 let content;
377 braced!(content in input);
378 let mut variants = Vec::new();
379 while !content.is_empty() {
380 variants.push(content.call(eos_variant)?);
381 }
382
383 if let Some(ident) = input.parse::<Option<Ident>>()? {
384 assert_eq!(ident, "do_not_generate_to_tokens");
385 }
386
387 let enum_item = {
388 let variants = variants.iter().map(|v| {
389 let attrs = &v.attrs;
390 let name = &v.name;
391 match v.member {
392 Some(ref member) => quote!(#(#attrs)* #name(#member)),
393 None => quote!(#(#attrs)* #name),
394 }
395 });
396 parse_quote! {
397 pub enum #ident {
398 #(#variants),*
399 }
400 }
401 };
402 Ok(AstItem {
403 ast: enum_item,
404 features: vec![],
405 })
406 }
407
408 mod kw {
409 syn::custom_keyword!(hidden);
410 syn::custom_keyword!(macro_rules);
411 syn::custom_keyword!(Token);
412 }
413
parse_token_macro(input: ParseStream) -> Result<TokenLookup>414 pub fn parse_token_macro(input: ParseStream) -> Result<TokenLookup> {
415 input.parse::<TokenTree>()?;
416 input.parse::<Token![=>]>()?;
417
418 let definition;
419 braced!(definition in input);
420 definition.call(Attribute::parse_outer)?;
421 definition.parse::<kw::macro_rules>()?;
422 definition.parse::<Token![!]>()?;
423 definition.parse::<kw::Token>()?;
424
425 let rules;
426 braced!(rules in definition);
427 input.parse::<Token![;]>()?;
428
429 let mut tokens = BTreeMap::new();
430 while !rules.is_empty() {
431 if rules.peek(Token![$]) {
432 rules.parse::<Token![$]>()?;
433 rules.parse::<TokenTree>()?;
434 rules.parse::<Token![*]>()?;
435 tokens.insert("await".to_owned(), "Await".to_owned());
436 } else {
437 let pattern;
438 bracketed!(pattern in rules);
439 let token = pattern.parse::<TokenStream>()?.to_string();
440 rules.parse::<Token![=>]>()?;
441 let expansion;
442 braced!(expansion in rules);
443 rules.parse::<Token![;]>()?;
444 expansion.parse::<Token![$]>()?;
445 let path: Path = expansion.parse()?;
446 let ty = path.segments.last().unwrap().ident.to_string();
447 tokens.insert(token, ty.to_string());
448 }
449 }
450 Ok(tokens)
451 }
452
parse_feature(input: ParseStream) -> Result<String>453 fn parse_feature(input: ParseStream) -> Result<String> {
454 let i: Ident = input.parse()?;
455 assert_eq!(i, "feature");
456
457 input.parse::<Token![=]>()?;
458 let s = input.parse::<LitStr>()?;
459
460 Ok(s.value())
461 }
462
parse_features(input: ParseStream) -> Result<types::Features>463 pub fn parse_features(input: ParseStream) -> Result<types::Features> {
464 let mut features = BTreeSet::new();
465
466 let level_1;
467 parenthesized!(level_1 in input);
468
469 let i: Ident = level_1.fork().parse()?;
470
471 if i == "any" {
472 level_1.parse::<Ident>()?;
473
474 let level_2;
475 parenthesized!(level_2 in level_1);
476
477 while !level_2.is_empty() {
478 features.insert(parse_feature(&level_2)?);
479
480 if !level_2.is_empty() {
481 level_2.parse::<Token![,]>()?;
482 }
483 }
484 } else if i == "feature" {
485 features.insert(parse_feature(&level_1)?);
486 assert!(level_1.is_empty());
487 } else {
488 panic!("{:?}", i);
489 }
490
491 assert!(input.is_empty());
492
493 Ok(types::Features { any: features })
494 }
495
path_attr(attrs: &[Attribute]) -> Result<Option<LitStr>>496 pub fn path_attr(attrs: &[Attribute]) -> Result<Option<LitStr>> {
497 for attr in attrs {
498 if attr.path.is_ident("path") {
499 fn parser(input: ParseStream) -> Result<LitStr> {
500 input.parse::<Token![=]>()?;
501 input.parse()
502 }
503 let filename = parser.parse2(attr.tokens.clone())?;
504 return Ok(Some(filename));
505 }
506 }
507 Ok(None)
508 }
509
parse_doc_hidden_attr(input: ParseStream) -> Result<()>510 pub fn parse_doc_hidden_attr(input: ParseStream) -> Result<()> {
511 let content;
512 parenthesized!(content in input);
513 content.parse::<kw::hidden>()?;
514 Ok(())
515 }
516 }
517
clone_features(features: &[Attribute]) -> Vec<Attribute>518 fn clone_features(features: &[Attribute]) -> Vec<Attribute> {
519 features.iter().map(|attr| parse_quote!(#attr)).collect()
520 }
521
get_features(attrs: &[Attribute], base: &[Attribute]) -> Vec<Attribute>522 fn get_features(attrs: &[Attribute], base: &[Attribute]) -> Vec<Attribute> {
523 let mut ret = clone_features(base);
524
525 for attr in attrs {
526 if attr.path.is_ident("cfg") {
527 ret.push(parse_quote!(#attr));
528 }
529 }
530
531 ret
532 }
533
534 #[derive(Error, Debug)]
535 #[error("{path}:{line}:{column}: {error}")]
536 struct LoadFileError {
537 path: PathBuf,
538 line: usize,
539 column: usize,
540 error: Error,
541 }
542
load_file<P: AsRef<Path>>( name: P, features: &[Attribute], lookup: &mut ItemLookup, ) -> Result<()>543 fn load_file<P: AsRef<Path>>(
544 name: P,
545 features: &[Attribute],
546 lookup: &mut ItemLookup,
547 ) -> Result<()> {
548 let error = match do_load_file(&name, features, lookup).err() {
549 None => return Ok(()),
550 Some(error) => error,
551 };
552
553 let error = error.downcast::<Error>()?;
554 let span = error.span().start();
555
556 bail!(LoadFileError {
557 path: name.as_ref().to_owned(),
558 line: span.line,
559 column: span.column + 1,
560 error,
561 })
562 }
563
do_load_file<P: AsRef<Path>>( name: P, features: &[Attribute], lookup: &mut ItemLookup, ) -> Result<()>564 fn do_load_file<P: AsRef<Path>>(
565 name: P,
566 features: &[Attribute],
567 lookup: &mut ItemLookup,
568 ) -> Result<()> {
569 let name = name.as_ref();
570 let parent = name.parent().expect("no parent path");
571
572 // Parse the file
573 let src = fs::read_to_string(name)?;
574 let file = syn::parse_file(&src)?;
575
576 // Collect all of the interesting AstItems declared in this file or submodules.
577 'items: for item in file.items {
578 match item {
579 Item::Mod(item) => {
580 // Don't inspect inline modules.
581 if item.content.is_some() {
582 continue;
583 }
584
585 // We don't want to try to load the generated rust files and
586 // parse them, so we ignore them here.
587 for name in IGNORED_MODS {
588 if item.ident == name {
589 continue 'items;
590 }
591 }
592
593 // Lookup any #[cfg()] attributes on the module and add them to
594 // the feature set.
595 //
596 // The derive module is weird because it is built with either
597 // `full` or `derive` but exported only under `derive`.
598 let features = if item.ident == "derive" {
599 vec![parse_quote!(#[cfg(feature = "derive")])]
600 } else {
601 get_features(&item.attrs, features)
602 };
603
604 // Look up the submodule file, and recursively parse it.
605 // Only handles same-directory .rs file submodules for now.
606 let filename = match parsing::path_attr(&item.attrs)? {
607 Some(filename) => filename.value(),
608 None => format!("{}.rs", item.ident),
609 };
610 let path = parent.join(filename);
611 load_file(path, &features, lookup)?;
612 }
613 Item::Macro(item) => {
614 // Lookip any #[cfg()] attributes directly on the macro
615 // invocation, and add them to the feature set.
616 let features = get_features(&item.attrs, features);
617
618 // Try to parse the AstItem declaration out of the item.
619 let tts = item.mac.tokens.clone();
620 let found = if item.mac.path.is_ident("ast_struct") {
621 Some(parsing::ast_struct.parse2(tts)?)
622 } else if item.mac.path.is_ident("ast_enum") {
623 parsing::ast_enum.parse2(tts)?
624 } else if item.mac.path.is_ident("ast_enum_of_structs") {
625 Some(parsing::ast_enum_of_structs.parse2(tts)?)
626 } else {
627 continue;
628 };
629
630 // Record our features on the parsed AstItems.
631 if let Some(mut item) = found {
632 if item.ast.ident != "Reserved" {
633 item.features.extend(clone_features(&features));
634 lookup.insert(item.ast.ident.clone(), item);
635 }
636 }
637 }
638 Item::Struct(item) => {
639 let ident = item.ident;
640 if EXTRA_TYPES.contains(&&ident.to_string()[..]) {
641 lookup.insert(
642 ident.clone(),
643 AstItem {
644 ast: DeriveInput {
645 ident,
646 vis: item.vis,
647 attrs: item.attrs,
648 generics: item.generics,
649 data: Data::Struct(DataStruct {
650 fields: item.fields,
651 struct_token: item.struct_token,
652 semi_token: item.semi_token,
653 }),
654 },
655 features: clone_features(features),
656 },
657 );
658 }
659 }
660 _ => {}
661 }
662 }
663 Ok(())
664 }
665
load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup>666 fn load_token_file<P: AsRef<Path>>(name: P) -> Result<TokenLookup> {
667 let name = name.as_ref();
668 let src = fs::read_to_string(name)?;
669 let file = syn::parse_file(&src)?;
670 for item in file.items {
671 if let Item::Macro(item) = item {
672 match item.ident {
673 Some(ref i) if i == "export_token_macro" => {}
674 _ => continue,
675 }
676 let tokens = item.mac.parse_body_with(parsing::parse_token_macro)?;
677 return Ok(tokens);
678 }
679 }
680
681 panic!("failed to parse Token macro")
682 }
683