1 use codespan_reporting::diagnostic; 2 use codespan_reporting::files; 3 use serde::Serialize; 4 use std::fmt; 5 use std::ops; 6 7 /// File identfiier. 8 /// References a source file in the source database. 9 pub type FileId = usize; 10 11 /// Source database. 12 /// Stores the source file contents for reference. 13 pub type SourceDatabase = files::SimpleFiles<String, String>; 14 15 #[derive(Debug, Copy, Clone, Serialize, PartialEq, Eq, PartialOrd, Ord)] 16 pub struct SourceLocation { 17 /// Byte offset into the file (counted from zero). 18 pub offset: usize, 19 /// Line number (counted from zero). 20 pub line: usize, 21 /// Column number (counted from zero) 22 pub column: usize, 23 } 24 25 #[derive(Debug, Clone, Serialize)] 26 pub struct SourceRange { 27 pub file: FileId, 28 pub start: SourceLocation, 29 pub end: SourceLocation, 30 } 31 32 #[derive(Debug, Serialize)] 33 #[serde(tag = "kind", rename = "comment")] 34 pub struct Comment { 35 pub loc: SourceRange, 36 pub text: String, 37 } 38 39 #[derive(Debug, Serialize)] 40 #[serde(rename_all = "snake_case")] 41 pub enum EndiannessValue { 42 LittleEndian, 43 BigEndian, 44 } 45 46 #[derive(Debug, Serialize)] 47 #[serde(tag = "kind", rename = "endianness_declaration")] 48 pub struct Endianness { 49 pub loc: SourceRange, 50 pub value: EndiannessValue, 51 } 52 53 #[derive(Debug, Serialize)] 54 #[serde(tag = "kind")] 55 pub enum Expr { 56 #[serde(rename = "identifier")] 57 Identifier { loc: SourceRange, name: String }, 58 #[serde(rename = "integer")] 59 Integer { loc: SourceRange, value: usize }, 60 #[serde(rename = "unary_expr")] 61 Unary { loc: SourceRange, op: String, operand: Box<Expr> }, 62 #[serde(rename = "binary_expr")] 63 Binary { loc: SourceRange, op: String, operands: Box<(Expr, Expr)> }, 64 } 65 66 #[derive(Debug, Serialize)] 67 #[serde(tag = "kind", rename = "tag")] 68 pub struct Tag { 69 pub id: String, 70 pub loc: SourceRange, 71 pub value: usize, 72 } 73 74 #[derive(Debug, Serialize)] 75 #[serde(tag = "kind", rename = "constraint")] 76 pub struct Constraint { 77 pub id: String, 78 pub loc: SourceRange, 79 pub value: Expr, 80 } 81 82 #[derive(Debug, Serialize)] 83 #[serde(tag = "kind")] 84 pub enum Field { 85 #[serde(rename = "checksum_field")] 86 Checksum { loc: SourceRange, field_id: String }, 87 #[serde(rename = "padding_field")] 88 Padding { loc: SourceRange, width: usize }, 89 #[serde(rename = "size_field")] 90 Size { loc: SourceRange, field_id: String, width: usize }, 91 #[serde(rename = "count_field")] 92 Count { loc: SourceRange, field_id: String, width: usize }, 93 #[serde(rename = "body_field")] 94 Body { loc: SourceRange }, 95 #[serde(rename = "payload_field")] 96 Payload { loc: SourceRange, size_modifier: Option<String> }, 97 #[serde(rename = "fixed_field")] 98 Fixed { 99 loc: SourceRange, 100 width: Option<usize>, 101 value: Option<usize>, 102 enum_id: Option<String>, 103 tag_id: Option<String>, 104 }, 105 #[serde(rename = "reserved_field")] 106 Reserved { loc: SourceRange, width: usize }, 107 #[serde(rename = "array_field")] 108 Array { 109 loc: SourceRange, 110 id: String, 111 width: Option<usize>, 112 type_id: Option<String>, 113 size_modifier: Option<String>, 114 size: Option<usize>, 115 }, 116 #[serde(rename = "scalar_field")] 117 Scalar { loc: SourceRange, id: String, width: usize }, 118 #[serde(rename = "typedef_field")] 119 Typedef { loc: SourceRange, id: String, type_id: String }, 120 #[serde(rename = "group_field")] 121 Group { loc: SourceRange, group_id: String, constraints: Vec<Constraint> }, 122 } 123 124 #[derive(Debug, Serialize)] 125 #[serde(tag = "kind", rename = "test_case")] 126 pub struct TestCase { 127 pub loc: SourceRange, 128 pub input: String, 129 } 130 131 #[derive(Debug, Serialize)] 132 #[serde(tag = "kind")] 133 pub enum Decl { 134 #[serde(rename = "checksum_declaration")] 135 Checksum { id: String, loc: SourceRange, function: String, width: usize }, 136 #[serde(rename = "custom_field_declaration")] 137 CustomField { id: String, loc: SourceRange, width: Option<usize>, function: String }, 138 #[serde(rename = "enum_declaration")] 139 Enum { id: String, loc: SourceRange, tags: Vec<Tag>, width: usize }, 140 #[serde(rename = "packet_declaration")] 141 Packet { 142 id: String, 143 loc: SourceRange, 144 constraints: Vec<Constraint>, 145 fields: Vec<Field>, 146 parent_id: Option<String>, 147 }, 148 #[serde(rename = "struct_declaration")] 149 Struct { 150 id: String, 151 loc: SourceRange, 152 constraints: Vec<Constraint>, 153 fields: Vec<Field>, 154 parent_id: Option<String>, 155 }, 156 #[serde(rename = "group_declaration")] 157 Group { id: String, loc: SourceRange, fields: Vec<Field> }, 158 #[serde(rename = "test_declaration")] 159 Test { loc: SourceRange, type_id: String, test_cases: Vec<TestCase> }, 160 } 161 162 #[derive(Debug, Serialize)] 163 pub struct Grammar { 164 pub version: String, 165 pub file: FileId, 166 pub comments: Vec<Comment>, 167 pub endianness: Option<Endianness>, 168 pub declarations: Vec<Decl>, 169 } 170 171 impl SourceLocation { 172 /// Construct a new source location. 173 /// 174 /// The `line_starts` indicates the byte offsets where new lines 175 /// start in the file. The first element should thus be `0` since 176 /// every file has at least one line starting at offset `0`. new(offset: usize, line_starts: &[usize]) -> SourceLocation177 pub fn new(offset: usize, line_starts: &[usize]) -> SourceLocation { 178 let mut loc = SourceLocation { offset, line: 0, column: offset }; 179 for (line, start) in line_starts.iter().enumerate() { 180 if *start > offset { 181 break; 182 } 183 loc = SourceLocation { offset, line, column: offset - start }; 184 } 185 loc 186 } 187 } 188 189 impl SourceRange { primary(&self) -> diagnostic::Label<FileId>190 pub fn primary(&self) -> diagnostic::Label<FileId> { 191 diagnostic::Label::primary(self.file, self.start.offset..self.end.offset) 192 } secondary(&self) -> diagnostic::Label<FileId>193 pub fn secondary(&self) -> diagnostic::Label<FileId> { 194 diagnostic::Label::secondary(self.file, self.start.offset..self.end.offset) 195 } 196 } 197 198 impl fmt::Display for SourceRange { fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result199 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 200 if self.start.line == self.end.line { 201 write!(f, "{}:{}-{}", self.start.line, self.start.column, self.end.column) 202 } else { 203 write!( 204 f, 205 "{}:{}-{}:{}", 206 self.start.line, self.start.column, self.end.line, self.end.column 207 ) 208 } 209 } 210 } 211 212 impl ops::Add<SourceRange> for SourceRange { 213 type Output = SourceRange; 214 add(self, rhs: SourceRange) -> SourceRange215 fn add(self, rhs: SourceRange) -> SourceRange { 216 assert!(self.file == rhs.file); 217 SourceRange { 218 file: self.file, 219 start: self.start.min(rhs.start), 220 end: self.end.max(rhs.end), 221 } 222 } 223 } 224 225 impl Grammar { new(file: FileId) -> Grammar226 pub fn new(file: FileId) -> Grammar { 227 Grammar { 228 version: "1,0".to_owned(), 229 comments: vec![], 230 endianness: None, 231 declarations: vec![], 232 file, 233 } 234 } 235 } 236 237 impl Decl { loc(&self) -> &SourceRange238 pub fn loc(&self) -> &SourceRange { 239 match self { 240 Decl::Checksum { loc, .. } 241 | Decl::CustomField { loc, .. } 242 | Decl::Enum { loc, .. } 243 | Decl::Packet { loc, .. } 244 | Decl::Struct { loc, .. } 245 | Decl::Group { loc, .. } 246 | Decl::Test { loc, .. } => loc, 247 } 248 } 249 id(&self) -> Option<&String>250 pub fn id(&self) -> Option<&String> { 251 match self { 252 Decl::Test { .. } => None, 253 Decl::Checksum { id, .. } 254 | Decl::CustomField { id, .. } 255 | Decl::Enum { id, .. } 256 | Decl::Packet { id, .. } 257 | Decl::Struct { id, .. } 258 | Decl::Group { id, .. } => Some(id), 259 } 260 } 261 } 262 263 impl Field { loc(&self) -> &SourceRange264 pub fn loc(&self) -> &SourceRange { 265 match self { 266 Field::Checksum { loc, .. } 267 | Field::Padding { loc, .. } 268 | Field::Size { loc, .. } 269 | Field::Count { loc, .. } 270 | Field::Body { loc, .. } 271 | Field::Payload { loc, .. } 272 | Field::Fixed { loc, .. } 273 | Field::Reserved { loc, .. } 274 | Field::Array { loc, .. } 275 | Field::Scalar { loc, .. } 276 | Field::Typedef { loc, .. } 277 | Field::Group { loc, .. } => loc, 278 } 279 } 280 id(&self) -> Option<&String>281 pub fn id(&self) -> Option<&String> { 282 match self { 283 Field::Checksum { .. } 284 | Field::Padding { .. } 285 | Field::Size { .. } 286 | Field::Count { .. } 287 | Field::Body { .. } 288 | Field::Payload { .. } 289 | Field::Fixed { .. } 290 | Field::Reserved { .. } 291 | Field::Group { .. } => None, 292 Field::Array { id, .. } | Field::Scalar { id, .. } | Field::Typedef { id, .. } => { 293 Some(id) 294 } 295 } 296 } 297 } 298 299 #[cfg(test)] 300 mod tests { 301 use super::*; 302 303 #[test] source_location_new()304 fn source_location_new() { 305 let line_starts = &[0, 20, 80, 120, 150]; 306 assert_eq!( 307 SourceLocation::new(0, line_starts), 308 SourceLocation { offset: 0, line: 0, column: 0 } 309 ); 310 assert_eq!( 311 SourceLocation::new(10, line_starts), 312 SourceLocation { offset: 10, line: 0, column: 10 } 313 ); 314 assert_eq!( 315 SourceLocation::new(50, line_starts), 316 SourceLocation { offset: 50, line: 1, column: 30 } 317 ); 318 assert_eq!( 319 SourceLocation::new(100, line_starts), 320 SourceLocation { offset: 100, line: 2, column: 20 } 321 ); 322 assert_eq!( 323 SourceLocation::new(1000, line_starts), 324 SourceLocation { offset: 1000, line: 4, column: 850 } 325 ); 326 } 327 328 #[test] source_location_new_no_crash_with_empty_line_starts()329 fn source_location_new_no_crash_with_empty_line_starts() { 330 let loc = SourceLocation::new(100, &[]); 331 assert_eq!(loc, SourceLocation { offset: 100, line: 0, column: 100 }); 332 } 333 } 334