1 // Copyright 2019 The Chromium OS Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 // https://android.googlesource.com/platform/system/core/+/7b444f0/libsparse/sparse_format.h
6
7 use std::collections::BTreeMap;
8 use std::fmt::{self, Display};
9 use std::fs::File;
10 use std::io::{self, ErrorKind, Read, Seek, SeekFrom};
11 use std::mem;
12
13 use crate::DiskGetLen;
14 use base::{
15 AsRawDescriptor, FileAllocate, FileReadWriteAtVolatile, FileSetLen, FileSync, PunchHole,
16 RawDescriptor, WriteZeroesAt,
17 };
18 use data_model::{DataInit, Le16, Le32, VolatileSlice};
19 use remain::sorted;
20
21 #[sorted]
22 #[derive(Debug)]
23 pub enum Error {
24 InvalidMagicHeader,
25 InvalidSpecification(String),
26 ReadSpecificationError(io::Error),
27 }
28
29 impl Display for Error {
30 #[remain::check]
fmt(&self, f: &mut fmt::Formatter) -> fmt::Result31 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
32 use self::Error::*;
33
34 #[sorted]
35 match self {
36 InvalidMagicHeader => write!(f, "invalid magic header for android sparse format"),
37 InvalidSpecification(s) => write!(f, "invalid specification: \"{}\"", s),
38 ReadSpecificationError(e) => write!(f, "failed to read specification: \"{}\"", e),
39 }
40 }
41 }
42
43 pub type Result<T> = std::result::Result<T, Error>;
44
45 pub const SPARSE_HEADER_MAGIC: u32 = 0xed26ff3a;
46 const MAJOR_VERSION: u16 = 1;
47
48 #[repr(C)]
49 #[derive(Clone, Copy, Debug)]
50 struct SparseHeader {
51 magic: Le32, /* SPARSE_HEADER_MAGIC */
52 major_version: Le16, /* (0x1) - reject images with higher major versions */
53 minor_version: Le16, /* (0x0) - allow images with higer minor versions */
54 file_hdr_sz: Le16, /* 28 bytes for first revision of the file format */
55 chunk_hdr_size: Le16, /* 12 bytes for first revision of the file format */
56 blk_sz: Le32, /* block size in bytes, must be a multiple of 4 (4096) */
57 total_blks: Le32, /* total blocks in the non-sparse output image */
58 total_chunks: Le32, /* total chunks in the sparse input image */
59 image_checksum: Le32, /* CRC32 checksum of the original data, counting "don't care" */
60 /* as 0. Standard 802.3 polynomial, use a Public Domain */
61 /* table implementation */
62 }
63
64 unsafe impl DataInit for SparseHeader {}
65
66 const CHUNK_TYPE_RAW: u16 = 0xCAC1;
67 const CHUNK_TYPE_FILL: u16 = 0xCAC2;
68 const CHUNK_TYPE_DONT_CARE: u16 = 0xCAC3;
69 const CHUNK_TYPE_CRC32: u16 = 0xCAC4;
70
71 #[repr(C)]
72 #[derive(Clone, Copy, Debug)]
73 struct ChunkHeader {
74 chunk_type: Le16, /* 0xCAC1 -> raw; 0xCAC2 -> fill; 0xCAC3 -> don't care */
75 reserved1: u16,
76 chunk_sz: Le32, /* in blocks in output image */
77 total_sz: Le32, /* in bytes of chunk input file including chunk header and data */
78 }
79
80 unsafe impl DataInit for ChunkHeader {}
81
82 #[derive(Clone, Debug, PartialEq, Eq)]
83 enum Chunk {
84 Raw(u64), // Offset into the file
85 Fill(Vec<u8>),
86 DontCare,
87 }
88
89 #[derive(Clone, Debug, PartialEq, Eq)]
90 struct ChunkWithSize {
91 chunk: Chunk,
92 expanded_size: u64,
93 }
94
95 /* Following a Raw or Fill or CRC32 chunk is data.
96 * For a Raw chunk, it's the data in chunk_sz * blk_sz.
97 * For a Fill chunk, it's 4 bytes of the fill data.
98 * For a CRC32 chunk, it's 4 bytes of CRC32
99 */
100 #[derive(Debug)]
101 pub struct AndroidSparse {
102 file: File,
103 total_size: u64,
104 chunks: BTreeMap<u64, ChunkWithSize>,
105 }
106
parse_chunk<T: Read + Seek>( mut input: &mut T, chunk_hdr_size: u64, blk_sz: u64, ) -> Result<Option<ChunkWithSize>>107 fn parse_chunk<T: Read + Seek>(
108 mut input: &mut T,
109 chunk_hdr_size: u64,
110 blk_sz: u64,
111 ) -> Result<Option<ChunkWithSize>> {
112 let current_offset = input
113 .seek(SeekFrom::Current(0))
114 .map_err(Error::ReadSpecificationError)?;
115 let chunk_header =
116 ChunkHeader::from_reader(&mut input).map_err(Error::ReadSpecificationError)?;
117 let chunk = match chunk_header.chunk_type.to_native() {
118 CHUNK_TYPE_RAW => {
119 input
120 .seek(SeekFrom::Current(
121 chunk_header.total_sz.to_native() as i64 - chunk_hdr_size as i64,
122 ))
123 .map_err(Error::ReadSpecificationError)?;
124 Chunk::Raw(current_offset + chunk_hdr_size as u64)
125 }
126 CHUNK_TYPE_FILL => {
127 if chunk_header.total_sz == chunk_hdr_size as u32 {
128 return Err(Error::InvalidSpecification(
129 "Fill chunk did not have any data to fill".to_string(),
130 ));
131 }
132 let fill_size = chunk_header.total_sz.to_native() as u64 - chunk_hdr_size as u64;
133 let mut fill_bytes = vec![0u8; fill_size as usize];
134 input
135 .read_exact(&mut fill_bytes)
136 .map_err(Error::ReadSpecificationError)?;
137 Chunk::Fill(fill_bytes)
138 }
139 CHUNK_TYPE_DONT_CARE => Chunk::DontCare,
140 CHUNK_TYPE_CRC32 => return Ok(None), // TODO(schuffelen): Validate crc32s in input
141 unknown_type => {
142 return Err(Error::InvalidSpecification(format!(
143 "Chunk had invalid type, was {:x}",
144 unknown_type
145 )))
146 }
147 };
148 let expanded_size = chunk_header.chunk_sz.to_native() as u64 * blk_sz;
149 Ok(Some(ChunkWithSize {
150 chunk,
151 expanded_size,
152 }))
153 }
154
155 impl AndroidSparse {
from_file(mut file: File) -> Result<AndroidSparse>156 pub fn from_file(mut file: File) -> Result<AndroidSparse> {
157 file.seek(SeekFrom::Start(0))
158 .map_err(Error::ReadSpecificationError)?;
159 let sparse_header =
160 SparseHeader::from_reader(&mut file).map_err(Error::ReadSpecificationError)?;
161 if sparse_header.magic != SPARSE_HEADER_MAGIC {
162 return Err(Error::InvalidSpecification(format!(
163 "Header did not match magic constant. Expected {:x}, was {:x}",
164 SPARSE_HEADER_MAGIC,
165 sparse_header.magic.to_native()
166 )));
167 } else if sparse_header.major_version != MAJOR_VERSION {
168 return Err(Error::InvalidSpecification(format!(
169 "Header major version did not match. Expected {}, was {}",
170 MAJOR_VERSION,
171 sparse_header.major_version.to_native(),
172 )));
173 } else if (sparse_header.chunk_hdr_size.to_native() as usize)
174 < mem::size_of::<ChunkHeader>()
175 {
176 return Err(Error::InvalidSpecification(format!(
177 "Chunk header size does not fit chunk header struct, expected >={}, was {}",
178 sparse_header.chunk_hdr_size.to_native(),
179 mem::size_of::<ChunkHeader>()
180 )));
181 }
182 let header_size = sparse_header.chunk_hdr_size.to_native() as u64;
183 let block_size = sparse_header.blk_sz.to_native() as u64;
184 let chunks = (0..sparse_header.total_chunks.to_native())
185 .filter_map(|_| parse_chunk(&mut file, header_size, block_size).transpose())
186 .collect::<Result<Vec<ChunkWithSize>>>()?;
187 let total_size =
188 sparse_header.total_blks.to_native() as u64 * sparse_header.blk_sz.to_native() as u64;
189 AndroidSparse::from_parts(file, total_size, chunks)
190 }
191
from_parts(file: File, size: u64, chunks: Vec<ChunkWithSize>) -> Result<AndroidSparse>192 fn from_parts(file: File, size: u64, chunks: Vec<ChunkWithSize>) -> Result<AndroidSparse> {
193 let mut chunks_map: BTreeMap<u64, ChunkWithSize> = BTreeMap::new();
194 let mut expanded_location: u64 = 0;
195 for chunk_with_size in chunks {
196 let size = chunk_with_size.expanded_size;
197 if chunks_map
198 .insert(expanded_location, chunk_with_size)
199 .is_some()
200 {
201 return Err(Error::InvalidSpecification(format!(
202 "Two chunks were at {}",
203 expanded_location
204 )));
205 }
206 expanded_location += size;
207 }
208 let image = AndroidSparse {
209 file,
210 total_size: size,
211 chunks: chunks_map,
212 };
213 let calculated_len = image.get_len().map_err(Error::ReadSpecificationError)?;
214 if calculated_len != size {
215 return Err(Error::InvalidSpecification(format!(
216 "Header promised size {}, chunks added up to {}",
217 size, calculated_len
218 )));
219 }
220 Ok(image)
221 }
222 }
223
224 impl DiskGetLen for AndroidSparse {
get_len(&self) -> io::Result<u64>225 fn get_len(&self) -> io::Result<u64> {
226 Ok(self.total_size)
227 }
228 }
229
230 impl FileSetLen for AndroidSparse {
set_len(&self, _len: u64) -> io::Result<()>231 fn set_len(&self, _len: u64) -> io::Result<()> {
232 Err(io::Error::new(
233 ErrorKind::PermissionDenied,
234 "unsupported operation",
235 ))
236 }
237 }
238
239 impl FileSync for AndroidSparse {
fsync(&mut self) -> io::Result<()>240 fn fsync(&mut self) -> io::Result<()> {
241 Ok(())
242 }
243 }
244
245 impl PunchHole for AndroidSparse {
punch_hole(&mut self, _offset: u64, _length: u64) -> io::Result<()>246 fn punch_hole(&mut self, _offset: u64, _length: u64) -> io::Result<()> {
247 Err(io::Error::new(
248 ErrorKind::PermissionDenied,
249 "unsupported operation",
250 ))
251 }
252 }
253
254 impl WriteZeroesAt for AndroidSparse {
write_zeroes_at(&mut self, _offset: u64, _length: usize) -> io::Result<usize>255 fn write_zeroes_at(&mut self, _offset: u64, _length: usize) -> io::Result<usize> {
256 Err(io::Error::new(
257 ErrorKind::PermissionDenied,
258 "unsupported operation",
259 ))
260 }
261 }
262
263 impl AsRawDescriptor for AndroidSparse {
as_raw_descriptor(&self) -> RawDescriptor264 fn as_raw_descriptor(&self) -> RawDescriptor {
265 self.file.as_raw_descriptor()
266 }
267 }
268
269 impl FileAllocate for AndroidSparse {
allocate(&mut self, _offset: u64, _length: u64) -> io::Result<()>270 fn allocate(&mut self, _offset: u64, _length: u64) -> io::Result<()> {
271 Err(io::Error::new(
272 ErrorKind::PermissionDenied,
273 "unsupported operation",
274 ))
275 }
276 }
277
278 // Performs reads up to the chunk boundary.
279 impl FileReadWriteAtVolatile for AndroidSparse {
read_at_volatile(&mut self, slice: VolatileSlice, offset: u64) -> io::Result<usize>280 fn read_at_volatile(&mut self, slice: VolatileSlice, offset: u64) -> io::Result<usize> {
281 let found_chunk = self.chunks.range(..=offset).next_back();
282 let (
283 chunk_start,
284 ChunkWithSize {
285 chunk,
286 expanded_size,
287 },
288 ) = found_chunk.ok_or_else(|| {
289 io::Error::new(
290 ErrorKind::UnexpectedEof,
291 format!("no chunk for offset {}", offset),
292 )
293 })?;
294 let chunk_offset = offset - chunk_start;
295 let chunk_size = *expanded_size;
296 let subslice = if chunk_offset + (slice.size() as u64) > chunk_size {
297 slice
298 .sub_slice(0, (chunk_size - chunk_offset) as usize)
299 .map_err(|e| io::Error::new(ErrorKind::InvalidData, format!("{:?}", e)))?
300 } else {
301 slice
302 };
303 match chunk {
304 Chunk::DontCare => {
305 subslice.write_bytes(0);
306 Ok(subslice.size() as usize)
307 }
308 Chunk::Raw(file_offset) => self
309 .file
310 .read_at_volatile(subslice, *file_offset + chunk_offset),
311 Chunk::Fill(fill_bytes) => {
312 let chunk_offset_mod = chunk_offset % fill_bytes.len() as u64;
313 let filled_memory: Vec<u8> = fill_bytes
314 .iter()
315 .cloned()
316 .cycle()
317 .skip(chunk_offset_mod as usize)
318 .take(subslice.size() as usize)
319 .collect();
320 subslice.copy_from(&filled_memory);
321 Ok(subslice.size() as usize)
322 }
323 }
324 }
write_at_volatile(&mut self, _slice: VolatileSlice, _offset: u64) -> io::Result<usize>325 fn write_at_volatile(&mut self, _slice: VolatileSlice, _offset: u64) -> io::Result<usize> {
326 Err(io::Error::new(
327 ErrorKind::PermissionDenied,
328 "unsupported operation",
329 ))
330 }
331 }
332
333 #[cfg(test)]
334 mod tests {
335 use super::*;
336 use std::io::{Cursor, Write};
337 use tempfile::tempfile;
338
339 const CHUNK_SIZE: usize = mem::size_of::<ChunkHeader>();
340
341 #[test]
parse_raw()342 fn parse_raw() {
343 let chunk_raw = ChunkHeader {
344 chunk_type: CHUNK_TYPE_RAW.into(),
345 reserved1: 0,
346 chunk_sz: 1.into(),
347 total_sz: (CHUNK_SIZE as u32 + 123).into(),
348 };
349 let header_bytes = chunk_raw.as_slice();
350 let mut chunk_bytes: Vec<u8> = Vec::new();
351 chunk_bytes.extend_from_slice(header_bytes);
352 chunk_bytes.extend_from_slice(&[0u8; 123]);
353 let mut chunk_cursor = Cursor::new(chunk_bytes);
354 let chunk = parse_chunk(&mut chunk_cursor, CHUNK_SIZE as u64, 123)
355 .expect("Failed to parse")
356 .expect("Failed to determine chunk type");
357 let expected_chunk = ChunkWithSize {
358 chunk: Chunk::Raw(CHUNK_SIZE as u64),
359 expanded_size: 123,
360 };
361 assert_eq!(expected_chunk, chunk);
362 }
363
364 #[test]
parse_dont_care()365 fn parse_dont_care() {
366 let chunk_raw = ChunkHeader {
367 chunk_type: CHUNK_TYPE_DONT_CARE.into(),
368 reserved1: 0,
369 chunk_sz: 100.into(),
370 total_sz: (CHUNK_SIZE as u32).into(),
371 };
372 let header_bytes = chunk_raw.as_slice();
373 let mut chunk_cursor = Cursor::new(header_bytes);
374 let chunk = parse_chunk(&mut chunk_cursor, CHUNK_SIZE as u64, 123)
375 .expect("Failed to parse")
376 .expect("Failed to determine chunk type");
377 let expected_chunk = ChunkWithSize {
378 chunk: Chunk::DontCare,
379 expanded_size: 12300,
380 };
381 assert_eq!(expected_chunk, chunk);
382 }
383
384 #[test]
parse_fill()385 fn parse_fill() {
386 let chunk_raw = ChunkHeader {
387 chunk_type: CHUNK_TYPE_FILL.into(),
388 reserved1: 0,
389 chunk_sz: 100.into(),
390 total_sz: (CHUNK_SIZE as u32 + 4).into(),
391 };
392 let header_bytes = chunk_raw.as_slice();
393 let mut chunk_bytes: Vec<u8> = Vec::new();
394 chunk_bytes.extend_from_slice(header_bytes);
395 chunk_bytes.extend_from_slice(&[123u8; 4]);
396 let mut chunk_cursor = Cursor::new(chunk_bytes);
397 let chunk = parse_chunk(&mut chunk_cursor, CHUNK_SIZE as u64, 123)
398 .expect("Failed to parse")
399 .expect("Failed to determine chunk type");
400 let expected_chunk = ChunkWithSize {
401 chunk: Chunk::Fill(vec![123, 123, 123, 123]),
402 expanded_size: 12300,
403 };
404 assert_eq!(expected_chunk, chunk);
405 }
406
407 #[test]
parse_crc32()408 fn parse_crc32() {
409 let chunk_raw = ChunkHeader {
410 chunk_type: CHUNK_TYPE_CRC32.into(),
411 reserved1: 0,
412 chunk_sz: 0.into(),
413 total_sz: (CHUNK_SIZE as u32 + 4).into(),
414 };
415 let header_bytes = chunk_raw.as_slice();
416 let mut chunk_bytes: Vec<u8> = Vec::new();
417 chunk_bytes.extend_from_slice(header_bytes);
418 chunk_bytes.extend_from_slice(&[123u8; 4]);
419 let mut chunk_cursor = Cursor::new(chunk_bytes);
420 let chunk =
421 parse_chunk(&mut chunk_cursor, CHUNK_SIZE as u64, 123).expect("Failed to parse");
422 assert_eq!(None, chunk);
423 }
424
test_image(chunks: Vec<ChunkWithSize>) -> AndroidSparse425 fn test_image(chunks: Vec<ChunkWithSize>) -> AndroidSparse {
426 let file = tempfile().expect("failed to create tempfile");
427 let size = chunks.iter().map(|x| x.expanded_size).sum();
428 AndroidSparse::from_parts(file, size, chunks).expect("Could not create image")
429 }
430
431 #[test]
read_dontcare()432 fn read_dontcare() {
433 let chunks = vec![ChunkWithSize {
434 chunk: Chunk::DontCare,
435 expanded_size: 100,
436 }];
437 let mut image = test_image(chunks);
438 let mut input_memory = [55u8; 100];
439 image
440 .read_exact_at_volatile(VolatileSlice::new(&mut input_memory[..]), 0)
441 .expect("Could not read");
442 let expected = [0u8; 100];
443 assert_eq!(&expected[..], &input_memory[..]);
444 }
445
446 #[test]
read_fill_simple()447 fn read_fill_simple() {
448 let chunks = vec![ChunkWithSize {
449 chunk: Chunk::Fill(vec![10, 20]),
450 expanded_size: 8,
451 }];
452 let mut image = test_image(chunks);
453 let mut input_memory = [55u8; 8];
454 image
455 .read_exact_at_volatile(VolatileSlice::new(&mut input_memory[..]), 0)
456 .expect("Could not read");
457 let expected = [10, 20, 10, 20, 10, 20, 10, 20];
458 assert_eq!(&expected[..], &input_memory[..]);
459 }
460
461 #[test]
read_fill_edges()462 fn read_fill_edges() {
463 let chunks = vec![ChunkWithSize {
464 chunk: Chunk::Fill(vec![10, 20, 30]),
465 expanded_size: 8,
466 }];
467 let mut image = test_image(chunks);
468 let mut input_memory = [55u8; 6];
469 image
470 .read_exact_at_volatile(VolatileSlice::new(&mut input_memory[..]), 1)
471 .expect("Could not read");
472 let expected = [20, 30, 10, 20, 30, 10];
473 assert_eq!(&expected[..], &input_memory[..]);
474 }
475
476 #[test]
read_fill_offset_edges()477 fn read_fill_offset_edges() {
478 let chunks = vec![
479 ChunkWithSize {
480 chunk: Chunk::DontCare,
481 expanded_size: 20,
482 },
483 ChunkWithSize {
484 chunk: Chunk::Fill(vec![10, 20, 30]),
485 expanded_size: 100,
486 },
487 ];
488 let mut image = test_image(chunks);
489 let mut input_memory = [55u8; 7];
490 image
491 .read_exact_at_volatile(VolatileSlice::new(&mut input_memory[..]), 39)
492 .expect("Could not read");
493 let expected = [20, 30, 10, 20, 30, 10, 20];
494 assert_eq!(&expected[..], &input_memory[..]);
495 }
496
497 #[test]
read_raw()498 fn read_raw() {
499 let chunks = vec![ChunkWithSize {
500 chunk: Chunk::Raw(0),
501 expanded_size: 100,
502 }];
503 let mut image = test_image(chunks);
504 write!(image.file, "hello").expect("Failed to write into internal file");
505 let mut input_memory = [55u8; 5];
506 image
507 .read_exact_at_volatile(VolatileSlice::new(&mut input_memory[..]), 0)
508 .expect("Could not read");
509 let expected = [104, 101, 108, 108, 111];
510 assert_eq!(&expected[..], &input_memory[..]);
511 }
512
513 #[test]
read_two_fills()514 fn read_two_fills() {
515 let chunks = vec![
516 ChunkWithSize {
517 chunk: Chunk::Fill(vec![10, 20]),
518 expanded_size: 4,
519 },
520 ChunkWithSize {
521 chunk: Chunk::Fill(vec![30, 40]),
522 expanded_size: 4,
523 },
524 ];
525 let mut image = test_image(chunks);
526 let mut input_memory = [55u8; 8];
527 image
528 .read_exact_at_volatile(VolatileSlice::new(&mut input_memory[..]), 0)
529 .expect("Could not read");
530 let expected = [10, 20, 10, 20, 30, 40, 30, 40];
531 assert_eq!(&expected[..], &input_memory[..]);
532 }
533 }
534