• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Chromium OS Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 use crate::{wrap_descriptor, AsRawDescriptor, MappedRegion, MmapError, Protection, SharedMemory};
6 use data_model::volatile_memory::*;
7 use data_model::DataInit;
8 use std::fs::File;
9 use sys_util::MemoryMapping as SysUtilMmap;
10 
11 pub type Result<T> = std::result::Result<T, MmapError>;
12 
13 /// See [MemoryMapping](sys_util::MemoryMapping) for struct- and method-level
14 /// documentation.
15 #[derive(Debug)]
16 pub struct MemoryMapping {
17     mapping: SysUtilMmap,
18 }
19 
20 impl MemoryMapping {
write_slice(&self, buf: &[u8], offset: usize) -> Result<usize>21     pub fn write_slice(&self, buf: &[u8], offset: usize) -> Result<usize> {
22         self.mapping.write_slice(buf, offset)
23     }
24 
read_slice(&self, buf: &mut [u8], offset: usize) -> Result<usize>25     pub fn read_slice(&self, buf: &mut [u8], offset: usize) -> Result<usize> {
26         self.mapping.read_slice(buf, offset)
27     }
28 
write_obj<T: DataInit>(&self, val: T, offset: usize) -> Result<()>29     pub fn write_obj<T: DataInit>(&self, val: T, offset: usize) -> Result<()> {
30         self.mapping.write_obj(val, offset)
31     }
32 
read_obj<T: DataInit>(&self, offset: usize) -> Result<T>33     pub fn read_obj<T: DataInit>(&self, offset: usize) -> Result<T> {
34         self.mapping.read_obj(offset)
35     }
36 
msync(&self) -> Result<()>37     pub fn msync(&self) -> Result<()> {
38         self.mapping.msync()
39     }
40 
use_hugepages(&self) -> Result<()>41     pub fn use_hugepages(&self) -> Result<()> {
42         self.mapping.use_hugepages()
43     }
44 
read_to_memory( &self, mem_offset: usize, src: &dyn AsRawDescriptor, count: usize, ) -> Result<()>45     pub fn read_to_memory(
46         &self,
47         mem_offset: usize,
48         src: &dyn AsRawDescriptor,
49         count: usize,
50     ) -> Result<()> {
51         self.mapping
52             .read_to_memory(mem_offset, &wrap_descriptor(src), count)
53     }
54 
write_from_memory( &self, mem_offset: usize, dst: &dyn AsRawDescriptor, count: usize, ) -> Result<()>55     pub fn write_from_memory(
56         &self,
57         mem_offset: usize,
58         dst: &dyn AsRawDescriptor,
59         count: usize,
60     ) -> Result<()> {
61         self.mapping
62             .write_from_memory(mem_offset, &wrap_descriptor(dst), count)
63     }
64 }
65 
66 pub trait Unix {
remove_range(&self, mem_offset: usize, count: usize) -> Result<()>67     fn remove_range(&self, mem_offset: usize, count: usize) -> Result<()>;
68 }
69 
70 impl Unix for MemoryMapping {
remove_range(&self, mem_offset: usize, count: usize) -> Result<()>71     fn remove_range(&self, mem_offset: usize, count: usize) -> Result<()> {
72         self.mapping.remove_range(mem_offset, count)
73     }
74 }
75 
76 pub trait MemoryMappingBuilderUnix<'a> {
from_descriptor(self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder77     fn from_descriptor(self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder;
78 }
79 
80 pub struct MemoryMappingBuilder<'a> {
81     descriptor: Option<&'a dyn AsRawDescriptor>,
82     size: usize,
83     offset: Option<u64>,
84     protection: Option<Protection>,
85     populate: bool,
86 }
87 
88 impl<'a> MemoryMappingBuilderUnix<'a> for MemoryMappingBuilder<'a> {
89     /// Build the memory mapping given the specified descriptor to mapped memory
90     ///
91     /// Default: Create a new memory mapping.
from_descriptor(mut self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder92     fn from_descriptor(mut self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder {
93         self.descriptor = Some(descriptor);
94         self
95     }
96 }
97 
98 /// Builds a MemoryMapping object from the specified arguments.
99 impl<'a> MemoryMappingBuilder<'a> {
100     /// Creates a new builder specifying size of the memory region in bytes.
new(size: usize) -> MemoryMappingBuilder<'a>101     pub fn new(size: usize) -> MemoryMappingBuilder<'a> {
102         MemoryMappingBuilder {
103             descriptor: None,
104             size,
105             offset: None,
106             protection: None,
107             populate: false,
108         }
109     }
110 
111     /// Build the memory mapping given the specified File to mapped memory
112     ///
113     /// Default: Create a new memory mapping.
114     ///
115     /// Note: this is a forward looking interface to accomodate platforms that
116     /// require special handling for file backed mappings.
117     #[allow(unused_mut)]
from_file(mut self, file: &'a File) -> MemoryMappingBuilder118     pub fn from_file(mut self, file: &'a File) -> MemoryMappingBuilder {
119         self.descriptor = Some(file as &dyn AsRawDescriptor);
120         self
121     }
122 
123     /// Build the memory mapping given the specified SharedMemory to mapped memory
124     ///
125     /// Default: Create a new memory mapping.
from_shared_memory(mut self, shm: &'a SharedMemory) -> MemoryMappingBuilder126     pub fn from_shared_memory(mut self, shm: &'a SharedMemory) -> MemoryMappingBuilder {
127         self.descriptor = Some(shm as &dyn AsRawDescriptor);
128         self
129     }
130 
131     /// Offset in bytes from the beginning of the mapping to start the mmap.
132     ///
133     /// Default: No offset
offset(mut self, offset: u64) -> MemoryMappingBuilder<'a>134     pub fn offset(mut self, offset: u64) -> MemoryMappingBuilder<'a> {
135         self.offset = Some(offset);
136         self
137     }
138 
139     /// Protection (e.g. readable/writable) of the memory region.
140     ///
141     /// Default: Read/write
protection(mut self, protection: Protection) -> MemoryMappingBuilder<'a>142     pub fn protection(mut self, protection: Protection) -> MemoryMappingBuilder<'a> {
143         self.protection = Some(protection);
144         self
145     }
146 
147     /// Request that the mapped pages are pre-populated
148     ///
149     /// Default: Do not populate
populate(mut self) -> MemoryMappingBuilder<'a>150     pub fn populate(mut self) -> MemoryMappingBuilder<'a> {
151         self.populate = true;
152         self
153     }
154 
155     /// Build a MemoryMapping from the provided options.
build(self) -> Result<MemoryMapping>156     pub fn build(self) -> Result<MemoryMapping> {
157         match self.descriptor {
158             None => {
159                 if self.populate {
160                     // Population not supported for new mmaps
161                     return Err(MmapError::InvalidArgument);
162                 }
163                 MemoryMappingBuilder::wrap(SysUtilMmap::new_protection(
164                     self.size,
165                     self.protection.unwrap_or_else(Protection::read_write),
166                 ))
167             }
168             Some(descriptor) => {
169                 MemoryMappingBuilder::wrap(SysUtilMmap::from_fd_offset_protection_populate(
170                     &wrap_descriptor(descriptor),
171                     self.size,
172                     self.offset.unwrap_or(0),
173                     self.protection.unwrap_or_else(Protection::read_write),
174                     self.populate,
175                 ))
176             }
177         }
178     }
179 
180     /// Build a MemoryMapping from the provided options at a fixed address. Note this
181     /// is a separate function from build in order to isolate unsafe behavior.
182     ///
183     /// # Safety
184     ///
185     /// Function should not be called before the caller unmaps any mmap'd regions already
186     /// present at `(addr..addr+size)`. If another MemoryMapping object holds the same
187     /// address space, the destructors of those objects will conflict and the space could
188     /// be unmapped while still in use.
build_fixed(self, addr: *mut u8) -> Result<MemoryMapping>189     pub unsafe fn build_fixed(self, addr: *mut u8) -> Result<MemoryMapping> {
190         if self.populate {
191             // Population not supported for fixed mapping.
192             return Err(MmapError::InvalidArgument);
193         }
194         match self.descriptor {
195             None => MemoryMappingBuilder::wrap(SysUtilMmap::new_protection_fixed(
196                 addr,
197                 self.size,
198                 self.protection.unwrap_or_else(Protection::read_write),
199             )),
200             Some(descriptor) => {
201                 MemoryMappingBuilder::wrap(SysUtilMmap::from_fd_offset_protection_fixed(
202                     addr,
203                     &wrap_descriptor(descriptor),
204                     self.size,
205                     self.offset.unwrap_or(0),
206                     self.protection.unwrap_or_else(Protection::read_write),
207                 ))
208             }
209         }
210     }
211 
wrap(result: Result<SysUtilMmap>) -> Result<MemoryMapping>212     fn wrap(result: Result<SysUtilMmap>) -> Result<MemoryMapping> {
213         result.map(|mapping| MemoryMapping { mapping })
214     }
215 }
216 
217 impl VolatileMemory for MemoryMapping {
get_slice(&self, offset: usize, count: usize) -> VolatileMemoryResult<VolatileSlice>218     fn get_slice(&self, offset: usize, count: usize) -> VolatileMemoryResult<VolatileSlice> {
219         self.mapping.get_slice(offset, count)
220     }
221 }
222 
223 // Safe because it exclusively forwards calls to a safe implementation.
224 unsafe impl MappedRegion for MemoryMapping {
as_ptr(&self) -> *mut u8225     fn as_ptr(&self) -> *mut u8 {
226         self.mapping.as_ptr()
227     }
228 
size(&self) -> usize229     fn size(&self) -> usize {
230         self.mapping.size()
231     }
232 }
233