• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 // Copyright 2020 The Chromium OS Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4 
5 use crate::descriptor::AsRawDescriptor;
6 use crate::{
7     platform::MemoryMapping as SysUtilMmap, MappedRegion, MemoryMappingArena, MmapError,
8     Protection, SharedMemory,
9 };
10 use data_model::{volatile_memory::*, DataInit};
11 use std::fs::File;
12 
13 pub type Result<T> = std::result::Result<T, MmapError>;
14 
15 /// See [MemoryMapping](crate::platform::MemoryMapping) for struct- and method-level
16 /// documentation.
17 #[derive(Debug)]
18 pub struct MemoryMapping {
19     mapping: SysUtilMmap,
20 }
21 
22 impl MemoryMapping {
write_slice(&self, buf: &[u8], offset: usize) -> Result<usize>23     pub fn write_slice(&self, buf: &[u8], offset: usize) -> Result<usize> {
24         self.mapping.write_slice(buf, offset)
25     }
26 
read_slice(&self, buf: &mut [u8], offset: usize) -> Result<usize>27     pub fn read_slice(&self, buf: &mut [u8], offset: usize) -> Result<usize> {
28         self.mapping.read_slice(buf, offset)
29     }
30 
write_obj<T: DataInit>(&self, val: T, offset: usize) -> Result<()>31     pub fn write_obj<T: DataInit>(&self, val: T, offset: usize) -> Result<()> {
32         self.mapping.write_obj(val, offset)
33     }
34 
read_obj<T: DataInit>(&self, offset: usize) -> Result<T>35     pub fn read_obj<T: DataInit>(&self, offset: usize) -> Result<T> {
36         self.mapping.read_obj(offset)
37     }
38 
msync(&self) -> Result<()>39     pub fn msync(&self) -> Result<()> {
40         self.mapping.msync()
41     }
42 
use_hugepages(&self) -> Result<()>43     pub fn use_hugepages(&self) -> Result<()> {
44         self.mapping.use_hugepages()
45     }
46 
read_to_memory( &self, mem_offset: usize, src: &dyn AsRawDescriptor, count: usize, ) -> Result<()>47     pub fn read_to_memory(
48         &self,
49         mem_offset: usize,
50         src: &dyn AsRawDescriptor,
51         count: usize,
52     ) -> Result<()> {
53         self.mapping.read_to_memory(mem_offset, src, count)
54     }
55 
write_from_memory( &self, mem_offset: usize, dst: &dyn AsRawDescriptor, count: usize, ) -> Result<()>56     pub fn write_from_memory(
57         &self,
58         mem_offset: usize,
59         dst: &dyn AsRawDescriptor,
60         count: usize,
61     ) -> Result<()> {
62         self.mapping.write_from_memory(mem_offset, dst, count)
63     }
64 }
65 
66 pub trait Unix {
remove_range(&self, mem_offset: usize, count: usize) -> Result<()>67     fn remove_range(&self, mem_offset: usize, count: usize) -> Result<()>;
68 }
69 
70 impl Unix for MemoryMapping {
remove_range(&self, mem_offset: usize, count: usize) -> Result<()>71     fn remove_range(&self, mem_offset: usize, count: usize) -> Result<()> {
72         self.mapping.remove_range(mem_offset, count)
73     }
74 }
75 
76 pub trait MemoryMappingBuilderUnix<'a> {
77     #[allow(clippy::wrong_self_convention)]
from_descriptor(self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder78     fn from_descriptor(self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder;
79 }
80 
81 pub struct MemoryMappingBuilder<'a> {
82     descriptor: Option<&'a dyn AsRawDescriptor>,
83     size: usize,
84     offset: Option<u64>,
85     protection: Option<Protection>,
86     populate: bool,
87 }
88 
89 impl<'a> MemoryMappingBuilderUnix<'a> for MemoryMappingBuilder<'a> {
90     /// Build the memory mapping given the specified descriptor to mapped memory
91     ///
92     /// Default: Create a new memory mapping.
93     #[allow(clippy::wrong_self_convention)]
from_descriptor(mut self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder94     fn from_descriptor(mut self, descriptor: &'a dyn AsRawDescriptor) -> MemoryMappingBuilder {
95         self.descriptor = Some(descriptor);
96         self
97     }
98 }
99 
100 /// Builds a MemoryMapping object from the specified arguments.
101 impl<'a> MemoryMappingBuilder<'a> {
102     /// Creates a new builder specifying size of the memory region in bytes.
new(size: usize) -> MemoryMappingBuilder<'a>103     pub fn new(size: usize) -> MemoryMappingBuilder<'a> {
104         MemoryMappingBuilder {
105             descriptor: None,
106             size,
107             offset: None,
108             protection: None,
109             populate: false,
110         }
111     }
112 
113     /// Build the memory mapping given the specified File to mapped memory
114     ///
115     /// Default: Create a new memory mapping.
116     ///
117     /// Note: this is a forward looking interface to accomodate platforms that
118     /// require special handling for file backed mappings.
119     #[allow(clippy::wrong_self_convention, unused_mut)]
from_file(mut self, file: &'a File) -> MemoryMappingBuilder120     pub fn from_file(mut self, file: &'a File) -> MemoryMappingBuilder {
121         self.descriptor = Some(file as &dyn AsRawDescriptor);
122         self
123     }
124 
125     /// Build the memory mapping given the specified SharedMemory to mapped memory
126     ///
127     /// Default: Create a new memory mapping.
from_shared_memory(mut self, shm: &'a SharedMemory) -> MemoryMappingBuilder128     pub fn from_shared_memory(mut self, shm: &'a SharedMemory) -> MemoryMappingBuilder {
129         self.descriptor = Some(shm as &dyn AsRawDescriptor);
130         self
131     }
132 
133     /// Offset in bytes from the beginning of the mapping to start the mmap.
134     ///
135     /// Default: No offset
offset(mut self, offset: u64) -> MemoryMappingBuilder<'a>136     pub fn offset(mut self, offset: u64) -> MemoryMappingBuilder<'a> {
137         self.offset = Some(offset);
138         self
139     }
140 
141     /// Protection (e.g. readable/writable) of the memory region.
142     ///
143     /// Default: Read/write
protection(mut self, protection: Protection) -> MemoryMappingBuilder<'a>144     pub fn protection(mut self, protection: Protection) -> MemoryMappingBuilder<'a> {
145         self.protection = Some(protection);
146         self
147     }
148 
149     /// Request that the mapped pages are pre-populated
150     ///
151     /// Default: Do not populate
populate(mut self) -> MemoryMappingBuilder<'a>152     pub fn populate(mut self) -> MemoryMappingBuilder<'a> {
153         self.populate = true;
154         self
155     }
156 
157     /// Build a MemoryMapping from the provided options.
build(self) -> Result<MemoryMapping>158     pub fn build(self) -> Result<MemoryMapping> {
159         match self.descriptor {
160             None => {
161                 if self.populate {
162                     // Population not supported for new mmaps
163                     return Err(MmapError::InvalidArgument);
164                 }
165                 MemoryMappingBuilder::wrap(SysUtilMmap::new_protection(
166                     self.size,
167                     self.protection.unwrap_or_else(Protection::read_write),
168                 ))
169             }
170             Some(descriptor) => {
171                 MemoryMappingBuilder::wrap(SysUtilMmap::from_fd_offset_protection_populate(
172                     descriptor,
173                     self.size,
174                     self.offset.unwrap_or(0),
175                     self.protection.unwrap_or_else(Protection::read_write),
176                     self.populate,
177                 ))
178             }
179         }
180     }
181 
182     /// Build a MemoryMapping from the provided options at a fixed address. Note this
183     /// is a separate function from build in order to isolate unsafe behavior.
184     ///
185     /// # Safety
186     ///
187     /// Function should not be called before the caller unmaps any mmap'd regions already
188     /// present at `(addr..addr+size)`. If another MemoryMapping object holds the same
189     /// address space, the destructors of those objects will conflict and the space could
190     /// be unmapped while still in use.
build_fixed(self, addr: *mut u8) -> Result<MemoryMapping>191     pub unsafe fn build_fixed(self, addr: *mut u8) -> Result<MemoryMapping> {
192         if self.populate {
193             // Population not supported for fixed mapping.
194             return Err(MmapError::InvalidArgument);
195         }
196         match self.descriptor {
197             None => MemoryMappingBuilder::wrap(SysUtilMmap::new_protection_fixed(
198                 addr,
199                 self.size,
200                 self.protection.unwrap_or_else(Protection::read_write),
201             )),
202             Some(descriptor) => {
203                 MemoryMappingBuilder::wrap(SysUtilMmap::from_fd_offset_protection_fixed(
204                     addr,
205                     descriptor,
206                     self.size,
207                     self.offset.unwrap_or(0),
208                     self.protection.unwrap_or_else(Protection::read_write),
209                 ))
210             }
211         }
212     }
213 
wrap(result: Result<SysUtilMmap>) -> Result<MemoryMapping>214     fn wrap(result: Result<SysUtilMmap>) -> Result<MemoryMapping> {
215         result.map(|mapping| MemoryMapping { mapping })
216     }
217 }
218 
219 impl VolatileMemory for MemoryMapping {
get_slice(&self, offset: usize, count: usize) -> VolatileMemoryResult<VolatileSlice>220     fn get_slice(&self, offset: usize, count: usize) -> VolatileMemoryResult<VolatileSlice> {
221         self.mapping.get_slice(offset, count)
222     }
223 }
224 
225 // Safe because it exclusively forwards calls to a safe implementation.
226 unsafe impl MappedRegion for MemoryMapping {
as_ptr(&self) -> *mut u8227     fn as_ptr(&self) -> *mut u8 {
228         self.mapping.as_ptr()
229     }
230 
size(&self) -> usize231     fn size(&self) -> usize {
232         self.mapping.size()
233     }
234 }
235 
236 impl From<MemoryMapping> for MemoryMappingArena {
from(mmap: MemoryMapping) -> Self237     fn from(mmap: MemoryMapping) -> Self {
238         MemoryMappingArena::from(mmap.mapping)
239     }
240 }
241