• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1 use std::ffi::OsStr;
2 use std::fs;
3 use std::path::{Path, PathBuf};
4 use std::process::Command;
5 
6 use super::build_sysroot::STDLIB_SRC;
7 use super::path::{Dirs, RelPath};
8 use super::rustc_info::get_default_sysroot;
9 use super::utils::{
10     copy_dir_recursively, git_command, remove_dir_if_exists, retry_spawn_and_wait, spawn_and_wait,
11 };
12 
prepare(dirs: &Dirs)13 pub(crate) fn prepare(dirs: &Dirs) {
14     RelPath::DOWNLOAD.ensure_exists(dirs);
15     super::tests::RAND_REPO.fetch(dirs);
16     super::tests::REGEX_REPO.fetch(dirs);
17     super::tests::PORTABLE_SIMD_REPO.fetch(dirs);
18 }
19 
prepare_stdlib(dirs: &Dirs, rustc: &Path)20 pub(crate) fn prepare_stdlib(dirs: &Dirs, rustc: &Path) {
21     let sysroot_src_orig = get_default_sysroot(rustc).join("lib/rustlib/src/rust");
22     assert!(sysroot_src_orig.exists());
23 
24     apply_patches(dirs, "stdlib", &sysroot_src_orig, &STDLIB_SRC.to_path(dirs));
25 
26     std::fs::write(
27         STDLIB_SRC.to_path(dirs).join("Cargo.toml"),
28         r#"
29 [workspace]
30 members = ["./library/sysroot"]
31 
32 [patch.crates-io]
33 rustc-std-workspace-core = { path = "./library/rustc-std-workspace-core" }
34 rustc-std-workspace-alloc = { path = "./library/rustc-std-workspace-alloc" }
35 rustc-std-workspace-std = { path = "./library/rustc-std-workspace-std" }
36 
37 # Mandatory for correctly compiling compiler-builtins
38 [profile.dev.package.compiler_builtins]
39 debug-assertions = false
40 overflow-checks = false
41 codegen-units = 10000
42 
43 [profile.release.package.compiler_builtins]
44 debug-assertions = false
45 overflow-checks = false
46 codegen-units = 10000
47 "#,
48     )
49     .unwrap();
50 
51     let source_lockfile = RelPath::PATCHES.to_path(dirs).join("stdlib-lock.toml");
52     let target_lockfile = STDLIB_SRC.to_path(dirs).join("Cargo.lock");
53     fs::copy(source_lockfile, target_lockfile).unwrap();
54 }
55 
56 pub(crate) struct GitRepo {
57     url: GitRepoUrl,
58     rev: &'static str,
59     content_hash: &'static str,
60     patch_name: &'static str,
61 }
62 
63 enum GitRepoUrl {
64     Github { user: &'static str, repo: &'static str },
65 }
66 
67 // Note: This uses a hasher which is not cryptographically secure. This is fine as the hash is meant
68 // to protect against accidental modification and outdated downloads, not against manipulation.
hash_file(file: &std::path::Path) -> u6469 fn hash_file(file: &std::path::Path) -> u64 {
70     let contents = std::fs::read(file).unwrap();
71     #[allow(deprecated)]
72     let mut hasher = std::hash::SipHasher::new();
73     std::hash::Hash::hash(&contents, &mut hasher);
74     std::hash::Hasher::finish(&hasher)
75 }
76 
hash_dir(dir: &std::path::Path) -> u6477 fn hash_dir(dir: &std::path::Path) -> u64 {
78     let mut sub_hashes = std::collections::BTreeMap::new();
79     for entry in std::fs::read_dir(dir).unwrap() {
80         let entry = entry.unwrap();
81         if entry.file_type().unwrap().is_dir() {
82             sub_hashes
83                 .insert(entry.file_name().to_str().unwrap().to_owned(), hash_dir(&entry.path()));
84         } else {
85             sub_hashes
86                 .insert(entry.file_name().to_str().unwrap().to_owned(), hash_file(&entry.path()));
87         }
88     }
89     #[allow(deprecated)]
90     let mut hasher = std::hash::SipHasher::new();
91     std::hash::Hash::hash(&sub_hashes, &mut hasher);
92     std::hash::Hasher::finish(&hasher)
93 }
94 
95 impl GitRepo {
github( user: &'static str, repo: &'static str, rev: &'static str, content_hash: &'static str, patch_name: &'static str, ) -> GitRepo96     pub(crate) const fn github(
97         user: &'static str,
98         repo: &'static str,
99         rev: &'static str,
100         content_hash: &'static str,
101         patch_name: &'static str,
102     ) -> GitRepo {
103         GitRepo { url: GitRepoUrl::Github { user, repo }, rev, content_hash, patch_name }
104     }
105 
download_dir(&self, dirs: &Dirs) -> PathBuf106     fn download_dir(&self, dirs: &Dirs) -> PathBuf {
107         match self.url {
108             GitRepoUrl::Github { user: _, repo } => RelPath::DOWNLOAD.join(repo).to_path(dirs),
109         }
110     }
111 
source_dir(&self) -> RelPath112     pub(crate) const fn source_dir(&self) -> RelPath {
113         match self.url {
114             GitRepoUrl::Github { user: _, repo } => RelPath::BUILD.join(repo),
115         }
116     }
117 
fetch(&self, dirs: &Dirs)118     pub(crate) fn fetch(&self, dirs: &Dirs) {
119         let download_dir = self.download_dir(dirs);
120 
121         if download_dir.exists() {
122             let actual_hash = format!("{:016x}", hash_dir(&download_dir));
123             if actual_hash == self.content_hash {
124                 println!("[FRESH] {}", download_dir.display());
125                 return;
126             } else {
127                 println!(
128                     "Mismatched content hash for {download_dir}: {actual_hash} != {content_hash}. Downloading again.",
129                     download_dir = download_dir.display(),
130                     content_hash = self.content_hash,
131                 );
132             }
133         }
134 
135         match self.url {
136             GitRepoUrl::Github { user, repo } => {
137                 clone_repo_shallow_github(dirs, &download_dir, user, repo, self.rev);
138             }
139         }
140 
141         let source_lockfile =
142             RelPath::PATCHES.to_path(dirs).join(format!("{}-lock.toml", self.patch_name));
143         let target_lockfile = download_dir.join("Cargo.lock");
144         if source_lockfile.exists() {
145             fs::copy(source_lockfile, target_lockfile).unwrap();
146         } else {
147             assert!(target_lockfile.exists());
148         }
149 
150         let actual_hash = format!("{:016x}", hash_dir(&download_dir));
151         if actual_hash != self.content_hash {
152             println!(
153                 "Download of {download_dir} failed with mismatched content hash: {actual_hash} != {content_hash}",
154                 download_dir = download_dir.display(),
155                 content_hash = self.content_hash,
156             );
157             std::process::exit(1);
158         }
159     }
160 
patch(&self, dirs: &Dirs)161     pub(crate) fn patch(&self, dirs: &Dirs) {
162         apply_patches(
163             dirs,
164             self.patch_name,
165             &self.download_dir(dirs),
166             &self.source_dir().to_path(dirs),
167         );
168     }
169 }
170 
171 #[allow(dead_code)]
clone_repo(download_dir: &Path, repo: &str, rev: &str)172 fn clone_repo(download_dir: &Path, repo: &str, rev: &str) {
173     eprintln!("[CLONE] {}", repo);
174     // Ignore exit code as the repo may already have been checked out
175     git_command(None, "clone").arg(repo).arg(download_dir).spawn().unwrap().wait().unwrap();
176 
177     let mut clean_cmd = git_command(download_dir, "checkout");
178     clean_cmd.arg("--").arg(".");
179     spawn_and_wait(clean_cmd);
180 
181     let mut checkout_cmd = git_command(download_dir, "checkout");
182     checkout_cmd.arg("-q").arg(rev);
183     spawn_and_wait(checkout_cmd);
184 
185     std::fs::remove_dir_all(download_dir.join(".git")).unwrap();
186 }
187 
clone_repo_shallow_github(dirs: &Dirs, download_dir: &Path, user: &str, repo: &str, rev: &str)188 fn clone_repo_shallow_github(dirs: &Dirs, download_dir: &Path, user: &str, repo: &str, rev: &str) {
189     if cfg!(windows) {
190         // Older windows doesn't have tar or curl by default. Fall back to using git.
191         clone_repo(download_dir, &format!("https://github.com/{}/{}.git", user, repo), rev);
192         return;
193     }
194 
195     let archive_url = format!("https://github.com/{}/{}/archive/{}.tar.gz", user, repo, rev);
196     let archive_file = RelPath::DOWNLOAD.to_path(dirs).join(format!("{}.tar.gz", rev));
197     let archive_dir = RelPath::DOWNLOAD.to_path(dirs).join(format!("{}-{}", repo, rev));
198 
199     eprintln!("[DOWNLOAD] {}/{} from {}", user, repo, archive_url);
200 
201     // Remove previous results if they exists
202     let _ = std::fs::remove_file(&archive_file);
203     let _ = std::fs::remove_dir_all(&archive_dir);
204     let _ = std::fs::remove_dir_all(&download_dir);
205 
206     // Download zip archive
207     let mut download_cmd = Command::new("curl");
208     download_cmd
209         .arg("--max-time")
210         .arg("600")
211         .arg("-y")
212         .arg("30")
213         .arg("-Y")
214         .arg("10")
215         .arg("--connect-timeout")
216         .arg("30")
217         .arg("--continue-at")
218         .arg("-")
219         .arg("--location")
220         .arg("--output")
221         .arg(&archive_file)
222         .arg(archive_url);
223     retry_spawn_and_wait(5, download_cmd);
224 
225     // Unpack tar archive
226     let mut unpack_cmd = Command::new("tar");
227     unpack_cmd.arg("xf").arg(&archive_file).current_dir(RelPath::DOWNLOAD.to_path(dirs));
228     spawn_and_wait(unpack_cmd);
229 
230     // Rename unpacked dir to the expected name
231     std::fs::rename(archive_dir, &download_dir).unwrap();
232 
233     // Cleanup
234     std::fs::remove_file(archive_file).unwrap();
235 }
236 
init_git_repo(repo_dir: &Path)237 fn init_git_repo(repo_dir: &Path) {
238     let mut git_init_cmd = git_command(repo_dir, "init");
239     git_init_cmd.arg("-q");
240     spawn_and_wait(git_init_cmd);
241 
242     let mut git_add_cmd = git_command(repo_dir, "add");
243     git_add_cmd.arg(".");
244     spawn_and_wait(git_add_cmd);
245 
246     let mut git_commit_cmd = git_command(repo_dir, "commit");
247     git_commit_cmd.arg("-m").arg("Initial commit").arg("-q");
248     spawn_and_wait(git_commit_cmd);
249 }
250 
get_patches(dirs: &Dirs, crate_name: &str) -> Vec<PathBuf>251 fn get_patches(dirs: &Dirs, crate_name: &str) -> Vec<PathBuf> {
252     let mut patches: Vec<_> = fs::read_dir(RelPath::PATCHES.to_path(dirs))
253         .unwrap()
254         .map(|entry| entry.unwrap().path())
255         .filter(|path| path.extension() == Some(OsStr::new("patch")))
256         .filter(|path| {
257             path.file_name()
258                 .unwrap()
259                 .to_str()
260                 .unwrap()
261                 .split_once("-")
262                 .unwrap()
263                 .1
264                 .starts_with(crate_name)
265         })
266         .collect();
267     patches.sort();
268     patches
269 }
270 
apply_patches(dirs: &Dirs, crate_name: &str, source_dir: &Path, target_dir: &Path)271 pub(crate) fn apply_patches(dirs: &Dirs, crate_name: &str, source_dir: &Path, target_dir: &Path) {
272     // FIXME avoid copy and patch if src, patches and target are unchanged
273 
274     eprintln!("[COPY] {crate_name} source");
275 
276     remove_dir_if_exists(target_dir);
277     fs::create_dir_all(target_dir).unwrap();
278     if crate_name == "stdlib" {
279         fs::create_dir(target_dir.join("library")).unwrap();
280         copy_dir_recursively(&source_dir.join("library"), &target_dir.join("library"));
281     } else {
282         copy_dir_recursively(source_dir, target_dir);
283     }
284 
285     init_git_repo(target_dir);
286 
287     if crate_name == "<none>" {
288         return;
289     }
290 
291     for patch in get_patches(dirs, crate_name) {
292         eprintln!(
293             "[PATCH] {:?} <- {:?}",
294             target_dir.file_name().unwrap(),
295             patch.file_name().unwrap()
296         );
297         let mut apply_patch_cmd = git_command(target_dir, "am");
298         apply_patch_cmd.arg(patch).arg("-q");
299         spawn_and_wait(apply_patch_cmd);
300     }
301 }
302