1 use std::{
2 collections::HashSet,
3 path::{Path, PathBuf},
4 };
5
6 use xshell::Shell;
7
8 #[cfg(not(feature = "in-rust-tree"))]
9 use xshell::cmd;
10
11 #[cfg(not(feature = "in-rust-tree"))]
12 #[test]
check_code_formatting()13 fn check_code_formatting() {
14 let sh = &Shell::new().unwrap();
15 sh.change_dir(sourcegen::project_root());
16
17 let out = cmd!(sh, "rustup run stable rustfmt --version").read().unwrap();
18 if !out.contains("stable") {
19 panic!(
20 "Failed to run rustfmt from toolchain 'stable'. \
21 Please run `rustup component add rustfmt --toolchain stable` to install it.",
22 )
23 }
24
25 let res = cmd!(sh, "rustup run stable cargo fmt -- --check").run();
26 if res.is_err() {
27 let _ = cmd!(sh, "rustup run stable cargo fmt").run();
28 }
29 res.unwrap()
30 }
31
32 #[test]
check_lsp_extensions_docs()33 fn check_lsp_extensions_docs() {
34 let sh = &Shell::new().unwrap();
35
36 let expected_hash = {
37 let lsp_ext_rs = sh
38 .read_file(sourcegen::project_root().join("crates/rust-analyzer/src/lsp_ext.rs"))
39 .unwrap();
40 stable_hash(lsp_ext_rs.as_str())
41 };
42
43 let actual_hash = {
44 let lsp_extensions_md =
45 sh.read_file(sourcegen::project_root().join("docs/dev/lsp-extensions.md")).unwrap();
46 let text = lsp_extensions_md
47 .lines()
48 .find_map(|line| line.strip_prefix("lsp_ext.rs hash:"))
49 .unwrap()
50 .trim();
51 u64::from_str_radix(text, 16).unwrap()
52 };
53
54 if actual_hash != expected_hash {
55 panic!(
56 "
57 lsp_ext.rs was changed without touching lsp-extensions.md.
58
59 Expected hash: {expected_hash:x}
60 Actual hash: {actual_hash:x}
61
62 Please adjust docs/dev/lsp-extensions.md.
63 "
64 )
65 }
66 }
67
68 #[test]
files_are_tidy()69 fn files_are_tidy() {
70 let sh = &Shell::new().unwrap();
71
72 let files = sourcegen::list_files(&sourcegen::project_root().join("crates"));
73
74 let mut tidy_docs = TidyDocs::default();
75 let mut tidy_marks = TidyMarks::default();
76 for path in files {
77 let extension = path.extension().unwrap_or_default().to_str().unwrap_or_default();
78 match extension {
79 "rs" => {
80 let text = sh.read_file(&path).unwrap();
81 check_todo(&path, &text);
82 check_dbg(&path, &text);
83 check_test_attrs(&path, &text);
84 check_trailing_ws(&path, &text);
85 tidy_docs.visit(&path, &text);
86 tidy_marks.visit(&path, &text);
87 }
88 "toml" => {
89 let text = sh.read_file(&path).unwrap();
90 check_cargo_toml(&path, text);
91 }
92 _ => (),
93 }
94 }
95
96 tidy_docs.finish();
97 tidy_marks.finish();
98 }
99
check_cargo_toml(path: &Path, text: String)100 fn check_cargo_toml(path: &Path, text: String) {
101 let mut section = None;
102 for (line_no, text) in text.lines().enumerate() {
103 let text = text.trim();
104 if text.starts_with('[') {
105 if !text.ends_with(']') {
106 panic!(
107 "\nplease don't add comments or trailing whitespace in section lines.\n\
108 {}:{}\n",
109 path.display(),
110 line_no + 1
111 )
112 }
113 section = Some(text);
114 continue;
115 }
116 let text: String = text.split_whitespace().collect();
117 if !text.contains("path=") {
118 continue;
119 }
120 match section {
121 Some(s) if s.contains("dev-dependencies") => {
122 if text.contains("version") {
123 panic!(
124 "\ncargo internal dev-dependencies should not have a version.\n\
125 {}:{}\n",
126 path.display(),
127 line_no + 1
128 );
129 }
130 }
131 Some(s) if s.contains("dependencies") => {
132 if !text.contains("version") {
133 panic!(
134 "\ncargo internal dependencies should have a version.\n\
135 {}:{}\n",
136 path.display(),
137 line_no + 1
138 );
139 }
140 }
141 _ => {}
142 }
143 }
144 }
145
146 #[cfg(not(feature = "in-rust-tree"))]
147 #[test]
check_licenses()148 fn check_licenses() {
149 let sh = &Shell::new().unwrap();
150
151 let expected = "
152 (MIT OR Apache-2.0) AND Unicode-DFS-2016
153 0BSD OR MIT OR Apache-2.0
154 Apache-2.0
155 Apache-2.0 OR BSL-1.0
156 Apache-2.0 OR MIT
157 Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT
158 Apache-2.0/MIT
159 BSD-3-Clause
160 BlueOak-1.0.0 OR MIT OR Apache-2.0
161 CC0-1.0 OR Artistic-2.0
162 ISC
163 MIT
164 MIT / Apache-2.0
165 MIT OR Apache-2.0
166 MIT OR Apache-2.0 OR Zlib
167 MIT OR Zlib OR Apache-2.0
168 MIT/Apache-2.0
169 Unlicense OR MIT
170 Unlicense/MIT
171 Zlib OR Apache-2.0 OR MIT
172 "
173 .lines()
174 .filter(|it| !it.is_empty())
175 .collect::<Vec<_>>();
176
177 let meta = cmd!(sh, "cargo metadata --format-version 1").read().unwrap();
178 let mut licenses = meta
179 .split(|c| c == ',' || c == '{' || c == '}')
180 .filter(|it| it.contains(r#""license""#))
181 .map(|it| it.trim())
182 .map(|it| it[r#""license":"#.len()..].trim_matches('"'))
183 .collect::<Vec<_>>();
184 licenses.sort_unstable();
185 licenses.dedup();
186 if licenses != expected {
187 let mut diff = String::new();
188
189 diff.push_str("New Licenses:\n");
190 for &l in licenses.iter() {
191 if !expected.contains(&l) {
192 diff += &format!(" {l}\n")
193 }
194 }
195
196 diff.push_str("\nMissing Licenses:\n");
197 for &l in expected.iter() {
198 if !licenses.contains(&l) {
199 diff += &format!(" {l}\n")
200 }
201 }
202
203 panic!("different set of licenses!\n{diff}");
204 }
205 assert_eq!(licenses, expected);
206 }
207
check_todo(path: &Path, text: &str)208 fn check_todo(path: &Path, text: &str) {
209 let need_todo = &[
210 // This file itself obviously needs to use todo (<- like this!).
211 "tests/tidy.rs",
212 // Some of our assists generate `todo!()`.
213 "handlers/add_turbo_fish.rs",
214 "handlers/generate_function.rs",
215 "handlers/add_missing_match_arms.rs",
216 "handlers/replace_derive_with_manual_impl.rs",
217 // To support generating `todo!()` in assists, we have `expr_todo()` in
218 // `ast::make`.
219 "ast/make.rs",
220 // The documentation in string literals may contain anything for its own purposes
221 "ide-db/src/generated/lints.rs",
222 "ide-assists/src/utils/gen_trait_fn_body.rs",
223 "ide-assists/src/tests/generated.rs",
224 // The tests for missing fields
225 "ide-diagnostics/src/handlers/missing_fields.rs",
226 ];
227 if need_todo.iter().any(|p| path.ends_with(p)) {
228 return;
229 }
230 if text.contains("TODO") || text.contains("TOOD") || text.contains("todo!") {
231 // Generated by an assist
232 if text.contains("${0:todo!()}") {
233 return;
234 }
235
236 panic!(
237 "\nTODO markers or todo! macros should not be committed to the master branch,\n\
238 use FIXME instead\n\
239 {}\n",
240 path.display(),
241 )
242 }
243 }
244
check_dbg(path: &Path, text: &str)245 fn check_dbg(path: &Path, text: &str) {
246 let need_dbg = &[
247 // This file itself obviously needs to use dbg.
248 "slow-tests/tidy.rs",
249 // Assists to remove `dbg!()`
250 "handlers/remove_dbg.rs",
251 // We have .dbg postfix
252 "ide-completion/src/completions/postfix.rs",
253 "ide-completion/src/completions/keyword.rs",
254 "ide-completion/src/tests/proc_macros.rs",
255 // The documentation in string literals may contain anything for its own purposes
256 "ide-completion/src/lib.rs",
257 "ide-db/src/generated/lints.rs",
258 // test for doc test for remove_dbg
259 "src/tests/generated.rs",
260 // `expect!` string can contain `dbg!` (due to .dbg postfix)
261 "ide-completion/src/tests/special.rs",
262 ];
263 if need_dbg.iter().any(|p| path.ends_with(p)) {
264 return;
265 }
266 if text.contains("dbg!") {
267 panic!(
268 "\ndbg! macros should not be committed to the master branch,\n\
269 {}\n",
270 path.display(),
271 )
272 }
273 }
274
check_test_attrs(path: &Path, text: &str)275 fn check_test_attrs(path: &Path, text: &str) {
276 let ignore_rule =
277 "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#ignore";
278 let need_ignore: &[&str] = &[
279 // This file.
280 "slow-tests/tidy.rs",
281 // Special case to run `#[ignore]` tests.
282 "ide/src/runnables.rs",
283 // A legit test which needs to be ignored, as it takes too long to run
284 // :(
285 "hir-def/src/nameres/collector.rs",
286 // Long sourcegen test to generate lint completions.
287 "ide-db/src/tests/sourcegen_lints.rs",
288 // Obviously needs ignore.
289 "ide-assists/src/handlers/toggle_ignore.rs",
290 // See above.
291 "ide-assists/src/tests/generated.rs",
292 ];
293 if text.contains("#[ignore") && !need_ignore.iter().any(|p| path.ends_with(p)) {
294 panic!("\ndon't `#[ignore]` tests, see:\n\n {ignore_rule}\n\n {}\n", path.display(),)
295 }
296
297 let panic_rule =
298 "https://github.com/rust-lang/rust-analyzer/blob/master/docs/dev/style.md#should_panic";
299 let need_panic: &[&str] = &[
300 // This file.
301 "slow-tests/tidy.rs",
302 "test-utils/src/fixture.rs",
303 ];
304 if text.contains("#[should_panic") && !need_panic.iter().any(|p| path.ends_with(p)) {
305 panic!(
306 "\ndon't add `#[should_panic]` tests, see:\n\n {}\n\n {}\n",
307 panic_rule,
308 path.display(),
309 )
310 }
311 }
312
check_trailing_ws(path: &Path, text: &str)313 fn check_trailing_ws(path: &Path, text: &str) {
314 if is_exclude_dir(path, &["test_data"]) {
315 return;
316 }
317 for (line_number, line) in text.lines().enumerate() {
318 if line.chars().last().map(char::is_whitespace) == Some(true) {
319 panic!("Trailing whitespace in {} at line {}", path.display(), line_number + 1)
320 }
321 }
322 }
323
324 #[derive(Default)]
325 struct TidyDocs {
326 missing_docs: Vec<String>,
327 contains_fixme: Vec<PathBuf>,
328 }
329
330 impl TidyDocs {
visit(&mut self, path: &Path, text: &str)331 fn visit(&mut self, path: &Path, text: &str) {
332 // Tests and diagnostic fixes don't need module level comments.
333 if is_exclude_dir(path, &["tests", "test_data", "fixes", "grammar"]) {
334 return;
335 }
336
337 if is_exclude_file(path) {
338 return;
339 }
340
341 let first_line = match text.lines().next() {
342 Some(it) => it,
343 None => return,
344 };
345
346 if first_line.starts_with("//!") {
347 if first_line.contains("FIXME") {
348 self.contains_fixme.push(path.to_path_buf());
349 }
350 } else {
351 if text.contains("// Feature:")
352 || text.contains("// Assist:")
353 || text.contains("// Diagnostic:")
354 {
355 return;
356 }
357 self.missing_docs.push(path.display().to_string());
358 }
359
360 fn is_exclude_file(d: &Path) -> bool {
361 let file_names = ["tests.rs", "famous_defs_fixture.rs"];
362
363 d.file_name()
364 .unwrap_or_default()
365 .to_str()
366 .map(|f_n| file_names.iter().any(|name| *name == f_n))
367 .unwrap_or(false)
368 }
369 }
370
finish(self)371 fn finish(self) {
372 if !self.missing_docs.is_empty() {
373 panic!(
374 "\nMissing docs strings\n\n\
375 modules:\n{}\n\n",
376 self.missing_docs.join("\n")
377 )
378 }
379
380 for path in self.contains_fixme {
381 panic!("FIXME doc in a fully-documented crate: {}", path.display())
382 }
383 }
384 }
385
is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool386 fn is_exclude_dir(p: &Path, dirs_to_exclude: &[&str]) -> bool {
387 p.strip_prefix(sourcegen::project_root())
388 .unwrap()
389 .components()
390 .rev()
391 .skip(1)
392 .filter_map(|it| it.as_os_str().to_str())
393 .any(|it| dirs_to_exclude.contains(&it))
394 }
395
396 #[derive(Default)]
397 struct TidyMarks {
398 hits: HashSet<String>,
399 checks: HashSet<String>,
400 }
401
402 impl TidyMarks {
visit(&mut self, _path: &Path, text: &str)403 fn visit(&mut self, _path: &Path, text: &str) {
404 find_marks(&mut self.hits, text, "hit");
405 find_marks(&mut self.checks, text, "check");
406 find_marks(&mut self.checks, text, "check_count");
407 }
408
finish(self)409 fn finish(self) {
410 assert!(!self.hits.is_empty());
411
412 let diff: Vec<_> =
413 self.hits.symmetric_difference(&self.checks).map(|it| it.as_str()).collect();
414
415 if !diff.is_empty() {
416 panic!("unpaired marks: {diff:?}")
417 }
418 }
419 }
420
421 #[allow(deprecated)]
stable_hash(text: &str) -> u64422 fn stable_hash(text: &str) -> u64 {
423 use std::hash::{Hash, Hasher, SipHasher};
424
425 let text = text.replace('\r', "");
426 let mut hasher = SipHasher::default();
427 text.hash(&mut hasher);
428 hasher.finish()
429 }
430
find_marks(set: &mut HashSet<String>, text: &str, mark: &str)431 fn find_marks(set: &mut HashSet<String>, text: &str, mark: &str) {
432 let mut text = text;
433 let mut prev_text = "";
434 while text != prev_text {
435 prev_text = text;
436 if let Some(idx) = text.find(mark) {
437 text = &text[idx + mark.len()..];
438 if let Some(stripped_text) = text.strip_prefix("!(") {
439 text = stripped_text.trim_start();
440 if let Some(idx2) = text.find(|c: char| !(c.is_alphanumeric() || c == '_')) {
441 let mark_text = &text[..idx2];
442 set.insert(mark_text.to_string());
443 text = &text[idx2..];
444 }
445 }
446 }
447 }
448 }
449