cargo_tarpaulin/source_analysis/
mod.rs1use crate::config::{Config, RunType};
2use crate::path_utils::{get_source_walker, is_source_file};
3use lazy_static::lazy_static;
4use proc_macro2::{Span, TokenStream};
5use quote::ToTokens;
6use regex::Regex;
7use serde::{Deserialize, Serialize};
8use std::cell::RefCell;
9use std::collections::{HashMap, HashSet};
10use std::ffi::OsStr;
11use std::fs::File;
12use std::io::{self, BufRead, BufReader, Read};
13use std::path::{Path, PathBuf};
14use syn::spanned::Spanned;
15use syn::*;
16use tracing::{debug, trace, warn};
17use walkdir::WalkDir;
18
19mod attributes;
20mod expressions;
21mod items;
22mod macros;
23mod statements;
24#[cfg(test)]
25mod tests;
26
27pub(crate) mod prelude {
28 pub(crate) use super::*;
29 pub(crate) use attributes::*;
30 pub(crate) use macros::*;
31}
32
33#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
35pub enum Lines {
36 All,
38 Line(usize),
40}
41
42#[derive(Clone, Debug, Default)]
45pub struct LineAnalysis {
46 pub ignore: HashSet<Lines>,
49 pub cover: HashSet<usize>,
53 pub logical_lines: HashMap<usize, usize>,
57 max_line: usize,
59 pub functions: HashMap<String, (usize, usize)>,
60}
61
62pub(crate) struct Context<'a> {
65 config: &'a Config,
67 file_contents: &'a str,
69 file: &'a Path,
71 ignore_mods: RefCell<HashSet<PathBuf>>,
74 pub(crate) symbol_stack: RefCell<Vec<String>>,
77}
78
79pub(crate) struct StackGuard<'a>(&'a RefCell<Vec<String>>);
80
81impl<'a> Drop for StackGuard<'a> {
82 fn drop(&mut self) {
83 self.0.borrow_mut().pop();
84 }
85}
86
87impl<'a> Context<'a> {
88 pub(crate) fn push_to_symbol_stack(&self, mut ident: String) -> StackGuard<'_> {
89 if !(ident.starts_with("<") && ident.ends_with(">")) {
90 ident = ident.replace(' ', "");
91 }
92 self.symbol_stack.borrow_mut().push(ident);
93 StackGuard(&self.symbol_stack)
94 }
95
96 pub(crate) fn get_qualified_name(&self) -> String {
97 let stack = self.symbol_stack.borrow();
98 let name = stack.join("::");
99 debug!("Found function: {}", name);
100 name
101 }
102}
103
104pub trait SourceAnalysisQuery {
108 fn should_ignore(&self, path: &Path, l: &usize) -> bool;
110 fn normalise(&self, path: &Path, l: usize) -> (PathBuf, usize);
113}
114
115#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
116pub(crate) enum SubResult {
117 Ok,
119 Definite,
122 Unreachable,
124}
125
126impl std::ops::AddAssign for SubResult {
129 fn add_assign(&mut self, other: Self) {
130 if *self == Self::Definite || other == Self::Definite {
131 *self = Self::Definite;
132 } else if *self == Self::Unreachable || other == Self::Unreachable {
133 *self = Self::Unreachable;
134 } else {
135 *self = Self::Ok;
136 }
137 }
138}
139
140impl std::ops::Add for SubResult {
141 type Output = Self;
142
143 fn add(mut self, rhs: Self) -> Self::Output {
144 self += rhs;
145 self
146 }
147}
148
149impl SubResult {
150 pub fn is_reachable(&self) -> bool {
151 *self != Self::Unreachable
152 }
153
154 pub fn is_unreachable(&self) -> bool {
155 !self.is_reachable()
156 }
157}
158
159impl SourceAnalysisQuery for HashMap<PathBuf, LineAnalysis> {
160 fn should_ignore(&self, path: &Path, l: &usize) -> bool {
161 if self.contains_key(path) {
162 self.get(path).unwrap().should_ignore(*l)
163 } else {
164 false
165 }
166 }
167
168 fn normalise(&self, path: &Path, l: usize) -> (PathBuf, usize) {
169 let pb = path.to_path_buf();
170 match self.get(path) {
171 Some(s) => match s.logical_lines.get(&l) {
172 Some(o) => (pb, *o),
173 _ => (pb, l),
174 },
175 _ => (pb, l),
176 }
177 }
178}
179
180impl LineAnalysis {
181 fn new() -> Self {
183 Default::default()
184 }
185
186 fn new_from_file(path: &Path) -> io::Result<Self> {
187 let file = BufReader::new(File::open(path)?);
188 Ok(Self {
189 max_line: file.lines().count(),
190 ..Default::default()
191 })
192 }
193
194 pub fn ignore_all(&mut self) {
196 self.ignore.clear();
197 self.cover.clear();
198 self.ignore.insert(Lines::All);
199 }
200
201 pub fn ignore_tokens<T>(&mut self, tokens: T)
203 where
204 T: ToTokens,
205 {
206 for token in tokens.into_token_stream() {
207 self.ignore_span(token.span());
208 }
209 }
210
211 pub fn ignore_span(&mut self, span: Span) {
213 if !self.ignore.contains(&Lines::All) {
215 for i in span.start().line..=span.end().line {
216 self.ignore.insert(Lines::Line(i));
217 if self.cover.contains(&i) {
218 self.cover.remove(&i);
219 }
220 }
221 }
222 }
223
224 pub fn cover_token_stream(&mut self, tokens: TokenStream, contents: Option<&str>) {
226 for token in tokens {
227 self.cover_span(token.span(), contents);
228 }
229 }
230
231 pub fn cover_span(&mut self, span: Span, contents: Option<&str>) {
233 let mut useful_lines: HashSet<usize> = HashSet::new();
236 if let Some(c) = contents {
237 lazy_static! {
238 static ref SINGLE_LINE: Regex = Regex::new(r"\s*//").unwrap();
239 }
240 const MULTI_START: &str = "/*";
241 const MULTI_END: &str = "*/";
242 let len = span.end().line - span.start().line;
243 let mut is_comment = false;
244 for (i, line) in c.lines().enumerate().skip(span.start().line - 1).take(len) {
245 let is_code = if line.contains(MULTI_START) {
246 if !line.contains(MULTI_END) {
247 is_comment = true;
248 }
249 false
250 } else if is_comment {
251 if line.contains(MULTI_END) {
252 is_comment = false;
253 }
254 false
255 } else {
256 true
257 };
258 if is_code && !SINGLE_LINE.is_match(line) {
259 useful_lines.insert(i + 1);
260 }
261 }
262 }
263 for i in span.start().line..=span.end().line {
264 if !self.ignore.contains(&Lines::Line(i)) && useful_lines.contains(&i) {
265 self.cover.insert(i);
266 }
267 }
268 }
269
270 pub fn should_ignore(&self, line: usize) -> bool {
272 self.ignore.contains(&Lines::Line(line))
273 || self.ignore.contains(&Lines::All)
274 || (self.max_line > 0 && self.max_line < line)
275 }
276
277 fn add_to_ignore(&mut self, lines: impl IntoIterator<Item = usize>) {
279 if !self.ignore.contains(&Lines::All) {
280 for l in lines {
281 self.ignore.insert(Lines::Line(l));
282 if self.cover.contains(&l) {
283 self.cover.remove(&l);
284 }
285 }
286 }
287 }
288}
289
290impl Function {
291 fn new(name: &str, span: (usize, usize)) -> Self {
292 Self {
293 name: name.to_string(),
294 start: span.0 as u64,
295 end: span.1 as u64,
296 }
297 }
298}
299
300#[derive(Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd, Deserialize, Serialize)]
301pub struct Function {
302 pub name: String,
303 pub start: u64,
304 pub end: u64,
305}
306
307#[derive(Default)]
308pub struct SourceAnalysis {
309 pub lines: HashMap<PathBuf, LineAnalysis>,
310 ignored_modules: Vec<PathBuf>,
311}
312
313impl SourceAnalysis {
314 pub fn new() -> Self {
315 Default::default()
316 }
317
318 pub fn create_function_map(&self) -> HashMap<PathBuf, Vec<Function>> {
319 self.lines
320 .iter()
321 .map(|(file, analysis)| {
322 let mut functions: Vec<Function> = analysis
323 .functions
324 .iter()
325 .map(|(function, span)| Function::new(function, *span))
326 .collect();
327 functions.sort_unstable_by(|a, b| a.start.cmp(&b.start));
328 (file.to_path_buf(), functions)
329 })
330 .collect()
331 }
332
333 pub fn get_line_analysis(&mut self, path: PathBuf) -> &mut LineAnalysis {
334 self.lines
335 .entry(path.clone())
336 .or_insert_with(|| LineAnalysis::new_from_file(&path).unwrap_or_default())
337 }
338
339 fn is_ignored_module(&self, path: &Path) -> bool {
340 self.ignored_modules.iter().any(|x| path.starts_with(x))
341 }
342
343 pub fn get_analysis(config: &Config) -> Self {
344 let mut result = Self::new();
345 let mut ignored_files: HashSet<PathBuf> = HashSet::new();
346 let root = config.root();
347
348 for e in get_source_walker(config) {
349 if !ignored_files.contains(e.path()) {
350 result.analyse_package(e.path(), &root, config, &mut ignored_files);
351 } else {
352 let mut analysis = LineAnalysis::new();
353 analysis.ignore_all();
354 result.lines.insert(e.path().to_path_buf(), analysis);
355 ignored_files.remove(e.path());
356 }
357 }
358 for e in &ignored_files {
359 let mut analysis = LineAnalysis::new();
360 analysis.ignore_all();
361 result.lines.insert(e.clone(), analysis);
362 }
363 result.debug_printout(config);
364
365 result
366 }
367
368 fn analyse_package(
370 &mut self,
371 path: &Path,
372 root: &Path,
373 config: &Config,
374 filtered_files: &mut HashSet<PathBuf>,
375 ) {
376 if let Some(file) = path.to_str() {
377 let skip_cause_test = !config.include_tests() && path.starts_with(root.join("tests"));
378 let skip_cause_example = path.starts_with(root.join("examples"))
379 && !config.run_types.contains(&RunType::Examples);
380 if (skip_cause_test || skip_cause_example) || self.is_ignored_module(path) {
381 let mut analysis = LineAnalysis::new();
382 analysis.ignore_all();
383 self.lines.insert(path.to_path_buf(), analysis);
384 } else {
385 let file = File::open(file);
386 if let Ok(mut file) = file {
387 let mut content = String::new();
388 let res = file.read_to_string(&mut content);
389 if let Err(e) = res {
390 warn!(
391 "Unable to read file into string, skipping source analysis: {}",
392 e
393 );
394 return;
395 }
396 let file = parse_file(&content);
397 if let Ok(file) = file {
398 let ctx = Context {
399 config,
400 file_contents: &content,
401 file: path,
402 ignore_mods: RefCell::new(HashSet::new()),
403 symbol_stack: RefCell::new(vec![]),
404 };
405 if self.check_attr_list(&file.attrs, &ctx) {
406 self.find_ignorable_lines(&ctx);
407 self.process_items(&file.items, &ctx);
408
409 let mut ignored_files = ctx.ignore_mods.into_inner();
410 for f in ignored_files.drain() {
411 if f.is_file() {
412 filtered_files.insert(f);
413 } else {
414 let walker = WalkDir::new(f).into_iter();
415 for e in walker
416 .filter_map(std::result::Result::ok)
417 .filter(is_source_file)
418 {
419 filtered_files.insert(e.path().to_path_buf());
420 }
421 }
422 }
423 maybe_ignore_first_line(path, &mut self.lines);
424 } else {
425 let bad_module =
428 match (path.parent(), path.file_name().map(OsStr::to_string_lossy))
429 {
430 (Some(p), Some(n)) => {
431 if n == "lib.rs" || n == "mod.rs" {
432 Some(p.to_path_buf())
433 } else {
434 let ignore = p.join(n.trim_end_matches(".rs"));
435 if ignore.exists() && ignore.is_dir() {
436 Some(ignore)
437 } else {
438 None
439 }
440 }
441 }
442 _ => None,
443 };
444 if let Some(module) = bad_module {
446 self.lines
447 .iter_mut()
448 .filter(|(k, _)| k.starts_with(module.as_path()))
449 .for_each(|(_, v)| v.ignore_all());
450 self.ignored_modules.push(module);
451 }
452 let analysis = self.get_line_analysis(path.to_path_buf());
453 analysis.ignore_span(file.span());
454 }
455 }
456 }
457 }
458 }
459 }
460
461 pub(crate) fn find_ignorable_lines(&mut self, ctx: &Context) {
465 lazy_static! {
466 static ref IGNORABLE: Regex =
467 Regex::new(r"^((\s*//)|([\[\]\{\}\(\)\s;\?,/]*$))").unwrap();
468 }
469 let analysis = self.get_line_analysis(ctx.file.to_path_buf());
470 let lines = ctx
471 .file_contents
472 .lines()
473 .enumerate()
474 .filter(|&(_, x)| IGNORABLE.is_match(x))
475 .map(|(i, _)| i + 1);
476 analysis.add_to_ignore(lines);
477
478 let lines = ctx
479 .file_contents
480 .lines()
481 .enumerate()
482 .filter(|&(_, x)| {
483 let mut x = x.to_string();
484 x.retain(|c| !c.is_whitespace());
485 x == "}else{"
486 })
487 .map(|(i, _)| i + 1);
488 analysis.add_to_ignore(lines);
489 }
490
491 pub(crate) fn visit_generics(&mut self, generics: &Generics, ctx: &Context) {
492 if let Some(ref wh) = generics.where_clause {
493 let analysis = self.get_line_analysis(ctx.file.to_path_buf());
494 analysis.ignore_tokens(wh);
495 }
496 }
497
498 #[cfg(not(tarpaulin_include))]
501 pub fn debug_printout(&self, config: &Config) {
502 if config.debug {
503 for (path, analysis) in &self.lines {
504 trace!(
505 "Source analysis for {}",
506 config.strip_base_dir(path).display()
507 );
508 let mut lines = Vec::new();
509 for l in &analysis.ignore {
510 match l {
511 Lines::All => {
512 lines.clear();
513 trace!("All lines are ignorable");
514 break;
515 }
516 Lines::Line(i) => {
517 lines.push(i);
518 }
519 }
520 }
521 if !lines.is_empty() {
522 lines.sort();
523 trace!("Ignorable lines: {:?}", lines);
524 lines.clear();
525 }
526 for c in &analysis.cover {
527 lines.push(c);
528 }
529
530 if !lines.is_empty() {
531 lines.sort();
532 trace!("Coverable lines: {:?}", lines);
533 }
534 }
535 }
536 }
537}
538
539fn maybe_ignore_first_line(file: &Path, result: &mut HashMap<PathBuf, LineAnalysis>) {
543 if let Ok(f) = File::open(file) {
544 let read_file = BufReader::new(f);
545 if let Some(Ok(first)) = read_file.lines().next() {
546 if !(first.starts_with("pub") || first.starts_with("fn")) {
547 let file = file.to_path_buf();
548 let line_analysis = result.entry(file).or_default();
549 line_analysis.add_to_ignore([1]);
550 }
551 }
552 }
553}