ethers_solc/resolver/
mod.rs

1//! Resolution of the entire dependency graph for a project.
2//!
3//! This module implements the core logic in taking all contracts of a project and creating a
4//! resolved graph with applied remappings for all source contracts.
5//!
6//! Some constraints we're working with when resolving contracts
7//!
8//!   1. Each file can contain several source units and can have any number of imports/dependencies
9//! (using the term interchangeably). Each dependency can declare a version range that it is
10//! compatible with, solidity version pragma.
11//!   2. A dependency can be imported from any directory,
12//! see `Remappings`
13//!
14//! Finding all dependencies is fairly simple, we're simply doing a DFS, starting the source
15//! contracts
16//!
17//! ## Solc version auto-detection
18//!
19//! Solving a constraint graph is an NP-hard problem. The algorithm for finding the "best" solution
20//! makes several assumptions and tries to find a version of "Solc" that is compatible with all
21//! source files.
22//!
23//! The algorithm employed here is fairly simple, we simply do a DFS over all the source files and
24//! find the set of Solc versions that the file and all its imports are compatible with, and then we
25//! try to find a single Solc version that is compatible with all the files. This is effectively the
26//! intersection of all version sets.
27//!
28//! We always try to activate the highest (installed) solc version first. Uninstalled solc is only
29//! used if this version is the only compatible version for a single file or in the intersection of
30//! all version sets.
31//!
32//! This leads to finding the optimal version, if there is one. If there is no single Solc version
33//! that is compatible with all sources and their imports, then suddenly this becomes a very
34//! difficult problem, because what would be the "best" solution. In this case, just choose the
35//! latest (installed) Solc version and try to minimize the number of Solc versions used.
36//!
37//! ## Performance
38//!
39//! Note that this is a relatively performance-critical portion of the ethers-solc preprocessing.
40//! The data that needs to be processed is proportional to the size of the dependency
41//! graph, which can, depending on the project, often be quite large.
42//!
43//! Note that, unlike the solidity compiler, we work with the filesystem, where we have to resolve
44//! remappings and follow relative paths. We're also limiting the nodes in the graph to solidity
45//! files, since we're only interested in their
46//! [version pragma](https://docs.soliditylang.org/en/develop/layout-of-source-files.html#version-pragma),
47//! which is defined on a per source file basis.
48
49use crate::{error::Result, utils, IncludePaths, ProjectPathsConfig, SolcError, Source, Sources};
50use parse::{SolData, SolDataUnit, SolImport};
51use rayon::prelude::*;
52use semver::VersionReq;
53use std::{
54    collections::{HashMap, HashSet, VecDeque},
55    fmt, io,
56    path::{Path, PathBuf},
57};
58
59mod parse;
60mod tree;
61
62use crate::utils::find_case_sensitive_existing_file;
63pub use parse::SolImportAlias;
64pub use tree::{print, Charset, TreeOptions};
65
66/// The underlying edges of the graph which only contains the raw relationship data.
67///
68/// This is kept separate from the `Graph` as the `Node`s get consumed when the `Solc` to `Sources`
69/// set is determined.
70#[derive(Debug)]
71pub struct GraphEdges {
72    /// The indices of `edges` correspond to the `nodes`. That is, `edges[0]`
73    /// is the set of outgoing edges for `nodes[0]`.
74    edges: Vec<Vec<usize>>,
75    /// index maps for a solidity file to an index, for fast lookup.
76    indices: HashMap<PathBuf, usize>,
77    /// reverse of `indices` for reverse lookup
78    rev_indices: HashMap<usize, PathBuf>,
79    /// the identified version requirement of a file
80    versions: HashMap<usize, Option<VersionReq>>,
81    /// the extracted data from the source file
82    data: HashMap<usize, SolData>,
83    /// with how many input files we started with, corresponds to `let input_files =
84    /// nodes[..num_input_files]`.
85    ///
86    /// Combined with the `indices` this way we can determine if a file was original added to the
87    /// graph as input or was added as resolved import, see [`Self::is_input_file()`]
88    num_input_files: usize,
89    /// tracks all imports that we failed to resolve for a file
90    unresolved_imports: HashSet<(PathBuf, PathBuf)>,
91    /// tracks additional include paths resolved by scanning all imports of the graph
92    ///
93    /// Absolute imports, like `import "src/Contract.sol"` are possible, but this does not play
94    /// nice with the standard-json import format, since the VFS won't be able to resolve
95    /// "src/Contract.sol" without help via `--include-path`
96    #[allow(unused)]
97    resolved_solc_include_paths: IncludePaths,
98}
99
100impl GraphEdges {
101    /// How many files are source files
102    pub fn num_source_files(&self) -> usize {
103        self.num_input_files
104    }
105
106    /// Returns an iterator over all file indices
107    pub fn files(&self) -> impl Iterator<Item = usize> + '_ {
108        0..self.edges.len()
109    }
110
111    /// Returns an iterator over all source file indices
112    pub fn source_files(&self) -> impl Iterator<Item = usize> + '_ {
113        0..self.num_input_files
114    }
115
116    /// Returns an iterator over all library files
117    pub fn library_files(&self) -> impl Iterator<Item = usize> + '_ {
118        self.files().skip(self.num_input_files)
119    }
120
121    /// Returns all additional `--include-paths`
122    pub fn include_paths(&self) -> &IncludePaths {
123        &self.resolved_solc_include_paths
124    }
125
126    /// Returns all imports that we failed to resolve
127    pub fn unresolved_imports(&self) -> &HashSet<(PathBuf, PathBuf)> {
128        &self.unresolved_imports
129    }
130
131    /// Returns a list of nodes the given node index points to for the given kind.
132    pub fn imported_nodes(&self, from: usize) -> &[usize] {
133        &self.edges[from]
134    }
135
136    /// Returns an iterator that yields all imports of a node and all their imports
137    pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
138        NodesIter::new(from, self).skip(1)
139    }
140
141    /// Returns all files imported by the given file
142    pub fn imports(&self, file: impl AsRef<Path>) -> HashSet<&PathBuf> {
143        if let Some(start) = self.indices.get(file.as_ref()).copied() {
144            NodesIter::new(start, self).skip(1).map(move |idx| &self.rev_indices[&idx]).collect()
145        } else {
146            HashSet::new()
147        }
148    }
149
150    /// Returns the id of the given file
151    pub fn node_id(&self, file: impl AsRef<Path>) -> usize {
152        self.indices[file.as_ref()]
153    }
154
155    /// Returns the path of the given node
156    pub fn node_path(&self, id: usize) -> &PathBuf {
157        &self.rev_indices[&id]
158    }
159
160    /// Returns true if the `file` was originally included when the graph was first created and not
161    /// added when all `imports` were resolved
162    pub fn is_input_file(&self, file: impl AsRef<Path>) -> bool {
163        if let Some(idx) = self.indices.get(file.as_ref()).copied() {
164            idx < self.num_input_files
165        } else {
166            false
167        }
168    }
169
170    /// Returns the `VersionReq` for the given file
171    pub fn version_requirement(&self, file: impl AsRef<Path>) -> Option<&VersionReq> {
172        self.indices
173            .get(file.as_ref())
174            .and_then(|idx| self.versions.get(idx))
175            .and_then(|v| v.as_ref())
176    }
177
178    /// Returns those library files that will be required as `linkReferences` by the given file
179    ///
180    /// This is a preprocess function that attempts to resolve those libraries that will the
181    /// solidity `file` will be required to link. And further restrict this list to libraries
182    /// that won't be inlined.
183    ///
184    /// See also `parse::SolLibrary`.
185    pub fn get_link_references(&self, file: impl AsRef<Path>) -> HashSet<&PathBuf> {
186        let mut link_references = HashSet::new();
187        for import in self.all_imported_nodes(self.node_id(file)) {
188            let data = &self.data[&import];
189            if data.has_link_references() {
190                link_references.insert(&self.rev_indices[&import]);
191            }
192        }
193        link_references
194    }
195}
196
197/// Represents a fully-resolved solidity dependency graph. Each node in the graph
198/// is a file and edges represent dependencies between them.
199/// See also <https://docs.soliditylang.org/en/latest/layout-of-source-files.html?highlight=import#importing-other-source-files>
200#[derive(Debug)]
201pub struct Graph {
202    /// all nodes in the project, a `Node` represents a single file
203    nodes: Vec<Node>,
204    /// relationship of the nodes
205    edges: GraphEdges,
206    /// the root of the project this graph represents
207    #[allow(unused)]
208    root: PathBuf,
209}
210
211impl Graph {
212    /// Print the graph to `StdOut`
213    pub fn print(&self) {
214        self.print_with_options(Default::default())
215    }
216
217    /// Print the graph to `StdOut` using the provided `TreeOptions`
218    pub fn print_with_options(&self, opts: TreeOptions) {
219        let stdout = io::stdout();
220        let mut out = stdout.lock();
221        tree::print(self, &opts, &mut out).expect("failed to write to stdout.")
222    }
223
224    /// Returns a list of nodes the given node index points to for the given kind.
225    pub fn imported_nodes(&self, from: usize) -> &[usize] {
226        self.edges.imported_nodes(from)
227    }
228
229    /// Returns an iterator that yields all imports of a node and all their imports
230    pub fn all_imported_nodes(&self, from: usize) -> impl Iterator<Item = usize> + '_ {
231        self.edges.all_imported_nodes(from)
232    }
233
234    /// Returns `true` if the given node has any outgoing edges.
235    pub(crate) fn has_outgoing_edges(&self, index: usize) -> bool {
236        !self.edges.edges[index].is_empty()
237    }
238
239    /// Returns all the resolved files and their index in the graph
240    pub fn files(&self) -> &HashMap<PathBuf, usize> {
241        &self.edges.indices
242    }
243
244    /// Gets a node by index.
245    ///
246    /// # Panics
247    ///
248    /// if the `index` node id is not included in the graph
249    pub fn node(&self, index: usize) -> &Node {
250        &self.nodes[index]
251    }
252
253    pub(crate) fn display_node(&self, index: usize) -> DisplayNode {
254        DisplayNode { node: self.node(index), root: &self.root }
255    }
256
257    /// Returns an iterator that yields all nodes of the dependency tree that the given node id
258    /// spans, starting with the node itself.
259    ///
260    /// # Panics
261    ///
262    /// if the `start` node id is not included in the graph
263    pub fn node_ids(&self, start: usize) -> impl Iterator<Item = usize> + '_ {
264        NodesIter::new(start, &self.edges)
265    }
266
267    /// Same as `Self::node_ids` but returns the actual `Node`
268    pub fn nodes(&self, start: usize) -> impl Iterator<Item = &Node> + '_ {
269        self.node_ids(start).map(move |idx| self.node(idx))
270    }
271
272    fn split(self) -> (Vec<(PathBuf, Source)>, GraphEdges) {
273        let Graph { nodes, mut edges, .. } = self;
274        // need to move the extracted data to the edges, essentially splitting the node so we have
275        // access to the data at a later stage in the compile pipeline
276        let mut sources = Vec::new();
277        for (idx, node) in nodes.into_iter().enumerate() {
278            let Node { path, source, data } = node;
279            sources.push((path, source));
280            edges.data.insert(idx, data);
281        }
282
283        (sources, edges)
284    }
285
286    /// Consumes the `Graph`, effectively splitting the `nodes` and the `GraphEdges` off and
287    /// returning the `nodes` converted to `Sources`
288    pub fn into_sources(self) -> (Sources, GraphEdges) {
289        let (sources, edges) = self.split();
290        (sources.into_iter().collect(), edges)
291    }
292
293    /// Returns an iterator that yields only those nodes that represent input files.
294    /// See `Self::resolve_sources`
295    /// This won't yield any resolved library nodes
296    pub fn input_nodes(&self) -> impl Iterator<Item = &Node> {
297        self.nodes.iter().take(self.edges.num_input_files)
298    }
299
300    /// Returns all files imported by the given file
301    pub fn imports(&self, path: impl AsRef<Path>) -> HashSet<&PathBuf> {
302        self.edges.imports(path)
303    }
304
305    /// Resolves a number of sources within the given config
306    pub fn resolve_sources(paths: &ProjectPathsConfig, sources: Sources) -> Result<Graph> {
307        /// checks if the given target path was already resolved, if so it adds its id to the list
308        /// of resolved imports. If it hasn't been resolved yet, it queues in the file for
309        /// processing
310        fn add_node(
311            unresolved: &mut VecDeque<(PathBuf, Node)>,
312            index: &mut HashMap<PathBuf, usize>,
313            resolved_imports: &mut Vec<usize>,
314            target: PathBuf,
315        ) -> Result<()> {
316            if let Some(idx) = index.get(&target).copied() {
317                resolved_imports.push(idx);
318            } else {
319                // imported file is not part of the input files
320                let node = Node::read(&target)?;
321                unresolved.push_back((target.clone(), node));
322                let idx = index.len();
323                index.insert(target, idx);
324                resolved_imports.push(idx);
325            }
326            Ok(())
327        }
328
329        // we start off by reading all input files, which includes all solidity files from the
330        // source and test folder
331        let mut unresolved: VecDeque<(PathBuf, Node)> = sources
332            .into_par_iter()
333            .map(|(path, source)| {
334                let data = SolData::parse(source.as_ref(), &path);
335                (path.clone(), Node { path, source, data })
336            })
337            .collect();
338
339        // identifiers of all resolved files
340        let mut index: HashMap<_, _> =
341            unresolved.iter().enumerate().map(|(idx, (p, _))| (p.clone(), idx)).collect();
342
343        let num_input_files = unresolved.len();
344
345        // contains the files and their dependencies
346        let mut nodes = Vec::with_capacity(unresolved.len());
347        let mut edges = Vec::with_capacity(unresolved.len());
348
349        // tracks additional paths that should be used with `--include-path`, these are libraries
350        // that use absolute imports like `import "src/Contract.sol"`
351        let mut resolved_solc_include_paths = IncludePaths::default();
352
353        // keep track of all unique paths that we failed to resolve to not spam the reporter with
354        // the same path
355        let mut unresolved_imports = HashSet::new();
356
357        // now we need to resolve all imports for the source file and those imported from other
358        // locations
359        while let Some((path, node)) = unresolved.pop_front() {
360            let mut resolved_imports = Vec::with_capacity(node.data.imports.len());
361            // parent directory of the current file
362            let cwd = match path.parent() {
363                Some(inner) => inner,
364                None => continue,
365            };
366
367            for import in node.data.imports.iter() {
368                let import_path = import.data().path();
369                match paths.resolve_import_and_include_paths(
370                    cwd,
371                    import_path,
372                    &mut resolved_solc_include_paths,
373                ) {
374                    Ok(import) => {
375                        add_node(&mut unresolved, &mut index, &mut resolved_imports, import)
376                            .map_err(|err| {
377                                match err {
378                                    err @ SolcError::ResolveCaseSensitiveFileName { .. } |
379                                    err @ SolcError::Resolve(_) => {
380                                        // make the error more helpful by providing additional
381                                        // context
382                                        SolcError::FailedResolveImport(
383                                            Box::new(err),
384                                            node.path.clone(),
385                                            import_path.clone(),
386                                        )
387                                    }
388                                    _ => err,
389                                }
390                            })?
391                    }
392                    Err(err) => {
393                        unresolved_imports.insert((import_path.to_path_buf(), node.path.clone()));
394                        tracing::trace!(
395                            "failed to resolve import component \"{:?}\" for {:?}",
396                            err,
397                            node.path
398                        )
399                    }
400                };
401            }
402
403            nodes.push(node);
404            edges.push(resolved_imports);
405        }
406
407        if !unresolved_imports.is_empty() {
408            // notify on all unresolved imports
409            crate::report::unresolved_imports(
410                &unresolved_imports
411                    .iter()
412                    .map(|(i, f)| (i.as_path(), f.as_path()))
413                    .collect::<Vec<_>>(),
414                &paths.remappings,
415            );
416        }
417
418        let edges = GraphEdges {
419            edges,
420            rev_indices: index.iter().map(|(k, v)| (*v, k.clone())).collect(),
421            indices: index,
422            num_input_files,
423            versions: nodes
424                .iter()
425                .enumerate()
426                .map(|(idx, node)| (idx, node.data.version_req.clone()))
427                .collect(),
428            data: Default::default(),
429            unresolved_imports,
430            resolved_solc_include_paths,
431        };
432        Ok(Graph { nodes, edges, root: paths.root.clone() })
433    }
434
435    /// Resolves the dependencies of a project's source contracts
436    pub fn resolve(paths: &ProjectPathsConfig) -> Result<Graph> {
437        Self::resolve_sources(paths, paths.read_input_files()?)
438    }
439}
440
441#[cfg(all(feature = "svm-solc", not(target_arch = "wasm32")))]
442impl Graph {
443    /// Consumes the nodes of the graph and returns all input files together with their appropriate
444    /// version and the edges of the graph
445    ///
446    /// First we determine the compatible version for each input file (from sources and test folder,
447    /// see `Self::resolve`) and then we add all resolved library imports.
448    pub fn into_sources_by_version(self, offline: bool) -> Result<(VersionedSources, GraphEdges)> {
449        /// insert the imports of the given node into the sources map
450        /// There can be following graph:
451        /// `A(<=0.8.10) imports C(>0.4.0)` and `B(0.8.11) imports C(>0.4.0)`
452        /// where `C` is a library import, in which case we assign `C` only to the first input file.
453        /// However, it's not required to include them in the solc `CompilerInput` as they would get
454        /// picked up by solc otherwise, but we add them, so we can create a corresponding
455        /// cache entry for them as well. This can be optimized however
456        fn insert_imports(
457            idx: usize,
458            all_nodes: &mut HashMap<usize, (PathBuf, Source)>,
459            sources: &mut Sources,
460            edges: &[Vec<usize>],
461            processed_sources: &mut HashSet<usize>,
462        ) {
463            // iterate over all dependencies not processed yet
464            for dep in edges[idx].iter().copied() {
465                // keep track of processed dependencies, if the dep was already in the set we have
466                // processed it already
467                if !processed_sources.insert(dep) {
468                    continue
469                }
470
471                // library import
472                if let Some((path, source)) = all_nodes.get(&dep).cloned() {
473                    sources.insert(path, source);
474                    insert_imports(dep, all_nodes, sources, edges, processed_sources);
475                }
476            }
477        }
478
479        let versioned_nodes = self.get_input_node_versions(offline)?;
480        let (nodes, edges) = self.split();
481
482        let mut versioned_sources = HashMap::with_capacity(versioned_nodes.len());
483
484        let mut all_nodes = nodes.into_iter().enumerate().collect::<HashMap<_, _>>();
485
486        // determine the `Sources` set for each solc version
487        for (version, input_node_indices) in versioned_nodes {
488            let mut sources = Sources::new();
489
490            // all input nodes will be processed
491            let mut processed_sources = input_node_indices.iter().copied().collect();
492
493            // we only process input nodes (from sources, tests for example)
494            for idx in input_node_indices {
495                // insert the input node in the sources set and remove it from the available set
496                let (path, source) = all_nodes.get(&idx).cloned().expect("node is preset. qed");
497                sources.insert(path, source);
498                insert_imports(
499                    idx,
500                    &mut all_nodes,
501                    &mut sources,
502                    &edges.edges,
503                    &mut processed_sources,
504                );
505            }
506            versioned_sources.insert(version, sources);
507        }
508        Ok((
509            VersionedSources {
510                inner: versioned_sources,
511                offline,
512                resolved_solc_include_paths: edges.resolved_solc_include_paths.clone(),
513            },
514            edges,
515        ))
516    }
517
518    /// Writes the list of imported files into the given formatter:
519    ///
520    /// ```text
521    /// path/to/a.sol (<version>) imports:
522    ///     path/to/b.sol (<version>)
523    ///     path/to/c.sol (<version>)
524    ///     ...
525    /// ```
526    fn format_imports_list<W: std::fmt::Write>(
527        &self,
528        idx: usize,
529        f: &mut W,
530    ) -> std::result::Result<(), std::fmt::Error> {
531        let node = self.node(idx);
532        write!(f, "{} ", utils::source_name(&node.path, &self.root).display())?;
533        node.data.fmt_version(f)?;
534        write!(f, " imports:")?;
535        for dep in self.node_ids(idx).skip(1) {
536            let dep = self.node(dep);
537            write!(f, "\n    {} ", utils::source_name(&dep.path, &self.root).display())?;
538            dep.data.fmt_version(f)?;
539        }
540
541        Ok(())
542    }
543
544    /// Filters incompatible versions from the `candidates`.
545    fn retain_compatible_versions(&self, idx: usize, candidates: &mut Vec<&crate::SolcVersion>) {
546        let nodes: HashSet<_> = self.node_ids(idx).collect();
547        for node in nodes {
548            if let Some(req) = &self.node(node).data.version_req {
549                candidates.retain(|v| req.matches(v.as_ref()));
550            }
551            if candidates.is_empty() {
552                // nothing to filter anymore
553                return
554            }
555        }
556    }
557
558    /// Ensures that all files are compatible with all of their imports.
559    pub fn ensure_compatible_imports(&self, offline: bool) -> Result<()> {
560        self.get_input_node_versions(offline)?;
561        Ok(())
562    }
563
564    /// Returns a map of versions together with the input nodes that are compatible with that
565    /// version.
566    ///
567    /// This will essentially do a DFS on all input sources and their transitive imports and
568    /// checking that all can compiled with the version stated in the input file.
569    ///
570    /// Returns an error message with __all__ input files that don't have compatible imports.
571    ///
572    /// This also attempts to prefer local installations over remote available.
573    /// If `offline` is set to `true` then only already installed.
574    fn get_input_node_versions(
575        &self,
576        offline: bool,
577    ) -> Result<HashMap<crate::SolcVersion, Vec<usize>>> {
578        use crate::Solc;
579
580        tracing::trace!("resolving input node versions");
581        // this is likely called by an application and will be eventually printed so we don't exit
582        // on first error, instead gather all the errors and return a bundled error message instead
583        let mut errors = Vec::new();
584        // we also  don't want duplicate error diagnostic
585        let mut erroneous_nodes = HashSet::with_capacity(self.edges.num_input_files);
586
587        // the sorted list of all versions
588        let all_versions = if offline { Solc::installed_versions() } else { Solc::all_versions() };
589
590        // stores all versions and their nodes that can be compiled
591        let mut versioned_nodes = HashMap::new();
592
593        // stores all files and the versions they're compatible with
594        let mut all_candidates = Vec::with_capacity(self.edges.num_input_files);
595        // walking through the node's dep tree and filtering the versions along the way
596        for idx in 0..self.edges.num_input_files {
597            let mut candidates = all_versions.iter().collect::<Vec<_>>();
598            // remove all incompatible versions from the candidates list by checking the node and
599            // all its imports
600            self.retain_compatible_versions(idx, &mut candidates);
601
602            if candidates.is_empty() && !erroneous_nodes.contains(&idx) {
603                // check if the version is even valid
604                let node = self.node(idx);
605                if let Err(version_err) = node.check_available_version(&all_versions, offline) {
606                    let f = utils::source_name(&node.path, &self.root).display();
607                    errors.push(format!("Encountered invalid solc version in {f}: {version_err}"));
608                } else {
609                    let mut msg = String::new();
610                    self.format_imports_list(idx, &mut msg).unwrap();
611                    errors.push(format!("Found incompatible Solidity versions:\n{msg}"));
612                }
613
614                erroneous_nodes.insert(idx);
615            } else {
616                // found viable candidates, pick the most recent version that's already installed
617                let candidate =
618                    if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) {
619                        candidates[pos]
620                    } else {
621                        candidates.last().expect("not empty; qed.")
622                    }
623                    .clone();
624
625                // also store all possible candidates to optimize the set
626                all_candidates.push((idx, candidates.into_iter().collect::<HashSet<_>>()));
627
628                versioned_nodes.entry(candidate).or_insert_with(|| Vec::with_capacity(1)).push(idx);
629            }
630        }
631
632        // detected multiple versions but there might still exist a single version that satisfies
633        // all sources
634        if versioned_nodes.len() > 1 {
635            versioned_nodes = Self::resolve_multiple_versions(all_candidates);
636        }
637
638        if versioned_nodes.len() == 1 {
639            tracing::trace!(
640                "found exact solc version for all sources  \"{}\"",
641                versioned_nodes.keys().next().unwrap()
642            );
643        }
644
645        if errors.is_empty() {
646            tracing::trace!(
647                "resolved {} versions {:?}",
648                versioned_nodes.len(),
649                versioned_nodes.keys()
650            );
651            Ok(versioned_nodes)
652        } else {
653            tracing::error!("failed to resolve versions");
654            Err(SolcError::msg(errors.join("\n")))
655        }
656    }
657
658    /// Tries to find the "best" set of versions to nodes, See [Solc version
659    /// auto-detection](#solc-version-auto-detection)
660    ///
661    /// This is a bit inefficient but is fine, the max. number of versions is ~80 and there's
662    /// a high chance that the number of source files is <50, even for larger projects.
663    fn resolve_multiple_versions(
664        all_candidates: Vec<(usize, HashSet<&crate::SolcVersion>)>,
665    ) -> HashMap<crate::SolcVersion, Vec<usize>> {
666        // returns the intersection as sorted set of nodes
667        fn intersection<'a>(
668            mut sets: Vec<&HashSet<&'a crate::SolcVersion>>,
669        ) -> Vec<&'a crate::SolcVersion> {
670            if sets.is_empty() {
671                return Vec::new()
672            }
673
674            let mut result = sets.pop().cloned().expect("not empty; qed.");
675            if !sets.is_empty() {
676                result.retain(|item| sets.iter().all(|set| set.contains(item)));
677            }
678
679            let mut v = result.into_iter().collect::<Vec<_>>();
680            v.sort_unstable();
681            v
682        }
683
684        /// returns the highest version that is installed
685        /// if the candidates set only contains uninstalled versions then this returns the highest
686        /// uninstalled version
687        fn remove_candidate(candidates: &mut Vec<&crate::SolcVersion>) -> crate::SolcVersion {
688            debug_assert!(!candidates.is_empty());
689
690            if let Some(pos) = candidates.iter().rposition(|v| v.is_installed()) {
691                candidates.remove(pos)
692            } else {
693                candidates.pop().expect("not empty; qed.")
694            }
695            .clone()
696        }
697
698        let all_sets = all_candidates.iter().map(|(_, versions)| versions).collect();
699
700        // find all versions that satisfy all nodes
701        let mut intersection = intersection(all_sets);
702        if !intersection.is_empty() {
703            let exact_version = remove_candidate(&mut intersection);
704            let all_nodes = all_candidates.into_iter().map(|(node, _)| node).collect();
705            tracing::trace!(
706                "resolved solc version compatible with all sources  \"{}\"",
707                exact_version
708            );
709            return HashMap::from([(exact_version, all_nodes)])
710        }
711
712        // no version satisfies all nodes
713        let mut versioned_nodes: HashMap<crate::SolcVersion, Vec<usize>> = HashMap::new();
714
715        // try to minimize the set of versions, this is guaranteed to lead to `versioned_nodes.len()
716        // > 1` as no solc version exists that can satisfy all sources
717        for (node, versions) in all_candidates {
718            // need to sort them again
719            let mut versions = versions.into_iter().collect::<Vec<_>>();
720            versions.sort_unstable();
721
722            let candidate =
723                if let Some(idx) = versions.iter().rposition(|v| versioned_nodes.contains_key(v)) {
724                    // use a version that's already in the set
725                    versions.remove(idx).clone()
726                } else {
727                    // use the highest version otherwise
728                    remove_candidate(&mut versions)
729                };
730
731            versioned_nodes.entry(candidate).or_insert_with(|| Vec::with_capacity(1)).push(node);
732        }
733
734        tracing::trace!(
735            "no solc version can satisfy all source files, resolved multiple versions  \"{:?}\"",
736            versioned_nodes.keys()
737        );
738
739        versioned_nodes
740    }
741}
742
743/// An iterator over a node and its dependencies
744#[derive(Debug)]
745pub struct NodesIter<'a> {
746    /// stack of nodes
747    stack: VecDeque<usize>,
748    visited: HashSet<usize>,
749    graph: &'a GraphEdges,
750}
751
752impl<'a> NodesIter<'a> {
753    fn new(start: usize, graph: &'a GraphEdges) -> Self {
754        Self { stack: VecDeque::from([start]), visited: HashSet::new(), graph }
755    }
756}
757
758impl<'a> Iterator for NodesIter<'a> {
759    type Item = usize;
760    fn next(&mut self) -> Option<Self::Item> {
761        let node = self.stack.pop_front()?;
762
763        if self.visited.insert(node) {
764            // push the node's direct dependencies to the stack if we haven't visited it already
765            self.stack.extend(self.graph.imported_nodes(node).iter().copied());
766        }
767        Some(node)
768    }
769}
770
771/// Container type for solc versions and their compatible sources
772#[cfg(all(feature = "svm-solc", not(target_arch = "wasm32")))]
773#[derive(Debug)]
774pub struct VersionedSources {
775    resolved_solc_include_paths: IncludePaths,
776    inner: HashMap<crate::SolcVersion, Sources>,
777    offline: bool,
778}
779
780#[cfg(all(feature = "svm-solc", not(target_arch = "wasm32")))]
781impl VersionedSources {
782    /// Resolves or installs the corresponding `Solc` installation.
783    ///
784    /// This will also configure following solc arguments:
785    ///    - `allowed_paths`
786    ///    - `base_path`
787    pub fn get<T: crate::ArtifactOutput>(
788        self,
789        project: &crate::Project<T>,
790    ) -> Result<std::collections::BTreeMap<crate::Solc, (semver::Version, Sources)>> {
791        use crate::Solc;
792        // we take the installer lock here to ensure installation checking is done in sync
793        #[cfg(any(test, feature = "tests"))]
794        let _lock = crate::compile::take_solc_installer_lock();
795
796        let mut sources_by_version = std::collections::BTreeMap::new();
797        for (version, sources) in self.inner {
798            let solc = if !version.is_installed() {
799                if self.offline {
800                    return Err(SolcError::msg(format!(
801                        "missing solc \"{version}\" installation in offline mode"
802                    )))
803                } else {
804                    // install missing solc
805                    Solc::blocking_install(version.as_ref())?
806                }
807            } else {
808                // find installed svm
809                Solc::find_svm_installed_version(version.to_string())?.ok_or_else(|| {
810                    SolcError::msg(format!("solc \"{version}\" should have been installed"))
811                })?
812            };
813
814            if self.offline {
815                tracing::trace!(
816                    "skip verifying solc checksum for {} in offline mode",
817                    solc.solc.display()
818                );
819            } else {
820                tracing::trace!("verifying solc checksum for {}", solc.solc.display());
821                if let Err(err) = solc.verify_checksum() {
822                    tracing::trace!(?err, "corrupted solc version, redownloading  \"{}\"", version);
823                    Solc::blocking_install(version.as_ref())?;
824                    tracing::trace!("reinstalled solc: \"{}\"", version);
825                }
826            }
827
828            let version = solc.version()?;
829
830            // this will configure the `Solc` executable and its arguments
831            let solc = project.configure_solc_with_version(
832                solc,
833                Some(version.clone()),
834                self.resolved_solc_include_paths.clone(),
835            );
836            sources_by_version.insert(solc, (version, sources));
837        }
838        Ok(sources_by_version)
839    }
840}
841
842#[derive(Debug)]
843pub struct Node {
844    /// path of the solidity  file
845    path: PathBuf,
846    /// content of the solidity file
847    source: Source,
848    /// parsed data
849    data: SolData,
850}
851
852impl Node {
853    /// Reads the content of the file and returns a [Node] containing relevant information
854    pub fn read(file: impl AsRef<Path>) -> Result<Self> {
855        let file = file.as_ref();
856        let source = Source::read(file).map_err(|err| {
857            let exists = err.path().exists();
858            if !exists && err.path().is_symlink() {
859                SolcError::ResolveBadSymlink(err)
860            } else {
861                // This is an additional check useful on OS that have case-sensitive paths, See also <https://docs.soliditylang.org/en/v0.8.17/path-resolution.html#import-callback>
862                if !exists {
863                    // check if there exists a file with different case
864                    if let Some(existing_file) = find_case_sensitive_existing_file(file) {
865                        SolcError::ResolveCaseSensitiveFileName { error: err, existing_file }
866                    } else {
867                        SolcError::Resolve(err)
868                    }
869                } else {
870                    SolcError::Resolve(err)
871                }
872            }
873        })?;
874        let data = SolData::parse(source.as_ref(), file);
875        Ok(Self { path: file.to_path_buf(), source, data })
876    }
877
878    pub fn content(&self) -> &str {
879        &self.source.content
880    }
881
882    pub fn imports(&self) -> &Vec<SolDataUnit<SolImport>> {
883        &self.data.imports
884    }
885
886    pub fn version(&self) -> &Option<SolDataUnit<String>> {
887        &self.data.version
888    }
889
890    pub fn experimental(&self) -> &Option<SolDataUnit<String>> {
891        &self.data.experimental
892    }
893
894    pub fn license(&self) -> &Option<SolDataUnit<String>> {
895        &self.data.license
896    }
897
898    pub fn unpack(&self) -> (&PathBuf, &Source) {
899        (&self.path, &self.source)
900    }
901
902    /// Checks that the file's version is even available.
903    ///
904    /// This returns an error if the file's version is invalid semver, or is not available such as
905    /// 0.8.20, if the highest available version is `0.8.19`
906    #[cfg(all(feature = "svm-solc", not(target_arch = "wasm32")))]
907    fn check_available_version(
908        &self,
909        all_versions: &[crate::SolcVersion],
910        offline: bool,
911    ) -> std::result::Result<(), SourceVersionError> {
912        let Some(data) = &self.data.version else { return Ok(()) };
913        let v = data.data();
914
915        let req = crate::Solc::version_req(v)
916            .map_err(|err| SourceVersionError::InvalidVersion(v.to_string(), err))?;
917
918        if !all_versions.iter().any(|v| req.matches(v.as_ref())) {
919            return if offline {
920                Err(SourceVersionError::NoMatchingVersionOffline(req))
921            } else {
922                Err(SourceVersionError::NoMatchingVersion(req))
923            }
924        }
925
926        Ok(())
927    }
928}
929
930/// Helper type for formatting a node
931pub(crate) struct DisplayNode<'a> {
932    node: &'a Node,
933    root: &'a PathBuf,
934}
935
936impl<'a> fmt::Display for DisplayNode<'a> {
937    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
938        let path = utils::source_name(&self.node.path, self.root);
939        write!(f, "{}", path.display())?;
940        if let Some(ref v) = self.node.data.version {
941            write!(f, " {}", v.data())?;
942        }
943        Ok(())
944    }
945}
946
947/// Errors thrown when checking the solc version of a file
948#[derive(Debug, thiserror::Error)]
949#[allow(unused)]
950enum SourceVersionError {
951    #[error("Failed to parse solidity version {0}: {1}")]
952    InvalidVersion(String, SolcError),
953    #[error("No solc version exists that matches the version requirement: {0}")]
954    NoMatchingVersion(VersionReq),
955    #[error("No solc version installed that matches the version requirement: {0}")]
956    NoMatchingVersionOffline(VersionReq),
957}
958
959#[cfg(test)]
960mod tests {
961    use super::*;
962
963    #[test]
964    fn can_resolve_hardhat_dependency_graph() {
965        let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample");
966        let paths = ProjectPathsConfig::hardhat(root).unwrap();
967
968        let graph = Graph::resolve(&paths).unwrap();
969
970        assert_eq!(graph.edges.num_input_files, 1);
971        assert_eq!(graph.files().len(), 2);
972
973        assert_eq!(
974            graph.files().clone(),
975            HashMap::from([
976                (paths.sources.join("Greeter.sol"), 0),
977                (paths.root.join("node_modules/hardhat/console.sol"), 1),
978            ])
979        );
980    }
981
982    #[test]
983    fn can_resolve_dapp_dependency_graph() {
984        let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
985        let paths = ProjectPathsConfig::dapptools(root).unwrap();
986
987        let graph = Graph::resolve(&paths).unwrap();
988
989        assert_eq!(graph.edges.num_input_files, 2);
990        assert_eq!(graph.files().len(), 3);
991        assert_eq!(
992            graph.files().clone(),
993            HashMap::from([
994                (paths.sources.join("Dapp.sol"), 0),
995                (paths.sources.join("Dapp.t.sol"), 1),
996                (paths.root.join("lib/ds-test/src/test.sol"), 2),
997            ])
998        );
999
1000        let dapp_test = graph.node(1);
1001        assert_eq!(dapp_test.path, paths.sources.join("Dapp.t.sol"));
1002        assert_eq!(
1003            dapp_test.data.imports.iter().map(|i| i.data().path()).collect::<Vec<&PathBuf>>(),
1004            vec![&PathBuf::from("ds-test/test.sol"), &PathBuf::from("./Dapp.sol")]
1005        );
1006        assert_eq!(graph.imported_nodes(1).to_vec(), vec![2, 0]);
1007    }
1008
1009    #[test]
1010    #[cfg(not(target_os = "windows"))]
1011    fn can_print_dapp_sample_graph() {
1012        let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/dapp-sample");
1013        let paths = ProjectPathsConfig::dapptools(root).unwrap();
1014        let graph = Graph::resolve(&paths).unwrap();
1015        let mut out = Vec::<u8>::new();
1016        tree::print(&graph, &Default::default(), &mut out).unwrap();
1017
1018        assert_eq!(
1019            "
1020src/Dapp.sol >=0.6.6
1021src/Dapp.t.sol >=0.6.6
1022├── lib/ds-test/src/test.sol >=0.4.23
1023└── src/Dapp.sol >=0.6.6
1024"
1025            .trim_start()
1026            .as_bytes()
1027            .to_vec(),
1028            out
1029        );
1030    }
1031
1032    #[test]
1033    #[cfg(not(target_os = "windows"))]
1034    fn can_print_hardhat_sample_graph() {
1035        let root = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join("test-data/hardhat-sample");
1036        let paths = ProjectPathsConfig::hardhat(root).unwrap();
1037        let graph = Graph::resolve(&paths).unwrap();
1038        let mut out = Vec::<u8>::new();
1039        tree::print(&graph, &Default::default(), &mut out).unwrap();
1040        assert_eq!(
1041            "
1042contracts/Greeter.sol >=0.6.0
1043└── node_modules/hardhat/console.sol >= 0.4.22 <0.9.0
1044"
1045            .trim_start()
1046            .as_bytes()
1047            .to_vec(),
1048            out
1049        );
1050    }
1051}