1use crate::{
3 artifacts::Sources,
4 config::{ProjectPaths, SolcConfig},
5 error::{Result, SolcError},
6 filter::{FilteredSource, FilteredSourceInfo, FilteredSources},
7 resolver::GraphEdges,
8 utils, ArtifactFile, ArtifactOutput, Artifacts, ArtifactsMap, OutputContext, Project,
9 ProjectPathsConfig, Source,
10};
11use semver::Version;
12use serde::{de::DeserializeOwned, Deserialize, Serialize};
13use std::{
14 collections::{
15 btree_map::{BTreeMap, Entry},
16 hash_map, BTreeSet, HashMap, HashSet,
17 },
18 fs::{self},
19 io::Write,
20 path::{Path, PathBuf},
21 time::{Duration, UNIX_EPOCH},
22};
23
24const ETHERS_FORMAT_VERSION: &str = "ethers-rs-sol-cache-3";
30
31pub const SOLIDITY_FILES_CACHE_FILENAME: &str = "solidity-files-cache.json";
33
34#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
36pub struct SolFilesCache {
37 #[serde(rename = "_format")]
38 pub format: String,
39 pub paths: ProjectPaths,
41 pub files: BTreeMap<PathBuf, CacheEntry>,
42}
43
44impl SolFilesCache {
45 pub fn new(files: BTreeMap<PathBuf, CacheEntry>, paths: ProjectPaths) -> Self {
47 Self { format: ETHERS_FORMAT_VERSION.to_string(), files, paths }
48 }
49
50 pub fn is_empty(&self) -> bool {
51 self.files.is_empty()
52 }
53
54 pub fn len(&self) -> usize {
56 self.files.len()
57 }
58
59 pub fn artifacts_len(&self) -> usize {
61 self.entries().map(|entry| entry.artifacts().count()).sum()
62 }
63
64 pub fn entries(&self) -> impl Iterator<Item = &CacheEntry> {
66 self.files.values()
67 }
68
69 pub fn entry(&self, file: impl AsRef<Path>) -> Option<&CacheEntry> {
71 self.files.get(file.as_ref())
72 }
73
74 pub fn entry_mut(&mut self, file: impl AsRef<Path>) -> Option<&mut CacheEntry> {
76 self.files.get_mut(file.as_ref())
77 }
78
79 #[tracing::instrument(skip_all, name = "sol-files-cache::read")]
100 pub fn read(path: impl AsRef<Path>) -> Result<Self> {
101 let path = path.as_ref();
102 tracing::trace!("reading solfiles cache at {}", path.display());
103 let cache: SolFilesCache = utils::read_json_file(path)?;
104 tracing::trace!("read cache \"{}\" with {} entries", cache.format, cache.files.len());
105 Ok(cache)
106 }
107
108 pub fn read_joined(paths: &ProjectPathsConfig) -> Result<Self> {
128 let mut cache = SolFilesCache::read(&paths.cache)?;
129 cache.join_entries(&paths.root).join_artifacts_files(&paths.artifacts);
130 Ok(cache)
131 }
132
133 pub fn write(&self, path: impl AsRef<Path>) -> Result<()> {
135 let path = path.as_ref();
136 utils::create_parent_dir_all(path)?;
137 let file = fs::File::create(path).map_err(|err| SolcError::io(err, path))?;
138 tracing::trace!(
139 "writing cache with {} entries to json file: \"{}\"",
140 self.len(),
141 path.display()
142 );
143 let mut writer = std::io::BufWriter::with_capacity(1024 * 256, file);
144 serde_json::to_writer_pretty(&mut writer, self)?;
145 writer.flush().map_err(|e| SolcError::io(e, path))?;
146 tracing::trace!("cache file located: \"{}\"", path.display());
147 Ok(())
148 }
149
150 pub fn join_entries(&mut self, root: impl AsRef<Path>) -> &mut Self {
152 let root = root.as_ref();
153 self.files = std::mem::take(&mut self.files)
154 .into_iter()
155 .map(|(path, entry)| (root.join(path), entry))
156 .collect();
157 self
158 }
159
160 pub fn strip_entries_prefix(&mut self, base: impl AsRef<Path>) -> &mut Self {
162 let base = base.as_ref();
163 self.files = std::mem::take(&mut self.files)
164 .into_iter()
165 .map(|(path, entry)| (path.strip_prefix(base).map(Into::into).unwrap_or(path), entry))
166 .collect();
167 self
168 }
169
170 pub fn join_artifacts_files(&mut self, base: impl AsRef<Path>) -> &mut Self {
172 let base = base.as_ref();
173 self.files.values_mut().for_each(|entry| entry.join_artifacts_files(base));
174 self
175 }
176
177 pub fn strip_artifact_files_prefixes(&mut self, base: impl AsRef<Path>) -> &mut Self {
179 let base = base.as_ref();
180 self.files.values_mut().for_each(|entry| entry.strip_artifact_files_prefixes(base));
181 self
182 }
183
184 pub fn remove_missing_files(&mut self) {
188 tracing::trace!("remove non existing files from cache");
189 self.files.retain(|file, _| {
190 let exists = file.exists();
191 if !exists {
192 tracing::trace!("remove {} from cache", file.display());
193 }
194 exists
195 })
196 }
197
198 pub fn all_artifacts_exist(&self) -> bool {
200 self.files.values().all(|entry| entry.all_artifacts_exist())
201 }
202
203 pub fn with_stripped_file_prefixes(mut self, base: impl AsRef<Path>) -> Self {
227 let base = base.as_ref();
228 self.files = self
229 .files
230 .into_iter()
231 .map(|(f, e)| (utils::source_name(&f, base).to_path_buf(), e))
232 .collect();
233 self
234 }
235
236 pub fn find_artifact_path(
251 &self,
252 contract_file: impl AsRef<Path>,
253 contract_name: impl AsRef<str>,
254 ) -> Option<&PathBuf> {
255 let entry = self.entry(contract_file)?;
256 entry.find_artifact_path(contract_name)
257 }
258
259 pub fn read_artifact<Artifact: DeserializeOwned>(
278 &self,
279 contract_file: impl AsRef<Path>,
280 contract_name: impl AsRef<str>,
281 ) -> Result<Artifact> {
282 let contract_file = contract_file.as_ref();
283 let contract_name = contract_name.as_ref();
284
285 let artifact_path =
286 self.find_artifact_path(contract_file, contract_name).ok_or_else(|| {
287 SolcError::ArtifactNotFound(contract_file.to_path_buf(), contract_name.to_string())
288 })?;
289
290 utils::read_json_file(artifact_path)
291 }
292
293 pub fn read_artifacts<Artifact: DeserializeOwned + Send + Sync>(
308 &self,
309 ) -> Result<Artifacts<Artifact>> {
310 use rayon::prelude::*;
311
312 let artifacts = self
313 .files
314 .par_iter()
315 .map(|(file, entry)| {
316 let file_name = format!("{}", file.display());
317 entry.read_artifact_files().map(|files| (file_name, files))
318 })
319 .collect::<Result<ArtifactsMap<_>>>()?;
320 Ok(Artifacts(artifacts))
321 }
322
323 pub fn retain<'a, I, V>(&mut self, files: I)
328 where
329 I: IntoIterator<Item = (&'a Path, V)>,
330 V: IntoIterator<Item = &'a Version>,
331 {
332 let mut files: HashMap<_, _> = files.into_iter().collect();
333
334 self.files.retain(|file, entry| {
335 if entry.artifacts.is_empty() {
336 return true
339 }
340
341 if let Some(versions) = files.remove(file.as_path()) {
342 entry.retain_versions(versions);
343 } else {
344 return false
345 }
346 !entry.artifacts.is_empty()
347 });
348 }
349
350 pub fn extend<I>(&mut self, entries: I)
353 where
354 I: IntoIterator<Item = (PathBuf, CacheEntry)>,
355 {
356 for (file, entry) in entries.into_iter() {
357 match self.files.entry(file) {
358 Entry::Vacant(e) => {
359 e.insert(entry);
360 }
361 Entry::Occupied(mut other) => {
362 other.get_mut().merge_artifacts(entry);
363 }
364 }
365 }
366 }
367}
368
369#[cfg(feature = "async")]
371impl SolFilesCache {
372 pub async fn async_read(path: impl AsRef<Path>) -> Result<Self> {
373 let path = path.as_ref();
374 let content =
375 tokio::fs::read_to_string(path).await.map_err(|err| SolcError::io(err, path))?;
376 Ok(serde_json::from_str(&content)?)
377 }
378
379 pub async fn async_write(&self, path: impl AsRef<Path>) -> Result<()> {
380 let path = path.as_ref();
381 let content = serde_json::to_vec_pretty(self)?;
382 tokio::fs::write(path, content).await.map_err(|err| SolcError::io(err, path))
383 }
384}
385
386impl Default for SolFilesCache {
387 fn default() -> Self {
388 SolFilesCache {
389 format: ETHERS_FORMAT_VERSION.to_string(),
390 files: Default::default(),
391 paths: Default::default(),
392 }
393 }
394}
395
396impl<'a> From<&'a ProjectPathsConfig> for SolFilesCache {
397 fn from(config: &'a ProjectPathsConfig) -> Self {
398 let paths = config.paths_relative();
399 SolFilesCache::new(Default::default(), paths)
400 }
401}
402
403#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]
409#[serde(rename_all = "camelCase")]
410pub struct CacheEntry {
411 pub last_modification_date: u64,
413 pub content_hash: String,
415 pub source_name: PathBuf,
417 pub solc_config: SolcConfig,
419 pub imports: BTreeSet<PathBuf>,
423 pub version_requirement: Option<String>,
425 pub artifacts: BTreeMap<String, BTreeMap<Version, PathBuf>>,
435}
436
437impl CacheEntry {
438 pub fn last_modified(&self) -> Duration {
440 Duration::from_millis(self.last_modification_date)
441 }
442
443 pub fn find_artifact_path(&self, contract_name: impl AsRef<str>) -> Option<&PathBuf> {
451 self.artifacts.get(contract_name.as_ref())?.iter().next().map(|(_, p)| p)
452 }
453
454 pub fn read_last_modification_date(file: impl AsRef<Path>) -> Result<u64> {
456 let file = file.as_ref();
457 let last_modification_date = fs::metadata(file)
458 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
459 .modified()
460 .map_err(|err| SolcError::io(err, file.to_path_buf()))?
461 .duration_since(UNIX_EPOCH)
462 .map_err(SolcError::msg)?
463 .as_millis() as u64;
464 Ok(last_modification_date)
465 }
466
467 fn read_artifact_files<Artifact: DeserializeOwned>(
471 &self,
472 ) -> Result<BTreeMap<String, Vec<ArtifactFile<Artifact>>>> {
473 let mut artifacts = BTreeMap::new();
474 for (artifact_name, versioned_files) in self.artifacts.iter() {
475 let mut files = Vec::with_capacity(versioned_files.len());
476 for (version, file) in versioned_files {
477 let artifact: Artifact = utils::read_json_file(file)?;
478 files.push(ArtifactFile { artifact, file: file.clone(), version: version.clone() });
479 }
480 artifacts.insert(artifact_name.clone(), files);
481 }
482 Ok(artifacts)
483 }
484
485 pub(crate) fn insert_artifacts<'a, I, T: 'a>(&mut self, artifacts: I)
486 where
487 I: IntoIterator<Item = (&'a String, Vec<&'a ArtifactFile<T>>)>,
488 {
489 for (name, artifacts) in artifacts.into_iter().filter(|(_, a)| !a.is_empty()) {
490 let entries: BTreeMap<_, _> = artifacts
491 .into_iter()
492 .map(|artifact| (artifact.version.clone(), artifact.file.clone()))
493 .collect();
494 self.artifacts.insert(name.clone(), entries);
495 }
496 }
497
498 fn merge_artifacts(&mut self, other: CacheEntry) {
500 for (name, artifacts) in other.artifacts {
501 match self.artifacts.entry(name) {
502 Entry::Vacant(entry) => {
503 entry.insert(artifacts);
504 }
505 Entry::Occupied(mut entry) => {
506 entry.get_mut().extend(artifacts);
507 }
508 }
509 }
510 }
511
512 pub fn retain_versions<'a, I>(&mut self, versions: I)
516 where
517 I: IntoIterator<Item = &'a Version>,
518 {
519 let versions = versions.into_iter().collect::<HashSet<_>>();
520 self.artifacts.retain(|_, artifacts| {
521 artifacts.retain(|version, _| versions.contains(version));
522 !artifacts.is_empty()
523 })
524 }
525
526 pub fn contains_version(&self, version: &Version) -> bool {
528 self.artifacts_versions().any(|(v, _)| v == version)
529 }
530
531 pub fn artifacts_versions(&self) -> impl Iterator<Item = (&Version, &PathBuf)> {
533 self.artifacts.values().flatten()
534 }
535
536 pub fn find_artifact(&self, contract: &str, version: &Version) -> Option<&PathBuf> {
538 self.artifacts.get(contract).and_then(|files| files.get(version))
539 }
540
541 pub fn artifacts_for_version<'a>(
543 &'a self,
544 version: &'a Version,
545 ) -> impl Iterator<Item = &'a PathBuf> + 'a {
546 self.artifacts_versions().filter_map(move |(ver, file)| (ver == version).then_some(file))
547 }
548
549 pub fn artifacts(&self) -> impl Iterator<Item = &PathBuf> {
551 self.artifacts.values().flat_map(BTreeMap::values)
552 }
553
554 pub fn artifacts_mut(&mut self) -> impl Iterator<Item = &mut PathBuf> {
556 self.artifacts.values_mut().flat_map(BTreeMap::values_mut)
557 }
558
559 pub fn all_artifacts_exist(&self) -> bool {
561 self.artifacts().all(|p| p.exists())
562 }
563
564 pub fn join_artifacts_files(&mut self, base: impl AsRef<Path>) {
566 let base = base.as_ref();
567 self.artifacts_mut().for_each(|p| *p = base.join(&*p))
568 }
569
570 pub fn strip_artifact_files_prefixes(&mut self, base: impl AsRef<Path>) {
572 let base = base.as_ref();
573 self.artifacts_mut().for_each(|p| {
574 if let Ok(rem) = p.strip_prefix(base) {
575 *p = rem.to_path_buf();
576 }
577 })
578 }
579}
580
581#[derive(Debug)]
584pub(crate) struct ArtifactsCacheInner<'a, T: ArtifactOutput> {
585 pub cache: SolFilesCache,
587
588 pub cached_artifacts: Artifacts<T::Artifact>,
590
591 pub edges: GraphEdges,
593
594 pub project: &'a Project<T>,
596
597 pub filtered: HashMap<PathBuf, (Source, HashSet<Version>)>,
599
600 pub dirty_source_files: HashMap<PathBuf, (CacheEntry, HashSet<Version>)>,
609
610 pub content_hashes: HashMap<PathBuf, String>,
612}
613
614impl<'a, T: ArtifactOutput> ArtifactsCacheInner<'a, T> {
615 fn create_cache_entry(&self, file: &Path, source: &Source) -> CacheEntry {
617 let imports = self
618 .edges
619 .imports(file)
620 .into_iter()
621 .map(|import| utils::source_name(import, self.project.root()).to_path_buf())
622 .collect();
623
624 let entry = CacheEntry {
625 last_modification_date: CacheEntry::read_last_modification_date(file)
626 .unwrap_or_default(),
627 content_hash: source.content_hash(),
628 source_name: utils::source_name(file, self.project.root()).into(),
629 solc_config: self.project.solc_config.clone(),
630 imports,
631 version_requirement: self.edges.version_requirement(file).map(|v| v.to_string()),
632 artifacts: Default::default(),
634 };
635
636 entry
637 }
638
639 fn insert_new_cache_entry(&mut self, file: &Path, source: &Source, version: Version) {
643 if let Some((_, versions)) = self.dirty_source_files.get_mut(file) {
644 versions.insert(version);
645 } else {
646 let entry = self.create_cache_entry(file, source);
647 self.dirty_source_files.insert(file.to_path_buf(), (entry, HashSet::from([version])));
648 }
649 }
650
651 fn insert_filtered_source(&mut self, file: PathBuf, source: Source, version: Version) {
653 match self.filtered.entry(file) {
654 hash_map::Entry::Occupied(mut entry) => {
655 entry.get_mut().1.insert(version);
656 }
657 hash_map::Entry::Vacant(entry) => {
658 entry.insert((source, HashSet::from([version])));
659 }
660 }
661 }
662
663 fn filter(&mut self, sources: Sources, version: &Version) -> FilteredSources {
680 let mut imports_of_dirty = HashSet::new();
682
683 let mut dirty_sources = BTreeMap::new();
685 let mut clean_sources = Vec::with_capacity(sources.len());
686 let mut memo = HashMap::with_capacity(sources.len());
687 for (file, source) in sources {
688 let source = self.filter_source(file, source, version, &mut memo);
689 if source.dirty {
690 imports_of_dirty.extend(self.edges.all_imported_nodes(source.idx));
692 dirty_sources.insert(source.file, FilteredSource::Dirty(source.source));
693 } else {
694 clean_sources.push(source);
695 }
696 }
697
698 for (file, filtered) in dirty_sources.iter() {
700 self.insert_new_cache_entry(file, filtered.source(), version.clone());
701 }
702
703 for clean_source in clean_sources {
704 let FilteredSourceInfo { file, source, idx, .. } = clean_source;
705 if imports_of_dirty.contains(&idx) {
706 dirty_sources.insert(file.clone(), FilteredSource::Clean(source.clone()));
708 }
709 self.insert_filtered_source(file, source, version.clone());
710 }
711
712 dirty_sources.into()
713 }
714
715 fn filter_source(
717 &self,
718 file: PathBuf,
719 source: Source,
720 version: &Version,
721 memo: &mut HashMap<PathBuf, bool>,
722 ) -> FilteredSourceInfo {
723 let idx = self.edges.node_id(&file);
724 let dirty = self.is_dirty(&file, version, memo, true);
725 FilteredSourceInfo { file, source, idx, dirty }
726 }
727
728 #[tracing::instrument(level = "trace", skip_all, fields(file = %file.display(), version = %version))]
730 fn is_dirty(
731 &self,
732 file: &Path,
733 version: &Version,
734 memo: &mut HashMap<PathBuf, bool>,
735 check_imports: bool,
736 ) -> bool {
737 match memo.get(file) {
738 Some(&dirty) => {
739 tracing::trace!(dirty, "memoized");
740 dirty
741 }
742 None => {
743 let dirty = self.is_dirty_impl(file, version) ||
745 (check_imports &&
746 self.edges
747 .imports(file)
748 .iter()
749 .any(|file| self.is_dirty(file, version, memo, false)));
750 memo.insert(file.to_path_buf(), dirty);
751 dirty
752 }
753 }
754 }
755
756 fn is_dirty_impl(&self, file: &Path, version: &Version) -> bool {
757 let Some(hash) = self.content_hashes.get(file) else {
758 tracing::trace!("missing cache entry");
759 return true
760 };
761
762 let Some(entry) = self.cache.entry(file) else {
763 tracing::trace!("missing content hash");
764 return true
765 };
766
767 if entry.content_hash != *hash {
768 tracing::trace!("content hash changed");
769 return true
770 }
771
772 if self.project.solc_config != entry.solc_config {
773 tracing::trace!("solc config changed");
774 return true
775 }
776
777 if entry.artifacts.is_empty() {
781 tracing::trace!("no artifacts");
782 return false
783 }
784
785 if !entry.contains_version(version) {
786 tracing::trace!("missing linked artifacts",);
787 return true
788 }
789
790 if entry.artifacts_for_version(version).any(|artifact_path| {
791 let missing_artifact = !self.cached_artifacts.has_artifact(artifact_path);
792 if missing_artifact {
793 tracing::trace!("missing artifact \"{}\"", artifact_path.display());
794 }
795 missing_artifact
796 }) {
797 return true
798 }
799
800 false
802 }
803
804 fn fill_hashes(&mut self, sources: &Sources) {
806 for (file, source) in sources {
807 if let hash_map::Entry::Vacant(entry) = self.content_hashes.entry(file.clone()) {
808 entry.insert(source.content_hash());
809 }
810 }
811 }
812}
813
814#[allow(clippy::large_enum_variant)]
816#[derive(Debug)]
817pub(crate) enum ArtifactsCache<'a, T: ArtifactOutput> {
818 Ephemeral(GraphEdges, &'a Project<T>),
820 Cached(ArtifactsCacheInner<'a, T>),
822}
823
824impl<'a, T: ArtifactOutput> ArtifactsCache<'a, T> {
825 pub fn new(project: &'a Project<T>, edges: GraphEdges) -> Result<Self> {
826 fn get_cache<T: ArtifactOutput>(
830 project: &Project<T>,
831 invalidate_cache: bool,
832 ) -> SolFilesCache {
833 let paths = project.paths.paths_relative();
835
836 if !invalidate_cache && project.cache_path().exists() {
837 if let Ok(cache) = SolFilesCache::read_joined(&project.paths) {
838 if cache.paths == paths {
839 return cache
841 }
842 }
843 }
844
845 SolFilesCache::new(Default::default(), paths)
847 }
848
849 let cache = if project.cached {
850 let invalidate_cache = !edges.unresolved_imports().is_empty();
854
855 let mut cache = get_cache(project, invalidate_cache);
857
858 cache.remove_missing_files();
859
860 let cached_artifacts = if project.paths.artifacts.exists() {
862 tracing::trace!("reading artifacts from cache...");
863 let artifacts = cache.read_artifacts::<T::Artifact>().unwrap_or_default();
865 tracing::trace!("read {} artifacts from cache", artifacts.artifact_files().count());
866 artifacts
867 } else {
868 Default::default()
869 };
870
871 let cache = ArtifactsCacheInner {
872 cache,
873 cached_artifacts,
874 edges,
875 project,
876 filtered: Default::default(),
877 dirty_source_files: Default::default(),
878 content_hashes: Default::default(),
879 };
880
881 ArtifactsCache::Cached(cache)
882 } else {
883 ArtifactsCache::Ephemeral(edges, project)
885 };
886
887 Ok(cache)
888 }
889
890 pub fn graph(&self) -> &GraphEdges {
892 match self {
893 ArtifactsCache::Ephemeral(graph, _) => graph,
894 ArtifactsCache::Cached(inner) => &inner.edges,
895 }
896 }
897
898 #[cfg(test)]
899 #[allow(unused)]
900 #[doc(hidden)]
901 pub fn as_cached(&self) -> Option<&ArtifactsCacheInner<'a, T>> {
903 match self {
904 ArtifactsCache::Ephemeral(_, _) => None,
905 ArtifactsCache::Cached(cached) => Some(cached),
906 }
907 }
908
909 pub fn output_ctx(&self) -> OutputContext {
910 match self {
911 ArtifactsCache::Ephemeral(_, _) => Default::default(),
912 ArtifactsCache::Cached(inner) => OutputContext::new(&inner.cache),
913 }
914 }
915
916 pub fn project(&self) -> &'a Project<T> {
917 match self {
918 ArtifactsCache::Ephemeral(_, project) => project,
919 ArtifactsCache::Cached(cache) => cache.project,
920 }
921 }
922
923 pub fn fill_content_hashes(&mut self, sources: &Sources) {
925 match self {
926 ArtifactsCache::Ephemeral(_, _) => {}
927 ArtifactsCache::Cached(cache) => cache.fill_hashes(sources),
928 }
929 }
930
931 pub fn filter(&mut self, sources: Sources, version: &Version) -> FilteredSources {
933 match self {
934 ArtifactsCache::Ephemeral(_, _) => sources.into(),
935 ArtifactsCache::Cached(cache) => cache.filter(sources, version),
936 }
937 }
938
939 pub fn consume(
945 self,
946 written_artifacts: &Artifacts<T::Artifact>,
947 write_to_disk: bool,
948 ) -> Result<Artifacts<T::Artifact>> {
949 match self {
950 ArtifactsCache::Ephemeral(_, _) => {
951 tracing::trace!("no cache configured, ephemeral");
952 Ok(Default::default())
953 }
954 ArtifactsCache::Cached(cache) => {
955 let ArtifactsCacheInner {
956 mut cache,
957 mut cached_artifacts,
958 mut dirty_source_files,
959 filtered,
960 project,
961 ..
962 } = cache;
963
964 cache.retain(filtered.iter().map(|(p, (_, v))| (p.as_path(), v)));
966
967 for (file, written_artifacts) in written_artifacts.as_ref() {
973 let file_path = Path::new(&file);
974 if let Some((cache_entry, versions)) = dirty_source_files.get_mut(file_path) {
975 cache_entry.insert_artifacts(written_artifacts.iter().map(
976 |(name, artifacts)| {
977 let artifacts = artifacts
978 .iter()
979 .filter(|artifact| versions.contains(&artifact.version))
980 .collect::<Vec<_>>();
981 (name, artifacts)
982 },
983 ));
984 }
985
986 if let Some((f, mut cached)) = cached_artifacts.0.remove_entry(file) {
989 tracing::trace!("checking {} for obsolete cached artifact entries", file);
990 cached.retain(|name, cached_artifacts| {
991 if let Some(written_files) = written_artifacts.get(name) {
992 cached_artifacts.retain(|f| {
994 let retain = written_files
996 .iter()
997 .all(|other| other.version != f.version) && filtered.get(
998 &PathBuf::from(file)).map(|(_, versions)| {
999 versions.contains(&f.version)
1000 }).unwrap_or_default();
1001 if !retain {
1002 tracing::trace!(
1003 "purging obsolete cached artifact {:?} for contract {} and version {}",
1004 f.file,
1005 name,
1006 f.version
1007 );
1008 }
1009 retain
1010 });
1011 return !cached_artifacts.is_empty()
1012 }
1013 false
1014 });
1015
1016 if !cached.is_empty() {
1017 cached_artifacts.0.insert(f, cached);
1018 }
1019 }
1020 }
1021
1022 cache
1024 .extend(dirty_source_files.into_iter().map(|(file, (entry, _))| (file, entry)));
1025
1026 if write_to_disk {
1028 cache
1031 .strip_entries_prefix(project.root())
1032 .strip_artifact_files_prefixes(project.artifacts_path());
1033 cache.write(project.cache_path())?;
1034 }
1035
1036 Ok(cached_artifacts)
1037 }
1038 }
1039 }
1040}