Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(coverage): caching for coverage #9366

Draft
wants to merge 25 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 13 commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
39c5922
fix(`coverage`): write coverage artifacts to separate dir
yash-atreya Nov 20, 2024
215ac18
Merge branch 'master' into yash/fix-8840
yash-atreya Nov 21, 2024
20b09e8
adjust cache path for coverage
yash-atreya Nov 21, 2024
581cf4c
partial test
yash-atreya Nov 21, 2024
a0d7e19
enable cache
yash-atreya Nov 21, 2024
c59364a
don't assume recompilation
yash-atreya Nov 21, 2024
66cfb3d
fix(`coverage`): dont recompile when cache exists
yash-atreya Nov 21, 2024
730b26d
Merge branch 'master' into yash/fix-8840
yash-atreya Nov 21, 2024
0eadcfa
account for cached compiled artifacts at once
yash-atreya Nov 22, 2024
9902adf
test with multi solc versions
yash-atreya Nov 22, 2024
0433863
fix(`forge clean`): remove cache/coverage dir
yash-atreya Nov 22, 2024
e90a354
add test
klkvr Nov 22, 2024
7d639e6
fix(`coverage`): account for `build_id` in source identification (#9…
yash-atreya Nov 27, 2024
2b1af19
fix
yash-atreya Nov 29, 2024
498e427
rm version as key in report
yash-atreya Nov 29, 2024
06f01dc
rm version key from ContractId and report.items
yash-atreya Nov 29, 2024
38db689
fix: scale anchor.items_id
yash-atreya Nov 29, 2024
a6edce9
fix: rm dependence of coverage on version
yash-atreya Nov 29, 2024
8297739
fix: respect artifact.build_id while processing hitmaps
yash-atreya Nov 29, 2024
13d3fd7
fix: respect element idx
yash-atreya Nov 29, 2024
973cfe0
key source_ids by version and path
yash-atreya Dec 2, 2024
2eaab46
Merge branch 'master' into yash/fix-8840
yash-atreya Dec 2, 2024
1624587
nit
yash-atreya Dec 2, 2024
860ff71
Merge branch 'master' into yash/fix-8840
yash-atreya Dec 13, 2024
54be7b9
Merge branch 'master' into yash/fix-8840
yash-atreya Dec 20, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions crates/config/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -985,6 +985,8 @@ impl Config {
};
remove_test_dir(&self.fuzz.failure_persist_dir);
remove_test_dir(&self.invariant.failure_persist_dir);
remove_test_dir(&Some(self.cache_path.clone().join("coverage")));
remove_test_dir(&Some(self.cache_path.clone()));

// Remove snapshot directory.
let snapshot_dir = project.root().join(&self.snapshots);
Expand Down
41 changes: 33 additions & 8 deletions crates/evm/coverage/src/analysis.rs
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
use super::{CoverageItem, CoverageItemKind, SourceLocation};
use alloy_primitives::map::HashMap;
use core::fmt;
use foundry_common::TestFunctionExt;
use foundry_compilers::artifacts::ast::{self, Ast, Node, NodeType};
use rayon::prelude::*;
use std::sync::Arc;
use std::{borrow::Cow, sync::Arc};

/// A visitor that walks the AST of a single contract and finds coverage items.
#[derive(Clone, Debug)]
pub struct ContractVisitor<'a> {
/// The source ID of the contract.
source_id: usize,
source_id: SourceIdentifier,
/// The source code that contains the AST being walked.
source: &'a str,

Expand All @@ -26,7 +27,7 @@ pub struct ContractVisitor<'a> {
}

impl<'a> ContractVisitor<'a> {
pub fn new(source_id: usize, source: &'a str, contract_name: &'a Arc<str>) -> Self {
pub fn new(source_id: SourceIdentifier, source: &'a str, contract_name: &'a Arc<str>) -> Self {
Self { source_id, source, contract_name, branch_id: 0, last_line: 0, items: Vec::new() }
}

Expand Down Expand Up @@ -473,7 +474,7 @@ impl<'a> ContractVisitor<'a> {
let loc_start =
self.source.char_indices().map(|(i, _)| i).nth(loc.start).unwrap_or_default();
SourceLocation {
source_id: self.source_id,
source_id: self.source_id.clone(),
contract_name: self.contract_name.clone(),
start: loc.start as u32,
length: loc.length.map(|x| x as u32),
Expand Down Expand Up @@ -538,7 +539,7 @@ impl<'a> SourceAnalyzer<'a> {
.sources
.sources
.par_iter()
.flat_map_iter(|(&source_id, SourceFile { source, ast })| {
.flat_map_iter(|(source_id, SourceFile { source, ast })| {
ast.nodes.iter().map(move |node| {
if !matches!(node.node_type, NodeType::ContractDefinition) {
return Ok(vec![]);
Expand All @@ -556,7 +557,7 @@ impl<'a> SourceAnalyzer<'a> {
.attribute("name")
.ok_or_else(|| eyre::eyre!("Contract has no name"))?;

let mut visitor = ContractVisitor::new(source_id, source, &name);
let mut visitor = ContractVisitor::new(source_id.clone(), source, &name);
visitor.visit_contract(node)?;
let mut items = visitor.items;

Expand All @@ -583,7 +584,31 @@ impl<'a> SourceAnalyzer<'a> {
#[derive(Debug, Default)]
pub struct SourceFiles<'a> {
/// The versioned sources.
pub sources: HashMap<usize, SourceFile<'a>>,
/// Keyed by build_id and source_id.
pub sources: HashMap<SourceIdentifier, SourceFile<'a>>,
}

/// Serves as a unique identifier for sources across multiple compiler runs.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct SourceIdentifier {
/// Source ID is unique for each source file per compilation job but may not be across
/// different jobs.
pub source_id: usize,
/// Artifact build id is same for all sources in a single compilation job. But always unique
/// across different jobs.
pub build_id: String,
}

impl fmt::Display for SourceIdentifier {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "source_id={} build_id={}", self.source_id, self.build_id,)
}
}

impl SourceIdentifier {
pub fn new(source_id: usize, build_id: String) -> Self {
Self { source_id, build_id }
}
}

/// The source code and AST of a file.
Expand All @@ -592,5 +617,5 @@ pub struct SourceFile<'a> {
/// The source code.
pub source: String,
/// The AST of the source code.
pub ast: &'a Ast,
pub ast: Cow<'a, Ast>,
}
10 changes: 7 additions & 3 deletions crates/evm/coverage/src/anchors.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
use crate::analysis::SourceIdentifier;

use super::{CoverageItem, CoverageItemKind, ItemAnchor, SourceLocation};
use alloy_primitives::map::{DefaultHashBuilder, HashMap, HashSet};
use eyre::ensure;
Expand All @@ -11,12 +13,13 @@ pub fn find_anchors(
source_map: &SourceMap,
ic_pc_map: &IcPcMap,
items: &[CoverageItem],
items_by_source_id: &HashMap<usize, Vec<usize>>,
items_by_source_id: &HashMap<SourceIdentifier, Vec<usize>>,
source_id: &SourceIdentifier,
) -> Vec<ItemAnchor> {
let mut seen = HashSet::with_hasher(DefaultHashBuilder::default());
source_map
.iter()
.filter_map(|element| items_by_source_id.get(&(element.index()? as usize)))
.filter_map(|_element| items_by_source_id.get(source_id))
yash-atreya marked this conversation as resolved.
Show resolved Hide resolved
.flatten()
.filter_map(|&item_id| {
if !seen.insert(item_id) {
Expand Down Expand Up @@ -171,7 +174,8 @@ pub fn find_anchor_branch(
/// Calculates whether `element` is within the range of the target `location`.
fn is_in_source_range(element: &SourceElement, location: &SourceLocation) -> bool {
// Source IDs must match.
let source_ids_match = element.index().is_some_and(|a| a as usize == location.source_id);
let source_ids_match =
element.index().is_some_and(|a| a as usize == location.source_id.source_id);
if !source_ids_match {
return false;
}
Expand Down
23 changes: 12 additions & 11 deletions crates/evm/coverage/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@ extern crate foundry_common;
extern crate tracing;

use alloy_primitives::{map::HashMap, Bytes, B256};
use analysis::SourceIdentifier;
use eyre::{Context, Result};
use foundry_compilers::artifacts::sourcemap::SourceMap;
use semver::Version;
Expand All @@ -36,9 +37,9 @@ pub use inspector::CoverageCollector;
#[derive(Clone, Debug, Default)]
pub struct CoverageReport {
/// A map of source IDs to the source path.
pub source_paths: HashMap<(Version, usize), PathBuf>,
pub source_paths: HashMap<(Version, SourceIdentifier), PathBuf>,
yash-atreya marked this conversation as resolved.
Show resolved Hide resolved
/// A map of source paths to source IDs.
pub source_paths_to_ids: HashMap<(Version, PathBuf), usize>,
pub source_paths_to_ids: HashMap<(Version, PathBuf), SourceIdentifier>,
/// All coverage items for the codebase, keyed by the compiler version.
pub items: HashMap<Version, Vec<CoverageItem>>,
/// All item anchors for the codebase, keyed by their contract ID.
Expand All @@ -51,14 +52,14 @@ pub struct CoverageReport {

impl CoverageReport {
/// Add a source file path.
pub fn add_source(&mut self, version: Version, source_id: usize, path: PathBuf) {
self.source_paths.insert((version.clone(), source_id), path.clone());
pub fn add_source(&mut self, version: Version, source_id: SourceIdentifier, path: PathBuf) {
self.source_paths.insert((version.clone(), source_id.clone()), path.clone());
self.source_paths_to_ids.insert((version, path), source_id);
}

/// Get the source ID for a specific source file path.
pub fn get_source_id(&self, version: Version, path: PathBuf) -> Option<usize> {
self.source_paths_to_ids.get(&(version, path)).copied()
pub fn get_source_id(&self, version: Version, path: PathBuf) -> Option<SourceIdentifier> {
self.source_paths_to_ids.get(&(version, path)).cloned()
}

/// Add the source maps.
Expand Down Expand Up @@ -89,7 +90,7 @@ impl CoverageReport {
for (version, items) in self.items.iter() {
for item in items {
let Some(path) =
self.source_paths.get(&(version.clone(), item.loc.source_id)).cloned()
self.source_paths.get(&(version.clone(), item.loc.source_id.clone())).cloned()
else {
continue;
};
Expand All @@ -107,7 +108,7 @@ impl CoverageReport {
for (version, items) in self.items.iter() {
for item in items {
let Some(path) =
self.source_paths.get(&(version.clone(), item.loc.source_id)).cloned()
self.source_paths.get(&(version.clone(), item.loc.source_id.clone())).cloned()
else {
continue;
};
Expand Down Expand Up @@ -160,7 +161,7 @@ impl CoverageReport {
self.items.retain(|version, items| {
items.retain(|item| {
self.source_paths
.get(&(version.clone(), item.loc.source_id))
.get(&(version.clone(), item.loc.source_id.clone()))
.map(|path| filter(path))
.unwrap_or(false)
});
Expand Down Expand Up @@ -259,7 +260,7 @@ impl HitMap {
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ContractId {
pub version: Version,
pub source_id: usize,
pub source_id: SourceIdentifier,
pub contract_name: Arc<str>,
}

Expand Down Expand Up @@ -348,7 +349,7 @@ impl Display for CoverageItem {
#[derive(Clone, Debug)]
pub struct SourceLocation {
/// The source ID.
pub source_id: usize,
pub source_id: SourceIdentifier,
/// The contract this source range is in.
pub contract_name: Arc<str>,
/// Start byte in the source code.
Expand Down
83 changes: 65 additions & 18 deletions crates/forge/bin/cmd/coverage.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ use clap::{Parser, ValueEnum, ValueHint};
use eyre::{Context, Result};
use forge::{
coverage::{
analysis::{SourceAnalysis, SourceAnalyzer, SourceFile, SourceFiles},
analysis::{SourceAnalysis, SourceAnalyzer, SourceFile, SourceFiles, SourceIdentifier},
anchors::find_anchors,
BytecodeReporter, ContractId, CoverageReport, CoverageReporter, DebugReporter, ItemAnchor,
LcovReporter, SummaryReporter,
Expand All @@ -23,6 +23,7 @@ use foundry_config::{Config, SolcReq};
use rayon::prelude::*;
use semver::Version;
use std::{
borrow::Cow,
path::{Path, PathBuf},
sync::Arc,
};
Expand Down Expand Up @@ -92,7 +93,28 @@ impl CoverageArgs {
/// Builds the project.
fn build(&self, config: &Config) -> Result<(Project, ProjectCompileOutput)> {
// Set up the project
let mut project = config.create_project(false, false)?;
let mut project = config.create_project(config.cache, false)?;

// Set a different artifacts path for coverage. `out/coverage`.
// This is done to avoid overwriting the artifacts of the main build that maybe built with
// different optimizer settings or --via-ir. Optimizer settings are disabled for
// coverage builds.
let coverage_artifacts_path = project.artifacts_path().join("coverage");
project.paths.artifacts = coverage_artifacts_path.clone();
project.paths.build_infos = coverage_artifacts_path.join("build-info");
yash-atreya marked this conversation as resolved.
Show resolved Hide resolved

// Set a different compiler cache path for coverage. `cache/coverage`.
let cache_file = project
.paths
.cache
.components()
.last()
.ok_or_else(|| eyre::eyre!("Cache path is empty"))?;

let cache_dir =
project.paths.cache.parent().ok_or_else(|| eyre::eyre!("Cache path is empty"))?;
project.paths.cache = cache_dir.join("coverage").join(cache_file);

if self.ir_minimum {
// print warning message
sh_warn!("{}", concat!(
Expand Down Expand Up @@ -124,6 +146,8 @@ impl CoverageArgs {
project.settings.solc.via_ir = None;
}

sh_warn!("optimizer settings have been disabled for accurate coverage reports")?;

let output = ProjectCompiler::default()
.compile(&project)?
.with_stripped_file_prefixes(project.root());
Expand All @@ -135,32 +159,43 @@ impl CoverageArgs {
#[instrument(name = "prepare", skip_all)]
fn prepare(&self, project: &Project, output: &ProjectCompileOutput) -> Result<CoverageReport> {
let mut report = CoverageReport::default();

// Collect source files.
let project_paths = &project.paths;
let mut versioned_sources = HashMap::<Version, SourceFiles<'_>>::default();
for (path, source_file, version) in output.output().sources.sources_with_version() {
report.add_source(version.clone(), source_file.id as usize, path.clone());

// Account cached and freshly compiled sources
for (id, artifact) in output.artifact_ids() {
// Filter out dependencies
if !self.include_libs && project_paths.has_library_ancestor(path) {
if !self.include_libs && project_paths.has_library_ancestor(&id.source) {
continue;
}

if let Some(ast) = &source_file.ast {
let file = project_paths.root.join(path);
let version = id.version;
let build_id = id.build_id;
let source_file = if let Some(source_file) = artifact.source_file() {
source_file
} else {
sh_warn!("ast source file not found for {}", id.source.display())?;
continue;
};

let identifier = SourceIdentifier::new(source_file.id as usize, build_id.clone());
report.add_source(version.clone(), identifier.clone(), id.source.clone());

if let Some(ast) = source_file.ast {
let file = project_paths.root.join(id.source);
trace!(root=?project_paths.root, ?file, "reading source file");

let source = SourceFile {
ast,
ast: Cow::Owned(ast),
source: fs::read_to_string(&file)
.wrap_err("Could not read source code for analysis")?,
};

versioned_sources
.entry(version.clone())
.or_default()
.sources
.insert(source_file.id as usize, source);
.insert(identifier, source);
}
}

Expand All @@ -185,17 +220,23 @@ impl CoverageArgs {
);

for (item_id, item) in source_analysis.items.iter().enumerate() {
items_by_source_id.entry(item.loc.source_id).or_default().push(item_id);
items_by_source_id.entry(item.loc.source_id.clone()).or_default().push(item_id);
}

let anchors = artifacts
.par_iter()
.filter(|artifact| artifact.contract_id.version == *version)
.map(|artifact| {
let creation_code_anchors =
artifact.creation.find_anchors(&source_analysis, &items_by_source_id);
let deployed_code_anchors =
artifact.deployed.find_anchors(&source_analysis, &items_by_source_id);
let creation_code_anchors = artifact.creation.find_anchors(
&source_analysis,
&items_by_source_id,
&artifact.contract_id.source_id,
);
let deployed_code_anchors = artifact.deployed.find_anchors(
&source_analysis,
&items_by_source_id,
&artifact.contract_id.source_id,
);
(artifact.contract_id.clone(), (creation_code_anchors, deployed_code_anchors))
})
.collect::<Vec<_>>();
Expand Down Expand Up @@ -353,7 +394,11 @@ pub struct ArtifactData {
}

impl ArtifactData {
pub fn new(id: &ArtifactId, source_id: usize, artifact: &impl Artifact) -> Option<Self> {
pub fn new(
id: &ArtifactId,
source_id: SourceIdentifier,
artifact: &impl Artifact,
) -> Option<Self> {
Some(Self {
contract_id: ContractId {
version: id.version.clone(),
Expand Down Expand Up @@ -398,14 +443,16 @@ impl BytecodeData {
pub fn find_anchors(
&self,
source_analysis: &SourceAnalysis,
items_by_source_id: &HashMap<usize, Vec<usize>>,
items_by_source_id: &HashMap<SourceIdentifier, Vec<usize>>,
source_id: &SourceIdentifier,
) -> Vec<ItemAnchor> {
find_anchors(
&self.bytecode,
&self.source_map,
&self.ic_pc_map,
&source_analysis.items,
items_by_source_id,
source_id,
)
}
}
Loading