diff --git a/compilation/platforms/crytic_compile.go b/compilation/platforms/crytic_compile.go index 6039df26..77d43245 100644 --- a/compilation/platforms/crytic_compile.go +++ b/compilation/platforms/crytic_compile.go @@ -204,7 +204,7 @@ func (c *CryticCompilationConfig) Compile() ([]types.Compilation, string, error) } // Retrieve the source unit ID - sourceUnitId := ast.GetSourceUnitID() + sourceUnitId := types.GetSrcMapSourceUnitID(ast.Src) compilation.SourcePathToArtifact[sourcePath] = types.SourceArtifact{ // TODO: Our types.AST is not the same as the original AST but we could parse it and avoid using "any" Ast: source.AST, diff --git a/compilation/platforms/solc.go b/compilation/platforms/solc.go index 4ceef747..068cbeb7 100644 --- a/compilation/platforms/solc.go +++ b/compilation/platforms/solc.go @@ -145,7 +145,7 @@ func (s *SolcCompilationConfig) Compile() ([]types.Compilation, string, error) { } // Get the source unit ID - sourceUnitId := ast.GetSourceUnitID() + sourceUnitId := types.GetSrcMapSourceUnitID(ast.Src) // Construct our compiled source object compilation.SourcePathToArtifact[sourcePath] = types.SourceArtifact{ // TODO our types.AST is not the same as the original AST but we could parse it and avoid using "any" diff --git a/compilation/types/ast.go b/compilation/types/ast.go index f6b21612..b1dd251c 100644 --- a/compilation/types/ast.go +++ b/compilation/types/ast.go @@ -20,24 +20,84 @@ const ( // Node interface represents a generic AST node type Node interface { + // GetNodeType returns solc's node type e.g. FunctionDefinition, ContractDefinition. GetNodeType() string } +// FunctionDefinition is the function definition node +type FunctionDefinition struct { + // NodeType represents the node type (currently we only evaluate source unit node types) + NodeType string `json:"nodeType"` + // Src is the source file for this AST + Src string `json:"src"` + Name string `json:"name,omitempty"` +} + +func (s FunctionDefinition) GetNodeType() string { + return s.NodeType +} + // ContractDefinition is the contract definition node type ContractDefinition struct { - // NodeType represents the AST node type (note that it will always be a contract definition) + // NodeType represents the node type (currently we only evaluate source unit node types) NodeType string `json:"nodeType"` + // Nodes is a list of Nodes within the AST + Nodes []Node `json:"nodes"` + // Src is the source file for this AST + Src string `json:"src"` // CanonicalName is the name of the contract definition CanonicalName string `json:"canonicalName,omitempty"` // Kind is a ContractKind that represents what type of contract definition this is (contract, interface, or library) Kind ContractKind `json:"contractKind,omitempty"` } -// GetNodeType implements the Node interface and returns the node type for the contract definition func (s ContractDefinition) GetNodeType() string { return s.NodeType } +func (c *ContractDefinition) UnmarshalJSON(data []byte) error { + // Unmarshal the top-level AST into our own representation. Defer the unmarshaling of all the individual nodes until later + type Alias ContractDefinition + aux := &struct { + Nodes []json.RawMessage `json:"nodes"` + + *Alias + }{ + Alias: (*Alias)(c), + } + + if err := json.Unmarshal(data, &aux); err != nil { + return err + } + + // Iterate through all the nodes of the contract definition + for _, nodeData := range aux.Nodes { + // Unmarshal the node data to retrieve the node type + var nodeType struct { + NodeType string `json:"nodeType"` + } + if err := json.Unmarshal(nodeData, &nodeType); err != nil { + return err + } + + // Unmarshal the contents of the node based on the node type + switch nodeType.NodeType { + case "FunctionDefinition": + // If this is a function definition, unmarshal it + var functionDefinition FunctionDefinition + if err := json.Unmarshal(nodeData, &functionDefinition); err != nil { + return err + } + c.Nodes = append(c.Nodes, functionDefinition) + default: + continue + } + } + + return nil + +} + // AST is the abstract syntax tree type AST struct { // NodeType represents the node type (currently we only evaluate source unit node types) @@ -48,7 +108,6 @@ type AST struct { Src string `json:"src"` } -// UnmarshalJSON unmarshals from JSON func (a *AST) UnmarshalJSON(data []byte) error { // Unmarshal the top-level AST into our own representation. Defer the unmarshaling of all the individual nodes until later type Alias AST @@ -62,11 +121,6 @@ func (a *AST) UnmarshalJSON(data []byte) error { return err } - // Check if nodeType is "SourceUnit". Return early otherwise - if aux.NodeType != "SourceUnit" { - return nil - } - // Iterate through all the nodes of the source unit for _, nodeData := range aux.Nodes { // Unmarshal the node data to retrieve the node type @@ -78,7 +132,6 @@ func (a *AST) UnmarshalJSON(data []byte) error { } // Unmarshal the contents of the node based on the node type - var node Node switch nodeType.NodeType { case "ContractDefinition": // If this is a contract definition, unmarshal it @@ -86,23 +139,30 @@ func (a *AST) UnmarshalJSON(data []byte) error { if err := json.Unmarshal(nodeData, &contractDefinition); err != nil { return err } - node = contractDefinition + a.Nodes = append(a.Nodes, contractDefinition) + + case "FunctionDefinition": + // If this is a function definition, unmarshal it + var functionDefinition FunctionDefinition + if err := json.Unmarshal(nodeData, &functionDefinition); err != nil { + return err + } + a.Nodes = append(a.Nodes, functionDefinition) + // TODO: Add cases for other node types as needed default: continue } - // Append the node - a.Nodes = append(a.Nodes, node) } return nil } -// GetSourceUnitID returns the source unit ID based on the source of the AST -func (a *AST) GetSourceUnitID() int { +// GetSrcMapSourceUnitID returns the source unit ID based on the source of the AST +func GetSrcMapSourceUnitID(src string) int { re := regexp.MustCompile(`[0-9]*:[0-9]*:([0-9]*)`) - sourceUnitCandidates := re.FindStringSubmatch(a.Src) + sourceUnitCandidates := re.FindStringSubmatch(src) if len(sourceUnitCandidates) == 2 { // FindStringSubmatch includes the whole match as the first element sourceUnit, err := strconv.Atoi(sourceUnitCandidates[1]) @@ -112,3 +172,33 @@ func (a *AST) GetSourceUnitID() int { } return -1 } + +// GetSrcMapStart returns the byte offset where the function definition starts in the source file +func GetSrcMapStart(src string) int { + // 95:42:0 returns 95 + re := regexp.MustCompile(`([0-9]*):[0-9]*:[0-9]*`) + startCandidates := re.FindStringSubmatch(src) + + if len(startCandidates) == 2 { // FindStringSubmatch includes the whole match as the first element + start, err := strconv.Atoi(startCandidates[1]) + if err == nil { + return start + } + } + return -1 +} + +// GetSrcMapLength returns the length of the function definition in bytes +func GetSrcMapLength(src string) int { + // 95:42:0 returns 42 + re := regexp.MustCompile(`[0-9]*:([0-9]*):[0-9]*`) + endCandidates := re.FindStringSubmatch(src) + + if len(endCandidates) == 2 { // FindStringSubmatch includes the whole match as the first element + end, err := strconv.Atoi(endCandidates[1]) + if err == nil { + return end + } + } + return -1 +} diff --git a/docs/src/coverage_reports.md b/docs/src/coverage_reports.md index cd24b564..10920207 100644 --- a/docs/src/coverage_reports.md +++ b/docs/src/coverage_reports.md @@ -1,3 +1,44 @@ # Coverage Reports -WIP +## Generating HTML Report from LCOV + +Enable coverage reporting by setting the `corpusDirectory` key in the configuration file and setting the `coverageReports` key to `["lcov", "html"]`. + +```json +{ + "corpusDirectory": "corpus", + "coverageReports": ["lcov", "html"] +} +``` + +### Install lcov and genhtml + +Linux: + +```bash +apt-get install lcov +``` + +MacOS: + +```bash +brew install lcov +``` + +### Generate LCOV Report + +```bash + +genhtml corpus/coverage/lcov.info --output-dir corpus --rc derive_function_end_line=0 +``` + +> [!WARNING] +> ** The `derive_function_end_line` flag is required to prevent the `genhtml` tool from crashing when processing the Solidity source code. ** + +Open the `corpus/index.html` file in your browser or follow the steps to use VSCode below. + +### View Coverage Report in VSCode with Coverage Gutters + +Install the [Coverage Gutters](https://marketplace.visualstudio.com/items?itemName=ryanluker.vscode-coverage-gutters) extension. + +Then, right click in a project file and select `Coverage Gutters: Display Coverage`. diff --git a/docs/src/project_configuration/fuzzing_config.md b/docs/src/project_configuration/fuzzing_config.md index b2bf537d..5adcd067 100644 --- a/docs/src/project_configuration/fuzzing_config.md +++ b/docs/src/project_configuration/fuzzing_config.md @@ -55,6 +55,13 @@ The fuzzing configuration defines the parameters for the fuzzing campaign. can then be re-used/mutated by the fuzzer during the next fuzzing campaign. - **Default**: "" +### `coverageFormats` + +- **Type**: [String] (e.g. `["lcov"]`) +- **Description**: The coverage reports to generate after the fuzzing campaign has completed. The coverage reports are saved + in the `coverage` directory within `crytic-export/` or `corpusDirectory` if configured. +- **Default**: `["lcov", "html"]` + ### `targetContracts` - **Type**: [String] (e.g. `[FirstContract, SecondContract, ThirdContract]`) diff --git a/fuzzing/config/config.go b/fuzzing/config/config.go index e488238c..1ccbfb61 100644 --- a/fuzzing/config/config.go +++ b/fuzzing/config/config.go @@ -3,6 +3,7 @@ package config import ( "encoding/json" "errors" + "fmt" "math/big" "os" @@ -60,6 +61,9 @@ type FuzzingConfig struct { // CoverageEnabled describes whether to use coverage-guided fuzzing CoverageEnabled bool `json:"coverageEnabled"` + // CoverageFormats indicate which reports to generate: "lcov" and "html" are supported. + CoverageFormats []string `json:"coverageFormats"` + // TargetContracts are the target contracts for fuzz testing TargetContracts []string `json:"targetContracts"` @@ -391,6 +395,15 @@ func (p *ProjectConfig) Validate() error { } } + // The coverage report format must be either "lcov" or "html" + if p.Fuzzing.CoverageFormats != nil { + for _, report := range p.Fuzzing.CoverageFormats { + if report != "lcov" && report != "html" { + return fmt.Errorf("project configuration must specify only valid coverage reports (lcov, html): %s", report) + } + } + } + // Ensure that the log level is a valid one level, err := zerolog.ParseLevel(p.Logging.Level.String()) if err != nil || level == zerolog.FatalLevel { diff --git a/fuzzing/config/config_defaults.go b/fuzzing/config/config_defaults.go index 10f45dc1..38532a03 100644 --- a/fuzzing/config/config_defaults.go +++ b/fuzzing/config/config_defaults.go @@ -46,6 +46,7 @@ func GetDefaultProjectConfig(platform string) (*ProjectConfig, error) { ConstructorArgs: map[string]map[string]any{}, CorpusDirectory: "", CoverageEnabled: true, + CoverageFormats: []string{"html", "lcov"}, SenderAddresses: []string{ "0x10000", "0x20000", diff --git a/fuzzing/config/gen_fuzzing_config.go b/fuzzing/config/gen_fuzzing_config.go index 6a2784b7..47f780c5 100644 --- a/fuzzing/config/gen_fuzzing_config.go +++ b/fuzzing/config/gen_fuzzing_config.go @@ -23,6 +23,7 @@ func (f FuzzingConfig) MarshalJSON() ([]byte, error) { CallSequenceLength int `json:"callSequenceLength"` CorpusDirectory string `json:"corpusDirectory"` CoverageEnabled bool `json:"coverageEnabled"` + CoverageFormats []string `json:"coverageFormats"` TargetContracts []string `json:"targetContracts"` PredeployedContracts map[string]string `json:"predeployedContracts"` TargetContractsBalances []*hexutil.Big `json:"targetContractsBalances"` @@ -45,6 +46,7 @@ func (f FuzzingConfig) MarshalJSON() ([]byte, error) { enc.CallSequenceLength = f.CallSequenceLength enc.CorpusDirectory = f.CorpusDirectory enc.CoverageEnabled = f.CoverageEnabled + enc.CoverageFormats = f.CoverageFormats enc.TargetContracts = f.TargetContracts enc.PredeployedContracts = f.PredeployedContracts if f.TargetContractsBalances != nil { @@ -76,6 +78,7 @@ func (f *FuzzingConfig) UnmarshalJSON(input []byte) error { CallSequenceLength *int `json:"callSequenceLength"` CorpusDirectory *string `json:"corpusDirectory"` CoverageEnabled *bool `json:"coverageEnabled"` + CoverageFormats []string `json:"coverageFormats"` TargetContracts []string `json:"targetContracts"` PredeployedContracts map[string]string `json:"predeployedContracts"` TargetContractsBalances []*hexutil.Big `json:"targetContractsBalances"` @@ -117,6 +120,9 @@ func (f *FuzzingConfig) UnmarshalJSON(input []byte) error { if dec.CoverageEnabled != nil { f.CoverageEnabled = *dec.CoverageEnabled } + if dec.CoverageFormats != nil { + f.CoverageFormats = dec.CoverageFormats + } if dec.TargetContracts != nil { f.TargetContracts = dec.TargetContracts } diff --git a/fuzzing/coverage/report_generation.go b/fuzzing/coverage/report_generation.go index c9bc2da1..b5125ea7 100644 --- a/fuzzing/coverage/report_generation.go +++ b/fuzzing/coverage/report_generation.go @@ -3,14 +3,14 @@ package coverage import ( _ "embed" "fmt" - "github.com/crytic/medusa/compilation/types" - "github.com/crytic/medusa/utils" "html/template" "math" "os" "path/filepath" "strconv" "time" + + "github.com/crytic/medusa/utils" ) var ( @@ -18,26 +18,8 @@ var ( htmlReportTemplate []byte ) -// GenerateReport takes a set of CoverageMaps and compilations, and produces a coverage report using them, detailing -// all source mapped ranges of the source files which were covered or not. -// Returns an error if one occurred. -func GenerateReport(compilations []types.Compilation, coverageMaps *CoverageMaps, htmlReportPath string) error { - // Perform source analysis. - sourceAnalysis, err := AnalyzeSourceCoverage(compilations, coverageMaps) - if err != nil { - return err - } - - // Finally, export the report data we analyzed. - if htmlReportPath != "" { - err = exportCoverageReport(sourceAnalysis, htmlReportPath) - } - return err -} - -// exportCoverageReport takes a previously performed source analysis and generates an HTML coverage report from it. -// Returns an error if one occurs. -func exportCoverageReport(sourceAnalysis *SourceAnalysis, outputPath string) error { +// WriteHTMLReport takes a previously performed source analysis and generates an HTML coverage report from it. +func WriteHTMLReport(sourceAnalysis *SourceAnalysis, reportDir string) (string, error) { // Define mappings onto some useful variables/functions. functionMap := template.FuncMap{ "timeNow": time.Now, @@ -79,21 +61,21 @@ func exportCoverageReport(sourceAnalysis *SourceAnalysis, outputPath string) err // Parse our HTML template tmpl, err := template.New("coverage_report.html").Funcs(functionMap).Parse(string(htmlReportTemplate)) if err != nil { - return fmt.Errorf("could not export report, failed to parse report template: %v", err) + return "", fmt.Errorf("could not export report, failed to parse report template: %v", err) } - // If the parent directory doesn't exist, create it. - parentDirectory := filepath.Dir(outputPath) - err = utils.MakeDirectory(parentDirectory) + // If the directory doesn't exist, create it. + err = utils.MakeDirectory(reportDir) if err != nil { - return err + return "", err } // Create our report file - file, err := os.Create(outputPath) + htmlReportPath := filepath.Join(reportDir, "coverage_report.html") + file, err := os.Create(htmlReportPath) if err != nil { _ = file.Close() - return fmt.Errorf("could not export report, failed to open file for writing: %v", err) + return "", fmt.Errorf("could not export report, failed to open file for writing: %v", err) } // Execute the template and write it back to file. @@ -102,5 +84,26 @@ func exportCoverageReport(sourceAnalysis *SourceAnalysis, outputPath string) err if err == nil { err = fileCloseErr } - return err + return htmlReportPath, err +} + +// WriteLCOVReport takes a previously performed source analysis and generates an LCOV report from it. +func WriteLCOVReport(sourceAnalysis *SourceAnalysis, reportDir string) (string, error) { + // Generate the LCOV report. + lcovReport := sourceAnalysis.GenerateLCOVReport() + + // If the directory doesn't exist, create it. + err := utils.MakeDirectory(reportDir) + if err != nil { + return "", err + } + + // Write the LCOV report to a file. + lcovReportPath := filepath.Join(reportDir, "lcov.info") + err = os.WriteFile(lcovReportPath, []byte(lcovReport), 0644) + if err != nil { + return "", fmt.Errorf("could not export LCOV report: %v", err) + } + + return lcovReportPath, nil } diff --git a/fuzzing/coverage/source_analysis.go b/fuzzing/coverage/source_analysis.go index 7705d2c1..98c4bc75 100644 --- a/fuzzing/coverage/source_analysis.go +++ b/fuzzing/coverage/source_analysis.go @@ -2,6 +2,7 @@ package coverage import ( "bytes" + "encoding/json" "fmt" "sort" @@ -55,13 +56,78 @@ func (s *SourceAnalysis) CoveredLineCount() int { return count } +// GenerateLCOVReport generates an LCOV report from the source analysis. +// The spec of the format is here https://github.com/linux-test-project/lcov/blob/07a1127c2b4390abf4a516e9763fb28a956a9ce4/man/geninfo.1#L989 +func (s *SourceAnalysis) GenerateLCOVReport() string { + var linesHit, linesInstrumented int + var buffer bytes.Buffer + buffer.WriteString("TN:\n") + for _, file := range s.SortedFiles() { + // SF: + buffer.WriteString(fmt.Sprintf("SF:%s\n", file.Path)) + for idx, line := range file.Lines { + if line.IsActive { + // DA:, + if line.IsCovered { + buffer.WriteString(fmt.Sprintf("DA:%d,%d\n", idx+1, line.SuccessHitCount)) + linesHit++ + } else { + buffer.WriteString(fmt.Sprintf("DA:%d,%d\n", idx+1, 0)) + } + linesInstrumented++ + } + } + // FN:, + // FNDA:, + for _, fn := range file.Functions { + byteStart := types.GetSrcMapStart(fn.Src) + length := types.GetSrcMapLength(fn.Src) + + startLine := sort.Search(len(file.CumulativeOffsetByLine), func(i int) bool { + return file.CumulativeOffsetByLine[i] > byteStart + }) + endLine := sort.Search(len(file.CumulativeOffsetByLine), func(i int) bool { + return file.CumulativeOffsetByLine[i] > byteStart+length + }) + + // We are treating any line hit in the definition as a hit for the function. + hit := 0 + for i := startLine; i < endLine; i++ { + // index iz zero based, line numbers are 1 based + if file.Lines[i-1].IsActive && file.Lines[i-1].IsCovered { + hit = 1 + } + + } + + // TODO: handle fallback, receive, and constructor + if fn.Name != "" { + buffer.WriteString(fmt.Sprintf("FN:%d,%s\n", startLine, fn.Name)) + buffer.WriteString(fmt.Sprintf("FNDA:%d,%s\n", hit, fn.Name)) + } + + } + buffer.WriteString("end_of_record\n") + } + + return buffer.String() +} + // SourceFileAnalysis describes coverage information for a given source file. type SourceFileAnalysis struct { // Path describes the file path of the source file. This is kept here for access during report generation. Path string + // CumulativeOffsetByLine describes the cumulative byte offset for each line in the source file. + // For example, for a file with 5 lines, the list might look like: [0, 45, 98, 132, 189], where each number is the byte offset of the line's starting position + // This allows us to quickly determine which line a given byte offset falls within using a binary search. + CumulativeOffsetByLine []int + // Lines describes information about a given source line and its coverage. Lines []*SourceLineAnalysis + + // Functions is a list of functions defined in the source file + Functions []*types.FunctionDefinition } // ActiveLineCount returns the count of lines that are marked executable/active within the source file. @@ -130,13 +196,50 @@ func AnalyzeSourceCoverage(compilations []types.Compilation, coverageMaps *Cover return nil, fmt.Errorf("could not perform source code analysis, code was not cached for '%v'", sourcePath) } + lines, cumulativeOffset := parseSourceLines(compilation.SourceCode[sourcePath]) + funcs := make([]*types.FunctionDefinition, 0) + + var ast types.AST + b, err := json.Marshal(compilation.SourcePathToArtifact[sourcePath].Ast) + if err != nil { + return nil, fmt.Errorf("could not encode AST from sources: %v", err) + } + err = json.Unmarshal(b, &ast) + if err != nil { + return nil, fmt.Errorf("could not parse AST from sources: %v", err) + } + + for _, node := range ast.Nodes { + + if node.GetNodeType() == "FunctionDefinition" { + fn := node.(types.FunctionDefinition) + funcs = append(funcs, &fn) + } + if node.GetNodeType() == "ContractDefinition" { + contract := node.(types.ContractDefinition) + if contract.Kind == types.ContractKindInterface { + continue + } + for _, subNode := range contract.Nodes { + if subNode.GetNodeType() == "FunctionDefinition" { + fn := subNode.(types.FunctionDefinition) + funcs = append(funcs, &fn) + } + } + } + + } + // Obtain the parsed source code lines for this source. if _, ok := sourceAnalysis.Files[sourcePath]; !ok { sourceAnalysis.Files[sourcePath] = &SourceFileAnalysis{ - Path: sourcePath, - Lines: parseSourceLines(compilation.SourceCode[sourcePath]), + Path: sourcePath, + CumulativeOffsetByLine: cumulativeOffset, + Lines: lines, + Functions: funcs, } } + } } @@ -231,25 +334,26 @@ func analyzeContractSourceCoverage(compilation types.Compilation, sourceAnalysis // Obtain the source file this element maps to. if sourceFile, ok := sourceAnalysis.Files[sourcePath]; ok { // Mark all lines which fall within this range. - matchedSourceLine := false - for _, sourceLine := range sourceFile.Lines { - // Check if the line is within range - if sourceMapElement.Offset >= sourceLine.Start && sourceMapElement.Offset < sourceLine.End { - // Mark the line active/executable. - sourceLine.IsActive = true - - // Set its coverage state and increment hit counts - sourceLine.SuccessHitCount += succHitCount - sourceLine.RevertHitCount += revertHitCount - sourceLine.IsCovered = sourceLine.IsCovered || sourceLine.SuccessHitCount > 0 - sourceLine.IsCoveredReverted = sourceLine.IsCoveredReverted || sourceLine.RevertHitCount > 0 - - // Indicate we matched a source line, so when we stop matching sequentially, we know we can exit - // early. - matchedSourceLine = true - } else if matchedSourceLine { - break - } + start := sourceMapElement.Offset + + startLine := sort.Search(len(sourceFile.CumulativeOffsetByLine), func(i int) bool { + return sourceFile.CumulativeOffsetByLine[i] > start + }) + + // index iz zero based, line numbers are 1 based + sourceLine := sourceFile.Lines[startLine-1] + + // Check if the line is within range + if sourceMapElement.Offset < sourceLine.End { + // Mark the line active/executable. + sourceLine.IsActive = true + + // Set its coverage state and increment hit counts + sourceLine.SuccessHitCount += succHitCount + sourceLine.RevertHitCount += revertHitCount + sourceLine.IsCovered = sourceLine.IsCovered || sourceLine.SuccessHitCount > 0 + sourceLine.IsCoveredReverted = sourceLine.IsCoveredReverted || sourceLine.RevertHitCount > 0 + } } else { return fmt.Errorf("could not perform source code analysis, missing source '%v'", sourcePath) @@ -298,10 +402,11 @@ func filterSourceMaps(compilation types.Compilation, sourceMap types.SourceMap) // parseSourceLines splits the provided source code into SourceLineAnalysis objects. // Returns the SourceLineAnalysis objects. -func parseSourceLines(sourceCode []byte) []*SourceLineAnalysis { +func parseSourceLines(sourceCode []byte) ([]*SourceLineAnalysis, []int) { // Create our lines and a variable to track where our current line start offset is. var lines []*SourceLineAnalysis var lineStart int + var cumulativeOffset []int // Split the source code on new line characters sourceCodeLinesBytes := bytes.Split(sourceCode, []byte("\n")) @@ -317,9 +422,10 @@ func parseSourceLines(sourceCode []byte) []*SourceLineAnalysis { IsCovered: false, IsCoveredReverted: false, }) + cumulativeOffset = append(cumulativeOffset, int(lineStart)) lineStart = lineEnd } // Return the resulting lines - return lines + return lines, cumulativeOffset } diff --git a/fuzzing/fuzzer.go b/fuzzing/fuzzer.go index 0b9b1500..4d862dc0 100644 --- a/fuzzing/fuzzer.go +++ b/fuzzing/fuzzer.go @@ -409,6 +409,7 @@ func chainSetupFromCompilations(fuzzer *Fuzzer, testChain *chain.TestChain) (*ex fuzzer.config.Fuzzing.TargetContracts = []string{contract.Name()} found = true } else { + // TODO list options for the user to choose from return nil, fmt.Errorf("specify target contract(s)") } } @@ -836,13 +837,33 @@ func (f *Fuzzer) Start() error { f.printExitingResults() // Finally, generate our coverage report if we have set a valid corpus directory. - if err == nil && f.config.Fuzzing.CorpusDirectory != "" { - coverageReportPath := filepath.Join(f.config.Fuzzing.CorpusDirectory, "coverage_report.html") - err = coverage.GenerateReport(f.compilations, f.corpus.CoverageMaps(), coverageReportPath) + if err == nil && len(f.config.Fuzzing.CoverageFormats) > 0 { + // Write to the default directory if we have no corpus directory set. + coverageReportDir := filepath.Join("crytic-export", "coverage") + if f.config.Fuzzing.CorpusDirectory != "" { + coverageReportDir = filepath.Join(f.config.Fuzzing.CorpusDirectory, "coverage") + } + sourceAnalysis, err := coverage.AnalyzeSourceCoverage(f.compilations, f.corpus.CoverageMaps()) + if err != nil { - f.logger.Error("Failed to generate coverage report", err) + f.logger.Error("Failed to analyze source coverage", err) } else { - f.logger.Info("Coverage report saved to file: ", colors.Bold, coverageReportPath, colors.Reset) + var path string + for _, reportType := range f.config.Fuzzing.CoverageFormats { + switch reportType { + case "html": + path, err = coverage.WriteHTMLReport(sourceAnalysis, coverageReportDir) + case "lcov": + path, err = coverage.WriteLCOVReport(sourceAnalysis, coverageReportDir) + default: + err = fmt.Errorf("unsupported coverage report type: %s", reportType) + } + if err != nil { + f.logger.Error(fmt.Sprintf("Failed to generate %s coverage report", reportType), err) + } else { + f.logger.Info(fmt.Sprintf("%s report(s) saved to: %s", reportType, path), colors.Bold, colors.Reset) + } + } } }