-
Notifications
You must be signed in to change notification settings - Fork 329
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Create a script to generate a benchmark comparison of dart2wasm and d…
…art2js (#8195)
- Loading branch information
1 parent
6ca9458
commit ec20232
Showing
5 changed files
with
453 additions
and
77 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,79 @@ | ||
// Copyright 2024 The Chromium Authors. All rights reserved. | ||
// Use of this source code is governed by a BSD-style license that can be | ||
// found in the LICENSE file. | ||
|
||
import 'package:args/args.dart'; | ||
|
||
/// A base class for handling arguments for benchmarks scripts. | ||
abstract class BenchmarkArgsBase { | ||
late final ArgParser argParser; | ||
late final ArgResults argResults; | ||
|
||
static const _saveToFileOption = 'save-to-file'; | ||
static const _baselineOption = 'baseline'; | ||
static const _averageOfOption = 'average-of'; | ||
|
||
int get averageOf => int.parse(argResults[_averageOfOption]); | ||
String? get saveToFileLocation => argResults[_saveToFileOption]; | ||
String? get baselineLocation => argResults[_baselineOption]; | ||
|
||
/// Initializes [argParser] and parses [args] into [argResults]. | ||
void init(List<String> args, {required ArgParser parser}) { | ||
argParser = parser; | ||
argResults = argParser.parse(args); | ||
} | ||
} | ||
|
||
/// Extension methods to add [ArgParser] options for benchmarks scripts. | ||
extension BenchmarkArgsExtension on ArgParser { | ||
void addSaveToFileOption(BenchmarkResultsOutputType type) { | ||
addOption( | ||
BenchmarkArgument.saveToFile.flagName, | ||
help: 'Saves the benchmark results to a ${type.name} file at the ' | ||
'provided path (absolute).', | ||
valueHelp: '/Users/me/Downloads/output.${type.name}', | ||
); | ||
} | ||
|
||
void addAverageOfOption() { | ||
addOption( | ||
BenchmarkArgument.averageOf.flagName, | ||
defaultsTo: '1', | ||
help: 'The number of times to run the benchmark. The returned results ' | ||
'will be the average of all the benchmark runs when this value is ' | ||
'greater than 1.', | ||
valueHelp: '5', | ||
); | ||
} | ||
|
||
void addBaselineOption({String? additionalHelp}) { | ||
addOption( | ||
BenchmarkArgument.baseline.flagName, | ||
help: 'The baseline benchmark data to compare the test benchmark run to. ' | ||
'${additionalHelp ?? ''}', | ||
valueHelp: '/Users/me/Downloads/baseline.json', | ||
); | ||
} | ||
} | ||
|
||
/// The possible argument names for benchmark script [ArgParser]s. | ||
enum BenchmarkArgument { | ||
averageOf(flagName: 'average-of'), | ||
baseline, | ||
browser, | ||
saveToFile(flagName: 'save-to-file'), | ||
test, | ||
wasm; | ||
|
||
const BenchmarkArgument({String? flagName}) : _flagName = flagName; | ||
|
||
String get flagName => _flagName ?? name; | ||
|
||
final String? _flagName; | ||
} | ||
|
||
/// The file types that benchmark results may be written to. | ||
enum BenchmarkResultsOutputType { | ||
csv, | ||
json, | ||
} |
245 changes: 245 additions & 0 deletions
245
packages/devtools_app/benchmark/scripts/dart2wasm_performance_diff.dart
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,245 @@ | ||
// Copyright 2024 The Chromium Authors. All rights reserved. | ||
// Use of this source code is governed by a BSD-style license that can be | ||
// found in the LICENSE file. | ||
|
||
import 'dart:convert'; | ||
import 'dart:io'; | ||
|
||
import 'package:args/args.dart'; | ||
import 'package:intl/intl.dart'; | ||
import 'package:path/path.dart' as p; | ||
import 'package:web_benchmarks/analysis.dart'; | ||
|
||
import 'args.dart'; | ||
import 'run_benchmarks.dart'; | ||
import 'utils.dart'; | ||
|
||
/// This is a helper script to perform a comparison of dart2wasm peformance | ||
/// with dart2js performance for the full set of DevTools benchmark tests. | ||
/// | ||
/// This script: | ||
/// | ||
/// 1. Runs the benchmarks with dart2js, averaging the data over a fixed number | ||
/// of test runs (defaults to 1 and can be set using the '--average-of' arg). | ||
/// The average dart2js data is used as baseline data for step #3. | ||
/// 2. Runs the benchmarks with dart2wasm, averaging the data over a fixed | ||
/// number of test runs (defaults to 1 and can be set using the | ||
/// '--average-of' arg). The average dart2wasm data is used as the test data | ||
/// for step #3. | ||
/// 3. Computes the delta between the dart2wasm results (step #2) and the | ||
/// dart2js results (step #1). | ||
/// 4. Outputs the data to a .csv file that can be easily opened in a | ||
/// spreadsheet application (Google Sheets, Excel, etc.) for viewing. By | ||
/// default, this file will be saved to the | ||
/// '~/Downloads/devtools_benchmark_data/' directory with an automatically | ||
/// generated file name. A different output path can be specified using | ||
/// the '--save-to-file' arg. | ||
/// | ||
/// Optionally, a previously generated benchmark data file can be used for | ||
/// either or both the dart2js baseline data (normally generated from step #1) | ||
/// and the dart2wasm test data (normally generated from step #2). To use an | ||
/// existing benchmark run, specify the absolute path to the benchmark data | ||
/// using the '--baseline' or '--test' arguments. | ||
/// | ||
/// Example usage: | ||
/// * dart run benchmark/scripts/run_benchmarks.dart | ||
/// | ||
/// Example usage that averages benchmark results over 5 runs: | ||
/// * dart run benchmark/scripts/run_benchmarks.dart --average-of=5 | ||
/// | ||
/// Example usage that diffs against an existing basline: | ||
/// * dart run benchmark/scripts/run_benchmarks.dart --baseline=/Users/me/Downloads/baseline_run.json | ||
void main(List<String> args) async { | ||
if (!Directory.current.path.contains('devtools_app')) { | ||
stderr.writeln( | ||
'This script must be ran from the devtools_app/ directory or one of its ' | ||
'sub-directories.', | ||
); | ||
return; | ||
} | ||
|
||
if (args.isNotEmpty && args.first == '-h') { | ||
stdout.writeln(_Args._buildArgParser().usage); | ||
return; | ||
} | ||
|
||
final scriptArgs = _Args(args); | ||
final averageOf = scriptArgs.averageOf; | ||
|
||
// Run new benchmarks or parse existing results, and compute the delta. | ||
final baseline = await runBenchmarkOrUseExisting( | ||
scriptArgs.baselineLocation, | ||
averageOf: averageOf, | ||
useWasm: false, | ||
); | ||
final test = await runBenchmarkOrUseExisting( | ||
scriptArgs.testLocation, | ||
averageOf: averageOf, | ||
useWasm: true, | ||
); | ||
final delta = computeDelta(baseline, test); | ||
|
||
// Generate the CSV file and download it. | ||
final csvBuilder = CsvBuilder(saveToLocation: scriptArgs.saveToFileLocation) | ||
..writeHeaders(averageOf: averageOf); | ||
delta.toCsvLines().forEach(csvBuilder.writeLine); | ||
csvBuilder.download(); | ||
} | ||
|
||
class CsvBuilder { | ||
CsvBuilder({this.saveToLocation}) | ||
: assert(saveToLocation == null || saveToLocation.endsWith('.csv')); | ||
|
||
final _sb = StringBuffer(); | ||
|
||
final String? saveToLocation; | ||
|
||
void writeHeaders({required int averageOf}) { | ||
writeLines([ | ||
'Flutter DevTools performance benchmarks diff: dart2wasm diffed against dart2js.', | ||
'Benchmark results were averaged over $averageOf benchmark run(s).', | ||
'', | ||
'These results were auto-generated by a script:', | ||
'https://github.com/flutter/devtools/blob/master/packages/devtools_app/benchmark/scripts/dart2wasm_performance_diff.dart', | ||
'', | ||
]); | ||
|
||
// Write the Flutter and DevTools commit hash for the benchmark run. | ||
// TODO(kenz): automatically detect these and write them to the CSV. | ||
const flutter = '<enter manually by running \'flutter --version\'>'; | ||
const devtools = '<enter manually by running \'git log\'>'; | ||
writeLines( | ||
[ | ||
'Version info:', | ||
'Flutter: $flutter', | ||
'DevTools: $devtools', | ||
'', | ||
'Results:', | ||
], | ||
); | ||
|
||
// Write the headers. | ||
writeLine( | ||
[ | ||
'Benchmark Name', | ||
'Metric', | ||
'Value (micros)', | ||
'Delta (micros)', | ||
'Delta (%)', | ||
], | ||
); | ||
} | ||
|
||
void writeLine(List<String> content) { | ||
_sb.writeln(convertToCsvLine(content)); | ||
} | ||
|
||
void writeLines(List<String> lines) { | ||
for (final line in lines) { | ||
writeLine([line]); | ||
} | ||
} | ||
|
||
/// Downloads the current [CsvBuilder] content [_sb] to disk. | ||
void download() { | ||
Uri? saveToUri = saveToLocation != null ? Uri.parse(saveToLocation!) : null; | ||
if (saveToUri == null) { | ||
final time = DateTime.now(); | ||
final timestamp = DateFormat('yyyy_MM_dd-HH_mm_ss').format(time); | ||
final fileName = p.join( | ||
'devtools_benchmark_data', | ||
'dart2wasm_performance_diff_$timestamp.csv', | ||
); | ||
|
||
// Try to use the Downloads directory. | ||
// TODO(kenz): make this code possible to run on Windows if necessary. | ||
final currentDirectoryParts = Directory.current.uri.pathSegments; | ||
final downloadsDir = Directory.fromUri( | ||
Uri.parse( | ||
// We need the leading slash so that this is an absolute path. | ||
'/${p.join( | ||
currentDirectoryParts[0], | ||
currentDirectoryParts[1], | ||
'Downloads', | ||
)}', | ||
), | ||
); | ||
if (downloadsDir.existsSync()) { | ||
saveToUri = Uri.parse(p.join(downloadsDir.uri.path, fileName)); | ||
} else { | ||
stderr.writeln( | ||
'Warning: could not locate the \'Downloads\' directory at ' | ||
'${downloadsDir.path}. Saving results to the system temp directory ' | ||
'instead.', | ||
); | ||
saveToUri = Uri.parse(p.join(Directory.systemTemp.uri.path, fileName)); | ||
} | ||
} | ||
|
||
final file = File.fromUri(saveToUri)..createSync(recursive: true); | ||
file.writeAsStringSync(_sb.toString(), flush: true); | ||
|
||
stdout | ||
..writeln('Wrote dart2wasm performance diff to ${file.absolute.path}.') | ||
..writeln( | ||
'Open the file in a spreadsheet application ' | ||
'(Google Sheets, Excel, etc.) for viewing.', | ||
); | ||
} | ||
} | ||
|
||
Future<BenchmarkResults> runBenchmarkOrUseExisting( | ||
String? existingBenchmarkLocation, { | ||
required int averageOf, | ||
required bool useWasm, | ||
}) async { | ||
if (existingBenchmarkLocation == null) { | ||
// Run the benchmark [averageOf] times and take the average. | ||
return await runBenchmarks( | ||
averageOf: averageOf, | ||
useWasm: useWasm, | ||
useBrowser: false, | ||
); | ||
} else { | ||
final existingBenchmarkFile = checkFileExists(existingBenchmarkLocation); | ||
if (existingBenchmarkFile == null) { | ||
throw const FileSystemException('Benchmark file does not exist.'); | ||
} else { | ||
return BenchmarkResults.parse( | ||
jsonDecode(existingBenchmarkFile.readAsStringSync()), | ||
); | ||
} | ||
} | ||
} | ||
|
||
class _Args extends BenchmarkArgsBase { | ||
_Args(List<String> args) { | ||
init(args, parser: _buildArgParser()); | ||
} | ||
|
||
String? get testLocation => argResults[BenchmarkArgument.test.flagName]; | ||
|
||
static ArgParser _buildArgParser() { | ||
return ArgParser() | ||
..addSaveToFileOption(BenchmarkResultsOutputType.csv) | ||
..addAverageOfOption() | ||
..addBaselineOption( | ||
additionalHelp: | ||
'When specified, this script will use the benchmark data at ' | ||
'the specified path as the baseline instead of generating a new ' | ||
'benchmark run for the dart2js baseline. This file path should ' | ||
'point to a JSON file that was created by running the ' | ||
'`run_benchmarks.dart` script for the dart2js build of DevTools.', | ||
) | ||
..addOption( | ||
BenchmarkArgument.test.flagName, | ||
help: 'The test benchmark data (dart2wasm) to use for this performance' | ||
' diff. When specified, this script will use the benchmark data at ' | ||
'the specified path instead of generating a new benchmark run for ' | ||
'the dart2wasm data. This file path should point to a JSON file that ' | ||
'was created by running the `run_benchmarks.dart` script with the ' | ||
'`--wasm` flag.', | ||
valueHelp: '/Users/me/Downloads/dart2wasm_data.json', | ||
); | ||
} | ||
} |
Oops, something went wrong.