Skip to content

Commit

Permalink
sc-2990 extract proto files from source worktree (#6)
Browse files Browse the repository at this point in the history
sc-2990 extract proto files from source worktree
  • Loading branch information
saraiva132 authored Apr 25, 2022
1 parent 00685bb commit 22b251e
Show file tree
Hide file tree
Showing 6 changed files with 127 additions and 23 deletions.
2 changes: 1 addition & 1 deletion src/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ pub trait RepositoryCache {
}

pub struct ProtofetchCache {
location: PathBuf,
pub location: PathBuf,
}

#[derive(Error, Debug)]
Expand Down
6 changes: 5 additions & 1 deletion src/cli/args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,12 @@ pub enum Command {
#[clap(short, long)]
///forces re-creation of lock file
force_lock: bool,
#[clap(short, long, default_value = "proto_src")]
///Name of the dependencies source files directory
#[clap(short, long, default_value = "dependencies")]
source_output_directory: String,
///Name of the proto files directory
#[clap(short, long, default_value = "proto_src")]
proto_output_directory: String,
},
///Creates a lock file based on toml configuration file
Lock,
Expand Down
12 changes: 9 additions & 3 deletions src/cli/command_handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,22 @@ pub fn do_fetch(
cache: &ProtofetchCache,
conf_path: &Path,
lockfile_path: &Path,
out_dir: &Path,
dependencies_out_dir: &Path,
proto_output_directory: &Path,
) -> Result<(), Box<dyn Error>> {
let lockfile = if force_lock || !lockfile_path.exists() {
do_lock(cache, conf_path, lockfile_path)?
} else {
// read from file
LockFile::from_file(lockfile_path)?
};

fetch::fetch(cache, &lockfile, out_dir)?;
let dependencies_out_dir = cache.location.join(dependencies_out_dir);
fetch::fetch(
cache,
&lockfile,
&dependencies_out_dir,
proto_output_directory,
)?;

Ok(())
}
Expand Down
100 changes: 88 additions & 12 deletions src/fetch.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,9 @@
use std::{collections::HashMap, path::Path, str::Utf8Error};
use std::{
collections::HashMap,
fs,
path::{Path, PathBuf},
str::Utf8Error,
};

use crate::{
cache::{CacheError, RepositoryCache},
Expand All @@ -21,6 +26,8 @@ pub enum FetchError {
Parsing(#[from] crate::model::ParseError),
#[error("Bad output dir {0}")]
BadOutputDir(String),
#[error("Bad file path {0}")]
BadFilePath(String),
#[error("Error while processing protobuf repository: {0}")]
ProtoRepoError(#[from] crate::proto_repository::ProtoRepoError),
#[error("IO error: {0}")]
Expand Down Expand Up @@ -85,39 +92,107 @@ pub fn lock<Cache: RepositoryCache>(
pub fn fetch<Cache: RepositoryCache>(
cache: &Cache,
lockfile: &LockFile,
out_dir: &Path,
dependencies_out_dir: &Path,
proto_output_directory: &Path,
) -> Result<(), FetchError> {
debug!("Fetching dependencies source files...");
let out_dir = lockfile
info!("Fetching dependencies source files...");
let proto_out_dir = lockfile
.proto_out_dir
.as_ref()
.map(Path::new)
.unwrap_or(out_dir);
.unwrap_or(proto_output_directory);

if !out_dir.exists() {
std::fs::create_dir(out_dir)?;
if !dependencies_out_dir.exists() {
std::fs::create_dir_all(dependencies_out_dir)?;
}

if out_dir.is_dir() {
if dependencies_out_dir.is_dir() {
for dep in &lockfile.dependencies {
let repo = cache.clone_or_update(&dep.coordinate)?;
let work_tree_res =
repo.create_worktrees(&dep.name, &lockfile.module_name, &dep.commit_hash, out_dir);
let work_tree_res = repo.create_worktrees(
&dep.name,
&lockfile.module_name,
&dep.commit_hash,
dependencies_out_dir,
);
if let Err(err) = work_tree_res {
error!("Error while trying to create worktrees {err}. \
Most likely the worktree sources have been deleted but the worktree metadata has not. \
Please delete the cache and run protofetch fetch again.")
}
}

//Copy proto files to actual target
copy_proto_files(proto_out_dir, dependencies_out_dir, lockfile)?;
Ok(())
} else {
Err(FetchError::BadOutputDir(
out_dir.to_str().unwrap_or("").to_string(),
dependencies_out_dir.to_str().unwrap_or("").to_string(),
))
}
}

pub fn copy_proto_files(
proto_out_dir: &Path,
source_out_dir: &Path,
lockfile: &LockFile,
) -> Result<(), FetchError> {
info!("Copying proto files...");
if !proto_out_dir.exists() {
std::fs::create_dir_all(proto_out_dir)?;
}

for dep in &lockfile.dependencies {
debug!("Copying proto files for {}", dep.name.as_str());
let dep_dir = source_out_dir.join(&dep.name);
for file in dep_dir.read_dir()? {
let path = file?.path();
let proto_files = find_proto_files(path.as_path())?;
for proto_file_source in proto_files {
trace!(
"Copying proto file {}",
&proto_file_source.to_string_lossy()
);
let proto_src = proto_file_source.strip_prefix(&dep_dir).map_err(|_err| {
FetchError::BadOutputDir(format!(
"Could not create proto source path in {}. Wrong base dir {}",
proto_file_source.to_string_lossy(),
dep_dir.to_string_lossy()
))
})?;
let proto_out_dist = proto_out_dir.join(&proto_src);
let prefix = proto_out_dist
.parent()
.ok_or_else(|| FetchError::BadFilePath(format!(
"Bad parent dest file for {}",
&proto_out_dist.to_string_lossy()
)))?;
std::fs::create_dir_all(prefix)?;
fs::copy(proto_file_source.as_path(), proto_out_dist.as_path())?;
}
}
}
Ok(())
}

fn find_proto_files(dir: &Path) -> Result<Vec<PathBuf>, FetchError> {
let mut files: Vec<PathBuf> = Vec::new();
if dir.is_dir() {
for entry in fs::read_dir(dir)? {
let entry = entry?;
let path = entry.path();
if path.is_dir() {
let rec_call = find_proto_files(&path)?;
files.append(&mut rec_call.clone());
} else if let Some(extension) = path.extension() {
if extension == "proto" {
files.push(path);
}
}
}
}
Ok(files)
}

//TODO: Make sure we get the last version. Getting the biggest string is extremely error prone.
// Use semver
fn resolve_conflicts(dep_map: HashMap<Coordinate, Vec<Revision>>) -> HashMap<Coordinate, Revision> {
Expand Down Expand Up @@ -170,6 +245,7 @@ fn remove_duplicates() {
"github.com".to_string(),
"test".to_string(),
"test".to_string(),
crate::model::protofetch::Protocol::Https,
);
input.insert(coordinate.clone(), vec![
Revision::Arbitrary {
Expand Down
14 changes: 12 additions & 2 deletions src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,19 @@ fn run() -> Result<(), Box<dyn Error>> {
cli::args::Command::Fetch {
force_lock,
source_output_directory,
proto_output_directory,
} => {
let out_dir = Path::new(&source_output_directory);
command_handlers::do_fetch(force_lock, &cache, module_path, lockfile_path, out_dir)
let dependencies_out_dir = Path::new(&source_output_directory);
let proto_output_directory = Path::new(&proto_output_directory);

command_handlers::do_fetch(
force_lock,
&cache,
module_path,
lockfile_path,
dependencies_out_dir,
proto_output_directory,
)
}
cli::args::Command::Lock => {
command_handlers::do_lock(&cache, module_path, lockfile_path)?;
Expand Down
16 changes: 12 additions & 4 deletions src/model/protofetch.rs
Original file line number Diff line number Diff line change
Expand Up @@ -344,6 +344,7 @@ fn load_valid_file_one_dep() {
let str = r#"
name = "test_file"
description = "this is a description"
proto_out_dir= "./path/to/proto"
[dependency1]
protocol = "https"
url = "github.com/org/repo"
Expand All @@ -352,7 +353,7 @@ description = "this is a description"
let expected = Descriptor {
name: "test_file".to_string(),
description: Some("this is a description".to_string()),
proto_out_dir: None,
proto_out_dir: Some("./path/to/proto".to_string()),
dependencies: vec![Dependency {
name: "dependency1".to_string(),
coordinate: Coordinate {
Expand All @@ -373,6 +374,8 @@ description = "this is a description"
fn load_valid_file_multiple_dep() {
let str = r#"
name = "test_file"
proto_out_dir= "./path/to/proto"
[dependency1]
protocol = "https"
url = "github.com/org/repo"
Expand All @@ -389,7 +392,7 @@ name = "test_file"
let mut expected = Descriptor {
name: "test_file".to_string(),
description: None,
proto_out_dir: None,
proto_out_dir: Some("./path/to/proto".to_string()),
dependencies: vec![
Dependency {
name: "dependency1".to_string(),
Expand Down Expand Up @@ -437,11 +440,14 @@ name = "test_file"

#[test]
fn load_file_no_deps() {
let str = r#"name = "test_file""#;
let str = r#"
name = "test_file"
proto_out_dir = "./path/to/proto"
"#;
let expected = Descriptor {
name: "test_file".to_string(),
description: None,
proto_out_dir: None,
proto_out_dir: Some("./path/to/proto".to_string()),
dependencies: vec![],
};
assert_eq!(Descriptor::from_toml_str(str).unwrap(), expected);
Expand All @@ -451,6 +457,7 @@ fn load_file_no_deps() {
fn load_invalid_protocol() {
let str = r#"
name = "test_file"
proto_out_dir = "./path/to/proto"
[dependency1]
protocol = "ftp"
url = "github.com/org/repo"
Expand All @@ -463,6 +470,7 @@ name = "test_file"
fn load_invalid_url() {
let str = r#"
name = "test_file"
proto_out_dir = "./path/to/proto"
[dependency1]
protocol = "ftp"
url = "github.com/org"
Expand Down

0 comments on commit 22b251e

Please sign in to comment.