diff --git a/.github/workflows/micro-benchmark.yml b/.github/workflows/micro-benchmark.yml index cac3d1114..d26db189e 100644 --- a/.github/workflows/micro-benchmark.yml +++ b/.github/workflows/micro-benchmark.yml @@ -32,7 +32,7 @@ jobs: shared-key: micro-benchmark - name: Compile - run: cargo build --release -p pacquet_micro_benchmark + run: cargo build --release --bin=micro-benchmark - name: Sleep for CPU cooldown shell: bash @@ -48,7 +48,7 @@ jobs: ref: ${{ github.event.pull_request.head.sha }} - name: Compile - run: cargo build --release -p pacquet_micro_benchmark + run: cargo build --release --bin=micro-benchmark - name: Sleep for CPU cooldown shell: bash diff --git a/Cargo.lock b/Cargo.lock index c1b7ea161..3eb1fe9a6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -90,6 +90,21 @@ dependencies = [ "serde_json", ] +[[package]] +name = "assert_cmd" +version = "2.0.12" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "88903cb14723e4d4003335bb7f8a14f27691649105346a0f0957466c096adfe6" +dependencies = [ + "anstyle", + "bstr", + "doc-comment", + "predicates", + "predicates-core", + "predicates-tree", + "wait-timeout", +] + [[package]] name = "async-recursion" version = "1.0.5" @@ -98,7 +113,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -165,6 +180,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", + "regex-automata 0.3.4", "serde", ] @@ -262,7 +278,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -288,6 +304,12 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "command-extra" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1b4fe32ea3f2a8d975b6c5cdd3f02ac358f471ca24dbb18d7a4ca58b3193d2d" + [[package]] name = "console" version = "0.15.7" @@ -302,9 +324,12 @@ dependencies = [ [[package]] name = "convert_case" -version = "0.4.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] [[package]] name = "core-foundation" @@ -427,15 +452,24 @@ dependencies = [ [[package]] name = "derive_more" -version = "0.99.17" +version = "1.0.0-beta.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" +checksum = "f1335e0609db169713d97c340dd769773c6c63cd953c8fcf1063043fd3d6dd11" +dependencies = [ + "derive_more-impl", +] + +[[package]] +name = "derive_more-impl" +version = "1.0.0-beta.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df541e0e2a8069352be228ce4b85a1da6f59bfd325e56f57e4b241babbc3f832" dependencies = [ "convert_case", "proc-macro2", "quote", - "rustc_version", - "syn 1.0.109", + "syn", + "unicode-xid", ] [[package]] @@ -444,6 +478,12 @@ version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" +[[package]] +name = "difflib" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" + [[package]] name = "digest" version = "0.10.7" @@ -454,6 +494,18 @@ dependencies = [ "crypto-common", ] +[[package]] +name = "doc-comment" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" + +[[package]] +name = "dunce" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" + [[package]] name = "either" version = "1.9.0" @@ -606,7 +658,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -1025,7 +1077,7 @@ checksum = "49e7bc1560b95a3c4a25d03de42fe76ca718ab92d1a22a55b9b4cf67b3ae635c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -1193,7 +1245,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -1246,65 +1298,73 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1b04fb49957986fdce4d6ee7a65027d55d4b6d2265e5848bbb507b58ccfdb6f" [[package]] -name = "pacquet_cafs" +name = "pacquet-cafs" version = "0.0.1" dependencies = [ + "derive_more", "miette", "pretty_assertions", "ssri", "tempfile", - "thiserror", ] [[package]] -name = "pacquet_cli" +name = "pacquet-cli" version = "0.0.1" dependencies = [ - "async-recursion", + "assert_cmd", "clap", - "futures-util", + "command-extra", + "derive_more", + "dunce", + "home", "insta", - "junction", - "node-semver", - "pacquet_cafs", - "pacquet_diagnostics", - "pacquet_executor", - "pacquet_lockfile", - "pacquet_npmrc", - "pacquet_package_json", - "pacquet_registry", - "pacquet_tarball", + "miette", + "pacquet-cafs", + "pacquet-diagnostics", + "pacquet-executor", + "pacquet-fs", + "pacquet-lockfile", + "pacquet-npmrc", + "pacquet-package-manager", + "pacquet-package-manifest", + "pacquet-registry", + "pacquet-tarball", + "pacquet-testing-utils", "pipe-trait", "pretty_assertions", - "rayon", - "reflink-copy", "reqwest", - "serde", "serde_json", "tempfile", "tokio", - "walkdir", ] [[package]] -name = "pacquet_diagnostics" +name = "pacquet-diagnostics" version = "0.0.1" dependencies = [ "miette", - "thiserror", "tracing", "tracing-subscriber", ] [[package]] -name = "pacquet_executor" +name = "pacquet-executor" version = "0.0.1" dependencies = [ - "pacquet_diagnostics", + "derive_more", + "miette", ] [[package]] -name = "pacquet_integrated_benchmark" +name = "pacquet-fs" +version = "0.0.1" +dependencies = [ + "junction", +] + +[[package]] +name = "pacquet-integrated-benchmark" version = "0.0.0" dependencies = [ "clap", @@ -1317,13 +1377,13 @@ dependencies = [ ] [[package]] -name = "pacquet_lockfile" +name = "pacquet-lockfile" version = "0.0.1" dependencies = [ "derive_more", "node-semver", - "pacquet_diagnostics", - "pacquet_package_json", + "pacquet-diagnostics", + "pacquet-package-manifest", "pipe-trait", "pretty_assertions", "serde", @@ -1333,15 +1393,15 @@ dependencies = [ ] [[package]] -name = "pacquet_micro_benchmark" +name = "pacquet-micro-benchmark" version = "0.0.0" dependencies = [ "clap", "criterion", "mockito", "node-semver", - "pacquet_registry", - "pacquet_tarball", + "pacquet-registry", + "pacquet-tarball", "pipe-trait", "project-root", "reqwest", @@ -1350,7 +1410,7 @@ dependencies = [ ] [[package]] -name = "pacquet_npmrc" +name = "pacquet-npmrc" version = "0.0.1" dependencies = [ "home", @@ -1362,11 +1422,40 @@ dependencies = [ ] [[package]] -name = "pacquet_package_json" +name = "pacquet-package-manager" +version = "0.0.1" +dependencies = [ + "async-recursion", + "derive_more", + "futures-util", + "insta", + "miette", + "node-semver", + "pacquet-fs", + "pacquet-lockfile", + "pacquet-npmrc", + "pacquet-package-manifest", + "pacquet-registry", + "pacquet-tarball", + "pacquet-testing-utils", + "pipe-trait", + "pretty_assertions", + "rayon", + "reflink-copy", + "reqwest", + "tempfile", + "tokio", + "tracing", + "walkdir", +] + +[[package]] +name = "pacquet-package-manifest" version = "0.0.1" dependencies = [ + "derive_more", "insta", - "pacquet_diagnostics", + "miette", "pipe-trait", "pretty_assertions", "serde", @@ -1376,11 +1465,13 @@ dependencies = [ ] [[package]] -name = "pacquet_registry" +name = "pacquet-registry" version = "0.0.1" dependencies = [ + "derive_more", + "miette", "node-semver", - "pacquet_diagnostics", + "pacquet-diagnostics", "pipe-trait", "pretty_assertions", "reqwest", @@ -1391,12 +1482,14 @@ dependencies = [ ] [[package]] -name = "pacquet_tarball" +name = "pacquet-tarball" version = "0.0.1" dependencies = [ "dashmap", - "pacquet_cafs", - "pacquet_diagnostics", + "derive_more", + "miette", + "pacquet-cafs", + "pacquet-diagnostics", "pipe-trait", "pretty_assertions", "reqwest", @@ -1404,9 +1497,21 @@ dependencies = [ "tar", "tempfile", "tokio", + "tracing", "zune-inflate", ] +[[package]] +name = "pacquet-testing-utils" +version = "0.0.0" +dependencies = [ + "assert_cmd", + "command-extra", + "junction", + "tempfile", + "walkdir", +] + [[package]] name = "parking_lot" version = "0.12.1" @@ -1494,6 +1599,34 @@ version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +[[package]] +name = "predicates" +version = "3.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6dfc28575c2e3f19cb3c73b93af36460ae898d426eba6fc15b9bd2a5220758a0" +dependencies = [ + "anstyle", + "difflib", + "itertools 0.11.0", + "predicates-core", +] + +[[package]] +name = "predicates-core" +version = "1.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" + +[[package]] +name = "predicates-tree" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" +dependencies = [ + "predicates-core", + "termtree", +] + [[package]] name = "pretty_assertions" version = "1.4.0" @@ -1700,15 +1833,6 @@ version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" -[[package]] -name = "rustc_version" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver", -] - [[package]] name = "rustix" version = "0.38.13" @@ -1781,12 +1905,6 @@ dependencies = [ "libc", ] -[[package]] -name = "semver" -version = "1.0.18" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" - [[package]] name = "serde" version = "1.0.188" @@ -1804,7 +1922,7 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -1986,7 +2104,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.28", + "syn", ] [[package]] @@ -2017,17 +2135,6 @@ dependencies = [ "is-terminal", ] -[[package]] -name = "syn" -version = "1.0.109" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" -dependencies = [ - "proc-macro2", - "quote", - "unicode-ident", -] - [[package]] name = "syn" version = "2.0.28" @@ -2073,6 +2180,12 @@ dependencies = [ "winapi", ] +[[package]] +name = "termtree" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" + [[package]] name = "text-block-macros" version = "0.1.1" @@ -2107,7 +2220,7 @@ checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -2173,7 +2286,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -2226,7 +2339,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", ] [[package]] @@ -2307,12 +2420,24 @@ dependencies = [ "tinyvec", ] +[[package]] +name = "unicode-segmentation" +version = "1.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" + [[package]] name = "unicode-width" version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +[[package]] +name = "unicode-xid" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" + [[package]] name = "unsafe-libyaml" version = "0.2.9" @@ -2360,6 +2485,15 @@ version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +[[package]] +name = "wait-timeout" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6" +dependencies = [ + "libc", +] + [[package]] name = "walkdir" version = "2.4.0" @@ -2406,7 +2540,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.28", + "syn", "wasm-bindgen-shared", ] @@ -2440,7 +2574,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.28", + "syn", "wasm-bindgen-backend", "wasm-bindgen-shared", ] diff --git a/Cargo.toml b/Cargo.toml index 4b1f9b766..92d2ea579 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -13,21 +13,26 @@ repository = "https://github.com/anonrig/pacquet" [workspace.dependencies] # Crates -pacquet_cli = { path = "crates/cli" } -pacquet_registry = { path = "crates/registry" } -pacquet_tarball = { path = "crates/tarball" } -pacquet_package_json = { path = "crates/package_json" } -pacquet_lockfile = { path = "crates/lockfile" } -pacquet_npmrc = { path = "crates/npmrc" } -pacquet_executor = { path = "crates/executor" } -pacquet_cafs = { path = "crates/cafs" } -pacquet_diagnostics = { path = "crates/diagnostics" } +pacquet-cli = { path = "crates/cli" } +pacquet-fs = { path = "crates/fs" } +pacquet-registry = { path = "crates/registry" } +pacquet-tarball = { path = "crates/tarball" } +pacquet-testing-utils = { path = "crates/testing-utils" } +pacquet-package-manifest = { path = "crates/package-manifest" } +pacquet-package-manager = { path = "crates/package-manager" } +pacquet-lockfile = { path = "crates/lockfile" } +pacquet-npmrc = { path = "crates/npmrc" } +pacquet-executor = { path = "crates/executor" } +pacquet-cafs = { path = "crates/cafs" } +pacquet-diagnostics = { path = "crates/diagnostics" } # Dependencies async-recursion = { version = "1.0.5" } clap = { version = "4", features = ["derive", "string"] } +command-extra = { version = "1.0.0" } dashmap = { version = "5.5.3" } -derive_more = { version = "0.99.17" } +derive_more = { version = "1.0.0-beta.3", features = ["full"] } +dunce = { version = "1.0.4" } home = { version = "0.5.5" } insta = { version = "1.32.0", features = ["yaml", "glob", "walkdir"] } itertools = { version = "0.11.0" } @@ -49,7 +54,6 @@ ssri = { version = "9.0.0" } strum = { version = "0.25.0", features = ["derive"] } tar = { version = "0.4.40" } text-block-macros = { version = "0.1.1" } -thiserror = { version = "1.0.48" } tracing = { version = "0.1.37" } tracing-subscriber = { version = "0.3.17", features = ["env-filter"] } tokio = { version = "1", features = ["rt", "rt-multi-thread", "macros"] } @@ -59,6 +63,7 @@ zune-inflate = { version = "0.2.54" } # Dev dependencies +assert_cmd = { version = "2.0.12" } criterion = { version = "0.5.1", features = ["async_tokio"] } pretty_assertions = { version = "1.4.0" } project-root = { version = "0.2.2" } diff --git a/crates/cafs/Cargo.toml b/crates/cafs/Cargo.toml index 66889dc70..02e0b9f5e 100644 --- a/crates/cafs/Cargo.toml +++ b/crates/cafs/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_cafs" +name = "pacquet-cafs" version = "0.0.1" publish = false authors.workspace = true @@ -11,9 +11,9 @@ license.workspace = true repository.workspace = true [dependencies] -miette = { workspace = true } -ssri = { workspace = true } -thiserror = { workspace = true } +derive_more = { workspace = true } +miette = { workspace = true } +ssri = { workspace = true } [dev-dependencies] tempfile = { workspace = true } diff --git a/crates/cafs/src/lib.rs b/crates/cafs/src/lib.rs index 6e620c293..c55179e26 100644 --- a/crates/cafs/src/lib.rs +++ b/crates/cafs/src/lib.rs @@ -5,16 +5,15 @@ use std::{ path::{Path, PathBuf}, }; +use derive_more::{Display, Error, From}; use miette::Diagnostic; use ssri::{Algorithm, IntegrityOpts}; -use thiserror::Error; -#[derive(Error, Debug, Diagnostic)] +#[derive(Debug, Display, Error, From, Diagnostic)] #[non_exhaustive] pub enum CafsError { - #[error(transparent)] #[diagnostic(code(pacquet_cafs::io_error))] - Io(#[from] std::io::Error), + Io(std::io::Error), // TODO: remove derive(From), split this variant } enum FileType { diff --git a/crates/cli/Cargo.toml b/crates/cli/Cargo.toml index c9130efc3..d8932cd1b 100644 --- a/crates/cli/Cargo.toml +++ b/crates/cli/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_cli" +name = "pacquet-cli" version = "0.0.1" publish = false authors.workspace = true @@ -15,30 +15,32 @@ name = "pacquet" path = "src/bin/main.rs" [dependencies] -pacquet_cafs = { workspace = true } -pacquet_executor = { workspace = true } -pacquet_lockfile = { workspace = true } -pacquet_npmrc = { workspace = true } -pacquet_package_json = { workspace = true } -pacquet_registry = { workspace = true } -pacquet_tarball = { workspace = true } -pacquet_diagnostics = { workspace = true } +pacquet-cafs = { workspace = true } +pacquet-executor = { workspace = true } +pacquet-fs = { workspace = true } +pacquet-lockfile = { workspace = true } +pacquet-npmrc = { workspace = true } +pacquet-package-manifest = { workspace = true } +pacquet-package-manager = { workspace = true } +pacquet-registry = { workspace = true } +pacquet-tarball = { workspace = true } +pacquet-diagnostics = { workspace = true } -async-recursion = { workspace = true } -clap = { workspace = true } -futures-util = { workspace = true } -rayon = { workspace = true } -reflink-copy = { workspace = true } -junction = { workspace = true } -reqwest = { workspace = true } -node-semver = { workspace = true } -pipe-trait = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -tokio = { workspace = true } +clap = { workspace = true } +derive_more = { workspace = true } +home = { workspace = true } +miette = { workspace = true } +reqwest = { workspace = true } +pipe-trait = { workspace = true } +tokio = { workspace = true } [dev-dependencies] +pacquet-testing-utils = { workspace = true } + +assert_cmd = { workspace = true } +dunce = { workspace = true } +command-extra = { workspace = true } insta = { workspace = true } pretty_assertions = { workspace = true } +serde_json = { workspace = true } tempfile = { workspace = true } -walkdir = { workspace = true } diff --git a/crates/cli/src/bin/main.rs b/crates/cli/src/bin/main.rs index 47413dc36..33ee2777d 100644 --- a/crates/cli/src/bin/main.rs +++ b/crates/cli/src/bin/main.rs @@ -1,6 +1,4 @@ -use pacquet_diagnostics::Result; - #[tokio::main(flavor = "multi_thread")] -pub async fn main() -> Result<()> { - pacquet_cli::run_cli().await +pub async fn main() -> miette::Result<()> { + pacquet_cli::main().await } diff --git a/crates/cli/src/cli_args.rs b/crates/cli/src/cli_args.rs new file mode 100644 index 000000000..1cc3b56a5 --- /dev/null +++ b/crates/cli/src/cli_args.rs @@ -0,0 +1,94 @@ +pub mod add; +pub mod install; +pub mod run; +pub mod store; + +use crate::State; +use add::AddArgs; +use clap::{Parser, Subcommand}; +use install::InstallArgs; +use miette::Context; +use pacquet_executor::execute_shell; +use pacquet_npmrc::Npmrc; +use pacquet_package_manifest::PackageManifest; +use run::RunArgs; +use std::{env, path::PathBuf}; +use store::StoreCommand; + +/// Experimental package manager for node.js written in rust. +#[derive(Debug, Parser)] +#[clap(name = "pacquet")] +#[clap(bin_name = "pacquet")] +#[clap(version = "0.2.1")] +#[clap(about = "Experimental package manager for node.js")] +pub struct CliArgs { + #[clap(subcommand)] + pub command: CliCommand, + + /// Set working directory. + #[clap(short = 'C', long, default_value = ".")] + pub dir: PathBuf, +} + +#[derive(Subcommand, Debug)] +pub enum CliCommand { + /// Initialize a package.json + Init, + /// Add a package + Add(AddArgs), + /// Install packages + Install(InstallArgs), + /// Runs a package's "test" script, if one was provided. + Test, + /// Runs a defined package script. + Run(RunArgs), + /// Runs an arbitrary command specified in the package's start property of its scripts object. + Start, + /// Managing the package store. + #[clap(subcommand)] + Store(StoreCommand), +} + +impl CliArgs { + /// Execute the command + pub async fn run(self) -> miette::Result<()> { + let CliArgs { command, dir } = self; + let manifest_path = || dir.join("package.json"); + let npmrc = || Npmrc::current(env::current_dir, home::home_dir, Default::default).leak(); + let state = || State::init(manifest_path(), npmrc()).wrap_err("initialize the state"); + + match command { + CliCommand::Init => { + PackageManifest::init(&manifest_path()).wrap_err("initialize package.json")?; + } + CliCommand::Add(args) => args.run(state()?).await?, + CliCommand::Install(args) => args.run(state()?).await?, + CliCommand::Test => { + let manifest = PackageManifest::from_path(manifest_path()) + .wrap_err("getting the package.json in current directory")?; + if let Some(script) = manifest.script("test", false)? { + execute_shell(script) + .wrap_err(format!("executing command: \"{0}\"", script))?; + } + } + CliCommand::Run(args) => args.run(manifest_path())?, + CliCommand::Start => { + // Runs an arbitrary command specified in the package's start property of its scripts + // object. If no start property is specified on the scripts object, it will attempt to + // run node server.js as a default, failing if neither are present. + // The intended usage of the property is to specify a command that starts your program. + let manifest = PackageManifest::from_path(manifest_path()) + .wrap_err("getting the package.json in current directory")?; + let command = if let Some(script) = manifest.script("start", true)? { + script + } else { + "node server.js" + }; + execute_shell(command).wrap_err(format!("executing command: \"{0}\"", command))?; + } + CliCommand::Store(command) => command.run(|| npmrc())?, + } + + Ok(()) + } +} diff --git a/crates/cli/src/cli_args/add.rs b/crates/cli/src/cli_args/add.rs new file mode 100644 index 000000000..df7f4beb8 --- /dev/null +++ b/crates/cli/src/cli_args/add.rs @@ -0,0 +1,205 @@ +use crate::State; +use clap::Args; +use miette::Context; +use pacquet_package_manager::Add; +use pacquet_package_manifest::DependencyGroup; +use std::path::PathBuf; + +#[derive(Debug, Args)] +pub struct AddDependencyOptions { + /// Install the specified packages as regular dependencies. + #[clap(short = 'P', long)] + save_prod: bool, + /// Install the specified packages as devDependencies. + #[clap(short = 'D', long)] + save_dev: bool, + /// Install the specified packages as optionalDependencies. + #[clap(short = 'O', long)] + save_optional: bool, + /// Using --save-peer will add one or more packages to peerDependencies and install them as dev dependencies + #[clap(long)] + save_peer: bool, +} + +impl AddDependencyOptions { + /// Whether to add entry to `"dependencies"`. + /// + /// **NOTE:** no `--save-*` flags implies save as prod. + #[inline(always)] + fn save_prod(&self) -> bool { + let &AddDependencyOptions { save_prod, save_dev, save_optional, save_peer } = self; + save_prod || (!save_dev && !save_optional && !save_peer) + } + + /// Whether to add entry to `"devDependencies"`. + /// + /// **NOTE:** `--save-peer` without any other `--save-*` flags implies save as dev. + #[inline(always)] + fn save_dev(&self) -> bool { + let &AddDependencyOptions { save_prod, save_dev, save_optional, save_peer } = self; + save_dev || (!save_prod && !save_optional && save_peer) + } + + /// Whether to add entry to `"optionalDependencies"`. + #[inline(always)] + fn save_optional(&self) -> bool { + self.save_optional + } + + /// Whether to add entry to `"peerDependencies"`. + #[inline(always)] + fn save_peer(&self) -> bool { + self.save_peer + } + + /// Convert the `--save-*` flags to an iterator of [`DependencyGroup`] + /// which selects which target group to save to. + fn dependency_groups(&self) -> impl Iterator { + std::iter::empty() + .chain(self.save_prod().then_some(DependencyGroup::Prod)) + .chain(self.save_dev().then_some(DependencyGroup::Dev)) + .chain(self.save_optional().then_some(DependencyGroup::Optional)) + .chain(self.save_peer().then_some(DependencyGroup::Peer)) + } +} + +#[derive(Debug, Args)] +pub struct AddArgs { + /// Name of the package + pub package_name: String, // TODO: 1. support version range, 2. multiple arguments, 3. name this `packages` + /// --save-prod, --save-dev, --save-optional, --save-peer + #[clap(flatten)] + pub dependency_options: AddDependencyOptions, + /// Saved dependencies will be configured with an exact version rather than using + /// the default semver range operator. + #[clap(short = 'E', long = "save-exact")] + pub save_exact: bool, + /// The directory with links to the store (default is node_modules/.pacquet). + /// All direct and indirect dependencies of the project are linked into this directory + #[clap(long = "virtual-store-dir", default_value = "node_modules/.pacquet")] + pub virtual_store_dir: Option, // TODO: make use of this +} + +impl AddArgs { + /// Execute the subcommand. + pub async fn run(self, mut state: State) -> miette::Result<()> { + // TODO: if a package already exists in another dependency group, don't remove the existing entry. + + let State { tarball_cache, http_client, config, manifest, lockfile } = &mut state; + + Add { + tarball_cache, + http_client, + config, + manifest, + lockfile: lockfile.as_ref(), + list_dependency_groups: || self.dependency_options.dependency_groups(), + package_name: &self.package_name, + save_exact: self.save_exact, + } + .run() + .await + .wrap_err("adding a new package") + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pacquet_package_manifest::DependencyGroup; + use pretty_assertions::assert_eq; + + #[test] + fn dependency_options_to_dependency_groups() { + use DependencyGroup::{Dev, Optional, Peer, Prod}; + let create_list = |opts: AddDependencyOptions| opts.dependency_groups().collect::>(); + + // no flags -> prod + assert_eq!( + create_list(AddDependencyOptions { + save_prod: false, + save_dev: false, + save_optional: false, + save_peer: false + }), + [Prod] + ); + + // --save-prod -> prod + assert_eq!( + create_list(AddDependencyOptions { + save_prod: true, + save_dev: false, + save_optional: false, + save_peer: false + }), + [Prod] + ); + + // --save-dev -> dev + assert_eq!( + create_list(AddDependencyOptions { + save_prod: false, + save_dev: true, + save_optional: false, + save_peer: false + }), + [Dev] + ); + + // --save-optional -> optional + assert_eq!( + create_list(AddDependencyOptions { + save_prod: false, + save_dev: false, + save_optional: true, + save_peer: false + }), + [Optional] + ); + + // --save-peer -> dev + peer + assert_eq!( + create_list(AddDependencyOptions { + save_prod: false, + save_dev: false, + save_optional: false, + save_peer: true + }), + [Dev, Peer] + ); + + // --save-prod --save-peer -> prod + peer + assert_eq!( + create_list(AddDependencyOptions { + save_prod: true, + save_dev: false, + save_optional: false, + save_peer: true + }), + [Prod, Peer] + ); + + // --save-dev --save-peer -> dev + peer + assert_eq!( + create_list(AddDependencyOptions { + save_prod: false, + save_dev: true, + save_optional: false, + save_peer: true + }), + [Dev, Peer] + ); + + // --save-optional --save-peer -> optional + peer + assert_eq!( + create_list(AddDependencyOptions { + save_prod: false, + save_dev: false, + save_optional: true, + save_peer: true + }), + [Optional, Peer] + ); + } +} diff --git a/crates/cli/src/cli_args/install.rs b/crates/cli/src/cli_args/install.rs new file mode 100644 index 000000000..26c593de0 --- /dev/null +++ b/crates/cli/src/cli_args/install.rs @@ -0,0 +1,131 @@ +use crate::State; +use clap::Args; +use pacquet_package_manager::Install; +use pacquet_package_manifest::DependencyGroup; + +#[derive(Debug, Args)] +pub struct InstallDependencyOptions { + /// pacquet will not install any package listed in devDependencies and will remove those insofar + /// they were already installed, if the NODE_ENV environment variable is set to production. + /// Use this flag to instruct pacquet to ignore NODE_ENV and take its production status from this + /// flag instead. + #[arg(short = 'P', long)] + prod: bool, + /// Only devDependencies are installed and dependencies are removed insofar they were + /// already installed, regardless of the NODE_ENV. + #[arg(short = 'D', long)] + dev: bool, + /// optionalDependencies are not installed. + #[arg(long)] + no_optional: bool, +} + +impl InstallDependencyOptions { + /// Convert the dependency options to an iterator of [`DependencyGroup`] + /// which filters the types of dependencies to install. + fn dependency_groups(&self) -> impl Iterator { + let &InstallDependencyOptions { prod, dev, no_optional } = self; + let has_both = prod == dev; + let has_prod = has_both || prod; + let has_dev = has_both || dev; + let has_optional = !no_optional; + std::iter::empty() + .chain(has_prod.then_some(DependencyGroup::Prod)) + .chain(has_dev.then_some(DependencyGroup::Dev)) + .chain(has_optional.then_some(DependencyGroup::Optional)) + } +} + +#[derive(Debug, Args)] +pub struct InstallArgs { + /// --prod, --dev, and --no-optional + #[clap(flatten)] + pub dependency_options: InstallDependencyOptions, + + /// Don't generate a lockfile and fail if the lockfile is outdated. + #[clap(long)] + pub frozen_lockfile: bool, +} + +impl InstallArgs { + pub async fn run(self, state: State) -> miette::Result<()> { + let State { tarball_cache, http_client, config, manifest, lockfile } = &state; + let InstallArgs { dependency_options, frozen_lockfile } = self; + + Install { + tarball_cache, + http_client, + config, + manifest, + lockfile: lockfile.as_ref(), + dependency_groups: dependency_options.dependency_groups(), + frozen_lockfile, + } + .run() + .await; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pacquet_package_manifest::DependencyGroup; + use pretty_assertions::assert_eq; + + #[test] + fn dependency_options_to_dependency_groups() { + use DependencyGroup::{Dev, Optional, Prod}; + let create_list = + |opts: InstallDependencyOptions| opts.dependency_groups().collect::>(); + + // no flags -> prod + dev + optional + assert_eq!( + create_list(InstallDependencyOptions { prod: false, dev: false, no_optional: false }), + [Prod, Dev, Optional], + ); + + // --prod -> prod + optional + assert_eq!( + create_list(InstallDependencyOptions { prod: true, dev: false, no_optional: false }), + [Prod, Optional], + ); + + // --dev -> dev + optional + assert_eq!( + create_list(InstallDependencyOptions { prod: false, dev: true, no_optional: false }), + [Dev, Optional], + ); + + // --no-optional -> prod + dev + assert_eq!( + create_list(InstallDependencyOptions { prod: false, dev: false, no_optional: true }), + [Prod, Dev], + ); + + // --prod --no-optional -> prod + assert_eq!( + create_list(InstallDependencyOptions { prod: true, dev: false, no_optional: true }), + [Prod], + ); + + // --dev --no-optional -> dev + assert_eq!( + create_list(InstallDependencyOptions { prod: false, dev: true, no_optional: true }), + [Dev], + ); + + // --prod --dev -> prod + dev + optional + assert_eq!( + create_list(InstallDependencyOptions { prod: true, dev: true, no_optional: false }), + [Prod, Dev, Optional], + ); + + // --prod --dev --no-optional -> prod + dev + assert_eq!( + create_list(InstallDependencyOptions { prod: true, dev: true, no_optional: true }), + [Prod, Dev], + ); + } +} diff --git a/crates/cli/src/cli_args/run.rs b/crates/cli/src/cli_args/run.rs new file mode 100644 index 000000000..0ab4cefa2 --- /dev/null +++ b/crates/cli/src/cli_args/run.rs @@ -0,0 +1,41 @@ +use clap::Args; +use miette::Context; +use pacquet_executor::execute_shell; +use pacquet_package_manifest::PackageManifest; +use std::path::PathBuf; + +#[derive(Debug, Args)] +pub struct RunArgs { + /// A pre-defined package script. + pub command: String, + + /// Any additional arguments passed after the script name + pub args: Vec, + + /// You can use the --if-present flag to avoid exiting with a non-zero exit code when the + /// script is undefined. This lets you run potentially undefined scripts without breaking the + /// execution chain. + #[clap(long)] + pub if_present: bool, +} + +impl RunArgs { + /// Execute the subcommand. + pub fn run(self, manifest_path: PathBuf) -> miette::Result<()> { + let RunArgs { command, args, if_present } = self; + + let manifest = PackageManifest::from_path(manifest_path) + .wrap_err("getting the package.json in current directory")?; + + if let Some(script) = manifest.script(&command, if_present)? { + let mut command = script.to_string(); + // append an empty space between script and additional args + command.push(' '); + // then append the additional args + command.push_str(&args.join(" ")); + execute_shell(command.trim())?; + } + + Ok(()) + } +} diff --git a/crates/cli/src/cli_args/store.rs b/crates/cli/src/cli_args/store.rs new file mode 100644 index 000000000..4a908fe41 --- /dev/null +++ b/crates/cli/src/cli_args/store.rs @@ -0,0 +1,41 @@ +use clap::Subcommand; +use miette::Context; +use pacquet_npmrc::Npmrc; + +#[derive(Debug, Subcommand)] +pub enum StoreCommand { + /// Checks for modified packages in the store. + Store, + /// Functionally equivalent to pnpm add, except this adds new packages to the store directly + /// without modifying any projects or files outside of the store. + Add, + /// Removes unreferenced packages from the store. + /// Unreferenced packages are packages that are not used by any projects on the system. + /// Packages can become unreferenced after most installation operations, for instance when + /// dependencies are made redundant. + Prune, + /// Returns the path to the active store directory. + Path, +} + +impl StoreCommand { + /// Execute the subcommand. + pub fn run<'a>(self, config: impl FnOnce() -> &'a Npmrc) -> miette::Result<()> { + match self { + StoreCommand::Store => { + panic!("Not implemented") + } + StoreCommand::Add => { + panic!("Not implemented") + } + StoreCommand::Prune => { + pacquet_cafs::prune_sync(&config().store_dir).wrap_err("pruning store")?; + } + StoreCommand::Path => { + println!("{}", config().store_dir.display()); + } + } + + Ok(()) + } +} diff --git a/crates/cli/src/commands/add.rs b/crates/cli/src/commands/add.rs deleted file mode 100644 index f72387c4e..000000000 --- a/crates/cli/src/commands/add.rs +++ /dev/null @@ -1,300 +0,0 @@ -use clap::Parser; -use std::collections::VecDeque; - -use crate::{ - package::{fetch_package_version_directly, install_package_from_registry}, - package_manager::{PackageManager, PackageManagerError}, -}; -use futures_util::future; -use pacquet_diagnostics::miette::WrapErr; -use pacquet_package_json::DependencyGroup; -use pacquet_registry::PackageVersion; - -#[derive(Parser, Debug)] -pub struct AddCommandArgs { - /// Name of the package - pub package: String, - /// Install the specified packages as regular dependencies. - #[arg(short = 'P', long = "save-prod", group = "dependency_group")] - save_prod: bool, - /// Install the specified packages as devDependencies. - #[arg(short = 'D', long = "save-dev", group = "dependency_group")] - save_dev: bool, - /// Install the specified packages as optionalDependencies. - #[arg(short = 'O', long = "save-optional", group = "dependency_group")] - save_optional: bool, - /// Using --save-peer will add one or more packages to peerDependencies and install them as dev dependencies - #[arg(long = "save-peer", group = "dependency_group")] - save_peer: bool, - /// Saved dependencies will be configured with an exact version rather than using - /// pacquet's default semver range operator. - #[arg(short = 'E', long = "save-exact")] - pub save_exact: bool, - /// The directory with links to the store (default is node_modules/.pacquet). - /// All direct and indirect dependencies of the project are linked into this directory - #[arg(long = "virtual-store-dir", default_value = "node_modules/.pacquet")] - pub virtual_store_dir: String, -} - -impl AddCommandArgs { - pub fn dependency_group(&self) -> DependencyGroup { - if self.save_dev { - DependencyGroup::Dev - } else if self.save_optional { - DependencyGroup::Optional - } else if self.save_peer { - DependencyGroup::Peer - } else { - DependencyGroup::Default - } - } -} - -impl PackageManager { - /// Here is a brief overview of what this package does. - /// 1. Get a dependency - /// 2. Save the dependency to node_modules/.pacquet/pkg@version/node_modules/pkg - /// 3. Create a symlink to node_modules/pkg - /// 4. Download all dependencies to node_modules/.pacquet - /// 5. Symlink all dependencies to node_modules/.pacquet/pkg@version/node_modules - /// 6. Update package.json - pub async fn add(&mut self, args: &AddCommandArgs) -> Result<(), PackageManagerError> { - let latest_version = fetch_package_version_directly( - &self.tarball_cache, - self.config, - &self.http_client, - &args.package, - "latest", - &self.config.modules_dir, - ) - .await?; - let package_node_modules_path = self - .config - .virtual_store_dir - .join(latest_version.to_virtual_store_name()) - .join("node_modules"); - - let mut queue: VecDeque>> = VecDeque::new(); - let config = &self.config; - let http_client = &self.http_client; - let path = &package_node_modules_path; - - let direct_dependency_handles = - latest_version.dependencies(self.config.auto_install_peers).map(|(name, version)| { - install_package_from_registry( - &self.tarball_cache, - config, - http_client, - name, - version, - path, - ) - }); - - queue.push_front(future::join_all(direct_dependency_handles).await); - - while let Some(dependencies) = queue.pop_front() { - for dependency in dependencies { - let dependency = - dependency.wrap_err("failed to install one of the dependencies.").unwrap(); - let node_modules_path = self - .config - .virtual_store_dir - .join(dependency.to_virtual_store_name()) - .join("node_modules"); - - let handles = dependency.dependencies(self.config.auto_install_peers).map( - |(name, version)| { - install_package_from_registry( - &self.tarball_cache, - config, - http_client, - name, - version, - &node_modules_path, - ) - }, - ); - - queue.push_back(future::join_all(handles).await); - } - } - - self.package_json.add_dependency( - &args.package, - &latest_version.serialize(args.save_exact), - args.dependency_group(), - )?; - // Using --save-peer will add one or more packages to peerDependencies and - // install them as dev dependencies - if args.dependency_group() == DependencyGroup::Peer { - self.package_json.add_dependency( - &args.package, - &latest_version.serialize(args.save_exact), - DependencyGroup::Dev, - )?; - } - self.package_json.save()?; - - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use crate::fs::get_all_folders; - use std::{env, fs}; - - use crate::fs::get_filenames_in_folder; - use pacquet_npmrc::Npmrc; - use pacquet_package_json::{DependencyGroup, PackageJson}; - use pretty_assertions::assert_eq; - use tempfile::tempdir; - - use super::*; - - #[tokio::test] - pub async fn should_install_all_dependencies() { - let dir = tempdir().unwrap(); - let virtual_store_dir = dir.path().join("node_modules/.pacquet"); - let current_directory = env::current_dir().unwrap(); - env::set_current_dir(&dir).unwrap(); - let package_json = dir.path().join("package.json"); - let mut manager = PackageManager::new(&package_json, Npmrc::current().leak()).unwrap(); - - // It should create a package_json if not exist - assert!(package_json.exists()); - - let args = AddCommandArgs { - package: "is-even".to_string(), - save_prod: false, - save_dev: false, - save_peer: false, - save_optional: false, - save_exact: false, - virtual_store_dir: virtual_store_dir.to_string_lossy().to_string(), - }; - manager.add(&args).await.unwrap(); - - insta::assert_debug_snapshot!(get_all_folders(dir.path())); - - // Ensure that is-buffer does not have any dependencies - let is_buffer_path = virtual_store_dir.join("is-buffer@1.1.6/node_modules"); - assert_eq!(get_filenames_in_folder(&is_buffer_path), vec!["is-buffer"]); - - // Ensure that is-even have correct dependencies - let is_even_path = virtual_store_dir.join("is-even@1.0.0/node_modules"); - assert_eq!(get_filenames_in_folder(&is_even_path), vec!["is-even", "is-odd"]); - - // Ensure that is-number does not have any dependencies - let is_number_path = virtual_store_dir.join("is-number@3.0.0/node_modules"); - assert_eq!(get_filenames_in_folder(&is_number_path), vec!["is-number", "kind-of"]); - - env::set_current_dir(¤t_directory).unwrap(); - } - - #[tokio::test] - #[cfg(not(target_os = "windows"))] - pub async fn should_symlink_correctly() { - let dir = tempdir().unwrap(); - let virtual_store_dir = dir.path().join("node_modules/.pacquet"); - let current_directory = env::current_dir().unwrap(); - env::set_current_dir(&dir).unwrap(); - let package_json = dir.path().join("package.json"); - let mut manager = PackageManager::new(&package_json, Npmrc::current().leak()).unwrap(); - - let args = AddCommandArgs { - package: "is-odd".to_string(), - save_prod: false, - save_dev: false, - save_peer: false, - save_optional: false, - save_exact: false, - virtual_store_dir: virtual_store_dir.to_string_lossy().to_string(), - }; - manager.add(&args).await.unwrap(); - - insta::assert_debug_snapshot!(get_all_folders(dir.path())); - - // Make sure the symlinks are correct - assert_eq!( - fs::read_link(virtual_store_dir.join("is-odd@3.0.1/node_modules/is-number")).unwrap(), - fs::canonicalize(virtual_store_dir.join("is-number@6.0.0/node_modules/is-number")) - .unwrap(), - ); - env::set_current_dir(¤t_directory).unwrap(); - } - - #[tokio::test] - pub async fn should_add_to_package_json() { - let dir = tempdir().unwrap(); - let virtual_store_dir = dir.path().join("node_modules/.pacquet"); - let current_directory = env::current_dir().unwrap(); - env::set_current_dir(&dir).unwrap(); - let package_json = dir.path().join("package.json"); - let mut manager = PackageManager::new(&package_json, Npmrc::current().leak()).unwrap(); - - let args = AddCommandArgs { - package: "is-odd".to_string(), - save_prod: false, - save_dev: false, - save_peer: false, - save_optional: false, - save_exact: false, - virtual_store_dir: virtual_store_dir.to_string_lossy().to_string(), - }; - manager.add(&args).await.unwrap(); - let file = PackageJson::from_path(dir.path().join("package.json")).unwrap(); - assert!(file.dependencies([DependencyGroup::Default]).any(|(k, _)| k == "is-odd")); - env::set_current_dir(¤t_directory).unwrap(); - } - - #[tokio::test] - pub async fn should_add_dev_dependency() { - let dir = tempdir().unwrap(); - let virtual_store_dir = dir.path().join("node_modules/.pacquet"); - let current_directory = env::current_dir().unwrap(); - env::set_current_dir(&dir).unwrap(); - let package_json = dir.path().join("package.json"); - let mut manager = PackageManager::new(&package_json, Npmrc::current().leak()).unwrap(); - - let args = AddCommandArgs { - package: "is-odd".to_string(), - save_prod: false, - save_dev: true, - save_peer: false, - save_optional: false, - save_exact: false, - virtual_store_dir: virtual_store_dir.to_string_lossy().to_string(), - }; - manager.add(&args).await.unwrap(); - let file = PackageJson::from_path(dir.path().join("package.json")).unwrap(); - assert!(file.dependencies([DependencyGroup::Dev]).any(|(k, _)| k == "is-odd")); - env::set_current_dir(¤t_directory).unwrap(); - } - - #[tokio::test] - pub async fn should_add_peer_dependency() { - let dir = tempdir().unwrap(); - let virtual_store_dir = dir.path().join("node_modules/.pacquet"); - let current_directory = env::current_dir().unwrap(); - env::set_current_dir(&dir).unwrap(); - let package_json = dir.path().join("package.json"); - let mut manager = PackageManager::new(&package_json, Npmrc::current().leak()).unwrap(); - - let args = AddCommandArgs { - package: "is-odd".to_string(), - save_prod: false, - save_dev: false, - save_peer: true, - save_optional: false, - save_exact: false, - virtual_store_dir: virtual_store_dir.to_string_lossy().to_string(), - }; - manager.add(&args).await.unwrap(); - let file = PackageJson::from_path(dir.path().join("package.json")).unwrap(); - assert!(file.dependencies([DependencyGroup::Dev]).any(|(k, _)| k == "is-odd")); - assert!(file.dependencies([DependencyGroup::Peer]).any(|(k, _)| k == "is-odd")); - env::set_current_dir(¤t_directory).unwrap(); - } -} diff --git a/crates/cli/src/commands/install.rs b/crates/cli/src/commands/install.rs deleted file mode 100644 index 9c4c22e20..000000000 --- a/crates/cli/src/commands/install.rs +++ /dev/null @@ -1,328 +0,0 @@ -use crate::package::{install_package_from_registry, install_single_package_to_virtual_store}; -use crate::package_import::symlink_pkg; -use crate::package_manager::{PackageManager, PackageManagerError}; -use async_recursion::async_recursion; -use clap::Parser; -use futures_util::future; -use pacquet_diagnostics::tracing; -use pacquet_lockfile::{ - DependencyPath, Lockfile, PackageSnapshot, PkgName, PkgNameVerPeer, RootProjectSnapshot, -}; -use pacquet_package_json::DependencyGroup; -use pacquet_registry::PackageVersion; -use pipe_trait::Pipe; -use rayon::prelude::*; -use std::collections::HashMap; - -#[derive(Debug, Parser)] -pub struct CliDependencyOptions { - /// pacquet will not install any package listed in devDependencies and will remove those insofar - /// they were already installed, if the NODE_ENV environment variable is set to production. - /// Use this flag to instruct pacquet to ignore NODE_ENV and take its production status from this - /// flag instead. - #[arg(short = 'P', long = "prod")] - pub prod: bool, - /// Only devDependencies are installed and dependencies are removed insofar they were - /// already installed, regardless of the NODE_ENV. - #[arg(short = 'D', long = "dev")] - pub dev: bool, - /// optionalDependencies are not installed. - #[arg(long = "no-optional")] - pub no_optional: bool, -} - -impl CliDependencyOptions { - /// Convert the command arguments to an iterator of [`DependencyGroup`] - /// which filters the types of dependencies to install. - fn dependency_groups(&self) -> impl Iterator { - let &CliDependencyOptions { prod, dev, no_optional } = self; - let has_both = prod == dev; - let has_prod = has_both || prod; - let has_dev = has_both || dev; - let has_optional = !no_optional; - std::iter::empty() - .chain(has_prod.then_some(DependencyGroup::Default)) - .chain(has_dev.then_some(DependencyGroup::Dev)) - .chain(has_optional.then_some(DependencyGroup::Optional)) - } -} - -#[derive(Parser, Debug)] -pub struct InstallCommandArgs { - /// --prod, --dev, and --no-optional - #[clap(flatten)] - pub dependency_options: CliDependencyOptions, - - /// Don't generate a lockfile and fail if the lockfile is outdated. - #[clap(long)] - pub frozen_lockfile: bool, -} - -impl PackageManager { - /// Install dependencies of a dependency. - /// - /// This function is used by [`PackageManager::install`] without a lockfile. - #[async_recursion] - async fn install_dependencies_from_registry(&self, package: &PackageVersion) { - let node_modules_path = self - .config - .virtual_store_dir - .join(package.to_virtual_store_name()) - .join("node_modules"); - - tracing::info!(target: "pacquet::install", node_modules = ?node_modules_path, "Start subset"); - - package - .dependencies(self.config.auto_install_peers) - .map(|(name, version_range)| async { - let dependency = install_package_from_registry( - &self.tarball_cache, - self.config, - &self.http_client, - name, - version_range, - &node_modules_path, - ) - .await - .unwrap(); - self.install_dependencies_from_registry(&dependency).await; - }) - .pipe(future::join_all) - .await; - - tracing::info!(target: "pacquet::install", node_modules = ?node_modules_path, "Complete subset"); - } - - /// Generate filesystem layout for the virtual store at `node_modules/.pacquet`. - async fn create_virtual_store( - &self, - packages: &Option>, - ) { - let Some(packages) = packages else { - todo!("check project_snapshot, error if it's not empty, do nothing if empty"); - }; - packages - .iter() - .map(|(dependency_path, package_snapshot)| async move { - install_single_package_to_virtual_store( - &self.tarball_cache, - &self.http_client, - self.config, - dependency_path, - package_snapshot, - ) - .await - .unwrap(); - }) - .pipe(future::join_all) - .await; - } - - /// Create symlinks for the direct dependencies. - /// - /// If package `foo@x.y.z` is declared as a dependency in `package.json`, - /// symlink `foo -> .pacquet/foo@x.y.z/node_modules/foo` shall be created - /// in the `node_modules` directory. - fn link_direct_dependencies( - &self, - project_snapshot: &RootProjectSnapshot, - args: &InstallCommandArgs, - ) { - let InstallCommandArgs { dependency_options, .. } = args; - - let RootProjectSnapshot::Single(project_snapshot) = project_snapshot else { - panic!("Monorepo is not yet supported"); - }; - - project_snapshot - .dependencies_by_groups(dependency_options.dependency_groups()) - .collect::>() - .par_iter() - .for_each(|(name, spec)| { - // TODO: the code below is not optimal - let virtual_store_name = - PkgNameVerPeer::new(PkgName::clone(name), spec.version.clone()) - .to_virtual_store_name(); - - let name_str = name.to_string(); - symlink_pkg( - &self - .config - .virtual_store_dir - .join(virtual_store_name) - .join("node_modules") - .join(&name_str), - &self.config.modules_dir.join(&name_str), - ); - }); - } - - /// Jobs of the `install` command. - pub async fn install(&self, args: &InstallCommandArgs) -> Result<(), PackageManagerError> { - let InstallCommandArgs { dependency_options, frozen_lockfile } = args; - tracing::info!(target: "pacquet::install", "Start all"); - - match (self.config.lockfile, frozen_lockfile, &self.lockfile) { - (false, _, _) => { - self.package_json - .dependencies(dependency_options.dependency_groups()) - .map(|(name, version_range)| async move { - let dependency = install_package_from_registry( - &self.tarball_cache, - self.config, - &self.http_client, - name, - version_range, - &self.config.modules_dir, - ) - .await - .unwrap(); - self.install_dependencies_from_registry(&dependency).await; - }) - .pipe(future::join_all) - .await; - } - (true, false, Some(_)) | (true, false, None) | (true, true, None) => { - unimplemented!(); - } - (true, true, Some(lockfile)) => { - let Lockfile { lockfile_version, project_snapshot, packages, .. } = lockfile; - assert_eq!(lockfile_version.major, 6); // compatibility check already happens at serde, but this still helps preventing programmer mistakes. - - // TODO: check if the lockfile is out-of-date - - assert!( - self.config.prefer_frozen_lockfile, - "Non frozen lockfile is not yet supported", - ); - - self.create_virtual_store(packages).await; - self.link_direct_dependencies(project_snapshot, args); - } - } - - tracing::info!(target: "pacquet::install", "Complete all"); - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use std::env; - use std::io::Result; - - use crate::commands::install::{CliDependencyOptions, InstallCommandArgs}; - use crate::fs::get_all_folders; - use crate::package_manager::PackageManager; - use pacquet_npmrc::Npmrc; - use pacquet_package_json::{DependencyGroup, PackageJson}; - use pretty_assertions::assert_eq; - use tempfile::tempdir; - - // Helper function to check if a path is a symlink or junction - fn is_symlink_or_junction(path: std::path::PathBuf) -> Result { - #[cfg(windows)] - return junction::exists(&path); - - #[cfg(not(windows))] - return Ok(path.is_symlink()); - } - - #[test] - fn install_args_to_dependency_groups() { - use DependencyGroup::{Default, Dev, Optional}; - let create_list = |opts: CliDependencyOptions| opts.dependency_groups().collect::>(); - - // no flags -> prod + dev + optional - assert_eq!( - create_list(CliDependencyOptions { prod: false, dev: false, no_optional: false }), - [Default, Dev, Optional], - ); - - // --prod -> prod + optional - assert_eq!( - create_list(CliDependencyOptions { prod: true, dev: false, no_optional: false }), - [Default, Optional], - ); - - // --dev -> dev + optional - assert_eq!( - create_list(CliDependencyOptions { prod: false, dev: true, no_optional: false }), - [Dev, Optional], - ); - - // --no-optional -> prod + dev - assert_eq!( - create_list(CliDependencyOptions { prod: false, dev: false, no_optional: true }), - [Default, Dev], - ); - - // --prod --no-optional -> prod - assert_eq!( - create_list(CliDependencyOptions { prod: true, dev: false, no_optional: true }), - [Default], - ); - - // --dev --no-optional -> dev - assert_eq!( - create_list(CliDependencyOptions { prod: false, dev: true, no_optional: true }), - [Dev], - ); - - // --prod --dev -> prod + dev + optional - assert_eq!( - create_list(CliDependencyOptions { prod: true, dev: true, no_optional: false }), - [Default, Dev, Optional], - ); - - // --prod --dev --no-optional -> prod + dev - assert_eq!( - create_list(CliDependencyOptions { prod: true, dev: true, no_optional: true }), - [Default, Dev], - ); - } - - #[tokio::test] - pub async fn should_install_dependencies() { - let dir = tempdir().unwrap(); - let current_directory = env::current_dir().unwrap(); - env::set_current_dir(dir.path()).unwrap(); - - let package_json_path = dir.path().join("package.json"); - let mut package_json = PackageJson::create_if_needed(package_json_path.clone()).unwrap(); - - package_json.add_dependency("is-odd", "3.0.1", DependencyGroup::Default).unwrap(); - package_json - .add_dependency("fast-decode-uri-component", "1.0.1", DependencyGroup::Dev) - .unwrap(); - - package_json.save().unwrap(); - - let package_manager = - PackageManager::new(&package_json_path, Npmrc::current().leak()).unwrap(); - let args = InstallCommandArgs { - dependency_options: CliDependencyOptions { - prod: false, - dev: false, - no_optional: false, - }, - frozen_lockfile: false, - }; - package_manager.install(&args).await.unwrap(); - - // Make sure the package is installed - assert!(is_symlink_or_junction(dir.path().join("node_modules/is-odd")).unwrap()); - assert!(dir.path().join("node_modules/.pacquet/is-odd@3.0.1").exists()); - // Make sure it installs direct dependencies - assert!(!dir.path().join("node_modules/is-number").exists()); - assert!(dir.path().join("node_modules/.pacquet/is-number@6.0.0").exists()); - // Make sure we install dev-dependencies as well - assert!(is_symlink_or_junction(dir.path().join("node_modules/fast-decode-uri-component")) - .unwrap()); - assert!(dir.path().join("node_modules/.pacquet/fast-decode-uri-component@1.0.1").is_dir()); - - insta::assert_debug_snapshot!(get_all_folders(dir.path())); - - env::set_current_dir(¤t_directory).unwrap(); - } -} diff --git a/crates/cli/src/commands/mod.rs b/crates/cli/src/commands/mod.rs deleted file mode 100644 index b0d2e8879..000000000 --- a/crates/cli/src/commands/mod.rs +++ /dev/null @@ -1,49 +0,0 @@ -pub mod add; -pub mod install; -pub mod run; -pub mod store; - -use std::{env, ffi::OsString, path::PathBuf}; - -use crate::commands::{ - add::AddCommandArgs, install::InstallCommandArgs, run::RunCommandArgs, store::StoreSubcommands, -}; -use clap::{Parser, Subcommand}; - -fn default_current_dir() -> OsString { - env::current_dir().expect("failed to get current directory").into_os_string() -} - -/// Experimental package manager for node.js written in rust. -#[derive(Parser, Debug)] -#[command(name = "pacquet")] -#[command(bin_name = "pacquet")] -#[command(version = "0.2.1")] -#[command(about = "Experimental package manager for node.js")] -pub struct Cli { - #[command(subcommand)] - pub subcommand: Subcommands, - - /// Run as if pacquet was started in instead of the current working directory. - #[arg(short = 'C', long = "dir", default_value = default_current_dir())] - pub current_dir: PathBuf, -} - -#[derive(Subcommand, Debug)] -pub enum Subcommands { - /// Initialize a package.json - Init, - /// Add a package - Add(AddCommandArgs), - /// Install packages - Install(InstallCommandArgs), - /// Runs a package's "test" script, if one was provided. - Test, - /// Runs a defined package script. - Run(RunCommandArgs), - /// Runs an arbitrary command specified in the package's start property of its scripts object. - Start, - /// Managing the package store. - #[clap(subcommand)] - Store(StoreSubcommands), -} diff --git a/crates/cli/src/commands/run.rs b/crates/cli/src/commands/run.rs deleted file mode 100644 index 8cb7d6f3b..000000000 --- a/crates/cli/src/commands/run.rs +++ /dev/null @@ -1,16 +0,0 @@ -use clap::Parser; - -#[derive(Parser, Debug)] -pub struct RunCommandArgs { - /// A pre-defined package script. - pub command: String, - - /// Any additional arguments passed after the script name - pub args: Vec, - - /// You can use the --if-present flag to avoid exiting with a non-zero exit code when the - /// script is undefined. This lets you run potentially undefined scripts without breaking the - /// execution chain. - #[arg(long = "if-present")] - pub if_present: bool, -} diff --git a/crates/cli/src/commands/store.rs b/crates/cli/src/commands/store.rs deleted file mode 100644 index 334339313..000000000 --- a/crates/cli/src/commands/store.rs +++ /dev/null @@ -1,17 +0,0 @@ -use clap::Subcommand; - -#[derive(Subcommand, Debug)] -pub enum StoreSubcommands { - /// Checks for modified packages in the store. - Store, - /// Functionally equivalent to pnpm add, except this adds new packages to the store directly - /// without modifying any projects or files outside of the store. - Add, - /// Removes unreferenced packages from the store. - /// Unreferenced packages are packages that are not used by any projects on the system. - /// Packages can become unreferenced after most installation operations, for instance when - /// dependencies are made redundant. - Prune, - /// Returns the path to the active store directory. - Path, -} diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs index 924a620c7..679893657 100644 --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -1,137 +1,14 @@ -mod commands; -mod fs; -mod package; -mod package_import; -mod package_manager; +mod cli_args; +mod state; -use crate::package_manager::PackageManager; - -use crate::commands::store::StoreSubcommands; -use crate::commands::{Cli, Subcommands}; use clap::Parser; -use pacquet_diagnostics::{ - enable_tracing_by_env, - miette::{set_panic_hook, IntoDiagnostic, Result, WrapErr}, -}; -use pacquet_executor::execute_shell; -use pacquet_npmrc::Npmrc; -use pacquet_package_json::PackageJson; +use cli_args::CliArgs; +use miette::set_panic_hook; +use pacquet_diagnostics::enable_tracing_by_env; +use state::State; -pub async fn run_cli() -> Result<()> { +pub async fn main() -> miette::Result<()> { enable_tracing_by_env(); set_panic_hook(); - let cli = Cli::parse(); - let config = Npmrc::current().leak(); - run_commands(cli, config).await -} - -async fn run_commands(cli: Cli, config: &'static Npmrc) -> Result<()> { - let package_json_path = cli.current_dir.join("package.json"); - - match &cli.subcommand { - Subcommands::Init => { - // init command throws an error if package.json file exist. - PackageJson::init(&package_json_path).wrap_err("initialize package.json")?; - } - Subcommands::Add(args) => { - let mut package_manager = PackageManager::new(&package_json_path, config) - .wrap_err("initializing the package manager")?; - // TODO if a package already exists in another dependency group, we don't remove - // the existing entry. - package_manager.add(args).await.wrap_err("adding a new package")?; - } - Subcommands::Install(args) => { - let package_manager = PackageManager::new(&package_json_path, config) - .wrap_err("initializing the package manager")?; - package_manager - .install(args) - .await - .into_diagnostic() - .wrap_err("installing dependencies")?; - } - Subcommands::Test => { - let package_json = PackageJson::from_path(package_json_path) - .wrap_err("getting the package.json in current directory")?; - if let Some(script) = package_json.script("test", false)? { - execute_shell(script).wrap_err(format!("executing command: \"{0}\"", script))?; - } - } - Subcommands::Run(args) => { - let package_json = PackageJson::from_path(package_json_path) - .wrap_err("getting the package.json in current directory")?; - if let Some(script) = package_json.script(&args.command, args.if_present)? { - let mut command = script.to_string(); - // append an empty space between script and additional args - command.push(' '); - // then append the additional args - command.push_str(&args.args.join(" ")); - execute_shell(command.trim())?; - } - } - Subcommands::Start => { - // Runs an arbitrary command specified in the package's start property of its scripts - // object. If no start property is specified on the scripts object, it will attempt to - // run node server.js as a default, failing if neither are present. - // The intended usage of the property is to specify a command that starts your program. - let package_json = PackageJson::from_path(package_json_path) - .wrap_err("getting the package.json in current directory")?; - let command = if let Some(script) = package_json.script("start", true)? { - script - } else { - "node server.js" - }; - execute_shell(command).wrap_err(format!("executing command: \"{0}\"", command))?; - } - Subcommands::Store(subcommand) => match subcommand { - StoreSubcommands::Store => { - panic!("Not implemented") - } - StoreSubcommands::Add => { - panic!("Not implemented") - } - StoreSubcommands::Prune => { - pacquet_cafs::prune_sync(&config.store_dir).wrap_err("pruning store")?; - } - StoreSubcommands::Path => { - println!("{}", config.store_dir.display()); - } - }, - } - - Ok(()) -} - -#[cfg(test)] -mod tests { - use std::{fs, io::Write}; - - use tempfile::tempdir; - - use super::*; - - #[tokio::test] - async fn init_command_should_create_package_json() { - let parent_folder = tempdir().unwrap(); - let cli = Cli::parse_from(["", "-C", parent_folder.path().to_str().unwrap(), "init"]); - run_commands(cli, Npmrc::current().leak()).await.unwrap(); - assert!(parent_folder.path().join("package.json").exists()); - } - - #[tokio::test] - async fn init_command_should_throw_on_existing_file() { - let parent_folder = tempdir().unwrap(); - let mut file = fs::File::create(parent_folder.path().join("package.json")).unwrap(); - file.write_all("{}".as_bytes()).unwrap(); - assert!(parent_folder.path().join("package.json").exists()); - let cli = Cli::parse_from(["", "-C", parent_folder.path().to_str().unwrap(), "init"]); - run_commands(cli, Npmrc::current().leak()).await.expect_err("should have thrown"); - } - - #[tokio::test] - async fn should_get_store_path() { - let parent_folder = tempdir().unwrap(); - let cli = - Cli::parse_from(["", "-C", parent_folder.path().to_str().unwrap(), "store", "path"]); - run_commands(cli, Npmrc::current().leak()).await.unwrap(); - } + CliArgs::parse().run().await } diff --git a/crates/cli/src/package.rs b/crates/cli/src/package.rs deleted file mode 100644 index bd75fa951..000000000 --- a/crates/cli/src/package.rs +++ /dev/null @@ -1,215 +0,0 @@ -use crate::{ - package_import::{create_virtdir_by_snapshot, ImportMethodImpl}, - package_manager::PackageManagerError, -}; -use pacquet_lockfile::{DependencyPath, LockfileResolution, PackageSnapshot, PkgNameVerPeer}; -use pacquet_npmrc::Npmrc; -use pacquet_registry::{Package, PackageVersion}; -use pacquet_tarball::{download_tarball_to_store, Cache}; -use pipe_trait::Pipe; -use reqwest::Client; -use std::{borrow::Cow, path::Path}; - -/// This function execute the following and returns the package -/// - retrieves the package from the registry -/// - extracts the tarball to global store directory (~/Library/../pacquet) -/// - links global store directory to virtual dir (node_modules/.pacquet/..) -/// -/// symlink_path will be appended by the name of the package. Therefore, -/// it should be resolved into the node_modules folder of a subdependency such as -/// `node_modules/.pacquet/fastify@1.0.0/node_modules`. -pub async fn install_package_from_registry( - tarball_cache: &Cache, - config: &'static Npmrc, - http_client: &Client, - name: &str, - version_range: &str, - symlink_path: &Path, -) -> Result { - let package = Package::fetch_from_registry(name, http_client, &config.registry).await?; - let package_version = package.pinned_version(version_range).unwrap(); - internal_fetch(tarball_cache, http_client, package_version, config, symlink_path).await?; - Ok(package_version.to_owned()) -} - -pub async fn fetch_package_version_directly( - tarball_cache: &Cache, - config: &'static Npmrc, - http_client: &Client, - name: &str, - version: &str, - symlink_path: &Path, -) -> Result { - let package_version = - PackageVersion::fetch_from_registry(name, version, http_client, &config.registry).await?; - internal_fetch(tarball_cache, http_client, &package_version, config, symlink_path).await?; - Ok(package_version.to_owned()) -} - -async fn internal_fetch( - tarball_cache: &Cache, - http_client: &Client, - package_version: &PackageVersion, - config: &'static Npmrc, - symlink_path: &Path, -) -> Result<(), PackageManagerError> { - let store_folder_name = package_version.to_virtual_store_name(); - - // TODO: skip when it already exists in store? - let cas_paths = download_tarball_to_store( - tarball_cache, - http_client, - &config.store_dir, - package_version.dist.integrity.as_ref().expect("has integrity field"), - package_version.dist.unpacked_size, - package_version.as_tarball_url(), - ) - .await?; - - let save_path = config - .virtual_store_dir - .join(store_folder_name) - .join("node_modules") - .join(&package_version.name); - - config.package_import_method.import( - &cas_paths, - &save_path, - &symlink_path.join(&package_version.name), - )?; - - Ok(()) -} - -pub async fn install_single_package_to_virtual_store( - tarball_cache: &Cache, - http_client: &Client, - config: &'static Npmrc, - dependency_path: &DependencyPath, - package_snapshot: &PackageSnapshot, -) -> Result<(), PackageManagerError> { - let PackageSnapshot { resolution, .. } = package_snapshot; - let DependencyPath { custom_registry, package_specifier } = dependency_path; - - let (tarball_url, integrity) = match resolution { - LockfileResolution::Tarball(tarball_resolution) => { - let integrity = tarball_resolution.integrity.as_deref().unwrap_or_else(|| { - // TODO: how to handle the absent of integrity field? - panic!("Current implementation requires integrity, but {dependency_path} doesn't have it"); - }); - (tarball_resolution.tarball.as_str().pipe(Cow::Borrowed), integrity) - } - LockfileResolution::Registry(registry_resolution) => { - let registry = custom_registry.as_ref().unwrap_or(&config.registry); - let registry = registry.strip_suffix('/').unwrap_or(registry); - let PkgNameVerPeer { name, suffix: ver_peer } = package_specifier; - let version = ver_peer.version(); - let bare_name = name.bare.as_str(); - let tarball_url = format!("{registry}/{name}/-/{bare_name}-{version}.tgz"); - let integrity = registry_resolution.integrity.as_str(); - (Cow::Owned(tarball_url), integrity) - } - LockfileResolution::Directory(_) | LockfileResolution::Git(_) => { - panic!("Only TarballResolution and RegistryResolution is supported at the moment, but {dependency_path} requires {resolution:?}"); - } - }; - - // TODO: skip when already exists in store? - let cas_paths = download_tarball_to_store( - tarball_cache, - http_client, - &config.store_dir, - integrity, - None, - &tarball_url, - ) - .await?; - - create_virtdir_by_snapshot( - dependency_path, - &config.virtual_store_dir, - &cas_paths, - config.package_import_method, - package_snapshot, - )?; - - Ok(()) -} - -#[cfg(test)] -mod tests { - use crate::package::install_package_from_registry; - use node_semver::Version; - use pacquet_npmrc::Npmrc; - use pipe_trait::Pipe; - use pretty_assertions::assert_eq; - use std::fs; - use std::path::Path; - use tempfile::tempdir; - - fn create_config(store_dir: &Path, modules_dir: &Path, virtual_store_dir: &Path) -> Npmrc { - Npmrc { - hoist: false, - hoist_pattern: vec![], - public_hoist_pattern: vec![], - shamefully_hoist: false, - store_dir: store_dir.to_path_buf(), - modules_dir: modules_dir.to_path_buf(), - node_linker: Default::default(), - symlink: false, - virtual_store_dir: virtual_store_dir.to_path_buf(), - package_import_method: Default::default(), - modules_cache_max_age: 0, - lockfile: false, - prefer_frozen_lockfile: false, - lockfile_include_tarball_url: false, - registry: "https://registry.npmjs.com/".to_string(), - auto_install_peers: false, - dedupe_peer_dependents: false, - strict_peer_dependencies: false, - resolve_peers_from_workspace_root: false, - } - } - - #[tokio::test] - pub async fn should_find_package_version_from_registry() { - let store_dir = tempdir().unwrap(); - let modules_dir = tempdir().unwrap(); - let virtual_store_dir = tempdir().unwrap(); - let config: &'static Npmrc = - create_config(store_dir.path(), modules_dir.path(), virtual_store_dir.path()) - .pipe(Box::new) - .pipe(Box::leak); - let http_client = reqwest::Client::new(); - let symlink_path = tempdir().unwrap(); - let package = install_package_from_registry( - &Default::default(), - config, - &http_client, - "fast-querystring", - "1.0.0", - symlink_path.path(), - ) - .await - .unwrap(); - - assert_eq!(package.name, "fast-querystring"); - assert_eq!( - package.version, - Version { major: 1, minor: 0, patch: 0, build: vec![], pre_release: vec![] } - ); - - let virtual_store_path = virtual_store_dir - .path() - .join(package.to_virtual_store_name()) - .join("node_modules") - .join(&package.name); - assert!(virtual_store_path.is_dir()); - - // Make sure the symlink is resolving to the correct path - assert_eq!( - fs::read_link(symlink_path.path().join(&package.name)).unwrap(), - virtual_store_path - ); - } -} diff --git a/crates/cli/src/package_import.rs b/crates/cli/src/package_import.rs deleted file mode 100644 index f3027a149..000000000 --- a/crates/cli/src/package_import.rs +++ /dev/null @@ -1,154 +0,0 @@ -use std::{ - collections::HashMap, - ffi::OsString, - fs, - io::ErrorKind, - path::{Path, PathBuf}, -}; - -use crate::package_manager::{AutoImportError, PackageManagerError}; -use pacquet_diagnostics::tracing; -use pacquet_lockfile::{ - DependencyPath, PackageSnapshot, PackageSnapshotDependency, PkgNameVerPeer, -}; -use pacquet_npmrc::PackageImportMethod; -use rayon::prelude::*; - -pub trait ImportMethodImpl { - fn import( - &self, - cas_files: &HashMap, - save_path: &Path, - symlink_to: &Path, - ) -> Result<(), PackageManagerError>; -} - -impl ImportMethodImpl for PackageImportMethod { - fn import( - &self, - cas_files: &HashMap, - save_path: &Path, - symlink_to: &Path, - ) -> Result<(), PackageManagerError> { - tracing::info!(target: "pacquet::import", ?save_path, ?symlink_to, "Import package"); - match self { - PackageImportMethod::Auto => { - if !save_path.exists() { - cas_files - .into_par_iter() - .try_for_each(|(cleaned_entry, store_path)| { - auto_import(store_path, &save_path.join(cleaned_entry)) - }) - .expect("expected no write errors"); - } - - if !symlink_to.is_symlink() { - if let Some(parent_dir) = symlink_to.parent() { - fs::create_dir_all(parent_dir)?; - } - crate::fs::symlink_dir(save_path, symlink_to)?; - } - } - _ => panic!("Not implemented yet"), - } - - Ok(()) - } -} - -/// This function does 2 things: -/// 1. Install the files from `cas_paths` -/// 2. Create the symlink layout -/// -/// **TODO:** may break this function into 2 later -pub fn create_virtdir_by_snapshot( - dependency_path: &DependencyPath, - virtual_store_dir: &Path, - cas_paths: &HashMap, - import_method: PackageImportMethod, - package_snapshot: &PackageSnapshot, -) -> Result<(), PackageManagerError> { - assert_eq!( - import_method, - PackageImportMethod::Auto, - "Only auto import method is supported, but {dependency_path} requires {import_method:?}", - ); - - // node_modules/.pacquet/pkg-name@x.y.z/node_modules - let virtual_node_modules_dir = virtual_store_dir - .join(dependency_path.package_specifier.to_virtual_store_name()) - .join("node_modules"); - fs::create_dir_all(&virtual_node_modules_dir).unwrap_or_else(|error| { - panic!("Failed to create directory at {virtual_node_modules_dir:?}: {error}") - }); // TODO: proper error propagation - - // 1. Install the files from `cas_paths` - let save_path = - virtual_node_modules_dir.join(dependency_path.package_specifier.name.to_string()); - if !save_path.exists() { - cas_paths.par_iter().try_for_each(|(cleaned_entry, store_path)| { - auto_import(store_path, &save_path.join(cleaned_entry)) - })?; - } - - // 2. Create the symlink layout - if let Some(dependencies) = &package_snapshot.dependencies { - dependencies.par_iter().for_each(|(name, spec)| { - let virtual_store_name = match spec { - PackageSnapshotDependency::PkgVerPeer(ver_peer) => { - let package_specifier = PkgNameVerPeer::new(name.clone(), ver_peer.clone()); // TODO: remove copying here - package_specifier.to_virtual_store_name() - } - PackageSnapshotDependency::DependencyPath(dependency_path) => { - dependency_path.package_specifier.to_virtual_store_name() - } - }; - let name_str = name.to_string(); - symlink_pkg( - &virtual_store_dir.join(virtual_store_name).join("node_modules").join(&name_str), - &virtual_node_modules_dir.join(&name_str), - ); - }); - } - - Ok(()) -} - -fn auto_import(source_file: &Path, target_link: &Path) -> Result<(), AutoImportError> { - if target_link.exists() { - return Ok(()); - } - - if let Some(parent_dir) = target_link.parent() { - fs::create_dir_all(parent_dir).map_err(|error| AutoImportError::CreateDir { - dirname: parent_dir.to_path_buf(), - error, - })?; - } - - reflink_copy::reflink_or_copy(source_file, target_link).map_err(|error| { - AutoImportError::CreateLink { - from: source_file.to_path_buf(), - to: target_link.to_path_buf(), - error, - } - })?; // TODO: add hardlink - - Ok(()) -} - -pub fn symlink_pkg(symlink_target: &Path, symlink_path: &Path) { - // NOTE: symlink target in pacquet is absolute yet in pnpm is relative - // TODO: change symlink target to relative - if let Some(parent) = symlink_path.parent() { - fs::create_dir_all(parent).expect("make sure node_modules exist"); // TODO: proper error propagation - } - if let Err(error) = crate::fs::symlink_dir(symlink_target, symlink_path) { - match error.kind() { - ErrorKind::AlreadyExists => {} - _ => panic!( - "Failed to create symlink at {symlink_path:?} to {symlink_target:?}: {error}" - ), // TODO: proper error propagation - } - } -} diff --git a/crates/cli/src/package_manager.rs b/crates/cli/src/package_manager.rs deleted file mode 100644 index d80b054c1..000000000 --- a/crates/cli/src/package_manager.rs +++ /dev/null @@ -1,118 +0,0 @@ -use std::{io, path::PathBuf}; - -use pacquet_diagnostics::{ - miette::{self, Diagnostic}, - thiserror::{self, Error}, -}; -use pacquet_lockfile::Lockfile; -use pacquet_npmrc::Npmrc; -use pacquet_package_json::PackageJson; -use pacquet_tarball::Cache; - -#[derive(Error, Debug, Diagnostic)] -pub enum AutoImportError { - #[error("cannot create directory at {dirname:?}: {error}")] - CreateDir { - dirname: PathBuf, - #[source] - error: io::Error, - }, - #[error("fail to create a link from {from:?} to {to:?}: {error}")] - CreateLink { - from: PathBuf, - to: PathBuf, - #[source] - error: io::Error, - }, -} - -#[derive(Error, Debug, Diagnostic)] -#[non_exhaustive] -pub enum PackageManagerError { - #[error(transparent)] - #[diagnostic(transparent)] - Tarball(#[from] pacquet_tarball::TarballError), - - #[error(transparent)] - #[diagnostic(transparent)] - PackageJson(#[from] pacquet_package_json::PackageJsonError), - - #[error(transparent)] - #[diagnostic(transparent)] - LoadLockfileError(#[from] pacquet_lockfile::LoadLockfileError), - - #[error(transparent)] - #[diagnostic(transparent)] - Registry(#[from] pacquet_registry::RegistryError), - - #[error(transparent)] - #[diagnostic(code(pacquet_package_manager::io_error))] - Io(#[from] io::Error), - - #[error(transparent)] - #[diagnostic(transparent)] - AutoImport(#[from] AutoImportError), -} - -pub struct PackageManager { - pub config: &'static Npmrc, - pub package_json: PackageJson, - pub lockfile: Option, - pub http_client: reqwest::Client, - pub(crate) tarball_cache: Cache, -} - -impl PackageManager { - pub fn new>( - package_json_path: P, - config: &'static Npmrc, - ) -> Result { - Ok(PackageManager { - config, - package_json: PackageJson::create_if_needed(package_json_path.into())?, - lockfile: call_load_lockfile(config.lockfile, Lockfile::load_from_current_dir)?, - http_client: reqwest::Client::new(), - tarball_cache: Cache::new(), - }) - } -} - -/// Private function to load lockfile from current directory should `config.lockfile` is `true`. -/// -/// This function was extracted to be tested independently. -fn call_load_lockfile( - config_lockfile: bool, - load_lockfile: LoadLockfile, -) -> Result, Error> -where - LoadLockfile: FnOnce() -> Result, Error>, -{ - config_lockfile.then(load_lockfile).transpose().map(Option::flatten) -} - -#[cfg(test)] -mod tests { - use super::*; - use pretty_assertions::assert_eq; - - #[test] - fn test_call_load_lockfile() { - macro_rules! case { - ($config_lockfile:expr, $load_lockfile:expr => $output:expr) => {{ - let config_lockfile = $config_lockfile; - let load_lockfile = $load_lockfile; - let output: Result, &str> = $output; - eprintln!( - "CASE: {config_lockfile:?}, {load_lockfile} => {output:?}", - load_lockfile = stringify!($load_lockfile), - ); - assert_eq!(call_load_lockfile(config_lockfile, load_lockfile), output); - }}; - } - - case!(false, || unreachable!() => Ok(None)); - case!(true, || Err("error") => Err("error")); - case!(true, || Ok(None) => Ok(None)); - case!(true, || Ok(Some("value")) => Ok(Some("value"))); - } -} diff --git a/crates/cli/src/state.rs b/crates/cli/src/state.rs new file mode 100644 index 000000000..6ed86c7d5 --- /dev/null +++ b/crates/cli/src/state.rs @@ -0,0 +1,90 @@ +use derive_more::{Display, Error}; +use miette::Diagnostic; +use pacquet_lockfile::{LoadLockfileError, Lockfile}; +use pacquet_npmrc::Npmrc; +use pacquet_package_manifest::{PackageManifest, PackageManifestError}; +use pacquet_tarball::Cache; +use pipe_trait::Pipe; +use reqwest::Client; +use std::path::PathBuf; + +/// Application state when running `pacquet run` or `pacquet install`. +pub struct State { + /// Shared cache that store downloaded tarballs. + pub tarball_cache: Cache, + /// HTTP client to make HTTP requests. + pub http_client: Client, + /// Configuration read from `.npmrc` + pub config: &'static Npmrc, + /// Data from the `package.json` file. + pub manifest: PackageManifest, + /// Data from the `pnpm-lock.yaml` file. + pub lockfile: Option, +} + +/// Error type of [`State::init`]. +#[derive(Debug, Display, Error, Diagnostic)] +#[non_exhaustive] +pub enum InitStateError { + #[diagnostic(transparent)] + LoadManifest(#[error(source)] PackageManifestError), + + #[diagnostic(transparent)] + LoadLockfile(#[error(source)] LoadLockfileError), +} + +impl State { + /// Initialize the application state. + pub fn init(manifest_path: PathBuf, config: &'static Npmrc) -> Result { + Ok(State { + config, + manifest: manifest_path + .pipe(PackageManifest::create_if_needed) + .map_err(InitStateError::LoadManifest)?, + lockfile: call_load_lockfile(config.lockfile, Lockfile::load_from_current_dir) + .map_err(InitStateError::LoadLockfile)?, + http_client: Client::new(), + tarball_cache: Cache::new(), + }) + } +} + +/// Private function to load lockfile from current directory should `config.lockfile` is `true`. +/// +/// This function was extracted to be tested independently. +fn call_load_lockfile( + config_lockfile: bool, + load_lockfile: LoadLockfile, +) -> Result, Error> +where + LoadLockfile: FnOnce() -> Result, Error>, +{ + config_lockfile.then(load_lockfile).transpose().map(Option::flatten) +} + +#[cfg(test)] +mod tests { + use super::*; + use pretty_assertions::assert_eq; + + #[test] + fn test_call_load_lockfile() { + macro_rules! case { + ($config_lockfile:expr, $load_lockfile:expr => $output:expr) => {{ + let config_lockfile = $config_lockfile; + let load_lockfile = $load_lockfile; + let output: Result, &str> = $output; + eprintln!( + "CASE: {config_lockfile:?}, {load_lockfile} => {output:?}", + load_lockfile = stringify!($load_lockfile), + ); + assert_eq!(call_load_lockfile(config_lockfile, load_lockfile), output); + }}; + } + + case!(false, || unreachable!() => Ok(None)); + case!(true, || Err("error") => Err("error")); + case!(true, || Ok(None) => Ok(None)); + case!(true, || Ok(Some("value")) => Ok(Some("value"))); + } +} diff --git a/crates/cli/tests/_utils.rs b/crates/cli/tests/_utils.rs new file mode 100644 index 000000000..60a7de59d --- /dev/null +++ b/crates/cli/tests/_utils.rs @@ -0,0 +1,15 @@ +use assert_cmd::prelude::*; +use command_extra::CommandExtra; +use pacquet_testing_utils::bin::pacquet_with_temp_cwd; +use std::ffi::OsStr; +use tempfile::TempDir; + +pub fn exec_pacquet_in_temp_cwd(args: Args) -> TempDir +where + Args: IntoIterator, + Args::Item: AsRef, +{ + let (command, current_dir) = pacquet_with_temp_cwd(); + command.with_args(args).assert().success(); + current_dir +} diff --git a/crates/cli/tests/add.rs b/crates/cli/tests/add.rs new file mode 100644 index 000000000..22e7f28bf --- /dev/null +++ b/crates/cli/tests/add.rs @@ -0,0 +1,88 @@ +pub mod _utils; +pub use _utils::*; + +use pacquet_package_manifest::{DependencyGroup, PackageManifest}; +use pacquet_testing_utils::fs::{get_all_folders, get_filenames_in_folder}; +use pretty_assertions::assert_eq; +use std::{env, fs}; + +#[test] +fn should_install_all_dependencies() { + let dir = exec_pacquet_in_temp_cwd(["add", "is-even"]); + + eprintln!("Directory list"); + insta::assert_debug_snapshot!(get_all_folders(dir.path())); + + let manifest_path = dir.path().join("package.json"); + + eprintln!("Ensure the manifest file ({manifest_path:?}) exists"); + assert!(manifest_path.exists()); + + let virtual_store_dir = dir.path().join("node_modules").join(".pacquet"); + + eprintln!("Ensure virtual store dir ({virtual_store_dir:?}) exists"); + assert!(virtual_store_dir.exists()); + + eprintln!("Ensure that is-buffer does not have any dependencies"); + let is_buffer_path = virtual_store_dir.join("is-buffer@1.1.6/node_modules"); + assert_eq!(get_filenames_in_folder(&is_buffer_path), vec!["is-buffer"]); + + eprintln!("Ensure that is-even have correct dependencies"); + let is_even_path = virtual_store_dir.join("is-even@1.0.0/node_modules"); + assert_eq!(get_filenames_in_folder(&is_even_path), vec!["is-even", "is-odd"]); + + eprintln!("Ensure that is-number does not have any dependencies"); + let is_number_path = virtual_store_dir.join("is-number@3.0.0/node_modules"); + assert_eq!(get_filenames_in_folder(&is_number_path), vec!["is-number", "kind-of"]); +} + +#[test] +#[cfg(unix)] +pub fn should_symlink_correctly() { + let dir = exec_pacquet_in_temp_cwd(["add", "is-odd"]); + + eprintln!("Directory list"); + insta::assert_debug_snapshot!(get_all_folders(dir.path())); + + let manifest_path = dir.path().join("package.json"); + + eprintln!("Ensure the manifest file ({manifest_path:?}) exists"); + assert!(manifest_path.exists()); + + let virtual_store_dir = dir.path().join("node_modules").join(".pacquet"); + + eprintln!("Ensure virtual store dir ({virtual_store_dir:?}) exists"); + assert!(virtual_store_dir.exists()); + + eprintln!("Make sure the symlinks are correct"); + assert_eq!( + fs::read_link(virtual_store_dir.join("is-odd@3.0.1/node_modules/is-number")).unwrap(), + fs::canonicalize(virtual_store_dir.join("is-number@6.0.0/node_modules/is-number")).unwrap(), + ); +} + +#[test] +fn should_add_to_package_json() { + let dir = exec_pacquet_in_temp_cwd(["add", "is-odd"]); + let file = PackageManifest::from_path(dir.path().join("package.json")).unwrap(); + eprintln!("Ensure is-odd is added to package.json#dependencies"); + assert!(file.dependencies([DependencyGroup::Prod]).any(|(k, _)| k == "is-odd")); +} + +#[test] +fn should_add_dev_dependency() { + let dir = exec_pacquet_in_temp_cwd(["add", "is-odd", "--save-dev"]); + let file = PackageManifest::from_path(dir.path().join("package.json")).unwrap(); + eprintln!("Ensure is-odd is added to package.json#devDependencies"); + assert!(file.dependencies([DependencyGroup::Dev]).any(|(k, _)| k == "is-odd")); +} + +#[test] +fn should_add_peer_dependency() { + let dir = exec_pacquet_in_temp_cwd(["add", "is-odd", "--save-peer"]); + let file = PackageManifest::from_path(dir.path().join("package.json")).unwrap(); + eprintln!("Ensure is-odd is added to package.json#devDependencies"); + assert!(file.dependencies([DependencyGroup::Dev]).any(|(k, _)| k == "is-odd")); + eprintln!("Ensure is-odd is added to package.json#peerDependencies"); + assert!(file.dependencies([DependencyGroup::Peer]).any(|(k, _)| k == "is-odd")); +} diff --git a/crates/cli/tests/init.rs b/crates/cli/tests/init.rs new file mode 100644 index 000000000..56d12b28c --- /dev/null +++ b/crates/cli/tests/init.rs @@ -0,0 +1,43 @@ +pub mod _utils; +pub use _utils::*; + +use command_extra::CommandExtra; +use pacquet_testing_utils::{bin::pacquet_with_temp_cwd, fs::get_filenames_in_folder}; +use pretty_assertions::assert_eq; +use std::{env, fs}; + +#[test] +fn should_create_package_json() { + let dir = exec_pacquet_in_temp_cwd(["init"]); + + let manifest_path = dir.path().join("package.json"); + dbg!(&manifest_path); + + eprintln!("Content of package.json"); + let package_json_content = fs::read_to_string(&manifest_path).expect("read from package.json"); + insta::assert_snapshot!(package_json_content); + + eprintln!("Created files"); + assert_eq!(get_filenames_in_folder(dir.path()), ["package.json"]); +} + +#[test] +fn should_throw_on_existing_file() { + let (command, dir) = pacquet_with_temp_cwd(); + + let manifest_path = dir.path().join("package.json"); + dbg!(&manifest_path); + + eprintln!("Creating package.json..."); + fs::write(&manifest_path, "{}").expect("write to package.json"); + + eprintln!("Executing pacquet init..."); + let output = command.with_arg("init").output().expect("execute pacquet init"); + dbg!(&output); + + eprintln!("Exit status code"); + assert!(!output.status.success()); + + eprintln!("Stderr"); + insta::assert_snapshot!(String::from_utf8_lossy(&output.stderr).trim_end()); +} diff --git a/crates/cli/tests/install.rs b/crates/cli/tests/install.rs new file mode 100644 index 000000000..21bf5af1f --- /dev/null +++ b/crates/cli/tests/install.rs @@ -0,0 +1,44 @@ +use assert_cmd::prelude::*; +use command_extra::CommandExtra; +use pacquet_testing_utils::{ + bin::pacquet_with_temp_cwd, + fs::{get_all_folders, is_symlink_or_junction}, +}; +use std::fs; + +#[test] +fn should_install_dependencies() { + let (command, dir) = pacquet_with_temp_cwd(); + + eprintln!("Creating package.json..."); + let manifest_path = dir.path().join("package.json"); + let package_json_content = serde_json::json!({ + "dependencies": { + "is-odd": "3.0.1", + }, + "devDependencies": { + "fast-decode-uri-component": "1.0.1", + }, + }); + fs::write(&manifest_path, package_json_content.to_string()).expect("write to package.json"); + + eprintln!("Executing command..."); + command.with_arg("install").assert().success(); + + eprintln!("Make sure the package is installed"); + assert!(is_symlink_or_junction(&dir.path().join("node_modules/is-odd")).unwrap()); + assert!(dir.path().join("node_modules/.pacquet/is-odd@3.0.1").exists()); + + eprintln!("Make sure it installs direct dependencies"); + assert!(!dir.path().join("node_modules/is-number").exists()); + assert!(dir.path().join("node_modules/.pacquet/is-number@6.0.0").exists()); + + eprintln!("Make sure we install dev-dependencies as well"); + assert!( + is_symlink_or_junction(&dir.path().join("node_modules/fast-decode-uri-component")).unwrap() + ); + assert!(dir.path().join("node_modules/.pacquet/fast-decode-uri-component@1.0.1").is_dir()); + + eprintln!("Directory list"); + insta::assert_debug_snapshot!(get_all_folders(dir.path())); +} diff --git a/crates/cli/src/commands/snapshots/pacquet_cli__commands__add__tests__should_install_all_dependencies.snap b/crates/cli/tests/snapshots/add__should_install_all_dependencies.snap similarity index 93% rename from crates/cli/src/commands/snapshots/pacquet_cli__commands__add__tests__should_install_all_dependencies.snap rename to crates/cli/tests/snapshots/add__should_install_all_dependencies.snap index 7ee81ce5b..0591e544f 100644 --- a/crates/cli/src/commands/snapshots/pacquet_cli__commands__add__tests__should_install_all_dependencies.snap +++ b/crates/cli/tests/snapshots/add__should_install_all_dependencies.snap @@ -1,6 +1,7 @@ --- -source: crates/cli/src/commands/add.rs -expression: get_all_folders(&dir.path().to_path_buf()) +source: crates/cli/tests/add.rs +assertion_line: 19 +expression: get_all_folders(dir.path()) --- [ "node_modules", diff --git a/crates/cli/src/commands/snapshots/pacquet_cli__commands__add__tests__should_symlink_correctly.snap b/crates/cli/tests/snapshots/add__should_symlink_correctly.snap similarity index 83% rename from crates/cli/src/commands/snapshots/pacquet_cli__commands__add__tests__should_symlink_correctly.snap rename to crates/cli/tests/snapshots/add__should_symlink_correctly.snap index a18831c72..04f3d30f4 100644 --- a/crates/cli/src/commands/snapshots/pacquet_cli__commands__add__tests__should_symlink_correctly.snap +++ b/crates/cli/tests/snapshots/add__should_symlink_correctly.snap @@ -1,6 +1,7 @@ --- -source: crates/cli/src/commands/add.rs -expression: get_all_folders(&dir.path().to_path_buf()) +source: crates/cli/tests/add.rs +assertion_line: 57 +expression: get_all_folders(dir.path()) --- [ "node_modules", diff --git a/crates/cli/tests/snapshots/init__should_create_package_json.snap b/crates/cli/tests/snapshots/init__should_create_package_json.snap new file mode 100644 index 000000000..619dab683 --- /dev/null +++ b/crates/cli/tests/snapshots/init__should_create_package_json.snap @@ -0,0 +1,17 @@ +--- +source: crates/cli/tests/init.rs +assertion_line: 31 +expression: package_json_content +--- +{ + "name": "", + "version": "1.0.0", + "description": "", + "main": "index.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "keywords": [], + "author": "", + "license": "ISC" +} diff --git a/crates/cli/tests/snapshots/init__should_throw_on_existing_file.snap b/crates/cli/tests/snapshots/init__should_throw_on_existing_file.snap new file mode 100644 index 000000000..e35b51a82 --- /dev/null +++ b/crates/cli/tests/snapshots/init__should_throw_on_existing_file.snap @@ -0,0 +1,10 @@ +--- +source: crates/cli/tests/init.rs +assertion_line: 43 +expression: "String::from_utf8_lossy(&output.stderr).trim_end()" +--- +Error: pacquet_package_manifest::already_exist_error + + × initialize package.json + ╰─▶ package.json file already exists + help: Your current working directory already has a package.json file. diff --git a/crates/cli/src/commands/snapshots/pacquet_cli__commands__install__tests__should_install_dependencies.snap b/crates/cli/tests/snapshots/install__should_install_dependencies.snap similarity index 88% rename from crates/cli/src/commands/snapshots/pacquet_cli__commands__install__tests__should_install_dependencies.snap rename to crates/cli/tests/snapshots/install__should_install_dependencies.snap index 8d0636664..a059e4940 100644 --- a/crates/cli/src/commands/snapshots/pacquet_cli__commands__install__tests__should_install_dependencies.snap +++ b/crates/cli/tests/snapshots/install__should_install_dependencies.snap @@ -1,6 +1,7 @@ --- -source: crates/cli/src/commands/install.rs -expression: get_all_folders(&dir.path().to_path_buf()) +source: crates/cli/tests/install.rs +assertion_line: 74 +expression: get_all_folders(dir.path()) --- [ "node_modules", diff --git a/crates/cli/tests/store.rs b/crates/cli/tests/store.rs new file mode 100644 index 000000000..fabd48022 --- /dev/null +++ b/crates/cli/tests/store.rs @@ -0,0 +1,41 @@ +use command_extra::CommandExtra; +use pacquet_testing_utils::bin::pacquet_with_temp_cwd; +use pipe_trait::Pipe; +use pretty_assertions::assert_eq; +use std::{ + fs, + path::{Path, PathBuf}, +}; + +/// Handle the slight difference between OSes. +/// +/// **TODO:** may be we should have handle them in the production code instead? +fn canonicalize(path: &Path) -> PathBuf { + if cfg!(windows) { + path.to_path_buf() + } else { + dunce::canonicalize(path).expect("canonicalize path") + } +} + +#[test] +fn store_path_should_return_store_dir_from_npmrc() { + let (command, dir) = pacquet_with_temp_cwd(); + + eprintln!("Creating .npmrc..."); + fs::write(dir.path().join(".npmrc"), "store-dir=foo/bar").expect("write to .npmrc"); + + eprintln!("Executing pacquet store path..."); + let output = command.with_args(["store", "path"]).output().expect("run pacquet store path"); + dbg!(&output); + + eprintln!("Exit status code"); + assert!(output.status.success()); + + eprintln!("Stdout"); + let normalize = |path: &str| path.replace('\\', "/"); + assert_eq!( + String::from_utf8_lossy(&output.stdout).trim_end().pipe(normalize), + dir.path().pipe(canonicalize).join("foo/bar").to_string_lossy().pipe_as_ref(normalize), + ); +} diff --git a/crates/diagnostics/Cargo.toml b/crates/diagnostics/Cargo.toml index 0053f7b13..35c7c9876 100644 --- a/crates/diagnostics/Cargo.toml +++ b/crates/diagnostics/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_diagnostics" +name = "pacquet-diagnostics" version = "0.0.1" publish = false authors.workspace = true @@ -12,6 +12,5 @@ repository.workspace = true [dependencies] miette = { workspace = true } -thiserror = { workspace = true } tracing = { workspace = true } tracing-subscriber = { workspace = true } diff --git a/crates/diagnostics/src/lib.rs b/crates/diagnostics/src/lib.rs index 7bcacdc6a..0ca66e6ce 100644 --- a/crates/diagnostics/src/lib.rs +++ b/crates/diagnostics/src/lib.rs @@ -1,12 +1,6 @@ mod local_tracing; pub use miette; -pub use thiserror; pub use tracing; pub use local_tracing::enable_tracing_by_env; - -pub type Error = miette::Error; -pub type Severity = miette::Severity; -pub type Report = miette::Report; -pub type Result = miette::Result; diff --git a/crates/executor/Cargo.toml b/crates/executor/Cargo.toml index 242b8102e..c457268b0 100644 --- a/crates/executor/Cargo.toml +++ b/crates/executor/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_executor" +name = "pacquet-executor" version = "0.0.1" publish = false authors.workspace = true @@ -11,4 +11,5 @@ license.workspace = true repository.workspace = true [dependencies] -pacquet_diagnostics = { workspace = true } +derive_more = { workspace = true } +miette = { workspace = true } diff --git a/crates/executor/src/lib.rs b/crates/executor/src/lib.rs index 63fb9241f..8eb379ddf 100644 --- a/crates/executor/src/lib.rs +++ b/crates/executor/src/lib.rs @@ -1,22 +1,24 @@ +use derive_more::{Display, Error}; +use miette::Diagnostic; use std::process::Command; -use pacquet_diagnostics::{ - miette::{self, Diagnostic, Result}, - thiserror::{self, Error}, -}; - -#[derive(Error, Debug, Diagnostic)] +#[derive(Debug, Display, Error, Diagnostic)] #[non_exhaustive] pub enum ExecutorError { - #[error(transparent)] - #[diagnostic(code(pacquet_executor::io_error))] - Io(#[from] std::io::Error), + #[display("Failed to spawn command: {_0}")] + #[diagnostic(code(pacquet_executor::spawn_command))] + SpawnCommand(#[error(source)] std::io::Error), + + #[display("Process exits with an error: {_0}")] + #[diagnostic(code(pacquet_executor::wait_process))] + WaitProcess(#[error(source)] std::io::Error), } pub fn execute_shell(command: &str) -> Result<(), ExecutorError> { - let mut cmd = Command::new("sh").arg("-c").arg(command).spawn()?; + let mut cmd = + Command::new("sh").arg("-c").arg(command).spawn().map_err(ExecutorError::SpawnCommand)?; - cmd.wait()?; + cmd.wait().map_err(ExecutorError::WaitProcess)?; Ok(()) } diff --git a/crates/fs/Cargo.toml b/crates/fs/Cargo.toml new file mode 100644 index 000000000..71b7095df --- /dev/null +++ b/crates/fs/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "pacquet-fs" +description = "Filesystem utility functions used by pacquet" +version = "0.0.1" +publish = false +authors.workspace = true +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +repository.workspace = true + +[target.'cfg(windows)'.dependencies] +junction = { workspace = true } diff --git a/crates/fs/src/lib.rs b/crates/fs/src/lib.rs new file mode 100644 index 000000000..08c68721e --- /dev/null +++ b/crates/fs/src/lib.rs @@ -0,0 +1,11 @@ +use std::{io, path::Path}; + +/// Create a symlink to a directory. +/// +/// The `link` path will be a symbolic link pointing to `original`. +pub fn symlink_dir(original: &Path, link: &Path) -> io::Result<()> { + #[cfg(unix)] + return std::os::unix::fs::symlink(original, link); + #[cfg(windows)] + return junction::create(original, link); // junctions instead of symlinks because symlinks may require elevated privileges. +} diff --git a/crates/lockfile/Cargo.toml b/crates/lockfile/Cargo.toml index 992e4b874..25bc77151 100644 --- a/crates/lockfile/Cargo.toml +++ b/crates/lockfile/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_lockfile" +name = "pacquet-lockfile" version = "0.0.1" publish = false authors.workspace = true @@ -11,8 +11,8 @@ license.workspace = true repository.workspace = true [dependencies] -pacquet_diagnostics = { workspace = true } -pacquet_package_json = { workspace = true } +pacquet-diagnostics = { workspace = true } +pacquet-package-manifest = { workspace = true } derive_more = { workspace = true } node-semver = { workspace = true } diff --git a/crates/lockfile/src/comver.rs b/crates/lockfile/src/comver.rs index 291b314b9..b6b740006 100644 --- a/crates/lockfile/src/comver.rs +++ b/crates/lockfile/src/comver.rs @@ -6,7 +6,7 @@ use std::{num::ParseIntError, str::FromStr}; /// /// It contains only major and minor. #[derive(Debug, Display, Clone, Copy, PartialEq, Eq, Deserialize, Serialize)] -#[display(fmt = "{major}.{minor}")] +#[display("{major}.{minor}")] #[serde(try_from = "&'de str", into = "String")] pub struct ComVer { pub major: u16, @@ -23,11 +23,11 @@ impl ComVer { /// Error when parsing [`ComVer`] from a string. #[derive(Debug, Display, Error)] pub enum ParseComVerError { - #[display(fmt = "Dot is missing")] + #[display("Dot is missing")] MissingDot, - #[display(fmt = "Major is not a valid number: {_0}")] + #[display("Major is not a valid number: {_0}")] InvalidMajor(ParseIntError), - #[display(fmt = "Minor is not a valid number: {_0}")] + #[display("Minor is not a valid number: {_0}")] InvalidMinor(ParseIntError), } diff --git a/crates/lockfile/src/dependency_path.rs b/crates/lockfile/src/dependency_path.rs index 2e0f27eea..bc0e2f86f 100644 --- a/crates/lockfile/src/dependency_path.rs +++ b/crates/lockfile/src/dependency_path.rs @@ -17,7 +17,7 @@ use std::str::FromStr; /// * `registry.npmjs.com/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)` /// * `registry.node-modules.io/ts-node@10.9.1(@types/node@18.7.19)(typescript@5.1.6)` #[derive(Debug, Display, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)] -#[display(fmt = "{}/{package_specifier}", "custom_registry.as_deref().unwrap_or_default()")] +#[display("{}/{package_specifier}", custom_registry.as_deref().unwrap_or_default())] #[serde(try_from = "&'de str", into = "String")] pub struct DependencyPath { pub custom_registry: Option, @@ -27,9 +27,9 @@ pub struct DependencyPath { /// Error when parsing [`DependencyPath`] from a string. #[derive(Debug, Display, Error)] pub enum ParseDependencyPathError { - #[display(fmt = "Invalid syntax")] + #[display("Invalid syntax")] InvalidSyntax, - #[display(fmt = "Failed to parse specifier: {_0}")] + #[display("Failed to parse specifier: {_0}")] ParsePackageSpecifierFailure(ParsePkgNameVerPeerError), } diff --git a/crates/lockfile/src/lib.rs b/crates/lockfile/src/lib.rs index e962975f8..2c2683ec4 100644 --- a/crates/lockfile/src/lib.rs +++ b/crates/lockfile/src/lib.rs @@ -15,24 +15,22 @@ mod resolution; mod resolved_dependency; mod root_project_snapshot; -pub use comver::{ComVer, ParseComVerError}; -pub use dependency_path::DependencyPath; -pub use load_lockfile::LoadLockfileError; -pub use lockfile_version::LockfileVersion; -pub use multi_project_snapshot::MultiProjectSnapshot; -pub use package_snapshot::{LockfilePeerDependencyMetaValue, PackageSnapshot}; -pub use package_snapshot_dependency::PackageSnapshotDependency; -pub use pkg_name::{ParsePkgNameError, PkgName}; -pub use pkg_name_suffix::{ParsePkgNameSuffixError, PkgNameSuffix}; -pub use pkg_name_ver::{ParsePkgNameVerError, PkgNameVer}; -pub use pkg_name_ver_peer::{ParsePkgNameVerPeerError, PkgNameVerPeer}; -pub use pkg_ver_peer::{ParsePkgVerPeerError, PkgVerPeer}; -pub use project_snapshot::ProjectSnapshot; -pub use resolution::{ - DirectoryResolution, GitResolution, LockfileResolution, RegistryResolution, TarballResolution, -}; -pub use resolved_dependency::{ResolvedDependencyMap, ResolvedDependencySpec}; -pub use root_project_snapshot::RootProjectSnapshot; +pub use comver::*; +pub use dependency_path::*; +pub use load_lockfile::*; +pub use lockfile_version::*; +pub use multi_project_snapshot::*; +pub use package_snapshot::*; +pub use package_snapshot_dependency::*; +pub use pkg_name::*; +pub use pkg_name_suffix::*; +pub use pkg_name_ver::*; +pub use pkg_name_ver_peer::*; +pub use pkg_ver_peer::*; +pub use project_snapshot::*; +pub use resolution::*; +pub use resolved_dependency::*; +pub use root_project_snapshot::*; use serde::{Deserialize, Serialize}; use std::collections::HashMap; diff --git a/crates/lockfile/src/load_lockfile.rs b/crates/lockfile/src/load_lockfile.rs index 83b7f9b87..2f4a6bec0 100644 --- a/crates/lockfile/src/load_lockfile.rs +++ b/crates/lockfile/src/load_lockfile.rs @@ -11,15 +11,15 @@ use std::{ #[derive(Debug, Display, Error, Diagnostic)] #[non_exhaustive] pub enum LoadLockfileError { - #[display(fmt = "Failed to get current_dir: {_0}")] + #[display("Failed to get current_dir: {_0}")] #[diagnostic(code(pacquet_lockfile::current_dir))] CurrentDir(io::Error), - #[display(fmt = "Failed to read lockfile content: {_0}")] + #[display("Failed to read lockfile content: {_0}")] #[diagnostic(code(pacquet_lockfile::read_file))] ReadFile(io::Error), - #[display(fmt = "Failed to parse lockfile content as YAML: {_0}")] + #[display("Failed to parse lockfile content as YAML: {_0}")] #[diagnostic(code(pacquet_lockfile::parse_yaml))] ParseYaml(serde_yaml::Error), } diff --git a/crates/lockfile/src/lockfile_version.rs b/crates/lockfile/src/lockfile_version.rs index 4fb6ccc6b..bf0c3fbde 100644 --- a/crates/lockfile/src/lockfile_version.rs +++ b/crates/lockfile/src/lockfile_version.rs @@ -19,7 +19,7 @@ impl LockfileVersion { /// Error when [`ComVer`] fails compatibility check. #[derive(Debug, Display, Error)] pub enum LockfileVersionError { - #[display(fmt = "The lockfileVersion of {_0} is incompatible with {MAJOR}.x")] + #[display("The lockfileVersion of {_0} is incompatible with {MAJOR}.x")] IncompatibleMajor(#[error(not(source))] ComVer), } diff --git a/crates/lockfile/src/pkg_name.rs b/crates/lockfile/src/pkg_name.rs index d8c695427..f58d6799f 100644 --- a/crates/lockfile/src/pkg_name.rs +++ b/crates/lockfile/src/pkg_name.rs @@ -21,9 +21,9 @@ pub struct PkgName { /// Error when parsing [`PkgName`] from a string input. #[derive(Debug, Display, Error)] pub enum ParsePkgNameError { - #[display(fmt = "Missing bare name")] + #[display("Missing bare name")] MissingName, - #[display(fmt = "Name is empty")] + #[display("Name is empty")] EmptyName, } diff --git a/crates/lockfile/src/pkg_name_suffix.rs b/crates/lockfile/src/pkg_name_suffix.rs index 6f1e5e6a6..d5e905fa3 100644 --- a/crates/lockfile/src/pkg_name_suffix.rs +++ b/crates/lockfile/src/pkg_name_suffix.rs @@ -2,7 +2,7 @@ use crate::{ParsePkgNameError, PkgName}; use derive_more::{Display, Error}; use serde::{Deserialize, Serialize}; use split_first_char::SplitFirstChar; -use std::{fmt::Display, str::FromStr}; +use std::str::FromStr; /// Syntax: `{name}@{suffix}` /// @@ -10,8 +10,8 @@ use std::{fmt::Display, str::FromStr}; /// * `ts-node@10.9.1`, `@types/node@18.7.19`, `typescript@5.1.6` /// * `react-json-view@1.21.3(@types/react@17.0.49)(react-dom@17.0.2)(react@17.0.2)` #[derive(Debug, Display, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)] -#[display(fmt = "{name}@{suffix}")] -#[display(bound = "Suffix: Display")] +#[display("{name}@{suffix}")] +#[display(bound(Suffix: Display))] #[serde(try_from = "&'de str", into = "String")] #[serde(bound( deserialize = "Suffix: FromStr, Suffix::Err: Display", @@ -31,17 +31,17 @@ impl PkgNameSuffix { /// Error when parsing [`PkgNameSuffix`] from a string. #[derive(Debug, Display, Error)] -#[display(bound = "ParseSuffixError: Display")] +#[display(bound(ParseSuffixError: Display))] pub enum ParsePkgNameSuffixError { - #[display(fmt = "Input is empty")] + #[display("Input is empty")] EmptyInput, - #[display(fmt = "Suffix is missing")] + #[display("Suffix is missing")] MissingSuffix, - #[display(fmt = "Name is empty")] + #[display("Name is empty")] EmptyName, - #[display(fmt = "Failed to parse suffix: {_0}")] + #[display("Failed to parse suffix: {_0}")] ParseSuffixFailure(#[error(source)] ParseSuffixError), - #[display(fmt = "Failed to parse name: {_0}")] + #[display("Failed to parse name: {_0}")] ParseNameFailure(#[error(source)] ParsePkgNameError), } diff --git a/crates/lockfile/src/pkg_ver_peer.rs b/crates/lockfile/src/pkg_ver_peer.rs index a5c3acd32..299725868 100644 --- a/crates/lockfile/src/pkg_ver_peer.rs +++ b/crates/lockfile/src/pkg_ver_peer.rs @@ -10,7 +10,7 @@ use std::str::FromStr; /// /// **NOTE:** The peer part isn't guaranteed to be correct. It is only assumed to be. #[derive(Debug, Display, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)] -#[display(fmt = "{version}{peer}")] +#[display("{version}{peer}")] #[serde(try_from = "&'de str", into = "String")] pub struct PkgVerPeer { version: Version, @@ -38,9 +38,9 @@ impl PkgVerPeer { /// Error when parsing [`PkgVerPeer`] from a string. #[derive(Debug, Display, Error)] pub enum ParsePkgVerPeerError { - #[display(fmt = "Failed to parse the version part: {_0}")] + #[display("Failed to parse the version part: {_0}")] ParseVersionFailure(#[error(source)] SemverError), - #[display(fmt = "Mismatch parenthesis")] + #[display("Mismatch parenthesis")] MismatchParenthesis, } diff --git a/crates/lockfile/src/project_snapshot.rs b/crates/lockfile/src/project_snapshot.rs index ab17e0b1a..690b6ebe7 100644 --- a/crates/lockfile/src/project_snapshot.rs +++ b/crates/lockfile/src/project_snapshot.rs @@ -1,5 +1,5 @@ use crate::{PkgName, ResolvedDependencyMap, ResolvedDependencySpec}; -use pacquet_package_json::DependencyGroup; +use pacquet_package_manifest::DependencyGroup; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -25,7 +25,7 @@ impl ProjectSnapshot { /// Lookup dependency map according to group. pub fn get_map_by_group(&self, group: DependencyGroup) -> Option<&'_ ResolvedDependencyMap> { match group { - DependencyGroup::Default => self.dependencies.as_ref(), + DependencyGroup::Prod => self.dependencies.as_ref(), DependencyGroup::Optional => self.optional_dependencies.as_ref(), DependencyGroup::Dev => self.dev_dependencies.as_ref(), DependencyGroup::Peer => None, @@ -74,7 +74,7 @@ mod tests { #[test] fn dependencies_by_groups() { - use DependencyGroup::{Default, Dev, Optional, Peer}; + use DependencyGroup::{Dev, Optional, Peer, Prod}; macro_rules! case { ($input:expr => $output:expr) => {{ @@ -95,7 +95,7 @@ mod tests { } case!([] => []); - case!([Default] => [ + case!([Prod] => [ ("react", "^17.0.2", "17.0.2"), ("react-dom", "^17.0.2", "17.0.2(react@17.0.2)"), ]); @@ -107,16 +107,16 @@ mod tests { ("ts-node", "10.9.1", "10.9.1(@types/node@18.7.19)(typescript@5.1.6)"), ("typescript", "^5.1.6", "5.1.6"), ]); - case!([Default, Peer] => [ + case!([Prod, Peer] => [ ("react", "^17.0.2", "17.0.2"), ("react-dom", "^17.0.2", "17.0.2(react@17.0.2)"), ]); - case!([Default, Peer, Optional] => [ + case!([Prod, Peer, Optional] => [ ("@types/node", "^18.7.19", "18.7.19"), ("react", "^17.0.2", "17.0.2"), ("react-dom", "^17.0.2", "17.0.2(react@17.0.2)"), ]); - case!([Default, Peer, Optional, Dev] => [ + case!([Prod, Peer, Optional, Dev] => [ ("@types/node", "^18.7.19", "18.7.19"), ("react", "^17.0.2", "17.0.2"), ("react-dom", "^17.0.2", "17.0.2(react@17.0.2)"), diff --git a/crates/npmrc/Cargo.toml b/crates/npmrc/Cargo.toml index 9ea53c7fa..ec11d9ab4 100644 --- a/crates/npmrc/Cargo.toml +++ b/crates/npmrc/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_npmrc" +name = "pacquet-npmrc" version = "0.0.1" publish = false authors.workspace = true diff --git a/crates/npmrc/src/lib.rs b/crates/npmrc/src/lib.rs index c4b797f99..d7e9323ea 100644 --- a/crates/npmrc/src/lib.rs +++ b/crates/npmrc/src/lib.rs @@ -2,7 +2,7 @@ mod custom_deserializer; use pipe_trait::Pipe; use serde::Deserialize; -use std::{env, fs, path::PathBuf}; +use std::{fs, path::PathBuf}; use crate::custom_deserializer::{ bool_true, default_hoist_pattern, default_modules_cache_max_age, default_modules_dir, @@ -160,21 +160,32 @@ impl Npmrc { /// Try loading `.npmrc` in the current directory. /// If fails, try in the home directory. /// If fails again, return the default. - pub fn current() -> Self { - let path = match env::current_dir() { - Ok(dir) => Some(dir.join(".npmrc")), - _ => home::home_dir().map(|dir| dir.join(".npmrc")), + pub fn current( + current_dir: CurrentDir, + home_dir: HomeDir, + default: Default, + ) -> Self + where + CurrentDir: FnOnce() -> Result, + HomeDir: FnOnce() -> Option, + Default: FnOnce() -> Npmrc, + { + // TODO: this code makes no sense. + // TODO: it should have merged the settings. + + let load = |dir: PathBuf| -> Option { + dir.join(".npmrc") + .pipe(fs::read_to_string) + .ok()? // TODO: should it throw error instead? + .pipe_as_ref(serde_ini::from_str) + .ok() // TODO: should it throw error instead? }; - if let Some(file) = path { - if let Ok(content) = fs::read_to_string(file) { - if let Ok(npmrc) = serde_ini::from_str(&content) { - return npmrc; - } - } - } - - Npmrc::default() + current_dir() + .ok() + .and_then(load) + .or_else(|| home_dir().and_then(load)) + .unwrap_or_else(default) } /// Persist the config data until the program terminates. @@ -191,7 +202,7 @@ impl Default for Npmrc { #[cfg(test)] mod tests { - use std::{env, io::Write, str::FromStr}; + use std::{env, str::FromStr}; use pretty_assertions::assert_eq; use tempfile::tempdir; @@ -250,12 +261,6 @@ mod tests { env::remove_var("XDG_DATA_HOME"); } - #[test] - pub fn should_return_npmrc() { - let value = Npmrc::current(); - assert!(value.symlink); - } - #[test] pub fn should_use_relative_virtual_store_dir() { let value: Npmrc = serde_ini::from_str("virtual-store-dir=node_modules/.pacquet").unwrap(); @@ -284,25 +289,48 @@ mod tests { #[test] pub fn test_current_folder_for_npmrc() { let tmp = tempdir().unwrap(); - let current_directory = env::current_dir().unwrap(); - let mut f = fs::File::create(tmp.path().join(".npmrc")).expect("Unable to create file"); - f.write_all(b"symlink=false").unwrap(); - env::set_current_dir(tmp.path()).unwrap(); - let config = Npmrc::current(); + fs::write(tmp.path().join(".npmrc"), "symlink=false").expect("write to .npmrc"); + let config = Npmrc::current( + || tmp.path().to_path_buf().pipe(Ok::<_, ()>), + || unreachable!("shouldn't reach home dir"), + || unreachable!("shouldn't reach default"), + ); assert!(!config.symlink); - env::set_current_dir(current_directory).unwrap(); } #[test] pub fn test_current_folder_for_invalid_npmrc() { let tmp = tempdir().unwrap(); - let current_directory = env::current_dir().unwrap(); - let mut f = fs::File::create(tmp.path().join(".npmrc")).expect("Unable to create file"); // write invalid utf-8 value to npmrc - f.write_all(b"Hello \xff World").unwrap(); - env::set_current_dir(tmp.path()).unwrap(); - let config = Npmrc::current(); - assert!(config.symlink); - env::set_current_dir(current_directory).unwrap(); + fs::write(tmp.path().join(".npmrc"), b"Hello \xff World").expect("write to .npmrc"); + let config = + Npmrc::current(|| tmp.path().to_path_buf().pipe(Ok::<_, ()>), || None, Npmrc::new); + assert!(config.symlink); // TODO: what the hell? why succeed? + } + + #[test] + pub fn test_current_folder_fallback_to_home() { + let current_dir = tempdir().unwrap(); + let home_dir = tempdir().unwrap(); + dbg!(¤t_dir, &home_dir); + fs::write(home_dir.path().join(".npmrc"), "symlink=false").expect("write to .npmrc"); + let config = Npmrc::current( + || current_dir.path().to_path_buf().pipe(Ok::<_, ()>), + || home_dir.path().to_path_buf().pipe(Some), + || unreachable!("shouldn't reach home dir"), + ); + assert!(!config.symlink); + } + + #[test] + pub fn test_current_folder_fallback_to_default() { + let current_dir = tempdir().unwrap(); + let home_dir = tempdir().unwrap(); + let config = Npmrc::current( + || current_dir.path().to_path_buf().pipe(Ok::<_, ()>), + || home_dir.path().to_path_buf().pipe(Some), + || serde_ini::from_str("symlink=false").unwrap(), + ); + assert!(!config.symlink); } } diff --git a/crates/package-manager/Cargo.toml b/crates/package-manager/Cargo.toml new file mode 100644 index 000000000..455092e60 --- /dev/null +++ b/crates/package-manager/Cargo.toml @@ -0,0 +1,40 @@ +[package] +name = "pacquet-package-manager" +version = "0.0.1" +publish = false +authors.workspace = true +description.workspace = true +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +pacquet-fs = { workspace = true } +pacquet-lockfile = { workspace = true } +pacquet-npmrc = { workspace = true } +pacquet-package-manifest = { workspace = true } +pacquet-registry = { workspace = true } +pacquet-tarball = { workspace = true } + +async-recursion = { workspace = true } +derive_more = { workspace = true } +futures-util = { workspace = true } +node-semver = { workspace = true } +pipe-trait = { workspace = true } +rayon = { workspace = true } +reflink-copy = { workspace = true } +reqwest = { workspace = true } +tracing = { workspace = true } +miette = { workspace = true } + +[dev-dependencies] +pacquet-testing-utils = { workspace = true } + +node-semver = { workspace = true } +insta = { workspace = true } +pretty_assertions = { workspace = true } +tempfile = { workspace = true } +tokio = { workspace = true } +walkdir = { workspace = true } diff --git a/crates/package-manager/src/add.rs b/crates/package-manager/src/add.rs new file mode 100644 index 000000000..ac7e22ea0 --- /dev/null +++ b/crates/package-manager/src/add.rs @@ -0,0 +1,88 @@ +use crate::Install; +use derive_more::{Display, Error}; +use miette::Diagnostic; +use pacquet_lockfile::Lockfile; +use pacquet_npmrc::Npmrc; +use pacquet_package_manifest::PackageManifestError; +use pacquet_package_manifest::{DependencyGroup, PackageManifest}; +use pacquet_registry::{PackageTag, PackageVersion}; +use pacquet_tarball::Cache; +use reqwest::Client; + +/// This subroutine does everything `pacquet add` is supposed to do. +#[must_use] +pub struct Add<'a, ListDependencyGroups, DependencyGroupList> +where + ListDependencyGroups: Fn() -> DependencyGroupList, + DependencyGroupList: IntoIterator, +{ + pub tarball_cache: &'a Cache, + pub http_client: &'a Client, + pub config: &'static Npmrc, + pub manifest: &'a mut PackageManifest, + pub lockfile: Option<&'a Lockfile>, + pub list_dependency_groups: ListDependencyGroups, // must be a function because it is called multiple times + pub package_name: &'a str, // TODO: 1. support version range, 2. multiple arguments, 3. name this `packages` + pub save_exact: bool, // TODO: add `save-exact` to `.npmrc`, merge configs, and remove this +} + +/// Error type of [`Add`]. +#[derive(Debug, Display, Error, Diagnostic)] +pub enum AddError { + #[display("Failed to add package to manifest: {_0}")] + AddDependencyToManifest(#[error(source)] PackageManifestError), + #[display("Failed save the manifest file: {_0}")] + SaveManifest(#[error(source)] PackageManifestError), +} + +impl<'a, ListDependencyGroups, DependencyGroupList> + Add<'a, ListDependencyGroups, DependencyGroupList> +where + ListDependencyGroups: Fn() -> DependencyGroupList, + DependencyGroupList: IntoIterator, +{ + pub async fn run(self) -> Result<(), AddError> { + let Add { + tarball_cache, + http_client, + config, + manifest, + lockfile, + list_dependency_groups, + package_name, + save_exact, + } = self; + + let latest_version = PackageVersion::fetch_from_registry( + package_name, + PackageTag::Latest, // TODO: add support for specifying tags + http_client, + &config.registry, + ) + .await + .expect("resolve latest tag"); // TODO: properly propagate this error + + let version_range = latest_version.serialize(save_exact); + for dependency_group in list_dependency_groups() { + manifest + .add_dependency(package_name, &version_range, dependency_group) + .map_err(AddError::AddDependencyToManifest)?; + } + + Install { + tarball_cache, + http_client, + config, + manifest, + lockfile, + dependency_groups: list_dependency_groups(), + frozen_lockfile: false, + } + .run() + .await; + + manifest.save().map_err(AddError::SaveManifest)?; + + Ok(()) + } +} diff --git a/crates/package-manager/src/create_cas_files.rs b/crates/package-manager/src/create_cas_files.rs new file mode 100644 index 000000000..72fb2ba60 --- /dev/null +++ b/crates/package-manager/src/create_cas_files.rs @@ -0,0 +1,43 @@ +use crate::{link_file, LinkFileError}; +use derive_more::{Display, Error}; +use miette::Diagnostic; +use pacquet_npmrc::PackageImportMethod; +use rayon::prelude::*; +use std::{ + collections::HashMap, + ffi::OsString, + path::{Path, PathBuf}, +}; + +/// Error type for [`create_cas_files`]. +#[derive(Debug, Display, Error, Diagnostic)] +pub enum CreateCasFilesError { + #[diagnostic(transparent)] + LinkFile(#[error(source)] LinkFileError), +} + +/// If `dir_path` doesn't exist, create and populate it with files from `cas_paths`. +/// +/// If `dir_path` already exists, do nothing. +pub fn create_cas_files( + import_method: PackageImportMethod, + dir_path: &Path, + cas_paths: &HashMap, +) -> Result<(), CreateCasFilesError> { + assert_eq!( + import_method, + PackageImportMethod::Auto, + "Only PackageImportMethod::Auto is currently supported, but {dir_path:?} requires {import_method:?}", + ); + + if dir_path.exists() { + return Ok(()); + } + + cas_paths + .par_iter() + .try_for_each(|(cleaned_entry, store_path)| { + link_file(store_path, &dir_path.join(cleaned_entry)) + }) + .map_err(CreateCasFilesError::LinkFile) +} diff --git a/crates/package-manager/src/create_symlink_layout.rs b/crates/package-manager/src/create_symlink_layout.rs new file mode 100644 index 000000000..3abefacd2 --- /dev/null +++ b/crates/package-manager/src/create_symlink_layout.rs @@ -0,0 +1,31 @@ +use crate::symlink_package; +use pacquet_lockfile::{PackageSnapshotDependency, PkgName, PkgNameVerPeer}; +use rayon::prelude::*; +use std::{collections::HashMap, path::Path}; + +/// Create symlink layout of dependencies for a package in a virtual dir. +/// +/// **NOTE:** `virtual_node_modules_dir` is assumed to already exist. +pub fn create_symlink_layout( + dependencies: &HashMap, + virtual_root: &Path, + virtual_node_modules_dir: &Path, +) { + dependencies.par_iter().for_each(|(name, spec)| { + let virtual_store_name = match spec { + PackageSnapshotDependency::PkgVerPeer(ver_peer) => { + let package_specifier = PkgNameVerPeer::new(name.clone(), ver_peer.clone()); // TODO: remove copying here + package_specifier.to_virtual_store_name() + } + PackageSnapshotDependency::DependencyPath(dependency_path) => { + dependency_path.package_specifier.to_virtual_store_name() + } + }; + let name_str = name.to_string(); + symlink_package( + &virtual_root.join(virtual_store_name).join("node_modules").join(&name_str), + &virtual_node_modules_dir.join(&name_str), + ) + .expect("symlink pkg successful"); // TODO: properly propagate this error + }); +} diff --git a/crates/package-manager/src/create_virtual_dir_by_snapshot.rs b/crates/package-manager/src/create_virtual_dir_by_snapshot.rs new file mode 100644 index 000000000..d61077645 --- /dev/null +++ b/crates/package-manager/src/create_virtual_dir_by_snapshot.rs @@ -0,0 +1,73 @@ +use crate::{create_cas_files, create_symlink_layout, CreateCasFilesError}; +use derive_more::{Display, Error}; +use miette::Diagnostic; +use pacquet_lockfile::{DependencyPath, PackageSnapshot}; +use pacquet_npmrc::PackageImportMethod; +use std::{ + collections::HashMap, + ffi::OsString, + fs, io, + path::{Path, PathBuf}, +}; + +/// This subroutine installs the files from [`cas_paths`](Self::cas_paths) then creates the symlink layout. +#[must_use] +pub struct CreateVirtualDirBySnapshot<'a> { + pub virtual_store_dir: &'a Path, + pub cas_paths: &'a HashMap, + pub import_method: PackageImportMethod, + pub dependency_path: &'a DependencyPath, + pub package_snapshot: &'a PackageSnapshot, +} + +/// Error type of [`CreateVirtualDirBySnapshot`]. +#[derive(Debug, Display, Error, Diagnostic)] +pub enum CreateVirtualDirError { + #[display("Failed to recursively create node_modules directory at {dir:?}: {error}")] + #[diagnostic(code(pacquet_package_manager::create_node_modules_dir))] + CreateNodeModulesDir { + dir: PathBuf, + #[error(source)] + error: io::Error, + }, + + #[diagnostic(transparent)] + CreateCasFiles(#[error(source)] CreateCasFilesError), +} + +impl<'a> CreateVirtualDirBySnapshot<'a> { + /// Execute the subroutine. + pub fn run(self) -> Result<(), CreateVirtualDirError> { + let CreateVirtualDirBySnapshot { + virtual_store_dir, + cas_paths, + import_method, + dependency_path, + package_snapshot, + } = self; + + // node_modules/.pacquet/pkg-name@x.y.z/node_modules + let virtual_node_modules_dir = virtual_store_dir + .join(dependency_path.package_specifier.to_virtual_store_name()) + .join("node_modules"); + fs::create_dir_all(&virtual_node_modules_dir).map_err(|error| { + CreateVirtualDirError::CreateNodeModulesDir { + dir: virtual_node_modules_dir.to_path_buf(), + error, + } + })?; + + // 1. Install the files from `cas_paths` + let save_path = + virtual_node_modules_dir.join(dependency_path.package_specifier.name.to_string()); + create_cas_files(import_method, &save_path, cas_paths) + .map_err(CreateVirtualDirError::CreateCasFiles)?; + + // 2. Create the symlink layout + if let Some(dependencies) = &package_snapshot.dependencies { + create_symlink_layout(dependencies, virtual_store_dir, &virtual_node_modules_dir) + } + + Ok(()) + } +} diff --git a/crates/package-manager/src/create_virtual_store.rs b/crates/package-manager/src/create_virtual_store.rs new file mode 100644 index 000000000..f962c026d --- /dev/null +++ b/crates/package-manager/src/create_virtual_store.rs @@ -0,0 +1,48 @@ +use crate::InstallPackageBySnapshot; +use futures_util::future; +use pacquet_lockfile::{DependencyPath, PackageSnapshot, RootProjectSnapshot}; +use pacquet_npmrc::Npmrc; +use pacquet_tarball::Cache; +use pipe_trait::Pipe; +use reqwest::Client; +use std::collections::HashMap; + +/// This subroutine generates filesystem layout for the virtual store at `node_modules/.pacquet`. +#[must_use] +pub struct CreateVirtualStore<'a> { + pub tarball_cache: &'a Cache, + pub http_client: &'a Client, + pub config: &'static Npmrc, + pub packages: Option<&'a HashMap>, + pub project_snapshot: &'a RootProjectSnapshot, +} + +impl<'a> CreateVirtualStore<'a> { + /// Execute the subroutine. + pub async fn run(self) { + let CreateVirtualStore { tarball_cache, http_client, config, packages, project_snapshot } = + self; + + let packages = packages.unwrap_or_else(|| { + dbg!(project_snapshot); + todo!("check project_snapshot, error if it's not empty, do nothing if empty"); + }); + + packages + .iter() + .map(|(dependency_path, package_snapshot)| async move { + InstallPackageBySnapshot { + tarball_cache, + http_client, + config, + dependency_path, + package_snapshot, + } + .run() + .await + .unwrap(); // TODO: properly propagate this error + }) + .pipe(future::join_all) + .await; + } +} diff --git a/crates/package-manager/src/install.rs b/crates/package-manager/src/install.rs new file mode 100644 index 000000000..2cb681be3 --- /dev/null +++ b/crates/package-manager/src/install.rs @@ -0,0 +1,143 @@ +use crate::{InstallFrozenLockfile, InstallWithoutLockfile}; +use pacquet_lockfile::Lockfile; +use pacquet_npmrc::Npmrc; +use pacquet_package_manifest::{DependencyGroup, PackageManifest}; +use pacquet_tarball::Cache; +use reqwest::Client; + +/// This subroutine does everything `pacquet install` is supposed to do. +#[must_use] +pub struct Install<'a, DependencyGroupList> +where + DependencyGroupList: IntoIterator, +{ + pub tarball_cache: &'a Cache, + pub http_client: &'a Client, + pub config: &'static Npmrc, + pub manifest: &'a PackageManifest, + pub lockfile: Option<&'a Lockfile>, + pub dependency_groups: DependencyGroupList, + pub frozen_lockfile: bool, +} + +impl<'a, DependencyGroupList> Install<'a, DependencyGroupList> +where + DependencyGroupList: IntoIterator, +{ + /// Execute the subroutine. + pub async fn run(self) { + let Install { + tarball_cache, + http_client, + config, + manifest, + lockfile, + dependency_groups, + frozen_lockfile, + } = self; + + tracing::info!(target: "pacquet::install", "Start all"); + + match (config.lockfile, frozen_lockfile, lockfile) { + (false, _, _) => { + InstallWithoutLockfile { + tarball_cache, + http_client, + config, + manifest, + dependency_groups, + } + .run() + .await; + } + (true, false, Some(_)) | (true, false, None) | (true, true, None) => { + unimplemented!(); + } + (true, true, Some(lockfile)) => { + let Lockfile { lockfile_version, project_snapshot, packages, .. } = lockfile; + assert_eq!(lockfile_version.major, 6); // compatibility check already happens at serde, but this still helps preventing programmer mistakes. + + InstallFrozenLockfile { + tarball_cache, + http_client, + config, + project_snapshot, + packages: packages.as_ref(), + dependency_groups, + } + .run() + .await; + } + } + + tracing::info!(target: "pacquet::install", "Complete all"); + } +} + +#[cfg(test)] +mod tests { + use super::*; + use pacquet_npmrc::Npmrc; + use pacquet_package_manifest::{DependencyGroup, PackageManifest}; + use pacquet_testing_utils::fs::{get_all_folders, is_symlink_or_junction}; + use std::env; + use tempfile::tempdir; + + #[tokio::test] + async fn should_install_dependencies() { + let dir = tempdir().unwrap(); + let store_dir = dir.path().join("pacquet-store"); + let project_root = dir.path().join("project"); + let modules_dir = project_root.join("node_modules"); // TODO: we shouldn't have to define this + let virtual_store_dir = modules_dir.join(".pacquet"); // TODO: we shouldn't have to define this + + let manifest_path = dir.path().join("package.json"); + let mut manifest = PackageManifest::create_if_needed(manifest_path.clone()).unwrap(); + + manifest.add_dependency("is-odd", "3.0.1", DependencyGroup::Prod).unwrap(); + manifest + .add_dependency("fast-decode-uri-component", "1.0.1", DependencyGroup::Dev) + .unwrap(); + + manifest.save().unwrap(); + + let mut config = Npmrc::new(); + config.store_dir = store_dir.to_path_buf(); + config.modules_dir = modules_dir.to_path_buf(); + config.virtual_store_dir = virtual_store_dir.to_path_buf(); + let config = config.leak(); + + Install { + tarball_cache: &Default::default(), + http_client: &Default::default(), + config, + manifest: &manifest, + lockfile: None, + dependency_groups: [ + DependencyGroup::Prod, + DependencyGroup::Dev, + DependencyGroup::Optional, + ], + frozen_lockfile: false, + } + .run() + .await; + + // Make sure the package is installed + assert!(is_symlink_or_junction(&project_root.join("node_modules/is-odd")).unwrap()); + assert!(project_root.join("node_modules/.pacquet/is-odd@3.0.1").exists()); + // Make sure it installs direct dependencies + assert!(!project_root.join("node_modules/is-number").exists()); + assert!(project_root.join("node_modules/.pacquet/is-number@6.0.0").exists()); + // Make sure we install dev-dependencies as well + assert!(is_symlink_or_junction( + &project_root.join("node_modules/fast-decode-uri-component") + ) + .unwrap()); + assert!(project_root + .join("node_modules/.pacquet/fast-decode-uri-component@1.0.1") + .is_dir()); + + insta::assert_debug_snapshot!(get_all_folders(&project_root)); + } +} diff --git a/crates/package-manager/src/install_frozen_lockfile.rs b/crates/package-manager/src/install_frozen_lockfile.rs new file mode 100644 index 000000000..7a2a6774d --- /dev/null +++ b/crates/package-manager/src/install_frozen_lockfile.rs @@ -0,0 +1,56 @@ +use crate::{CreateVirtualStore, SymlinkDirectDependencies}; +use pacquet_lockfile::{DependencyPath, PackageSnapshot, RootProjectSnapshot}; +use pacquet_npmrc::Npmrc; +use pacquet_package_manifest::DependencyGroup; +use pacquet_tarball::Cache; +use reqwest::Client; +use std::collections::HashMap; + +/// This subroutine installs dependencies from a frozen lockfile. +/// +/// **Brief overview:** +/// * Iterate over each package in [`Self::packages`]. +/// * Fetch a tarball of each package. +/// * Extract each tarball into the store directory. +/// * Import (by reflink, hardlink, or copy) the files from the store dir to each `node_modules/.pacquet/{name}@{version}/node_modules/{name}/`. +/// * Create dependency symbolic links in each `node_modules/.pacquet/{name}@{version}/node_modules/`. +/// * Create a symbolic link at each `node_modules/{name}`. +#[must_use] +pub struct InstallFrozenLockfile<'a, DependencyGroupList> +where + DependencyGroupList: IntoIterator, +{ + pub tarball_cache: &'a Cache, + pub http_client: &'a Client, + pub config: &'static Npmrc, + pub project_snapshot: &'a RootProjectSnapshot, + pub packages: Option<&'a HashMap>, + pub dependency_groups: DependencyGroupList, +} + +impl<'a, DependencyGroupList> InstallFrozenLockfile<'a, DependencyGroupList> +where + DependencyGroupList: IntoIterator, +{ + /// Execute the subroutine. + pub async fn run(self) { + let InstallFrozenLockfile { + tarball_cache, + http_client, + config, + project_snapshot, + packages, + dependency_groups, + } = self; + + // TODO: check if the lockfile is out-of-date + + assert!(config.prefer_frozen_lockfile, "Non frozen lockfile is not yet supported"); + + CreateVirtualStore { tarball_cache, http_client, config, packages, project_snapshot } + .run() + .await; + + SymlinkDirectDependencies { config, project_snapshot, dependency_groups }.run(); + } +} diff --git a/crates/package-manager/src/install_package_by_snapshot.rs b/crates/package-manager/src/install_package_by_snapshot.rs new file mode 100644 index 000000000..677c2b9d5 --- /dev/null +++ b/crates/package-manager/src/install_package_by_snapshot.rs @@ -0,0 +1,90 @@ +use crate::{CreateVirtualDirBySnapshot, CreateVirtualDirError}; +use derive_more::{Display, Error}; +use miette::Diagnostic; +use pacquet_lockfile::{DependencyPath, LockfileResolution, PackageSnapshot, PkgNameVerPeer}; +use pacquet_npmrc::Npmrc; +use pacquet_tarball::{Cache, DownloadTarballToStore, TarballError}; +use pipe_trait::Pipe; +use reqwest::Client; +use std::borrow::Cow; + +/// This subroutine downloads a package tarball, extracts it, installs it to a virtual dir, +/// then creates the symlink layout for the package. +#[must_use] +pub struct InstallPackageBySnapshot<'a> { + pub tarball_cache: &'a Cache, + pub http_client: &'a Client, + pub config: &'static Npmrc, + pub dependency_path: &'a DependencyPath, + pub package_snapshot: &'a PackageSnapshot, +} + +/// Error type of [`InstallPackageBySnapshot`]. +#[derive(Debug, Display, Error, Diagnostic)] +pub enum InstallPackageBySnapshotError { + DownloadTarball(TarballError), + CreateVirtualDir(CreateVirtualDirError), +} + +impl<'a> InstallPackageBySnapshot<'a> { + /// Execute the subroutine. + pub async fn run(self) -> Result<(), InstallPackageBySnapshotError> { + let InstallPackageBySnapshot { + tarball_cache, + http_client, + config, + dependency_path, + package_snapshot, + } = self; + let PackageSnapshot { resolution, .. } = package_snapshot; + let DependencyPath { custom_registry, package_specifier } = dependency_path; + + let (tarball_url, integrity) = match resolution { + LockfileResolution::Tarball(tarball_resolution) => { + let integrity = tarball_resolution.integrity.as_deref().unwrap_or_else(|| { + // TODO: how to handle the absent of integrity field? + panic!("Current implementation requires integrity, but {dependency_path} doesn't have it"); + }); + (tarball_resolution.tarball.as_str().pipe(Cow::Borrowed), integrity) + } + LockfileResolution::Registry(registry_resolution) => { + let registry = custom_registry.as_ref().unwrap_or(&config.registry); + let registry = registry.strip_suffix('/').unwrap_or(registry); + let PkgNameVerPeer { name, suffix: ver_peer } = package_specifier; + let version = ver_peer.version(); + let bare_name = name.bare.as_str(); + let tarball_url = format!("{registry}/{name}/-/{bare_name}-{version}.tgz"); + let integrity = registry_resolution.integrity.as_str(); + (Cow::Owned(tarball_url), integrity) + } + LockfileResolution::Directory(_) | LockfileResolution::Git(_) => { + panic!("Only TarballResolution and RegistryResolution is supported at the moment, but {dependency_path} requires {resolution:?}"); + } + }; + + // TODO: skip when already exists in store? + let cas_paths = DownloadTarballToStore { + tarball_cache, + http_client, + store_dir: &config.store_dir, + package_integrity: integrity, + package_unpacked_size: None, + package_url: &tarball_url, + } + .run() + .await + .map_err(InstallPackageBySnapshotError::DownloadTarball)?; + + CreateVirtualDirBySnapshot { + virtual_store_dir: &config.virtual_store_dir, + cas_paths: &cas_paths, + import_method: config.package_import_method, + dependency_path, + package_snapshot, + } + .run() + .map_err(InstallPackageBySnapshotError::CreateVirtualDir)?; + + Ok(()) + } +} diff --git a/crates/package-manager/src/install_package_from_registry.rs b/crates/package-manager/src/install_package_from_registry.rs new file mode 100644 index 000000000..a2a01e87f --- /dev/null +++ b/crates/package-manager/src/install_package_from_registry.rs @@ -0,0 +1,189 @@ +use crate::{create_cas_files, symlink_package, CreateCasFilesError, SymlinkPackageError}; +use derive_more::{Display, Error}; +use miette::Diagnostic; +use pacquet_npmrc::Npmrc; +use pacquet_registry::{Package, PackageTag, PackageVersion, RegistryError}; +use pacquet_tarball::{Cache, DownloadTarballToStore, TarballError}; +use reqwest::Client; +use std::{path::Path, str::FromStr}; + +/// This subroutine executes the following and returns the package +/// * Retrieves the package from the registry +/// * Extracts the tarball to global store directory (~/Library/../pacquet) +/// * Links global store directory to virtual dir (node_modules/.pacquet/..) +/// +/// `symlink_path` will be appended by the name of the package. Therefore, +/// it should be resolved into the node_modules folder of a subdependency such as +/// `node_modules/.pacquet/fastify@1.0.0/node_modules`. +#[must_use] +pub struct InstallPackageFromRegistry<'a> { + pub tarball_cache: &'a Cache, + pub http_client: &'a Client, + pub config: &'static Npmrc, + pub node_modules_dir: &'a Path, + pub name: &'a str, + pub version_range: &'a str, +} + +/// Error type of [`InstallPackageFromRegistry`]. +#[derive(Debug, Display, Error, Diagnostic)] +pub enum InstallPackageFromRegistryError { + FetchFromRegistry(#[error(source)] RegistryError), + DownloadTarballToStore(#[error(source)] TarballError), + CreateCasFiles(#[error(source)] CreateCasFilesError), + SymlinkPackage(#[error(source)] SymlinkPackageError), +} + +impl<'a> InstallPackageFromRegistry<'a> { + /// Execute the subroutine. + pub async fn run(self) -> Result + where + Tag: FromStr + Into, + { + let &InstallPackageFromRegistry { http_client, config, name, version_range, .. } = &self; + + Ok(if let Ok(tag) = version_range.parse::() { + let package_version = PackageVersion::fetch_from_registry( + name, + tag.into(), + http_client, + &config.registry, + ) + .await + .map_err(InstallPackageFromRegistryError::FetchFromRegistry)?; + self.install_package_version(&package_version).await?; + package_version + } else { + let package = Package::fetch_from_registry(name, http_client, &config.registry) + .await + .map_err(InstallPackageFromRegistryError::FetchFromRegistry)?; + let package_version = package.pinned_version(version_range).unwrap(); // TODO: propagate error for when no version satisfies range + self.install_package_version(package_version).await?; + package_version.clone() + }) + } + + async fn install_package_version( + self, + package_version: &PackageVersion, + ) -> Result<(), InstallPackageFromRegistryError> { + let InstallPackageFromRegistry { + tarball_cache, http_client, config, node_modules_dir, .. + } = self; + + let store_folder_name = package_version.to_virtual_store_name(); + + // TODO: skip when it already exists in store? + let cas_paths = DownloadTarballToStore { + tarball_cache, + http_client, + store_dir: &config.store_dir, + package_integrity: package_version + .dist + .integrity + .as_ref() + .expect("has integrity field"), + package_unpacked_size: package_version.dist.unpacked_size, + package_url: package_version.as_tarball_url(), + } + .run() + .await + .map_err(InstallPackageFromRegistryError::DownloadTarballToStore)?; + + let save_path = config + .virtual_store_dir + .join(store_folder_name) + .join("node_modules") + .join(&package_version.name); + + let symlink_path = node_modules_dir.join(&package_version.name); + + tracing::info!(target: "pacquet::import", ?save_path, ?symlink_path, "Import package"); + + create_cas_files(config.package_import_method, &save_path, &cas_paths) + .map_err(InstallPackageFromRegistryError::CreateCasFiles)?; + + symlink_package(&save_path, &symlink_path) + .map_err(InstallPackageFromRegistryError::SymlinkPackage)?; + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use node_semver::Version; + use pacquet_npmrc::Npmrc; + use pipe_trait::Pipe; + use pretty_assertions::assert_eq; + use std::fs; + use std::path::Path; + use tempfile::tempdir; + + fn create_config(store_dir: &Path, modules_dir: &Path, virtual_store_dir: &Path) -> Npmrc { + Npmrc { + hoist: false, + hoist_pattern: vec![], + public_hoist_pattern: vec![], + shamefully_hoist: false, + store_dir: store_dir.to_path_buf(), + modules_dir: modules_dir.to_path_buf(), + node_linker: Default::default(), + symlink: false, + virtual_store_dir: virtual_store_dir.to_path_buf(), + package_import_method: Default::default(), + modules_cache_max_age: 0, + lockfile: false, + prefer_frozen_lockfile: false, + lockfile_include_tarball_url: false, + registry: "https://registry.npmjs.com/".to_string(), + auto_install_peers: false, + dedupe_peer_dependents: false, + strict_peer_dependencies: false, + resolve_peers_from_workspace_root: false, + } + } + + #[tokio::test] + pub async fn should_find_package_version_from_registry() { + let store_dir = tempdir().unwrap(); + let modules_dir = tempdir().unwrap(); + let virtual_store_dir = tempdir().unwrap(); + let config: &'static Npmrc = + create_config(store_dir.path(), modules_dir.path(), virtual_store_dir.path()) + .pipe(Box::new) + .pipe(Box::leak); + let http_client = reqwest::Client::new(); + let package = InstallPackageFromRegistry { + tarball_cache: &Default::default(), + config, + http_client: &http_client, + name: "fast-querystring", + version_range: "1.0.0", + node_modules_dir: modules_dir.path(), + } + .run::() + .await + .unwrap(); + + assert_eq!(package.name, "fast-querystring"); + assert_eq!( + package.version, + Version { major: 1, minor: 0, patch: 0, build: vec![], pre_release: vec![] } + ); + + let virtual_store_path = virtual_store_dir + .path() + .join(package.to_virtual_store_name()) + .join("node_modules") + .join(&package.name); + assert!(virtual_store_path.is_dir()); + + // Make sure the symlink is resolving to the correct path + assert_eq!( + fs::read_link(modules_dir.path().join(&package.name)).unwrap(), + virtual_store_path + ); + } +} diff --git a/crates/package-manager/src/install_without_lockfile.rs b/crates/package-manager/src/install_without_lockfile.rs new file mode 100644 index 000000000..0d0bb8b39 --- /dev/null +++ b/crates/package-manager/src/install_without_lockfile.rs @@ -0,0 +1,109 @@ +use crate::InstallPackageFromRegistry; +use async_recursion::async_recursion; +use futures_util::future; +use node_semver::Version; +use pacquet_npmrc::Npmrc; +use pacquet_package_manifest::{DependencyGroup, PackageManifest}; +use pacquet_registry::PackageVersion; +use pacquet_tarball::Cache; +use pipe_trait::Pipe; +use reqwest::Client; + +/// This subroutine install packages from a `package.json` without reading or writing a lockfile. +/// +/// **Brief overview for each package:** +/// * Fetch a tarball of the package. +/// * Extract the tarball into the store directory. +/// * Import (by reflink, hardlink, or copy) the files from the store dir to `node_modules/.pacquet/{name}@{version}/node_modules/{name}/`. +/// * Create dependency symbolic links in `node_modules/.pacquet/{name}@{version}/node_modules/`. +/// * Create a symbolic link at `node_modules/{name}`. +/// * Repeat the process for the dependencies of the package. +#[must_use] +pub struct InstallWithoutLockfile<'a, DependencyGroupList> { + pub tarball_cache: &'a Cache, + pub http_client: &'a Client, + pub config: &'static Npmrc, + pub manifest: &'a PackageManifest, + pub dependency_groups: DependencyGroupList, +} + +impl<'a, DependencyGroupList> InstallWithoutLockfile<'a, DependencyGroupList> { + /// Execute the subroutine. + pub async fn run(self) + where + DependencyGroupList: IntoIterator, + { + let InstallWithoutLockfile { + tarball_cache, + http_client, + config, + manifest, + dependency_groups, + } = self; + + let _: Vec<()> = manifest + .dependencies(dependency_groups.into_iter()) + .map(|(name, version_range)| async move { + let dependency = InstallPackageFromRegistry { + tarball_cache, + http_client, + config, + node_modules_dir: &config.modules_dir, + name, + version_range, + } + .run::() + .await + .unwrap(); + + InstallWithoutLockfile { + tarball_cache, + http_client, + config, + manifest, + dependency_groups: (), + } + .install_dependencies_from_registry(&dependency) + .await; + }) + .pipe(future::join_all) + .await; + } +} + +impl<'a> InstallWithoutLockfile<'a, ()> { + /// Install dependencies of a dependency. + #[async_recursion] + async fn install_dependencies_from_registry(&self, package: &PackageVersion) { + let InstallWithoutLockfile { tarball_cache, http_client, config, .. } = self; + + let node_modules_path = self + .config + .virtual_store_dir + .join(package.to_virtual_store_name()) + .join("node_modules"); + + tracing::info!(target: "pacquet::install", node_modules = ?node_modules_path, "Start subset"); + + package + .dependencies(self.config.auto_install_peers) + .map(|(name, version_range)| async { + let dependency = InstallPackageFromRegistry { + tarball_cache, + http_client, + config, + node_modules_dir: &node_modules_path, + name, + version_range, + } + .run::() + .await + .unwrap(); // TODO: proper error propagation + self.install_dependencies_from_registry(&dependency).await; + }) + .pipe(future::join_all) + .await; + + tracing::info!(target: "pacquet::install", node_modules = ?node_modules_path, "Complete subset"); + } +} diff --git a/crates/package-manager/src/lib.rs b/crates/package-manager/src/lib.rs new file mode 100644 index 000000000..654bd9cb3 --- /dev/null +++ b/crates/package-manager/src/lib.rs @@ -0,0 +1,27 @@ +mod add; +mod create_cas_files; +mod create_symlink_layout; +mod create_virtual_dir_by_snapshot; +mod create_virtual_store; +mod install; +mod install_frozen_lockfile; +mod install_package_by_snapshot; +mod install_package_from_registry; +mod install_without_lockfile; +mod link_file; +mod symlink_direct_dependencies; +mod symlink_package; + +pub use add::*; +pub use create_cas_files::*; +pub use create_symlink_layout::*; +pub use create_virtual_dir_by_snapshot::*; +pub use create_virtual_store::*; +pub use install::*; +pub use install_frozen_lockfile::*; +pub use install_package_by_snapshot::*; +pub use install_package_from_registry::*; +pub use install_without_lockfile::*; +pub use link_file::*; +pub use symlink_direct_dependencies::*; +pub use symlink_package::*; diff --git a/crates/package-manager/src/link_file.rs b/crates/package-manager/src/link_file.rs new file mode 100644 index 000000000..6805515c2 --- /dev/null +++ b/crates/package-manager/src/link_file.rs @@ -0,0 +1,51 @@ +use derive_more::{Display, Error}; +use miette::Diagnostic; +use std::{ + fs, io, + path::{Path, PathBuf}, +}; + +/// Error type for [`link_file`]. +#[derive(Debug, Display, Error, Diagnostic)] +pub enum LinkFileError { + #[display("cannot create directory at {dirname:?}: {error}")] + CreateDir { + dirname: PathBuf, + #[error(source)] + error: io::Error, + }, + #[display("fail to create a link from {from:?} to {to:?}: {error}")] + CreateLink { + from: PathBuf, + to: PathBuf, + #[error(source)] + error: io::Error, + }, +} + +/// Reflink or copy a single file. +/// +/// * If `target_link` already exists, do nothing. +/// * If parent dir of `target_link` doesn't exist, it will be created. +pub fn link_file(source_file: &Path, target_link: &Path) -> Result<(), LinkFileError> { + if target_link.exists() { + return Ok(()); + } + + if let Some(parent_dir) = target_link.parent() { + fs::create_dir_all(parent_dir).map_err(|error| LinkFileError::CreateDir { + dirname: parent_dir.to_path_buf(), + error, + })?; + } + + reflink_copy::reflink_or_copy(source_file, target_link).map_err(|error| { + LinkFileError::CreateLink { + from: source_file.to_path_buf(), + to: target_link.to_path_buf(), + error, + } + })?; // TODO: add hardlink + + Ok(()) +} diff --git a/crates/package-manager/src/snapshots/pacquet_package_manager__install__tests__should_install_dependencies.snap b/crates/package-manager/src/snapshots/pacquet_package_manager__install__tests__should_install_dependencies.snap new file mode 100644 index 000000000..e4e005181 --- /dev/null +++ b/crates/package-manager/src/snapshots/pacquet_package_manager__install__tests__should_install_dependencies.snap @@ -0,0 +1,21 @@ +--- +source: crates/package_manager/src/install.rs +assertion_line: 184 +expression: get_all_folders(&project_root) +--- +[ + "node_modules", + "node_modules/.pacquet", + "node_modules/.pacquet/fast-decode-uri-component@1.0.1", + "node_modules/.pacquet/fast-decode-uri-component@1.0.1/node_modules", + "node_modules/.pacquet/fast-decode-uri-component@1.0.1/node_modules/fast-decode-uri-component", + "node_modules/.pacquet/is-number@6.0.0", + "node_modules/.pacquet/is-number@6.0.0/node_modules", + "node_modules/.pacquet/is-number@6.0.0/node_modules/is-number", + "node_modules/.pacquet/is-odd@3.0.1", + "node_modules/.pacquet/is-odd@3.0.1/node_modules", + "node_modules/.pacquet/is-odd@3.0.1/node_modules/is-number", + "node_modules/.pacquet/is-odd@3.0.1/node_modules/is-odd", + "node_modules/fast-decode-uri-component", + "node_modules/is-odd", +] diff --git a/crates/package-manager/src/symlink_direct_dependencies.rs b/crates/package-manager/src/symlink_direct_dependencies.rs new file mode 100644 index 000000000..1d974605c --- /dev/null +++ b/crates/package-manager/src/symlink_direct_dependencies.rs @@ -0,0 +1,57 @@ +use crate::symlink_package; +use pacquet_lockfile::{PkgName, PkgNameVerPeer, RootProjectSnapshot}; +use pacquet_npmrc::Npmrc; +use pacquet_package_manifest::DependencyGroup; +use rayon::prelude::*; + +/// This subroutine creates symbolic links in the `node_modules` directory for +/// the direct dependencies. The targets of the link are the virtual directories. +/// +/// If package `foo@x.y.z` is declared as a dependency in `package.json`, +/// symlink `foo -> .pacquet/foo@x.y.z/node_modules/foo` shall be created +/// in the `node_modules` directory. +#[must_use] +pub struct SymlinkDirectDependencies<'a, DependencyGroupList> +where + DependencyGroupList: IntoIterator, +{ + pub config: &'static Npmrc, + pub project_snapshot: &'a RootProjectSnapshot, + pub dependency_groups: DependencyGroupList, +} + +impl<'a, DependencyGroupList> SymlinkDirectDependencies<'a, DependencyGroupList> +where + DependencyGroupList: IntoIterator, +{ + /// Execute the subroutine. + pub fn run(self) { + let SymlinkDirectDependencies { config, project_snapshot, dependency_groups } = self; + + let RootProjectSnapshot::Single(project_snapshot) = project_snapshot else { + panic!("Monorepo is not yet supported"); // TODO: properly propagate this error + }; + + project_snapshot + .dependencies_by_groups(dependency_groups) + .collect::>() + .par_iter() + .for_each(|(name, spec)| { + // TODO: the code below is not optimal + let virtual_store_name = + PkgNameVerPeer::new(PkgName::clone(name), spec.version.clone()) + .to_virtual_store_name(); + + let name_str = name.to_string(); + symlink_package( + &config + .virtual_store_dir + .join(virtual_store_name) + .join("node_modules") + .join(&name_str), + &config.modules_dir.join(&name_str), + ) + .expect("symlink pkg"); // TODO: properly propagate this error + }); + } +} diff --git a/crates/package-manager/src/symlink_package.rs b/crates/package-manager/src/symlink_package.rs new file mode 100644 index 000000000..76834e082 --- /dev/null +++ b/crates/package-manager/src/symlink_package.rs @@ -0,0 +1,59 @@ +use derive_more::{Display, Error}; +use miette::Diagnostic; +use pacquet_fs::symlink_dir; +use std::{ + fs, + io::{self, ErrorKind}, + path::{Path, PathBuf}, +}; + +/// Error type for [`symlink_package`]. +#[derive(Debug, Display, Error, Diagnostic)] +pub enum SymlinkPackageError { + #[display("Failed to create directory at {dir:?}: {error}")] + CreateParentDir { + dir: PathBuf, + #[error(source)] + error: io::Error, + }, + + #[display("Failed to create symlink at {symlink_path:?} to {symlink_target:?}: {error}")] + SymlinkDir { + symlink_target: PathBuf, + symlink_path: PathBuf, + #[error(source)] + error: io::Error, + }, +} + +/// Create symlink for a package. +/// +/// * If ancestors of `symlink_path` don't exist, they will be created recursively. +/// * If `symlink_path` already exists, skip. +/// * If `symlink_path` doesn't exist, a symlink pointing to `symlink_target` will be created. +pub fn symlink_package( + symlink_target: &Path, + symlink_path: &Path, +) -> Result<(), SymlinkPackageError> { + // NOTE: symlink target in pacquet is absolute yet in pnpm is relative + // TODO: change symlink target to relative + if let Some(parent) = symlink_path.parent() { + fs::create_dir_all(parent).map_err(|error| SymlinkPackageError::CreateParentDir { + dir: parent.to_path_buf(), + error, + })?; + } + if let Err(error) = symlink_dir(symlink_target, symlink_path) { + match error.kind() { + ErrorKind::AlreadyExists => {} + _ => { + return Err(SymlinkPackageError::SymlinkDir { + symlink_target: symlink_target.to_path_buf(), + symlink_path: symlink_path.to_path_buf(), + error, + }) + } + } + } + Ok(()) +} diff --git a/crates/package_json/Cargo.toml b/crates/package-manifest/Cargo.toml similarity index 67% rename from crates/package_json/Cargo.toml rename to crates/package-manifest/Cargo.toml index 70186f9a4..3716911df 100644 --- a/crates/package_json/Cargo.toml +++ b/crates/package-manifest/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_package_json" +name = "pacquet-package-manifest" version = "0.0.1" publish = false authors.workspace = true @@ -11,11 +11,11 @@ license.workspace = true repository.workspace = true [dependencies] -pacquet_diagnostics = { workspace = true } - -serde = { workspace = true } -serde_json = { workspace = true } -strum = { workspace = true } +derive_more = { workspace = true } +miette = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +strum = { workspace = true } [dev-dependencies] pipe-trait = { workspace = true } diff --git a/crates/package_json/src/lib.rs b/crates/package-manifest/src/lib.rs similarity index 62% rename from crates/package_json/src/lib.rs rename to crates/package-manifest/src/lib.rs index d72765e22..2016861ca 100644 --- a/crates/package_json/src/lib.rs +++ b/crates/package-manifest/src/lib.rs @@ -4,49 +4,48 @@ use std::{ path::{Path, PathBuf}, }; -use pacquet_diagnostics::{ - miette::{self, Diagnostic}, - thiserror::{self, Error}, -}; +use derive_more::{Display, Error, From}; +use miette::Diagnostic; use serde::{Deserialize, Serialize}; use serde_json::{json, Map, Value}; use strum::IntoStaticStr; -#[derive(Error, Debug, Diagnostic)] +#[derive(Debug, Display, Error, From, Diagnostic)] #[non_exhaustive] -pub enum PackageJsonError { - #[error(transparent)] - #[diagnostic(code(pacquet_package_json::serialization_error))] - Serialization(#[from] serde_json::Error), +pub enum PackageManifestError { + #[diagnostic(code(pacquet_package_manifest::serialization_error))] + Serialization(serde_json::Error), // TODO: remove derive(From), split this variant - #[error(transparent)] - #[diagnostic(code(pacquet_package_json::io_error))] - Io(#[from] std::io::Error), + #[diagnostic(code(pacquet_package_manifest::io_error))] + Io(std::io::Error), // TODO: remove derive(From), split this variant - #[error("package.json file already exists")] + #[display("package.json file already exists")] #[diagnostic( - code(pacquet_package_json::already_exist_error), + code(pacquet_package_manifest::already_exist_error), help("Your current working directory already has a package.json file.") )] AlreadyExist, - #[error("invalid attribute: {0}")] - #[diagnostic(code(pacquet_package_json::invalid_attribute))] - InvalidAttribute(String), + #[from(ignore)] // TODO: remove this after derive(From) has been removed + #[display("invalid attribute: {_0}")] + #[diagnostic(code(pacquet_package_manifest::invalid_attribute))] + InvalidAttribute(#[error(not(source))] String), - #[error("No package.json was found in {0}")] - #[diagnostic(code(pacquet_package_json::no_import_manifest_found))] - NoImporterManifestFound(String), + #[from(ignore)] // TODO: remove this after derive(From) has been removed + #[display("No package.json was found in {_0}")] + #[diagnostic(code(pacquet_package_manifest::no_import_manifest_found))] + NoImporterManifestFound(#[error(not(source))] String), - #[error("Missing script: \"{0}\"")] - #[diagnostic(code(pacquet_package_json::no_script_error))] - NoScript(String), + #[from(ignore)] // TODO: remove this after derive(From) has been removed + #[display("Missing script: {_0:?}")] + #[diagnostic(code(pacquet_package_manifest::no_script_error))] + NoScript(#[error(not(source))] String), } #[derive(Debug, Clone, Copy, PartialEq, IntoStaticStr)] pub enum DependencyGroup { #[strum(serialize = "dependencies")] - Default, + Prod, #[strum(serialize = "devDependencies")] Dev, #[strum(serialize = "optionalDependencies")] @@ -62,12 +61,13 @@ pub enum BundleDependencies { List(Vec), } -pub struct PackageJson { +/// Content of the `package.json` files and its path. +pub struct PackageManifest { path: PathBuf, - value: Value, + value: Value, // TODO: convert this into a proper struct + an array of keys order } -impl PackageJson { +impl PackageManifest { fn create_init_package_json(name: &str) -> Value { json!({ "name": name, @@ -83,52 +83,60 @@ impl PackageJson { }) } - fn write_to_file(path: &Path) -> Result<(Value, String), PackageJsonError> { + fn write_to_file(path: &Path) -> Result<(Value, String), PackageManifestError> { let name = path .parent() .and_then(|folder| folder.file_name()) .and_then(|file_name| file_name.to_str()) .unwrap_or(""); - let package_json = PackageJson::create_init_package_json(name); - let contents = serde_json::to_string_pretty(&package_json)?; + let manifest = PackageManifest::create_init_package_json(name); + let contents = serde_json::to_string_pretty(&manifest)?; fs::write(path, &contents)?; // TODO: forbid overwriting existing files - Ok((package_json, contents)) + Ok((manifest, contents)) } - fn read_from_file(path: &Path) -> Result { + fn read_from_file(path: &Path) -> Result { let contents = fs::read_to_string(path)?; - serde_json::from_str(&contents).map_err(PackageJsonError::from) + serde_json::from_str(&contents).map_err(PackageManifestError::from) } - pub fn init(path: &Path) -> Result<(), PackageJsonError> { + pub fn init(path: &Path) -> Result<(), PackageManifestError> { if path.exists() { - return Err(PackageJsonError::AlreadyExist); + return Err(PackageManifestError::AlreadyExist); } - let (_, contents) = PackageJson::write_to_file(path)?; + let (_, contents) = PackageManifest::write_to_file(path)?; println!("Wrote to {path}\n\n{contents}", path = path.display()); Ok(()) } - pub fn from_path(path: PathBuf) -> Result { + pub fn from_path(path: PathBuf) -> Result { if !path.exists() { - return Err(PackageJsonError::NoImporterManifestFound(path.display().to_string())); + return Err(PackageManifestError::NoImporterManifestFound(path.display().to_string())); } - let value = PackageJson::read_from_file(&path)?; - Ok(PackageJson { path, value }) + let value = PackageManifest::read_from_file(&path)?; + Ok(PackageManifest { path, value }) } - pub fn create_if_needed(path: PathBuf) -> Result { + pub fn create_if_needed(path: PathBuf) -> Result { let value = if path.exists() { - PackageJson::read_from_file(&path)? + PackageManifest::read_from_file(&path)? } else { - PackageJson::write_to_file(&path).map(|(value, _)| value)? + PackageManifest::write_to_file(&path).map(|(value, _)| value)? }; - Ok(PackageJson { path, value }) + Ok(PackageManifest { path, value }) + } + + pub fn path(&self) -> &'_ Path { + &self.path + } + + pub fn value(&self) -> &'_ Value { + &self.value } - pub fn save(&mut self) -> Result<(), PackageJsonError> { + pub fn save(&self) -> Result<(), PackageManifestError> { let mut file = fs::File::create(&self.path)?; let contents = serde_json::to_string_pretty(&self.value)?; file.write_all(contents.as_bytes())?; @@ -163,13 +171,13 @@ impl PackageJson { name: &str, version: &str, dependency_group: DependencyGroup, - ) -> Result<(), PackageJsonError> { + ) -> Result<(), PackageManifestError> { let dependency_type: &str = dependency_group.into(); if let Some(field) = self.value.get_mut(dependency_type) { if let Some(dependencies) = field.as_object_mut() { dependencies.insert(name.to_string(), Value::String(version.to_string())); } else { - return Err(PackageJsonError::InvalidAttribute( + return Err(PackageManifestError::InvalidAttribute( "dependencies attribute should be an object".to_string(), )); } @@ -184,8 +192,8 @@ impl PackageJson { pub fn script( &self, command: &str, - if_present: bool, - ) -> Result, PackageJsonError> { + if_present: bool, // TODO: split this function into 2, one with --if-present, one without + ) -> Result, PackageManifestError> { if let Some(script_str) = self .value .get("scripts") @@ -198,7 +206,7 @@ impl PackageJson { if if_present { Ok(None) } else { - Err(PackageJsonError::NoScript(command.to_string())) + Err(PackageManifestError::NoScript(command.to_string())) } } } @@ -217,39 +225,38 @@ mod tests { #[test] fn test_init_package_json_content() { - let package_json = PackageJson::create_init_package_json("test"); - assert_snapshot!(serde_json::to_string_pretty(&package_json).unwrap()); + let manifest = PackageManifest::create_init_package_json("test"); + assert_snapshot!(serde_json::to_string_pretty(&manifest).unwrap()); } #[test] fn init_should_throw_if_exists() { let tmp = NamedTempFile::new().unwrap(); write!(tmp.as_file(), "hello world").unwrap(); - PackageJson::init(tmp.path()).expect_err("package.json already exist"); + PackageManifest::init(tmp.path()).expect_err("package.json already exist"); } #[test] fn init_should_create_package_json_if_not_exist() { let dir = tempdir().unwrap(); let tmp = dir.path().join("package.json"); - PackageJson::init(&tmp).unwrap(); + PackageManifest::init(&tmp).unwrap(); assert!(tmp.exists()); assert!(tmp.is_file()); - assert_eq!(PackageJson::from_path(tmp.clone()).unwrap().path, tmp); + assert_eq!(PackageManifest::from_path(tmp.clone()).unwrap().path, tmp); } #[test] fn should_add_dependency() { let dir = tempdir().unwrap(); let tmp = dir.path().join("package.json"); - let mut package_json = PackageJson::create_if_needed(tmp.clone()).unwrap(); - package_json.add_dependency("fastify", "1.0.0", DependencyGroup::Default).unwrap(); + let mut manifest = PackageManifest::create_if_needed(tmp.clone()).unwrap(); + manifest.add_dependency("fastify", "1.0.0", DependencyGroup::Prod).unwrap(); - let dependencies: HashMap<_, _> = - package_json.dependencies([DependencyGroup::Default]).collect(); + let dependencies: HashMap<_, _> = manifest.dependencies([DependencyGroup::Prod]).collect(); assert!(dependencies.contains_key("fastify")); assert_eq!(dependencies.get("fastify").unwrap(), &"1.0.0"); - package_json.save().unwrap(); + manifest.save().unwrap(); assert!(read_to_string(tmp).unwrap().contains("fastify")); } @@ -257,8 +264,8 @@ mod tests { fn should_throw_on_missing_command() { let dir = tempdir().unwrap(); let tmp = dir.path().join("package.json"); - let package_json = PackageJson::create_if_needed(tmp).unwrap(); - package_json.script("dev", false).expect_err("dev command should not exist"); + let manifest = PackageManifest::create_if_needed(tmp).unwrap(); + manifest.script("dev", false).expect_err("dev command should not exist"); } #[test] @@ -272,10 +279,10 @@ mod tests { "#; let tmp = NamedTempFile::new().unwrap(); write!(tmp.as_file(), "{}", data).unwrap(); - let package_json = PackageJson::create_if_needed(tmp.path().to_path_buf()).unwrap(); - package_json.script("test", false).unwrap(); - package_json.script("invalid", false).expect_err("invalid command should not exist"); - package_json.script("invalid", true).unwrap(); + let manifest = PackageManifest::create_if_needed(tmp.path().to_path_buf()).unwrap(); + manifest.script("test", false).unwrap(); + manifest.script("invalid", false).expect_err("invalid command should not exist"); + manifest.script("invalid", true).unwrap(); } #[test] @@ -292,10 +299,10 @@ mod tests { "#; let tmp = NamedTempFile::new().unwrap(); write!(tmp.as_file(), "{}", data).unwrap(); - let package_json = PackageJson::create_if_needed(tmp.path().to_path_buf()).unwrap(); - let dependencies = |groups| package_json.dependencies(groups).collect::>(); + let manifest = PackageManifest::create_if_needed(tmp.path().to_path_buf()).unwrap(); + let dependencies = |groups| manifest.dependencies(groups).collect::>(); assert!(dependencies([DependencyGroup::Peer]).contains_key("fast-querystring")); - assert!(dependencies([DependencyGroup::Default]).contains_key("fastify")); + assert!(dependencies([DependencyGroup::Prod]).contains_key("fastify")); } #[test] @@ -314,8 +321,8 @@ mod tests { eprintln!("CASE: {data}"); let tmp = NamedTempFile::new().unwrap(); write!(tmp.as_file(), "{}", data).unwrap(); - let package_json = PackageJson::create_if_needed(tmp.path().to_path_buf()).unwrap(); - let bundle = package_json.bundle_dependencies().unwrap(); + let manifest = PackageManifest::create_if_needed(tmp.path().to_path_buf()).unwrap(); + let bundle = manifest.bundle_dependencies().unwrap(); assert_eq!(bundle, $output); }}; } diff --git a/crates/package_json/src/snapshots/pacquet_package_json__tests__init_package_json_content.snap b/crates/package-manifest/src/snapshots/pacquet_package_manifest__tests__init_package_json_content.snap similarity index 94% rename from crates/package_json/src/snapshots/pacquet_package_json__tests__init_package_json_content.snap rename to crates/package-manifest/src/snapshots/pacquet_package_manifest__tests__init_package_json_content.snap index 9bf9909e0..3d813d77d 100644 --- a/crates/package_json/src/snapshots/pacquet_package_json__tests__init_package_json_content.snap +++ b/crates/package-manifest/src/snapshots/pacquet_package_manifest__tests__init_package_json_content.snap @@ -1,5 +1,6 @@ --- source: crates/package_json/src/lib.rs +assertion_line: 228 expression: "serde_json::to_string_pretty(&package_json).unwrap()" --- { diff --git a/crates/registry/Cargo.toml b/crates/registry/Cargo.toml index f7d977b84..c230413d8 100644 --- a/crates/registry/Cargo.toml +++ b/crates/registry/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_registry" +name = "pacquet-registry" version = "0.0.1" publish = false authors.workspace = true @@ -11,14 +11,16 @@ license.workspace = true repository.workspace = true [dependencies] -pacquet_diagnostics = { workspace = true } +pacquet-diagnostics = { workspace = true } +derive_more = { workspace = true } reqwest = { workspace = true } node-semver = { workspace = true } pipe-trait = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } tokio = { workspace = true } +miette = { workspace = true } [dev-dependencies] pretty_assertions = { workspace = true } diff --git a/crates/registry/src/lib.rs b/crates/registry/src/lib.rs index 16bce87ab..d6e62def2 100644 --- a/crates/registry/src/lib.rs +++ b/crates/registry/src/lib.rs @@ -1,44 +1,45 @@ mod package; mod package_distribution; +mod package_tag; mod package_version; pub use package::Package; pub use package_distribution::PackageDistribution; +pub use package_tag::PackageTag; pub use package_version::PackageVersion; -use pacquet_diagnostics::{ - miette::{self, Diagnostic}, - thiserror::{self, Error}, -}; +use derive_more::{Display, Error, From}; +use miette::Diagnostic; -#[derive(Debug, Error)] -#[error("failed to request {url}: {error}")] +#[derive(Debug, Display, Error)] +#[display("Failed to request {url}: {error}")] pub struct NetworkError { pub url: String, - #[source] + #[error(source)] pub error: reqwest::Error, } -#[derive(Error, Debug, Diagnostic)] +#[derive(Debug, Display, Error, From, Diagnostic)] #[non_exhaustive] pub enum RegistryError { - #[error("missing latest tag on {0}")] + #[from(ignore)] // TODO: remove this after derive(From) has been removed + #[display("Missing latest tag on {_0}")] #[diagnostic(code(pacquet_registry::missing_latest_tag))] - MissingLatestTag(String), + MissingLatestTag(#[error(not(source))] String), - #[error("missing version {0} on package {1}")] + #[from(ignore)] // TODO: remove this after derive(From) has been removed + #[display("Missing version {_0} on package {_1}")] #[diagnostic(code(pacquet_registry::missing_version_release))] MissingVersionRelease(String, String), - #[error(transparent)] #[diagnostic(code(pacquet_registry::network_error))] - Network(#[from] NetworkError), + Network(NetworkError), // TODO: remove derive(Error), split this variant - #[error(transparent)] #[diagnostic(code(pacquet_registry::io_error))] - Io(#[from] std::io::Error), + Io(std::io::Error), // TODO: remove derive(Error), split this variant - #[error("serialization failed: {0}")] + #[from(ignore)] // TODO: remove this after derive(From) has been removed + #[display("Serialization failed: {_0}")] #[diagnostic(code(pacquet_registry::serialization_error))] - Serialization(String), + Serialization(#[error(not(source))] String), } diff --git a/crates/registry/src/package.rs b/crates/registry/src/package.rs index 79f0f03ad..8779f1c8a 100644 --- a/crates/registry/src/package.rs +++ b/crates/registry/src/package.rs @@ -46,7 +46,7 @@ impl Package { } pub fn pinned_version(&self, version_range: &str) -> Option<&PackageVersion> { - let range: node_semver::Range = version_range.parse().unwrap(); // TODO: this step should have happened in PackageJson + let range: node_semver::Range = version_range.parse().unwrap(); // TODO: this step should have happened in PackageManifest let mut satisfied_versions = self .versions .values() diff --git a/crates/registry/src/package_tag.rs b/crates/registry/src/package_tag.rs new file mode 100644 index 000000000..680a72f01 --- /dev/null +++ b/crates/registry/src/package_tag.rs @@ -0,0 +1,24 @@ +use derive_more::{Display, From, TryInto}; +use node_semver::{SemverError, Version}; +use std::str::FromStr; + +/// Version or tag that is attachable to a registry URL. +#[derive(Debug, Display, From, TryInto)] +pub enum PackageTag { + /// Literally `latest``. + #[display("latest")] + Latest, + /// Pinned version. + Version(Version), +} + +impl FromStr for PackageTag { + type Err = SemverError; + fn from_str(value: &str) -> Result { + if value == "latest" { + Ok(PackageTag::Latest) + } else { + value.parse::().map(PackageTag::Version) + } + } +} diff --git a/crates/registry/src/package_version.rs b/crates/registry/src/package_version.rs index 1580da92e..dcaca9217 100644 --- a/crates/registry/src/package_version.rs +++ b/crates/registry/src/package_version.rs @@ -1,9 +1,9 @@ -use std::{collections::HashMap, fmt::Display}; +use std::collections::HashMap; use pipe_trait::Pipe; use serde::{Deserialize, Serialize}; -use crate::{package_distribution::PackageDistribution, NetworkError, RegistryError}; +use crate::{package_distribution::PackageDistribution, NetworkError, PackageTag, RegistryError}; #[derive(Serialize, Deserialize, Debug, Clone, Eq)] #[serde(rename_all = "camelCase")] @@ -25,11 +25,11 @@ impl PartialEq for PackageVersion { impl PackageVersion { pub async fn fetch_from_registry( name: &str, - version: impl Display, // TODO: change to node_semver::Version to increase resistance against programmer errors + tag: PackageTag, http_client: &reqwest::Client, registry: &str, ) -> Result { - let url = || format!("{registry}{name}/{version}"); + let url = || format!("{registry}{name}/{tag}"); let network_error = |error| NetworkError { error, url: url() }; http_client diff --git a/crates/tarball/Cargo.toml b/crates/tarball/Cargo.toml index b273503af..ae787be44 100644 --- a/crates/tarball/Cargo.toml +++ b/crates/tarball/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_tarball" +name = "pacquet-tarball" version = "0.0.1" publish = false authors.workspace = true @@ -11,16 +11,19 @@ license.workspace = true repository.workspace = true [dependencies] -pacquet_cafs = { workspace = true } -pacquet_diagnostics = { workspace = true } +pacquet-cafs = { workspace = true } +pacquet-diagnostics = { workspace = true } dashmap = { workspace = true } +derive_more = { workspace = true } +miette = { workspace = true } pipe-trait = { workspace = true } reqwest = { workspace = true } ssri = { workspace = true } tar = { workspace = true } tokio = { workspace = true } zune-inflate = { workspace = true } +tracing = { workspace = true } [dev-dependencies] pretty_assertions = { workspace = true } diff --git a/crates/tarball/src/lib.rs b/crates/tarball/src/lib.rs index f33783f95..09222a56c 100644 --- a/crates/tarball/src/lib.rs +++ b/crates/tarball/src/lib.rs @@ -7,76 +7,74 @@ use std::{ }; use dashmap::DashMap; -use pacquet_diagnostics::{ - miette::{self, Diagnostic}, - thiserror::{self, Error}, - tracing::{self, instrument}, -}; +use derive_more::{Display, Error, From}; +use miette::Diagnostic; use pipe_trait::Pipe; use reqwest::Client; use ssri::{Integrity, IntegrityChecker}; use tar::Archive; use tokio::sync::{Notify, RwLock}; +use tracing::instrument; use zune_inflate::{errors::InflateDecodeErrors, DeflateDecoder, DeflateOptions}; -#[derive(Error, Debug, Diagnostic)] -#[error("Failed to fetch {url}: {error}")] +#[derive(Debug, Display, Error, Diagnostic)] +#[display("Failed to fetch {url}: {error}")] pub struct NetworkError { pub url: String, pub error: reqwest::Error, } -#[derive(Error, Debug, Diagnostic)] -#[error("Cannot parse {integrity:?} from {url} as an integrity: {error}")] +#[derive(Debug, Display, Error, Diagnostic)] +#[display("Cannot parse {integrity:?} from {url} as an integrity: {error}")] pub struct ParseIntegrityError { pub url: String, pub integrity: String, - #[source] + #[error(source)] pub error: ssri::Error, } -#[derive(Error, Debug, Diagnostic)] -#[error("Failed to verify the integrity of {url}: {error}")] +#[derive(Debug, Display, Error, Diagnostic)] +#[display("Failed to verify the integrity of {url}: {error}")] pub struct VerifyChecksumError { pub url: String, - #[source] + #[error(source)] pub error: ssri::Error, } -#[derive(Error, Debug, Diagnostic)] +#[derive(Debug, Display, Error, From, Diagnostic)] #[non_exhaustive] pub enum TarballError { - #[error(transparent)] - #[diagnostic(code(pacquet_tarball::request_error))] - Network(#[from] NetworkError), + #[diagnostic(code(pacquet_tarball::fetch_tarball))] + FetchTarball(NetworkError), - #[error(transparent)] + #[from(ignore)] #[diagnostic(code(pacquet_tarball::io_error))] - Io(#[from] std::io::Error), + ReadTarballEntries(std::io::Error), - #[error(transparent)] #[diagnostic(code(pacquet_tarball::parse_integrity_error))] - ParseIntegrity(#[from] ParseIntegrityError), + ParseIntegrity(ParseIntegrityError), - #[error(transparent)] #[diagnostic(code(pacquet_tarball::verify_checksum_error))] - Checksum(#[from] VerifyChecksumError), + Checksum(VerifyChecksumError), - #[error("integrity creation failed: {}", _0)] + #[from(ignore)] + #[display("Integrity creation failed: {_0}")] #[diagnostic(code(pacquet_tarball::integrity_error))] - Integrity(#[from] ssri::Error), + Integrity(ssri::Error), - #[error(transparent)] - #[diagnostic(code(pacquet_tarball::decompression_error))] - Decompression(#[from] InflateDecodeErrors), + #[from(ignore)] + #[display("Failed to decode gzip: {_0}")] + #[diagnostic(code(pacquet_tarball::decode_gzip))] + DecodeGzip(InflateDecodeErrors), - #[error(transparent)] + #[from(ignore)] + #[display("Failed to write cafs: {_0}")] #[diagnostic(transparent)] - Cafs(#[from] pacquet_cafs::CafsError), + WriteCafs(pacquet_cafs::CafsError), - #[error(transparent)] + #[from(ignore)] #[diagnostic(code(pacquet_tarball::task_join_error))] - TaskJoin(#[from] tokio::task::JoinError), + TaskJoin(tokio::task::JoinError), } /// Value of the cache. @@ -101,10 +99,9 @@ fn decompress_gzip(gz_data: &[u8], unpacked_size: Option) -> Result Result, - package_url: &str, -) -> Result>, TarballError> { - // QUESTION: I see no copying from existing store_dir, is there such mechanism? - // TODO: If it's not implemented yet, implement it - - if let Some(cache_lock) = cache.get(package_url) { - let notify = match &*cache_lock.write().await { - CacheValue::Available(cas_paths) => { +/// This subroutine downloads and extracts a tarball to the store directory. +/// +/// It returns a CAS map of files in the tarball. +#[must_use] +pub struct DownloadTarballToStore<'a> { + pub tarball_cache: &'a Cache, + pub http_client: &'a Client, + pub store_dir: &'static Path, + pub package_integrity: &'a str, + pub package_unpacked_size: Option, + pub package_url: &'a str, +} + +impl<'a> DownloadTarballToStore<'a> { + /// Execute the subroutine. + pub async fn run(self) -> Result>, TarballError> { + let &DownloadTarballToStore { tarball_cache, package_url, .. } = &self; + + // QUESTION: I see no copying from existing store_dir, is there such mechanism? + // TODO: If it's not implemented yet, implement it + + if let Some(cache_lock) = tarball_cache.get(package_url) { + let notify = match &*cache_lock.write().await { + CacheValue::Available(cas_paths) => { + return Ok(Arc::clone(cas_paths)); + } + CacheValue::InProgress(notify) => Arc::clone(notify), + }; + + tracing::info!(target: "pacquet::download", ?package_url, "Wait for cache"); + notify.notified().await; + if let CacheValue::Available(cas_paths) = &*cache_lock.read().await { return Ok(Arc::clone(cas_paths)); } - CacheValue::InProgress(notify) => Arc::clone(notify), - }; - - tracing::info!(target: "pacquet::download", ?package_url, "Wait for cache"); - notify.notified().await; - if let CacheValue::Available(cas_paths) = &*cache_lock.read().await { - return Ok(Arc::clone(cas_paths)); - } - unreachable!("Failed to get or compute tarball data for {package_url:?}"); - } else { - let notify = Arc::new(Notify::new()); - let cache_lock = notify - .pipe_ref(Arc::clone) - .pipe(CacheValue::InProgress) - .pipe(RwLock::new) - .pipe(Arc::new); - if cache.insert(package_url.to_string(), Arc::clone(&cache_lock)).is_some() { - tracing::warn!(target: "pacquet::download", ?package_url, "Race condition detected when writing to cache"); + unreachable!("Failed to get or compute tarball data for {package_url:?}"); + } else { + let notify = Arc::new(Notify::new()); + let cache_lock = notify + .pipe_ref(Arc::clone) + .pipe(CacheValue::InProgress) + .pipe(RwLock::new) + .pipe(Arc::new); + if tarball_cache.insert(package_url.to_string(), Arc::clone(&cache_lock)).is_some() { + tracing::warn!(target: "pacquet::download", ?package_url, "Race condition detected when writing to cache"); + } + let cas_paths = self.without_cache().await?; + let mut cache_write = cache_lock.write().await; + *cache_write = CacheValue::Available(Arc::clone(&cas_paths)); + notify.notify_waiters(); + Ok(cas_paths) } - let cas_paths = download_tarball_to_store_uncached( - package_url, + } + + async fn without_cache(&self) -> Result>, TarballError> { + let &DownloadTarballToStore { http_client, store_dir, package_integrity, package_unpacked_size, - ) - .await?; - let mut cache_write = cache_lock.write().await; - *cache_write = CacheValue::Available(Arc::clone(&cas_paths)); - notify.notify_waiters(); - Ok(cas_paths) - } -} + package_url, + .. + } = self; -async fn download_tarball_to_store_uncached( - package_url: &str, - http_client: &Client, - store_dir: &'static Path, - package_integrity: &str, - package_unpacked_size: Option, -) -> Result>, TarballError> { - tracing::info!(target: "pacquet::download", ?package_url, "New cache"); - - let network_error = |error| NetworkError { url: package_url.to_string(), error }; - let response = http_client - .get(package_url) - .send() - .await - .map_err(network_error)? - .bytes() - .await - .map_err(network_error)?; - - tracing::info!(target: "pacquet::download", ?package_url, "Download completed"); - - let package_integrity: Integrity = - package_integrity.parse().map_err(|error| ParseIntegrityError { - url: package_url.to_string(), - integrity: package_integrity.to_string(), - error, - })?; - enum TaskError { - Checksum(ssri::Error), - Other(TarballError), - } - let cas_paths = tokio::task::spawn(async move { - verify_checksum(&response, package_integrity).map_err(TaskError::Checksum)?; - let data = decompress_gzip(&response, package_unpacked_size).map_err(TaskError::Other)?; - Archive::new(Cursor::new(data)) - .entries() - .map_err(TarballError::Io) - .map_err(TaskError::Other)? - .filter(|entry| !entry.as_ref().unwrap().header().entry_type().is_dir()) - .map(|entry| -> Result<(OsString, PathBuf), TarballError> { - let mut entry = entry.unwrap(); - - // Read the contents of the entry - let mut buffer = Vec::with_capacity(entry.size() as usize); - entry.read_to_end(&mut buffer).unwrap(); - - let entry_path = entry.path().unwrap(); - let cleaned_entry_path = - entry_path.components().skip(1).collect::().into_os_string(); - let integrity = pacquet_cafs::write_sync(store_dir, &buffer)?; - - Ok((cleaned_entry_path, store_dir.join(integrity))) - }) - .collect::, TarballError>>() - .map_err(TaskError::Other) - }) - .await - .expect("no join error") - .map_err(|error| match error { - TaskError::Checksum(error) => { - TarballError::Checksum(VerifyChecksumError { url: package_url.to_string(), error }) + tracing::info!(target: "pacquet::download", ?package_url, "New cache"); + + let network_error = |error| { + TarballError::FetchTarball(NetworkError { url: package_url.to_string(), error }) + }; + let response = http_client + .get(package_url) + .send() + .await + .map_err(network_error)? + .bytes() + .await + .map_err(network_error)?; + + tracing::info!(target: "pacquet::download", ?package_url, "Download completed"); + + let package_integrity: Integrity = + package_integrity.parse().map_err(|error| ParseIntegrityError { + url: package_url.to_string(), + integrity: package_integrity.to_string(), + error, + })?; + enum TaskError { + Checksum(ssri::Error), + Other(TarballError), } - TaskError::Other(error) => error, - })? - .pipe(Arc::new); + let cas_paths = tokio::task::spawn(async move { + verify_checksum(&response, package_integrity).map_err(TaskError::Checksum)?; + let data = + decompress_gzip(&response, package_unpacked_size).map_err(TaskError::Other)?; + Archive::new(Cursor::new(data)) + .entries() + .map_err(TarballError::ReadTarballEntries) + .map_err(TaskError::Other)? + .filter(|entry| !entry.as_ref().unwrap().header().entry_type().is_dir()) + .map(|entry| -> Result<(OsString, PathBuf), TarballError> { + let mut entry = entry.unwrap(); + + // Read the contents of the entry + let mut buffer = Vec::with_capacity(entry.size() as usize); + entry.read_to_end(&mut buffer).unwrap(); + + let entry_path = entry.path().unwrap(); + let cleaned_entry_path = + entry_path.components().skip(1).collect::().into_os_string(); + let integrity = pacquet_cafs::write_sync(store_dir, &buffer) + .map_err(TarballError::WriteCafs)?; + + Ok((cleaned_entry_path, store_dir.join(integrity))) + }) + .collect::, TarballError>>() + .map_err(TaskError::Other) + }) + .await + .expect("no join error") + .map_err(|error| match error { + TaskError::Checksum(error) => { + TarballError::Checksum(VerifyChecksumError { url: package_url.to_string(), error }) + } + TaskError::Other(error) => error, + })? + .pipe(Arc::new); - tracing::info!(target: "pacquet::download", ?package_url, "Checksum verified"); + tracing::info!(target: "pacquet::download", ?package_url, "Checksum verified"); - Ok(cas_paths) + Ok(cas_paths) + } } #[cfg(test)] @@ -262,14 +269,15 @@ mod tests { #[cfg(not(target_os = "windows"))] async fn packages_under_orgs_should_work() { let (store_dir, store_path) = tempdir_with_leaked_path(); - let cas_files = download_tarball_to_store( - &Default::default(), - &Client::new(), - store_path, - "sha512-dj7vjIn1Ar8sVXj2yAXiMNCJDmS9MQ9XMlIecX2dIzzhjSHCyKo4DdXjXMs7wKW2kj6yvVRSpuQjOZ3YLrh56w==", - Some(16697), - "https://registry.npmjs.org/@fastify/error/-/error-3.3.0.tgz", - ) + let cas_files = DownloadTarballToStore { + tarball_cache: &Default::default(), + http_client: &Default::default(), + store_dir: store_path, + package_integrity: "sha512-dj7vjIn1Ar8sVXj2yAXiMNCJDmS9MQ9XMlIecX2dIzzhjSHCyKo4DdXjXMs7wKW2kj6yvVRSpuQjOZ3YLrh56w==", + package_unpacked_size: Some(16697), + package_url: "https://registry.npmjs.org/@fastify/error/-/error-3.3.0.tgz" + } + .run() .await .unwrap(); @@ -301,14 +309,15 @@ mod tests { #[tokio::test] async fn should_throw_error_on_checksum_mismatch() { let (store_dir, store_path) = tempdir_with_leaked_path(); - download_tarball_to_store( - &Default::default(), - &Client::new(), - store_path, - "sha512-aaaan1Ar8sVXj2yAXiMNCJDmS9MQ9XMlIecX2dIzzhjSHCyKo4DdXjXMs7wKW2kj6yvVRSpuQjOZ3YLrh56w==", - Some(16697), - "https://registry.npmjs.org/@fastify/error/-/error-3.3.0.tgz", - ) + DownloadTarballToStore { + tarball_cache: &Default::default(), + http_client: &Default::default(), + store_dir: store_path, + package_integrity: "sha512-aaaan1Ar8sVXj2yAXiMNCJDmS9MQ9XMlIecX2dIzzhjSHCyKo4DdXjXMs7wKW2kj6yvVRSpuQjOZ3YLrh56w==", + package_unpacked_size: Some(16697), + package_url: "https://registry.npmjs.org/@fastify/error/-/error-3.3.0.tgz", + } + .run() .await .expect_err("checksum mismatch"); diff --git a/crates/testing-utils/Cargo.toml b/crates/testing-utils/Cargo.toml new file mode 100644 index 000000000..c645c9045 --- /dev/null +++ b/crates/testing-utils/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "pacquet-testing-utils" +version = "0.0.0" +description = "Common utilities to test pacquet code" +publish = false +authors.workspace = true +edition.workspace = true +homepage.workspace = true +keywords.workspace = true +license.workspace = true +repository.workspace = true + +[dependencies] +assert_cmd = { workspace = true } +command-extra = { workspace = true } +tempfile = { workspace = true } +walkdir = { workspace = true } + +[target.'cfg(windows)'.dependencies] +junction = { workspace = true } diff --git a/crates/testing-utils/src/bin.rs b/crates/testing-utils/src/bin.rs new file mode 100644 index 000000000..c51ceb86e --- /dev/null +++ b/crates/testing-utils/src/bin.rs @@ -0,0 +1,12 @@ +use assert_cmd::prelude::*; +use command_extra::CommandExtra; +use std::process::Command; +use tempfile::{tempdir, TempDir}; + +pub fn pacquet_with_temp_cwd() -> (Command, TempDir) { + let current_dir = tempdir().expect("create temporary working directory for pacquet"); + let command = Command::cargo_bin("pacquet") + .expect("find the pacquet binary") + .with_current_dir(current_dir.path()); + (command, current_dir) +} diff --git a/crates/cli/src/fs.rs b/crates/testing-utils/src/fs.rs similarity index 74% rename from crates/cli/src/fs.rs rename to crates/testing-utils/src/fs.rs index 601f99035..f6b029021 100644 --- a/crates/cli/src/fs.rs +++ b/crates/testing-utils/src/fs.rs @@ -1,17 +1,5 @@ use std::{io, path::Path}; -#[cfg(unix)] -pub fn symlink_dir(original: &Path, link: &Path) -> io::Result<()> { - std::os::unix::fs::symlink(original, link) -} - -#[cfg(windows)] -pub fn symlink_dir(original: &Path, link: &Path) -> io::Result<()> { - // In Windows, we use junctions instead of symlinks because symlinks may require elevated privileges. - junction::create(original, link) -} - -#[cfg(test)] pub fn get_filenames_in_folder(path: &Path) -> Vec { let mut files = std::fs::read_dir(path) .unwrap() @@ -22,7 +10,6 @@ pub fn get_filenames_in_folder(path: &Path) -> Vec { files } -#[cfg(test)] pub fn get_all_folders(root: &std::path::Path) -> Vec { let mut files = Vec::new(); for entry in walkdir::WalkDir::new(root) { @@ -47,3 +34,12 @@ pub fn get_all_folders(root: &std::path::Path) -> Vec { files.sort(); files } + +// Helper function to check if a path is a symlink or junction +pub fn is_symlink_or_junction(path: &Path) -> io::Result { + #[cfg(windows)] + return junction::exists(path); + + #[cfg(not(windows))] + return Ok(path.is_symlink()); +} diff --git a/crates/testing-utils/src/lib.rs b/crates/testing-utils/src/lib.rs new file mode 100644 index 000000000..36ebebf8e --- /dev/null +++ b/crates/testing-utils/src/lib.rs @@ -0,0 +1,2 @@ +pub mod bin; +pub mod fs; diff --git a/tasks/integrated-benchmark/Cargo.toml b/tasks/integrated-benchmark/Cargo.toml index 5e9c6cdbb..c595a6ff2 100644 --- a/tasks/integrated-benchmark/Cargo.toml +++ b/tasks/integrated-benchmark/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_integrated_benchmark" +name = "pacquet-integrated-benchmark" version = "0.0.0" publish = false authors.workspace = true diff --git a/tasks/integrated-benchmark/src/fixtures.rs b/tasks/integrated-benchmark/src/fixtures.rs new file mode 100644 index 000000000..d998722fb --- /dev/null +++ b/tasks/integrated-benchmark/src/fixtures.rs @@ -0,0 +1,2 @@ +pub const PACKAGE_JSON: &str = include_str!("fixtures/package.json"); +pub const LOCKFILE: &str = include_str!("fixtures/pnpm-lock.yaml"); diff --git a/tasks/integrated-benchmark/src/fixtures/mod.rs b/tasks/integrated-benchmark/src/fixtures/mod.rs deleted file mode 100644 index 9b8b287d2..000000000 --- a/tasks/integrated-benchmark/src/fixtures/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub const PACKAGE_JSON: &str = include_str!("package.json"); -pub const LOCKFILE: &str = include_str!("pnpm-lock.yaml"); diff --git a/tasks/micro-benchmark/Cargo.toml b/tasks/micro-benchmark/Cargo.toml index 7a0f7ea0a..cb620dee8 100644 --- a/tasks/micro-benchmark/Cargo.toml +++ b/tasks/micro-benchmark/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "pacquet_micro_benchmark" +name = "pacquet-micro-benchmark" version = "0.0.0" publish = false authors.workspace = true @@ -15,8 +15,8 @@ name = "micro-benchmark" path = "src/main.rs" [dependencies] -pacquet_registry = { workspace = true } -pacquet_tarball = { workspace = true } +pacquet-registry = { workspace = true } +pacquet-tarball = { workspace = true } clap = { workspace = true } criterion = { workspace = true } diff --git a/tasks/micro-benchmark/src/main.rs b/tasks/micro-benchmark/src/main.rs index 5ad072f2e..96f1ae9a6 100644 --- a/tasks/micro-benchmark/src/main.rs +++ b/tasks/micro-benchmark/src/main.rs @@ -3,7 +3,7 @@ use std::{fs, path::Path}; use clap::Parser; use criterion::{Criterion, Throughput}; use mockito::ServerGuard; -use pacquet_tarball::download_tarball_to_store; +use pacquet_tarball::DownloadTarballToStore; use pipe_trait::Pipe; use project_root::get_project_root; use reqwest::Client; @@ -32,14 +32,14 @@ fn bench_tarball(c: &mut Criterion, server: &mut ServerGuard, fixtures_folder: & let http_client = Client::new(); let cas_map = - download_tarball_to_store( - &Default::default(), - &http_client, - dir.path(), - "sha512-dj7vjIn1Ar8sVXj2yAXiMNCJDmS9MQ9XMlIecX2dIzzhjSHCyKo4DdXjXMs7wKW2kj6yvVRSpuQjOZ3YLrh56w==", - Some(16697), - url, - ).await.unwrap(); + DownloadTarballToStore{ + tarball_cache: &Default::default(), + http_client: &http_client, + store_dir: dir.path(), + package_integrity: "sha512-dj7vjIn1Ar8sVXj2yAXiMNCJDmS9MQ9XMlIecX2dIzzhjSHCyKo4DdXjXMs7wKW2kj6yvVRSpuQjOZ3YLrh56w==", + package_unpacked_size: Some(16697), + package_url: url, + }.run().await.unwrap(); cas_map.len() }); });