From f0212d6d8873ff53a55e87d9269282ff0a627feb Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Mon, 21 Aug 2023 14:41:01 -0400 Subject: [PATCH 01/92] Add initial contents of "mostly everything is a node or an edge" This commits moves the engine-layer towards a design where Workspaces are snapshots of directed, acyclic graphs. In this design, ChangeSets point to a existing snapshot rather than overlaying rows on top of each other in the database. In addition, all graphs are immutable. This commit also adds scaffolding for the Gobbler, which will handle processing and performing updates for the new design. An initial RabbitMQ implementation and library has been added because of its durable properties, which NATS does not provide. For data access, this design uses a pull-through cache and a content store in the database. Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig --- .ci/docker-compose.test-integration.yml | 2 +- Cargo.lock | 140 +- Cargo.toml | 58 +- bin/gobbler/BUCK | 26 + bin/gobbler/Cargo.toml | 17 + bin/gobbler/Dockerfile | 38 + bin/gobbler/src/args.rs | 112 + bin/gobbler/src/main.rs | 56 + component/postgres/BUCK | 2 +- dev/Tiltfile | 3 +- dev/docker-compose.platform.yml | 10 +- lib/dal/BUCK | 1 + lib/dal/Cargo.toml | 1 + lib/dal/src/content.rs | 6 + lib/dal/src/content/hash.rs | 111 + lib/dal/src/content/pair.rs | 77 + lib/dal/src/content/store.rs | 194 ++ lib/dal/src/lib.rs | 17 +- .../migrations/U3000__workspace_snapshots.sql | 18 + .../src/migrations/U3001__content_pairs.sql | 19 + lib/dal/src/workspace.rs | 1 + lib/dal/src/workspace_snapshot.rs | 130 + lib/dal/src/workspace_snapshot/change_set.rs | 54 + lib/dal/src/workspace_snapshot/conflict.rs | 30 + lib/dal/src/workspace_snapshot/edge_weight.rs | 73 + lib/dal/src/workspace_snapshot/graph.rs | 2204 +++++++++++++++++ .../src/workspace_snapshot/lamport_clock.rs | 69 + lib/dal/src/workspace_snapshot/node_weight.rs | 163 ++ .../node_weight/content_node_weight.rs | 200 ++ .../node_weight/ordering_node_weight.rs | 156 ++ lib/dal/src/workspace_snapshot/update.rs | 21 + .../src/workspace_snapshot/vector_clock.rs | 115 + lib/gobbler-server/BUCK | 69 + lib/gobbler-server/Cargo.toml | 33 + lib/gobbler-server/src/config.rs | 197 ++ lib/gobbler-server/src/lib.rs | 25 + lib/gobbler-server/src/server.rs | 608 +++++ lib/gobbler-server/tests/integration.rs | 3 + .../tests/integration_test/connection.rs | 25 + .../tests/integration_test/mod.rs | 1 + lib/si-rabbitmq/BUCK | 21 + lib/si-rabbitmq/Cargo.toml | 20 + lib/si-rabbitmq/src/connection.rs | 40 + lib/si-rabbitmq/src/consumer.rs | 37 + lib/si-rabbitmq/src/error.rs | 32 + lib/si-rabbitmq/src/lib.rs | 35 + lib/si-rabbitmq/src/producer.rs | 58 + lib/si-test-macros/src/lib.rs | 10 + third-party/rust/BUCK | 185 +- third-party/rust/Cargo.lock | 82 +- third-party/rust/Cargo.toml | 51 +- third-party/rust/fixups/borsh/fixups.toml | 4 + .../rust/fixups/crossbeam-epoch/fixups.toml | 2 + .../rust/fixups/curve25519-dalek/fixups.toml | 4 + .../rust/fixups/mime_guess/fixups.toml | 2 +- .../system-configuration-sys/fixups.toml | 4 + 56 files changed, 5634 insertions(+), 38 deletions(-) create mode 100644 bin/gobbler/BUCK create mode 100644 bin/gobbler/Cargo.toml create mode 100644 bin/gobbler/Dockerfile create mode 100644 bin/gobbler/src/args.rs create mode 100644 bin/gobbler/src/main.rs create mode 100644 lib/dal/src/content.rs create mode 100644 lib/dal/src/content/hash.rs create mode 100644 lib/dal/src/content/pair.rs create mode 100644 lib/dal/src/content/store.rs create mode 100644 lib/dal/src/migrations/U3000__workspace_snapshots.sql create mode 100644 lib/dal/src/migrations/U3001__content_pairs.sql create mode 100644 lib/dal/src/workspace_snapshot.rs create mode 100644 lib/dal/src/workspace_snapshot/change_set.rs create mode 100644 lib/dal/src/workspace_snapshot/conflict.rs create mode 100644 lib/dal/src/workspace_snapshot/edge_weight.rs create mode 100644 lib/dal/src/workspace_snapshot/graph.rs create mode 100644 lib/dal/src/workspace_snapshot/lamport_clock.rs create mode 100644 lib/dal/src/workspace_snapshot/node_weight.rs create mode 100644 lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs create mode 100644 lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs create mode 100644 lib/dal/src/workspace_snapshot/update.rs create mode 100644 lib/dal/src/workspace_snapshot/vector_clock.rs create mode 100644 lib/gobbler-server/BUCK create mode 100644 lib/gobbler-server/Cargo.toml create mode 100644 lib/gobbler-server/src/config.rs create mode 100644 lib/gobbler-server/src/lib.rs create mode 100644 lib/gobbler-server/src/server.rs create mode 100644 lib/gobbler-server/tests/integration.rs create mode 100644 lib/gobbler-server/tests/integration_test/connection.rs create mode 100644 lib/gobbler-server/tests/integration_test/mod.rs create mode 100644 lib/si-rabbitmq/BUCK create mode 100644 lib/si-rabbitmq/Cargo.toml create mode 100644 lib/si-rabbitmq/src/connection.rs create mode 100644 lib/si-rabbitmq/src/consumer.rs create mode 100644 lib/si-rabbitmq/src/error.rs create mode 100644 lib/si-rabbitmq/src/lib.rs create mode 100644 lib/si-rabbitmq/src/producer.rs create mode 100644 third-party/rust/fixups/borsh/fixups.toml create mode 100644 third-party/rust/fixups/crossbeam-epoch/fixups.toml create mode 100644 third-party/rust/fixups/curve25519-dalek/fixups.toml create mode 100644 third-party/rust/fixups/system-configuration-sys/fixups.toml diff --git a/.ci/docker-compose.test-integration.yml b/.ci/docker-compose.test-integration.yml index 4c8ccec5e6..2adb66b52a 100644 --- a/.ci/docker-compose.test-integration.yml +++ b/.ci/docker-compose.test-integration.yml @@ -20,7 +20,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si" - "POSTGRES_DB=si" - - "POSTGRES_MULTIPLE_DBS=si_test,si_test_dal,si_test_sdf_server,si_auth,si_module_index" + - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index,si_test,si_test_dal,si_test_sdf_server,si_test_gobbler" nats: image: systeminit/nats:stable diff --git a/Cargo.lock b/Cargo.lock index 8f827058bb..a602c748a6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -567,7 +567,7 @@ checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" dependencies = [ "borsh-derive-internal", "borsh-schema-derive-internal", - "proc-macro-crate", + "proc-macro-crate 0.1.5", "proc-macro2", "syn 1.0.109", ] @@ -1397,7 +1397,7 @@ dependencies = [ "anyhow", "html-escape", "nom", - "ordered-float", + "ordered-float 2.10.0", ] [[package]] @@ -2101,6 +2101,46 @@ dependencies = [ "regex", ] +[[package]] +name = "gobbler" +version = "0.1.0" +dependencies = [ + "clap", + "color-eyre", + "gobbler-server", + "telemetry-application", + "tokio", +] + +[[package]] +name = "gobbler-server" +version = "0.1.0" +dependencies = [ + "buck2-resources", + "dal", + "dal-test", + "derive_builder", + "futures", + "nats-subscriber", + "remain", + "serde", + "serde_json", + "si-crypto", + "si-data-nats", + "si-data-pg", + "si-rabbitmq", + "si-settings", + "si-std", + "si-test-macros", + "stream-cancel", + "telemetry", + "thiserror", + "tokio", + "tokio-stream", + "ulid", + "veritech-client", +] + [[package]] name = "group" version = "0.13.0" @@ -3084,6 +3124,27 @@ dependencies = [ "libc", ] +[[package]] +name = "num_enum" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 2.0.29", +] + [[package]] name = "number_prefix" version = "0.4.0" @@ -3249,6 +3310,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "ordered-float" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fc2dbde8f8a79f2102cc474ceb0ad68e3b80b85289ea62389b60e66777e4213" +dependencies = [ + "num-traits", +] + [[package]] name = "ordered-multimap" version = "0.4.3" @@ -3727,6 +3797,16 @@ dependencies = [ "toml 0.5.11", ] +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -3853,6 +3933,43 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "rabbitmq-stream-client" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "735d6cecec44dc27382b962309c05de47d40727ff9898a501ec8fadec76be9a8" +dependencies = [ + "async-trait", + "bytes 1.4.0", + "dashmap", + "futures", + "pin-project 1.1.3", + "rabbitmq-stream-protocol", + "rand 0.8.5", + "rustls-pemfile", + "thiserror", + "tokio", + "tokio-rustls 0.24.1", + "tokio-stream", + "tokio-util", + "tracing", + "url", +] + +[[package]] +name = "rabbitmq-stream-protocol" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed68734bea9f111e2541f7d1cb8f2b109959839173589183e09e53771d62092f" +dependencies = [ + "byteorder", + "chrono", + "derive_more", + "num_enum", + "ordered-float 3.7.0", + "uuid", +] + [[package]] name = "radium" version = "0.7.0" @@ -4988,6 +5105,25 @@ dependencies = [ "tokio", ] +[[package]] +name = "si-rabbitmq" +version = "0.1.0" +dependencies = [ + "futures", + "futures-lite", + "pin-project-lite", + "rabbitmq-stream-client", + "remain", + "serde", + "serde_json", + "si-data-nats", + "si-test-macros", + "telemetry", + "thiserror", + "tokio", + "ulid", +] + [[package]] name = "si-settings" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index 5c16bf5a2b..c62d10458f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ resolver = "2" members = [ "bin/council", "bin/cyclone", + "bin/gobbler", "bin/module-index", "bin/pinga", "bin/sdf", @@ -15,13 +16,14 @@ members = [ "lib/buck2-resources", "lib/bytes-lines-codec", "lib/config-file", + "lib/council-server", "lib/cyclone-client", "lib/cyclone-core", "lib/cyclone-server", - "lib/council-server", "lib/dal", "lib/dal-test", "lib/deadpool-cyclone", + "lib/gobbler-server", "lib/module-index-client", "lib/module-index-server", "lib/nats-subscriber", @@ -33,10 +35,11 @@ members = [ "lib/si-data-pg", "lib/si-hash", "lib/si-pkg", + "lib/si-posthog-rs", + "lib/si-rabbitmq", "lib/si-settings", "lib/si-std", "lib/si-test-macros", - "lib/si-posthog-rs", "lib/telemetry-application-rs", "lib/telemetry-rs", "lib/veritech-client", @@ -58,7 +61,11 @@ ciborium = "0.2.1" clap = { version = "4.2.7", features = ["derive", "color", "env", "wrap_help"] } color-eyre = "0.6.2" colored = "2.0.4" -comfy-table = { version = "7.0.1", features = ["crossterm", "tty", "custom_styling"] } +comfy-table = { version = "7.0.1", features = [ + "crossterm", + "tty", + "custom_styling", +] } config = { version = "0.13.3", default-features = false, features = ["toml"] } console = "0.15.7" convert_case = "0.6.0" @@ -76,8 +83,15 @@ futures = "0.3.28" futures-lite = "1.13.0" hex = "0.4.3" http = "0.2.9" -hyper = { version = "0.14.26", features = ["client", "http1", "runtime", "server"] } -hyperlocal = { version = "0.8.0", default-features = false, features = ["client"] } +hyper = { version = "0.14.26", features = [ + "client", + "http1", + "runtime", + "server", +] } +hyperlocal = { version = "0.8.0", default-features = false, features = [ + "client", +] } iftree = "1.0.4" indicatif = "0.17.5" indoc = "2.0.1" @@ -91,7 +105,10 @@ nkeys = "0.2.0" num_cpus = "1.15.0" once_cell = "1.17.1" open = "5.0.0" -opentelemetry = { version = "~0.18.0", features = ["rt-tokio", "trace"] } # pinned, pending new release of tracing-opentelemetry, 0.18 +opentelemetry = { version = "~0.18.0", features = [ + "rt-tokio", + "trace", +] } # pinned, pending new release of tracing-opentelemetry, 0.18 opentelemetry-otlp = "~0.11.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 opentelemetry-semantic-conventions = "~0.10.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 ouroboros = "0.15.6" @@ -104,14 +121,27 @@ postgres-types = { version = "0.2.5", features = ["derive"] } pretty_assertions_sorted = "1.2.1" proc-macro2 = "1.0.56" quote = "1.0.27" +rabbitmq-stream-client = "0.3.0" rand = "0.8.5" refinery = { version = "0.8.9", features = ["tokio-postgres"] } regex = "1.8.1" remain = "0.2.8" -reqwest = { version = "0.11.17", default-features = false, features = ["rustls-tls", "json", "multipart"] } -rust-s3 = { version = "0.33.0", default-features = false, features = ["tokio-rustls-tls"] } +reqwest = { version = "0.11.17", default-features = false, features = [ + "rustls-tls", + "json", + "multipart", +] } +rust-s3 = { version = "0.33.0", default-features = false, features = [ + "tokio-rustls-tls", +] } rustls = "0.21.6" # pinned, pending update from tokio-rustls for async-nats -sea-orm = { version = "0.11", features = ["sqlx-postgres", "runtime-tokio-rustls", "macros", "with-chrono", "debug-print"] } +sea-orm = { version = "0.11", features = [ + "sqlx-postgres", + "runtime-tokio-rustls", + "macros", + "with-chrono", + "debug-print", +] } self-replace = "1.3.5" serde = { version = "1.0.160", features = ["derive", "rc"] } serde-aux = "4.2.0" @@ -125,10 +155,16 @@ strum = { version = "0.24.1", features = ["derive"] } syn = { version = "2.0.15", features = ["full", "extra-traits"] } tar = "0.4.38" tempfile = "3.5.0" -test-log = { version = "0.2.11", default-features = false, features = ["trace"] } +test-log = { version = "0.2.11", default-features = false, features = [ + "trace", +] } thiserror = "1.0.40" tokio = { version = "1.28.0", features = ["full"] } -tokio-postgres = { version = "0.7.8", features = ["runtime", "with-chrono-0_4", "with-serde_json-1"] } +tokio-postgres = { version = "0.7.8", features = [ + "runtime", + "with-chrono-0_4", + "with-serde_json-1", +] } tokio-serde = { version = "0.8.0", features = ["json"] } tokio-stream = "0.1.14" tokio-test = "0.4.2" diff --git a/bin/gobbler/BUCK b/bin/gobbler/BUCK new file mode 100644 index 0000000000..ec6f0ca090 --- /dev/null +++ b/bin/gobbler/BUCK @@ -0,0 +1,26 @@ +load( + "@prelude-si//:macros.bzl", + "docker_image", + "rust_binary", +) + +rust_binary( + name = "gobbler", + deps = [ + "//lib/gobbler-server:gobbler-server", + "//lib/telemetry-application-rs:telemetry-application", + "//third-party/rust:clap", + "//third-party/rust:color-eyre", + "//third-party/rust:tokio", + ], + srcs = glob(["src/**/*.rs"]), + resources = { + "dev.encryption.key": "//lib/cyclone-server:dev.encryption.key", + }, +) + +docker_image( + name = "image", + image_name = "gobbler", + build_deps = ["//bin/gobbler:gobbler"] +) diff --git a/bin/gobbler/Cargo.toml b/bin/gobbler/Cargo.toml new file mode 100644 index 0000000000..ec3fc26da1 --- /dev/null +++ b/bin/gobbler/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "gobbler" +version = "0.1.0" +edition = "2021" +rust-version = "1.64" +publish = false + +[[bin]] +name = "gobbler" +path = "src/main.rs" + +[dependencies] +clap = { workspace = true } +color-eyre = { workspace = true } +gobbler-server = { path = "../../lib/gobbler-server" } +telemetry-application = { path = "../../lib/telemetry-application-rs" } +tokio = { workspace = true } diff --git a/bin/gobbler/Dockerfile b/bin/gobbler/Dockerfile new file mode 100644 index 0000000000..7db829a9dd --- /dev/null +++ b/bin/gobbler/Dockerfile @@ -0,0 +1,38 @@ +# hadolint ignore=DL3007 +FROM nixos/nix:latest AS builder +ARG BIN=gobbler + +COPY . /workdir +WORKDIR /workdir + +RUN set -eux; \ + nix \ + --extra-experimental-features "nix-command flakes impure-derivations ca-derivations" \ + --option filter-syscalls false \ + build \ + ".#$BIN"; + +RUN mkdir -p /tmp/nix-store-closure /tmp/local-bin +# hadolint ignore=SC2046 +RUN cp -R $(nix-store --query --requisites result/) /tmp/nix-store-closure +# hadolint ignore=SC2046 +RUN ln -snf $(nix-store --query result/)/bin/* /tmp/local-bin/ + +FROM alpine:3 AS final +ARG BIN=gobbler + +# hadolint ignore=DL3018 +RUN set -eux; \ + apk add --no-cache runuser; \ + adduser -D app; \ + for dir in /run /etc /usr/local/etc /home/app/.config; do \ + mkdir -pv "$dir/$BIN"; \ + done; + +WORKDIR /run/$BIN +COPY --from=builder /tmp/nix-store-closure /nix/store +COPY --from=builder /tmp/local-bin/* /usr/local/bin/ + +ENTRYPOINT [ \ + "/sbin/runuser", "-u", "app", "--", "/usr/local/bin/gobbler" \ +] diff --git a/bin/gobbler/src/args.rs b/bin/gobbler/src/args.rs new file mode 100644 index 0000000000..28ee5b2d5f --- /dev/null +++ b/bin/gobbler/src/args.rs @@ -0,0 +1,112 @@ +use clap::{ArgAction, Parser}; +use gobbler_server::{Config, ConfigError, ConfigFile, StandardConfigFile}; + +const NAME: &str = "gobbler"; + +/// Parse, validate, and return the CLI arguments as a typed struct. +pub(crate) fn parse() -> Args { + Args::parse() +} + +#[derive(Parser, Debug)] +#[command(name = NAME, max_term_width = 100)] +pub(crate) struct Args { + /// Sets the verbosity mode. + /// + /// Multiple -v options increase verbosity. The maximum is 4. + #[arg(short = 'v', long = "verbose", action = ArgAction::Count)] + pub(crate) verbose: u8, + + /// PostgreSQL connection pool dbname [example: myapp] + #[arg(long)] + pub(crate) pg_dbname: Option, + + /// PostgreSQL connection pool hostname [example: prod.db.example.com] + #[arg(long)] + pub(crate) pg_hostname: Option, + + /// PostgreSQL connection pool max size [example: 8] + #[arg(long)] + pub(crate) pg_pool_max_size: Option, + + /// PostgreSQL connection pool port [example: 5432] + #[arg(long)] + pub(crate) pg_port: Option, + + /// PostgreSQL connection pool user [example: dbuser] + #[arg(long)] + pub(crate) pg_user: Option, + + /// NATS connection URL [example: demo.nats.io] + #[arg(long)] + pub(crate) nats_url: Option, + + /// Disable OpenTelemetry on startup + #[arg(long)] + pub(crate) disable_opentelemetry: bool, + + /// Cyclone encryption key file location [default: /run/gobbler/cyclone_encryption.key] + #[arg(long)] + pub(crate) cyclone_encryption_key_path: Option, + + /// The number of concurrent jobs that can be processed [default: 10] + #[arg(long)] + pub(crate) concurrency: Option, + + /// Instance ID [example: 01GWEAANW5BVFK5KDRVS6DEY0F"] + /// + /// And instance ID is used when tracking the execution of jobs in a way that can be traced + /// back to an instance of a Pinga service. + #[arg(long)] + pub(crate) instance_id: Option, +} + +impl TryFrom for Config { + type Error = ConfigError; + + fn try_from(args: Args) -> Result { + ConfigFile::layered_load(NAME, |config_map| { + if let Some(dbname) = args.pg_dbname { + config_map.set("pg.dbname", dbname); + } + if let Some(hostname) = args.pg_hostname { + config_map.set("pg.hostname", hostname); + } + if let Some(pool_max_size) = args.pg_pool_max_size { + config_map.set("pg.pool_max_size", i64::from(pool_max_size)); + } + if let Some(port) = args.pg_port { + config_map.set("pg.port", i64::from(port)); + } + if let Some(user) = args.pg_user { + config_map.set("pg.user", user); + } + if let Some(url) = args.nats_url { + config_map.set("nats.url", url); + } + if let Some(cyclone_encyption_key_path) = args.cyclone_encryption_key_path { + config_map.set("cyclone_encryption_key_path", cyclone_encyption_key_path); + } + if let Some(concurrency) = args.concurrency { + config_map.set("concurrency_limit", i64::from(concurrency)); + } + if let Some(instance_id) = args.instance_id { + config_map.set("instance_id", instance_id); + } + + config_map.set("pg.application_name", NAME); + })? + .try_into() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn verify_command() { + use clap::CommandFactory; + Args::command().debug_assert() + } +} diff --git a/bin/gobbler/src/main.rs b/bin/gobbler/src/main.rs new file mode 100644 index 0000000000..121cb07a37 --- /dev/null +++ b/bin/gobbler/src/main.rs @@ -0,0 +1,56 @@ +use color_eyre::Result; +use gobbler_server::{Config, Server}; +use telemetry_application::{ + prelude::*, start_tracing_level_signal_handler_task, ApplicationTelemetryClient, + TelemetryClient, TelemetryConfig, +}; + +mod args; + +const RT_DEFAULT_THREAD_STACK_SIZE: usize = 2 * 1024 * 1024 * 3; + +fn main() -> Result<()> { + let thread_builder = ::std::thread::Builder::new().stack_size(RT_DEFAULT_THREAD_STACK_SIZE); + let thread_handler = thread_builder.spawn(|| { + tokio::runtime::Builder::new_multi_thread() + .thread_stack_size(RT_DEFAULT_THREAD_STACK_SIZE) + .thread_name("bin/gobbler-tokio::runtime") + .enable_all() + .build()? + .block_on(async_main()) + })?; + thread_handler.join().unwrap() +} + +async fn async_main() -> Result<()> { + color_eyre::install()?; + let config = TelemetryConfig::builder() + .service_name("gobbler") + .service_namespace("si") + .log_env_var_prefix("SI") + .app_modules(vec!["gobbler", "gobbler_server"]) + .build()?; + let telemetry = telemetry_application::init(config)?; + let args = args::parse(); + + run(args, telemetry).await +} + +async fn run(args: args::Args, mut telemetry: ApplicationTelemetryClient) -> Result<()> { + if args.verbose > 0 { + telemetry.set_verbosity(args.verbose.into()).await?; + } + debug!(arguments =?args, "parsed cli arguments"); + + if args.disable_opentelemetry { + telemetry.disable_opentelemetry().await?; + } + + let config = Config::try_from(args)?; + + start_tracing_level_signal_handler_task(&telemetry)?; + + Server::from_config(config).await?.run().await?; + + Ok(()) +} diff --git a/component/postgres/BUCK b/component/postgres/BUCK index 17faacbcdd..37108fd0be 100644 --- a/component/postgres/BUCK +++ b/component/postgres/BUCK @@ -28,7 +28,7 @@ docker_image( "--env", "POSTGRES_DB=si", "--env", - "POSTGRES_MULTIPLE_DBS=si_test,si_test_dal,si_test_sdf_server,si_auth", + "POSTGRES_MULTIPLE_DBS=si_auth,si_test,si_test_dal,si_test_sdf_server,si_test_gobbler", "--publish", "5432:5432", ], diff --git a/dev/Tiltfile b/dev/Tiltfile index f371d2f0e3..c32e6b4962 100644 --- a/dev/Tiltfile +++ b/dev/Tiltfile @@ -8,6 +8,7 @@ groups = { "nats", "otelcol", "postgres", + "rabbitmq", ], "backend": [ "council", @@ -65,7 +66,7 @@ allow_k8s_contexts(k8s_context()) # Use Docker Compose to provide the platform services docker_compose("./docker-compose.platform.yml") -compose_services = ["jaeger", "nats", "otelcol", "postgres"] +compose_services = ["jaeger", "nats", "otelcol", "postgres", "rabbitmq"] for service in compose_services: dc_resource(service, labels = ["platform"]) diff --git a/dev/docker-compose.platform.yml b/dev/docker-compose.platform.yml index 63515b3349..3f861e5a85 100644 --- a/dev/docker-compose.platform.yml +++ b/dev/docker-compose.platform.yml @@ -9,7 +9,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si" - "POSTGRES_DB=si" - - "POSTGRES_MULTIPLE_DBS=si_test,si_test_dal,si_test_sdf_server,si_auth,si_module_index" + - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index,si_test,si_test_dal,si_test_sdf_server,si_test_gobbler" ports: - "5432:5432" @@ -35,3 +35,11 @@ services: - "55679:55679" depends_on: - jaeger + + rabbitmq: + # FIXME(nick): use our own rabbitmq once on "main". + # image: systeminit/rabbitmq:stable + image: rabbitmq:3.12-management-alpine + ports: + - "5672:5672" + - "15672:15672" diff --git a/lib/dal/BUCK b/lib/dal/BUCK index 9f60292d7b..c3f701af08 100644 --- a/lib/dal/BUCK +++ b/lib/dal/BUCK @@ -30,6 +30,7 @@ rust_library( "//third-party/rust:futures", "//third-party/rust:hex", "//third-party/rust:iftree", + "//third-party/rust:itertools", "//third-party/rust:jwt-simple", "//third-party/rust:lazy_static", "//third-party/rust:once_cell", diff --git a/lib/dal/Cargo.toml b/lib/dal/Cargo.toml index 9f27c914a1..df858e42ba 100644 --- a/lib/dal/Cargo.toml +++ b/lib/dal/Cargo.toml @@ -21,6 +21,7 @@ dyn-clone = { workspace = true } futures = { workspace = true } hex = { workspace = true } iftree = { workspace = true } +itertools = { workspace = true } jwt-simple = { workspace = true } lazy_static = { workspace = true } nats-subscriber = { path = "../../lib/nats-subscriber" } diff --git a/lib/dal/src/content.rs b/lib/dal/src/content.rs new file mode 100644 index 0000000000..2330f4a9e6 --- /dev/null +++ b/lib/dal/src/content.rs @@ -0,0 +1,6 @@ +//! This module contains all domain logic related to working with content hashes and the storage +//! of them and their corresponding values. + +pub mod hash; +pub mod pair; +pub mod store; diff --git a/lib/dal/src/content/hash.rs b/lib/dal/src/content/hash.rs new file mode 100644 index 0000000000..f31f1d37a1 --- /dev/null +++ b/lib/dal/src/content/hash.rs @@ -0,0 +1,111 @@ +use std::{fmt, str::FromStr}; + +use serde::{ + de::{self, Visitor}, + Deserialize, Serialize, +}; +use serde_json::Value; +use thiserror::Error; + +#[derive(Clone, Copy, Eq, Hash, PartialEq)] +pub struct ContentHash(blake3::Hash); + +impl ContentHash { + #[must_use] + pub fn new(input: &[u8]) -> Self { + Self(blake3::hash(input)) + } + + pub fn hasher() -> ContentHasher { + ContentHasher::new() + } +} + +impl From<&Value> for ContentHash { + fn from(value: &Value) -> Self { + let input = value.to_string(); + Self::new(input.as_bytes()) + } +} + +impl Default for ContentHash { + fn default() -> Self { + Self::new("".as_bytes()) + } +} + +impl fmt::Debug for ContentHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ContentHash({})", self.0) + } +} + +impl fmt::Display for ContentHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl Serialize for ContentHash { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +struct ContentHashVisitor; + +impl<'de> Visitor<'de> for ContentHashVisitor { + type Value = ContentHash; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a blake3 hash string") + } + + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + ContentHash::from_str(v).map_err(|e| E::custom(e.to_string())) + } +} + +impl<'de> Deserialize<'de> for ContentHash { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + deserializer.deserialize_str(ContentHashVisitor) + } +} + +#[derive(Debug, Error)] +#[error("failed to parse hash hex string")] +pub struct ContentHashParseError(#[from] blake3::HexError); + +impl FromStr for ContentHash { + type Err = ContentHashParseError; + + fn from_str(s: &str) -> Result { + Ok(Self(blake3::Hash::from_str(s)?)) + } +} + +#[derive(Debug, Default)] +pub struct ContentHasher(blake3::Hasher); + +impl ContentHasher { + pub fn new() -> Self { + ContentHasher(blake3::Hasher::new()) + } + + pub fn update(&mut self, input: &[u8]) { + self.0.update(input); + } + + pub fn finalize(&self) -> ContentHash { + ContentHash(self.0.finalize()) + } +} diff --git a/lib/dal/src/content/pair.rs b/lib/dal/src/content/pair.rs new file mode 100644 index 0000000000..5b33008b89 --- /dev/null +++ b/lib/dal/src/content/pair.rs @@ -0,0 +1,77 @@ +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use si_data_pg::PgError; +use thiserror::Error; + +use crate::content::hash::ContentHash; +use crate::{DalContext, StandardModelError, Timestamp, TransactionsError}; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum ContentPairError { + #[error("si_data_pg error: {0}")] + Pg(#[from] PgError), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("standard model error: {0}")] + StandardModel(#[from] StandardModelError), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), +} + +pub type ContentPairResult = Result; + +#[derive(Debug, Serialize, Deserialize)] +pub struct ContentPair { + #[serde(flatten)] + timestamp: Timestamp, + key: String, + value: Value, +} + +impl ContentPair { + pub async fn find_or_create( + ctx: &DalContext, + key: ContentHash, + value: Value, + ) -> ContentPairResult<(Self, bool)> { + let (pair, created): (Self, bool) = match Self::find(ctx, &key).await? { + Some(found) => (found, false), + None => { + let row = ctx + .txns() + .await? + .pg() + .query_one( + "SELECT content_pair_create_v1($1) AS object", + &[&key.to_string(), &value], + ) + .await?; + let json: Value = row.try_get("object")?; + (serde_json::from_value(json)?, true) + } + }; + Ok((pair, created)) + } + + pub async fn find(ctx: &DalContext, key: &ContentHash) -> ContentPairResult> { + let maybe_row = ctx + .txns() + .await? + .pg() + .query_opt( + "SELECT * FROM content_pairs WHERE key = $1 AS object", + &[&key.to_string()], + ) + .await?; + let result = match maybe_row { + Some(found_row) => { + let json: Value = found_row.try_get("object")?; + let object: Self = serde_json::from_value(json)?; + Some(object) + } + None => None, + }; + Ok(result) + } +} diff --git a/lib/dal/src/content/store.rs b/lib/dal/src/content/store.rs new file mode 100644 index 0000000000..b48f7c41f8 --- /dev/null +++ b/lib/dal/src/content/store.rs @@ -0,0 +1,194 @@ +use serde::de::DeserializeOwned; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use std::collections::HashMap; +use thiserror::Error; + +use crate::content::hash::ContentHash; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum StoreError { + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), +} + +pub type StoreResult = Result; + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct StoreItem { + value: Value, + processed: bool, +} + +#[derive(Default, Debug, Serialize, Deserialize)] +pub struct Store(HashMap); + +impl Store { + pub fn new() -> Self { + Self::default() + } + + pub fn is_empty(&self) -> bool { + self.0.is_empty() + } + + pub fn len(&self) -> usize { + self.0.len() + } + + // NOTE(nick): use local, pull through or return None. + pub fn get(&self, key: &ContentHash) -> StoreResult> + where + T: DeserializeOwned, + { + let maybe_item: Option = self.0.get(key).cloned(); + let value = match maybe_item { + Some(found_item) => Some(serde_json::from_value(found_item.value)?), + None => { + // TODO(nick): either populate from database ("pull-through caching") or return None. + None + } + }; + Ok(value) + } + + // NOTE(nick): existing entries must remain immutable. + pub fn add(&mut self, value: T) -> StoreResult<(ContentHash, bool)> + where + T: Serialize + ToOwned, + { + let value = serde_json::to_value(value)?; + let hash = ContentHash::from(&value); + let already_in_store = self.0.contains_key(&hash); + if !already_in_store { + // NOTE(nick): we DO NOT check that it is in the database because it does not matter. + // We wait until write time to talk to the database. + self.0.insert( + hash, + StoreItem { + value, + processed: false, + }, + ); + } + Ok((hash, already_in_store)) + } + + // TODO(nick): actually do stuff with the database. + pub fn write(&mut self) -> StoreResult<()> { + for item in self.0.values_mut() { + if !item.processed { + // TODO(nick): perform find or create in the database. Either way, we need to + // set "processed" to true for the next time we perform a batch write. + item.processed = true; + } + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn add() { + let mut store = Store::new(); + + // Add an item. + let sirens_value = "SIRENS".to_string(); + let (sirens_hash, already_in_store) = store.add(&sirens_value).expect("could not add item"); + assert!(!already_in_store); + + // Grab the value from the store and perform the assertion. + let found_sirens_value: String = store + .get(&sirens_hash) + .expect("could not get item") + .expect("no item found"); + assert_eq!( + sirens_value, // expected + found_sirens_value // actual + ); + assert_eq!( + 1, // expected + store.len() // actual + ); + + // Add another item. + let meltdown_value = "MELTDOWN".to_string(); + let (meltdown_hash, _) = store.add(&meltdown_value).expect("could not add item"); + assert!(!already_in_store); + + // Check both entries to ensure that nothing has drifted. + let found_meltdown_value: String = store + .get(&meltdown_hash) + .expect("could not get item") + .expect("no item found"); + assert_eq!( + meltdown_value, // expected + found_meltdown_value // actual + ); + let found_sirens_value: String = store + .get(&sirens_hash) + .expect("could not get item") + .expect("no item found"); + assert_eq!( + sirens_value, // expected + found_sirens_value // actual + ); + assert_eq!( + 2, // expected + store.len() // actual + ); + + // Try to add one of the items again and check if it already exists. + let (second_meltdown_hash, already_in_store) = + store.add(&meltdown_value).expect("could not add item"); + assert!(already_in_store); + assert_eq!( + meltdown_hash, // expected + second_meltdown_hash, // actual + ) + } + + #[test] + fn write() { + let mut store = Store::new(); + + // Populate the store and then write. + for value in ["PARASAIL", "TELEKINESIS"] { + let (_, already_in_store) = store.add(value).expect("could not add item"); + assert!(!already_in_store); + } + + // Since purely "adding" does not involve the database, none of our entries known if they + // were processed. + for item in store.0.values() { + assert!(!item.processed); + } + + // FIXME(nick): once write actually talks to the database, this will need to move to an + // integration test. Check that all items have been processed. + store.write().expect("could not write"); + for item in store.0.values() { + assert!(item.processed); + } + + // Add another item. + let (utopia_hash, already_in_store) = store.add("UTOPIA").expect("could not add item"); + assert!(!already_in_store); + + // Check that only the new item has not been processed and that all other items have been + // processed. + for (hash, item) in &store.0 { + assert_eq!(hash != &utopia_hash, item.processed); + } + + // Write again and assert all items have been processed. + store.write().expect("could not write"); + for item in store.0.values() { + assert!(item.processed); + } + } +} diff --git a/lib/dal/src/lib.rs b/lib/dal/src/lib.rs index d94204c877..13dcad2a55 100644 --- a/lib/dal/src/lib.rs +++ b/lib/dal/src/lib.rs @@ -4,13 +4,20 @@ use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; +use content::hash::ContentHash; use rand::Rng; use serde_with::{DeserializeFromStr, SerializeDisplay}; use si_crypto::SymmetricCryptoService; +use si_data_nats::{NatsClient, NatsError}; +use si_data_pg::{PgError, PgPool, PgPoolError}; use strum::{Display, EnumString, EnumVariantNames}; +use telemetry::prelude::*; use thiserror::Error; use tokio::time; use tokio::time::Instant; +use veritech_client::{Client, CycloneEncryptionKey}; + +use crate::builtins::SelectedTestBuiltinSchemas; pub use action::{Action, ActionError, ActionId}; pub use action_prototype::{ @@ -96,14 +103,11 @@ pub use secret::{ DecryptedSecret, EncryptedSecret, Secret, SecretAlgorithm, SecretError, SecretId, SecretPk, SecretResult, SecretVersion, }; -use si_data_nats::{NatsClient, NatsError}; -use si_data_pg::{PgError, PgPool, PgPoolError}; pub use socket::{Socket, SocketArity, SocketId}; pub use standard_model::{StandardModel, StandardModelError, StandardModelResult}; pub use status::{ StatusUpdate, StatusUpdateError, StatusUpdateResult, StatusUpdater, StatusUpdaterError, }; -use telemetry::prelude::*; pub use tenancy::{Tenancy, TenancyError}; pub use timestamp::{Timestamp, TimestampError}; pub use user::{User, UserClaim, UserError, UserPk, UserResult}; @@ -114,13 +118,12 @@ pub use validation::prototype::{ pub use validation::resolver::{ ValidationResolver, ValidationResolverError, ValidationResolverId, ValidationStatus, }; -use veritech_client::CycloneEncryptionKey; pub use visibility::{Visibility, VisibilityError}; pub use workspace::{Workspace, WorkspaceError, WorkspacePk, WorkspaceResult, WorkspaceSignup}; +pub use workspace_snapshot::graph::WorkspaceSnapshotGraph; +pub use workspace_snapshot::WorkspaceSnapshot; pub use ws_event::{WsEvent, WsEventError, WsEventResult, WsPayload}; -use crate::builtins::SelectedTestBuiltinSchemas; - pub mod action; pub mod action_prototype; pub mod actor_view; @@ -131,6 +134,7 @@ pub mod change_set; pub mod change_status; pub mod code_view; pub mod component; +pub mod content; pub mod context; pub mod diagram; pub mod edge; @@ -170,6 +174,7 @@ pub mod user; pub mod validation; pub mod visibility; pub mod workspace; +pub mod workspace_snapshot; pub mod ws_event; #[remain::sorted] diff --git a/lib/dal/src/migrations/U3000__workspace_snapshots.sql b/lib/dal/src/migrations/U3000__workspace_snapshots.sql new file mode 100644 index 0000000000..f6b3df958b --- /dev/null +++ b/lib/dal/src/migrations/U3000__workspace_snapshots.sql @@ -0,0 +1,18 @@ +CREATE TABLE workspace_snapshots +( + id ident NOT NULL DEFAULT ident_create_v1(), + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + snapshot jsonb NOT NULL +); + +CREATE UNIQUE INDEX unique_workspace_snapshots ON workspace_snapshots (id); + +CREATE OR REPLACE FUNCTION workspace_snapshot_create_v1( + this_snapshot jsonb +) RETURNS jsonb AS +$$ + INSERT INTO workspace_snapshots (snapshot) + VALUES (this_snapshot) + RETURNING row_to_json(workspace_snapshots) AS object; +$$ LANGUAGE SQL VOLATILE; diff --git a/lib/dal/src/migrations/U3001__content_pairs.sql b/lib/dal/src/migrations/U3001__content_pairs.sql new file mode 100644 index 0000000000..71a0259459 --- /dev/null +++ b/lib/dal/src/migrations/U3001__content_pairs.sql @@ -0,0 +1,19 @@ +CREATE TABLE content_pairs +( + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + key text NOT NULL, + value jsonb NOT NULL +); + +CREATE UNIQUE INDEX unique_content_pairs ON content_pairs (key, value); + +CREATE OR REPLACE FUNCTION content_pair_create_v1( + this_key text, + this_value jsonb +) RETURNS jsonb AS +$$ + INSERT INTO content_pairs (key, value) + VALUES (this_key, this_value) + RETURNING row_to_json(content_pairs) AS object; +$$ LANGUAGE SQL VOLATILE; diff --git a/lib/dal/src/workspace.rs b/lib/dal/src/workspace.rs index 6087669550..173fc0158b 100644 --- a/lib/dal/src/workspace.rs +++ b/lib/dal/src/workspace.rs @@ -40,6 +40,7 @@ pub enum WorkspaceError { pub type WorkspaceResult = Result; pk!(WorkspacePk); +pk!(WorkspaceId); #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct WorkspaceSignup { diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs new file mode 100644 index 0000000000..79abe042c5 --- /dev/null +++ b/lib/dal/src/workspace_snapshot.rs @@ -0,0 +1,130 @@ +//! Mostly everything is a node or an edge! + +// #![warn( +// missing_debug_implementations, +// missing_docs, +// unreachable_pub, +// bad_style, +// dead_code, +// improper_ctypes, +// non_shorthand_field_patterns, +// no_mangle_generic_items, +// overflowing_literals, +// path_statements, +// patterns_in_fns_without_body, +// private_in_public, +// unconditional_recursion, +// unused, +// unused_allocation, +// unused_comparisons, +// unused_parens, +// while_true, +// clippy::missing_panics_doc +// )] + +pub mod change_set; +pub mod conflict; +pub mod edge_weight; +pub mod graph; +pub mod lamport_clock; +pub mod node_weight; +pub mod update; +pub mod vector_clock; + +use petgraph::prelude::*; +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use si_data_pg::PgError; +use thiserror::Error; +use ulid::Ulid; + +use crate::{ + workspace_snapshot::{graph::WorkspaceSnapshotGraphError, node_weight::NodeWeightError}, + DalContext, StandardModelError, Timestamp, TransactionsError, WorkspaceSnapshotGraph, +}; +use change_set::{ChangeSet, ChangeSetError, ChangeSetId}; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum WorkspaceSnapshotError { + #[error("monotonic error: {0}")] + Monotonic(#[from] ulid::MonotonicError), + #[error("NodeWeight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("si_data_pg error: {0}")] + Pg(#[from] PgError), + #[error("poison error: {0}")] + Poison(String), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("standard model error: {0}")] + StandardModel(#[from] StandardModelError), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), + #[error("WorkspaceSnapshotGraph error: {0}")] + WorkspaceSnapshotGraph(#[from] WorkspaceSnapshotGraphError), + #[error("workspace snapshot graph missing")] + WorkspaceSnapshotGraphMissing, +} + +pub type WorkspaceSnapshotResult = Result; + +pub type WorkspaceSnapshotId = Ulid; + +#[derive(Debug, Serialize, Deserialize)] +pub struct WorkspaceSnapshot { + id: WorkspaceSnapshotId, + #[serde(flatten)] + timestamp: Timestamp, + snapshot: Value, + #[serde(skip_serializing)] + working_copy: Option, +} + +impl WorkspaceSnapshot { + pub async fn new(ctx: &DalContext, change_set: &ChangeSet) -> WorkspaceSnapshotResult { + let snapshot = WorkspaceSnapshotGraph::new(change_set)?; + let serialized_snapshot = serde_json::to_value(&snapshot)?; + + let row = ctx + .txns() + .await? + .pg() + .query_one( + "SELECT workspace_snapshot_create_v1($1) AS object", + &[&serialized_snapshot], + ) + .await?; + let json: Value = row.try_get("object")?; + let object: WorkspaceSnapshot = serde_json::from_value(json)?; + Ok(object) + } + + pub async fn write(mut self, ctx: &DalContext) -> WorkspaceSnapshotResult { + let working_copy = self.working_copy()?; + working_copy.cleanup(); + + let serialized_snapshot = serde_json::to_value(working_copy.clone())?; + let row = ctx + .txns() + .await? + .pg() + .query_one( + "SELECT workspace_snapshot_create_v1($1) AS object", + &[&serialized_snapshot], + ) + .await?; + let json: Value = row.try_get("object")?; + let object: WorkspaceSnapshot = serde_json::from_value(json)?; + Ok(object) + } + + fn working_copy(&mut self) -> WorkspaceSnapshotResult<&mut WorkspaceSnapshotGraph> { + if self.working_copy.is_none() { + self.working_copy = Some(serde_json::from_value(self.snapshot.clone())?); + } + self.working_copy + .as_mut() + .ok_or(WorkspaceSnapshotError::WorkspaceSnapshotGraphMissing) + } +} diff --git a/lib/dal/src/workspace_snapshot/change_set.rs b/lib/dal/src/workspace_snapshot/change_set.rs new file mode 100644 index 0000000000..9ea9b13123 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/change_set.rs @@ -0,0 +1,54 @@ +use std::sync::{Arc, Mutex}; + +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::{Generator, Ulid}; + +#[derive(Debug, Error)] +pub enum ChangeSetError { + #[error("Mutex error: {0}")] + Mutex(String), + #[error("Ulid Monotonic Error: {0}")] + Monotonic(#[from] ulid::MonotonicError), +} + +pub type ChangeSetResult = Result; + +// FIXME(nick): remove this in favor of the real one. +pub type ChangeSetId = Ulid; + +// FIXME(nick): remove this in favor of the real one. +#[derive(Clone, Serialize, Deserialize)] +pub struct ChangeSet { + pub id: ChangeSetId, + #[serde(skip)] + pub generator: Arc>, +} + +impl ChangeSet { + pub fn new() -> ChangeSetResult { + let mut generator = Generator::new(); + let id = generator.generate()?; + + Ok(Self { + id, + generator: Arc::new(Mutex::new(generator)), + }) + } + + pub fn generate_ulid(&self) -> ChangeSetResult { + self.generator + .lock() + .map_err(|e| ChangeSetError::Mutex(e.to_string()))? + .generate() + .map_err(Into::into) + } +} + +impl std::fmt::Debug for ChangeSet { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ChangeSet") + .field("id", &self.id.to_string()) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/conflict.rs b/lib/dal/src/workspace_snapshot/conflict.rs new file mode 100644 index 0000000000..b4c97059b8 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/conflict.rs @@ -0,0 +1,30 @@ +use petgraph::stable_graph::NodeIndex; + +/// Describe the type of conflict between the given locations in a +/// workspace graph. +#[remain::sorted] +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub enum Conflict { + ChildOrder { + ours: NodeIndex, + theirs: NodeIndex, + }, + ModifyRemovedItem(NodeIndex), + NodeContent { + to_rebase: NodeIndex, + onto: NodeIndex, + }, + RemoveModifiedItem { + container: NodeIndex, + removed_item: NodeIndex, + }, +} + +/// The [`NodeIndex`] of the location in the graph where a conflict occurs. +#[derive(Debug, Copy, Clone)] +pub struct ConflictLocation { + /// The location of the conflict in the "base" graph of the merge. + pub onto: NodeIndex, + /// The location of the conflict in the graph that is attempting to be merged into "base". + pub to_rebase: NodeIndex, +} diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs new file mode 100644 index 0000000000..c3e17c0b04 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -0,0 +1,73 @@ +//! Edges + +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +use crate::workspace_snapshot::{ + change_set::ChangeSet, + vector_clock::{VectorClock, VectorClockError}, +}; + +#[derive(Debug, Error)] +pub enum EdgeWeightError { + #[error("Vector Clock error: {0}")] + VectorClock(#[from] VectorClockError), +} + +pub type EdgeWeightResult = Result; + +#[derive(Default, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] +pub enum EdgeWeightKind { + /// Used to record the order that the elements of a container should be presented in. + Ordering, + /// Workspaces "use" functions, modules, schemas. Schemas "use" schema variants. + /// Schema variants "use" props. Props "use" functions, and other props. Modules + /// "use" functions, schemas, and eventually(?) components. + #[default] + Uses, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct EdgeWeight { + kind: EdgeWeightKind, + vector_clock_first_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl EdgeWeight { + pub fn increment_vector_clocks(&mut self, change_set: &ChangeSet) -> EdgeWeightResult<()> { + self.vector_clock_write.inc(change_set)?; + + Ok(()) + } + + pub fn kind(&self) -> EdgeWeightKind { + self.kind + } + + pub fn new(change_set: &ChangeSet, kind: EdgeWeightKind) -> EdgeWeightResult { + Ok(Self { + kind, + vector_clock_first_seen: VectorClock::new(change_set)?, + vector_clock_write: VectorClock::new(change_set)?, + }) + } + + pub fn new_with_incremented_vector_clocks( + &self, + change_set: &ChangeSet, + ) -> EdgeWeightResult { + let mut new_weight = self.clone(); + new_weight.increment_vector_clocks(change_set)?; + + Ok(new_weight) + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs new file mode 100644 index 0000000000..8571c41b99 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -0,0 +1,2204 @@ +use petgraph::visit::NodeCount; +use petgraph::{algo, prelude::*, visit::DfsEvent}; +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, HashSet}; +use telemetry::prelude::*; +use thiserror::Error; +use ulid::Ulid; + +use crate::{ + workspace_snapshot::{ + change_set::{ChangeSet, ChangeSetError}, + conflict::Conflict, + edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}, + node_weight::{ContentAddress, NodeWeight, NodeWeightError}, + update::Update, + }, + ContentHash, +}; + +#[allow(clippy::large_enum_variant)] +#[remain::sorted] +#[derive(Debug, Error)] +pub enum WorkspaceSnapshotGraphError { + #[error("Cannot compare ordering of container elements between ordered, and un-ordered container: {0:?}, {1:?}")] + CannotCompareOrderedAndUnorderedContainers(NodeIndex, NodeIndex), + #[error("ChangeSet error: {0}")] + ChangeSet(#[from] ChangeSetError), + #[error("Action would create a graph cycle")] + CreateGraphCycle, + #[error("EdgeWeight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("EdgeWeight not found")] + EdgeWeightNotFound, + #[error("Problem during graph traversal: {0:?}")] + GraphTraversal(petgraph::visit::DfsEvent), + #[error("Incompatible node types")] + IncompatibleNodeTypes, + #[error("NodeWeight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("node weight not found")] + NodeWeightNotFound, + #[error("Node with ID {0} not found")] + NodeWithIdNotFound(Ulid), + #[error("NodeIndex has too many Ordering children: {0:?}")] + TooManyOrderingForNode(NodeIndex), + #[error("Workspace Snapshot has conflicts and must be rebased")] + WorkspaceNeedsRebase, + #[error("Workspace Snapshot has conflicts")] + WorkspacesConflict, +} + +pub type WorkspaceSnapshotGraphResult = Result; + +#[derive(Default, Deserialize, Serialize, Clone)] +pub struct WorkspaceSnapshotGraph { + graph: StableDiGraph, + root_index: NodeIndex, +} + +impl std::fmt::Debug for WorkspaceSnapshotGraph { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("WorkspaceSnapshotGraph") + .field("root_index", &self.root_index) + .field("graph", &self.graph) + .finish() + } +} + +impl WorkspaceSnapshotGraph { + pub fn new(change_set: &ChangeSet) -> WorkspaceSnapshotGraphResult { + let mut graph: StableDiGraph = StableDiGraph::with_capacity(1, 0); + let root_index = graph.add_node(NodeWeight::new_content( + change_set, + change_set.generate_ulid()?, + ContentAddress::Root, + )?); + + Ok(Self { root_index, graph }) + } + + pub fn node_count(&self) -> usize { + self.graph.node_count() + } + + pub fn add_edge( + &mut self, + change_set: &ChangeSet, + from_node_index: NodeIndex, + mut edge_weight: EdgeWeight, + to_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + // Temporarily add the edge to the existing tree to see if it would create a cycle. + let temp_edge = self + .graph + .update_edge(from_node_index, to_node_index, edge_weight.clone()); + let would_create_a_cycle = !self.is_acyclic_directed(); + self.graph.remove_edge(temp_edge); + if would_create_a_cycle { + return Err(WorkspaceSnapshotGraphError::CreateGraphCycle); + } + + // Ensure the vector clocks of the edge are up-to-date. + edge_weight.increment_vector_clocks(change_set)?; + + // Because outgoing edges are part of a node's identity, we create a new "from" node + // as we are effectively writing to that node (we'll need to update the merkle tree + // hash), and everything in the graph should be treated as copy-on-write. + let new_from_node_index = self.copy_node_index(change_set, from_node_index)?; + + // Add the new edge to the new version of the "from" node. + let new_edge_index = + self.graph + .update_edge(new_from_node_index, to_node_index, edge_weight); + self.update_merkle_tree_hash(new_from_node_index)?; + + // Update the rest of the graph to reflect the new node/edge. + self.replace_references(change_set, from_node_index, new_from_node_index)?; + + Ok(new_edge_index) + } + + fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotGraphResult { + let new_node_index = self.graph.add_node(node); + self.update_merkle_tree_hash(new_node_index)?; + + Ok(new_node_index) + } + + pub fn cleanup(&mut self) { + self.graph.retain_nodes(|frozen_graph, current_node| { + // We cannot use "has_path_to_root" because we need to use the Frozen>. + algo::has_path_connecting(&*frozen_graph, self.root_index, current_node, None) + }); + } + + fn copy_node_index( + &mut self, + change_set: &ChangeSet, + node_index_to_copy: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + let new_node_index = self.graph.add_node( + self.get_node_weight(node_index_to_copy)? + .new_with_incremented_vector_clock(change_set)?, + ); + + Ok(new_node_index) + } + + fn detect_conflicts_and_updates( + &self, + to_rebase_change_set: &ChangeSet, + onto: &WorkspaceSnapshotGraph, + onto_change_set: &ChangeSet, + ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { + let mut conflicts: Vec = Vec::new(); + let mut updates: Vec = Vec::new(); + if let Err(traversal_error) = + petgraph::visit::depth_first_search(&onto.graph, Some(onto.root_index), |event| { + self.detect_conflicts_and_updates_process_dfs_event( + to_rebase_change_set, + onto, + onto_change_set, + event, + &mut conflicts, + &mut updates, + ) + }) + { + return Err(WorkspaceSnapshotGraphError::GraphTraversal(traversal_error)); + }; + + Ok((conflicts, updates)) + } + + fn detect_conflicts_and_updates_process_dfs_event( + &self, + to_rebase_change_set: &ChangeSet, + onto: &WorkspaceSnapshotGraph, + onto_change_set: &ChangeSet, + event: DfsEvent, + conflicts: &mut Vec, + updates: &mut Vec, + ) -> Result, petgraph::visit::DfsEvent> { + match event { + DfsEvent::Discover(onto_node_index, _) => { + let onto_node_weight = onto.get_node_weight(onto_node_index).map_err(|err| { + error!( + "Unable to get NodeWeight for onto NodeIndex {:?}: {}", + onto_node_index, err, + ); + event + })?; + let mut to_rebase_node_indexes = Vec::new(); + if let NodeWeight::Content(onto_content_weight) = onto_node_weight { + if onto_content_weight.content_address() == ContentAddress::Root { + // There can only be one (valid/current) `ContentAddress::Root` at any + // given moment, and the `lineage_id` isn't really relevant as it's not + // globally stable (even though it is locally stable). This matters as we + // may be dealing with a `WorkspaceSnapshotGraph` that is coming to us + // externally from a module that we're attempting to import. The external + // `WorkspaceSnapshotGraph` will be `self`, and the "local" one will be + // `onto`. + to_rebase_node_indexes.push(self.root_index); + } else { + to_rebase_node_indexes.extend( + self.get_node_index_by_lineage(onto_node_weight.lineage_id()) + .map_err(|err| { + error!( + "Unable to find NodeIndex(es) for lineage_id {}: {}", + onto_node_weight.lineage_id(), + err, + ); + event + })?, + ); + } + } + + // We'll lazily populate these, since we don't know if we'll need it at all, and + // we definitely don't want to be re-fetching this information inside the loop + // below, as it will be identical every time. + let mut onto_edges = None; + let mut onto_ordering_node_index = None; + let mut onto_order_set: Option> = None; + + // If everything with the same `lineage_id` is identical, then we can prune the + // graph traversal, and avoid unnecessary lookups/comparisons. + let mut any_content_with_lineage_has_changed = false; + + for to_rebase_node_index in to_rebase_node_indexes { + let to_rebase_node_weight = + self.get_node_weight(to_rebase_node_index).map_err(|err| { + error!( + "Unable to get to_rebase NodeWeight for NodeIndex {:?}: {}", + to_rebase_node_index, err, + ); + event + })?; + + if onto_node_weight.merkle_tree_hash() + == to_rebase_node_weight.merkle_tree_hash() + { + // If the merkle tree hashes are the same, then the entire sub-graph is + // identical, and we don't need to check any further. + continue; + } + any_content_with_lineage_has_changed = true; + + // Check if there's a difference in the node itself (and whether it is a + // conflict if there is a difference). + if onto_node_weight.content_hash() != to_rebase_node_weight.content_hash() { + if to_rebase_node_weight + .vector_clock_write() + .is_newer_than(onto_node_weight.vector_clock_write()) + { + // The existing node (`to_rebase`) has changes, but has already seen + // all of the changes in `onto`. There is no conflict, and there is + // nothing to update. + } else if onto_node_weight + .vector_clock_write() + .is_newer_than(to_rebase_node_weight.vector_clock_write()) + { + // `onto` has changes, but has already seen all of the changes in + // `to_rebase`. There is no conflict, and we should update to use the + // `onto` node. + updates.push(Update::ReplaceSubgraph { + new: onto_node_index, + old: to_rebase_node_index, + }); + } else { + // There are changes on both sides that have not been seen by the other + // side; this is a conflict. There may also be other conflicts in the + // outgoing relationships, the downstream nodes, or both. + conflicts.push(Conflict::NodeContent { + to_rebase: to_rebase_node_index, + onto: onto_node_index, + }); + } + } + + if onto_ordering_node_index.is_none() { + let onto_ordering_node_indexes = + ordering_node_indexes_for_node_index(onto, onto_node_index); + if onto_ordering_node_indexes.len() > 1 { + error!( + "Too many ordering nodes found for onto NodeIndex {:?}", + onto_node_index + ); + return Err(event); + } + onto_ordering_node_index = onto_ordering_node_indexes.get(0).copied(); + } + let to_rebase_ordering_node_indexes = + ordering_node_indexes_for_node_index(self, to_rebase_node_index); + if to_rebase_ordering_node_indexes.len() > 1 { + error!( + "Too many ordering nodes found for to_rebase NodeIndex {:?}", + to_rebase_node_index + ); + return Err(event); + } + let to_rebase_ordering_node_index = + to_rebase_ordering_node_indexes.get(0).copied(); + + match (to_rebase_ordering_node_index, onto_ordering_node_index) { + (None, None) => { + // Neither is ordered. The potential conflict could be because one + // or more elements changed, because elements were added/removed, + // or a combination of these. + // + // We need to check for all of these using the outgoing edges from + // the containers, since we can't rely on an ordering child to + // contain all the information to determine ordering/addition/removal. + // + // Eventually, this will only happen on the root node itself, since + // Objects, Maps, and Arrays should all have an ordering, for at + // least display purposes. + warn!( + "Found what appears to be two unordered containers: onto {:?}, to_rebase {:?}", + onto_node_index, to_rebase_node_index, + ); + println!( + "Comparing unordered containers: {:?}, {:?}", + onto_node_index, to_rebase_node_index + ); + + let onto_edges = onto_edges.get_or_insert_with(|| { + onto.graph.edges_directed(onto_node_index, Outgoing) + }); + let to_rebase_edges = + self.graph.edges_directed(to_rebase_node_index, Outgoing); + + let (container_conflicts, container_updates) = self + .find_unordered_container_membership_conflicts_and_updates( + to_rebase_change_set, + to_rebase_node_index, + onto, + onto_change_set, + onto_node_index, + ) + .map_err(|err| { + error!("Unable to find unordered container membership conflicts and updates for onto container NodeIndex {:?} and to_rebase container NodeIndex {:?}: {}", onto_node_index, to_rebase_node_index, err); + event + })?; + + updates.extend(container_updates); + conflicts.extend(container_conflicts); + } + (None, Some(_)) | (Some(_), None) => { + // We're trying to compare an ordered container with an unordered one, + // which isn't something that logically makes sense, so we've likely + // started comparing incompatible things. + warn!( + "Attempting to compare an ordered, and an unordered container: onto {:?}, to_rebase {:?}", + onto_node_index, to_rebase_node_index, + ); + return Err(event); + } + (Some(to_rebase_ordering_node_index), Some(onto_ordering_node_index)) => { + println!( + "Comparing ordered containers: {:?}, {:?}", + onto_node_index, to_rebase_node_index + ); + if onto_order_set.is_none() { + if let NodeWeight::Ordering(onto_order_weight) = onto + .get_node_weight(onto_ordering_node_index) + .map_err(|_| event)? + { + onto_order_set = + Some(onto_order_weight.order().iter().copied().collect()); + }; + } + let (container_conflicts, container_updates) = self + .find_ordered_container_membership_conflicts_and_updates( + to_rebase_change_set, + to_rebase_node_index, + to_rebase_ordering_node_index, + onto, + onto_change_set, + onto_node_index, + onto_ordering_node_index, + ) + .map_err(|_| event)?; + + updates.extend(container_updates); + conflicts.extend(container_conflicts); + + return Ok(petgraph::visit::Control::Continue); + } + } + } + + if any_content_with_lineage_has_changed { + // There was at least one thing with a merkle tree hash difference, so we need + // to examine further down the tree to see where the difference(s) are, and + // where there are conflicts, if there are any. + return Ok(petgraph::visit::Control::Continue); + } else { + // Everything to be rebased is identical, so there's no need to examine the + // rest of the tree looking for differences & conflicts that won't be there. + return Ok(petgraph::visit::Control::Prune); + } + } + DfsEvent::TreeEdge(_, _) + | DfsEvent::BackEdge(_, _) + | DfsEvent::CrossForwardEdge(_, _) + | DfsEvent::Finish(_, _) => { + // These events are all ignored, since we handle looking at edges as we encounter + // the node(s) the edges are coming from (Outgoing edges). + return Ok(petgraph::visit::Control::Continue); + } + } + } + + fn dot(&self) { + // NOTE(nick): copy the output and execute this on macOS. It will create a file in the + // process and open a new tab in your browser. + // ``` + // pbpaste | dot -Tsvg -o foo.svg && open foo.svg + // ``` + let current_root_weight = self.get_node_weight(self.root_index).unwrap(); + println!( + "Root Node Weight: {current_root_weight:?}\n{:?}", + petgraph::dot::Dot::with_config(&self.graph, &[petgraph::dot::Config::EdgeNoLabel]) + ); + } + + pub fn update_content( + &mut self, + change_set: &ChangeSet, + id: Ulid, + new_content_hash: ContentHash, + ) -> WorkspaceSnapshotGraphResult<()> { + let original_node_index = self.get_node_index_by_id(id)?; + let new_node_index = self.copy_node_index(change_set, original_node_index)?; + let node_weight = self.get_node_weight_mut(new_node_index)?; + node_weight.new_content_hash(new_content_hash)?; + + self.replace_references(change_set, original_node_index, new_node_index) + } + + fn find_ordered_container_membership_conflicts_and_updates( + &self, + to_rebase_change_set: &ChangeSet, + to_rebase_container_index: NodeIndex, + to_rebase_ordering_index: NodeIndex, + onto: &WorkspaceSnapshotGraph, + onto_change_set: &ChangeSet, + onto_container_index: NodeIndex, + onto_ordering_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { + let mut updates = Vec::new(); + let mut conflicts = Vec::new(); + + let onto_ordering = match onto.get_node_weight(onto_ordering_index)? { + NodeWeight::Ordering(ordering) => ordering, + _ => return Err(WorkspaceSnapshotGraphError::IncompatibleNodeTypes), + }; + let to_rebase_ordering = match self.get_node_weight(to_rebase_ordering_index)? { + NodeWeight::Ordering(ordering) => ordering, + _ => return Err(WorkspaceSnapshotGraphError::IncompatibleNodeTypes), + }; + + if onto_ordering.order() == to_rebase_ordering.order() { + // Both contain the same items, in the same order. No conflicts, and nothing + // to update. + return Ok((conflicts, updates)); + } else if onto_ordering + .vector_clock_write() + .is_newer_than(to_rebase_ordering.vector_clock_write()) + { + let onto_ordering_set: HashSet = onto_ordering.order().iter().copied().collect(); + let to_rebase_ordering_set: HashSet = + to_rebase_ordering.order().iter().copied().collect(); + let new_items: HashSet = onto_ordering_set + .difference(&to_rebase_ordering_set) + .copied() + .collect(); + let removed_items: HashSet = to_rebase_ordering_set + .difference(&onto_ordering_set) + .copied() + .collect(); + + // Find which `other` container items have the new ordering IDs so we can add edges + // from the `to_rebase` container to them (and create them in `to_rebase` if they don't + // already exist). + for onto_container_item_index in onto + .graph + .neighbors_directed(onto_container_index, Outgoing) + { + let onto_container_item_weight = onto.get_node_weight(onto_container_item_index)?; + if new_items.contains(&onto_container_item_weight.id()) { + for edge in onto + .graph + .edges_connecting(onto_container_index, onto_container_item_index) + { + updates.push(Update::NewEdge { + source: to_rebase_container_index, + destination: onto_container_item_index, + edge_weight: edge.weight().clone(), + }); + } + } + } + + // Remove the edges from the `to_rebase` container to the items removed in `onto`. We + // don't need to worry about removing the items themselves as they will be garbage + // collected when we drop all items that are not reachable from `to_rebase.root_index` + // if they are no longer referenced by anything. + for to_rebase_container_item_index in self + .graph + .neighbors_directed(to_rebase_container_index, Outgoing) + { + let to_rebase_container_item_weight = + self.get_node_weight(to_rebase_container_item_index)?; + if removed_items.contains(&to_rebase_container_item_weight.id()) { + for edge in self + .graph + .edges_connecting(to_rebase_container_index, to_rebase_container_item_index) + { + updates.push(Update::RemoveEdge(edge.id())); + } + } + } + + // Use the ordering from `other` in `to_rebase`. + updates.push(Update::ReplaceSubgraph { + new: onto_ordering_index, + old: to_rebase_ordering_index, + }); + } else if to_rebase_ordering + .vector_clock_write() + .is_newer_than(onto_ordering.vector_clock_write()) + { + // We already have everything in `onto` as part of `to_rebase`. Nothing needs + // updating, and there are no conflicts. + } else { + // Both `onto` and `to_rebase` have changes that the other has not incorporated. We + // need to find out what the changes are to see what needs to be updated, and what + // conflicts. + let onto_ordering_set: HashSet = onto_ordering.order().iter().copied().collect(); + let to_rebase_ordering_set: HashSet = + to_rebase_ordering.order().iter().copied().collect(); + let only_onto_items: HashSet = onto_ordering_set + .difference(&to_rebase_ordering_set) + .copied() + .collect(); + let only_to_rebase_items: HashSet = to_rebase_ordering_set + .difference(&onto_ordering_set) + .copied() + .collect(); + + let mut only_to_rebase_item_indexes = HashMap::new(); + for to_rebase_edgeref in self + .graph + .edges_directed(to_rebase_container_index, Outgoing) + { + let dest_node_weight = self.get_node_weight(to_rebase_edgeref.target())?; + if only_to_rebase_items.contains(&dest_node_weight.id()) { + only_to_rebase_item_indexes + .insert(dest_node_weight.id(), to_rebase_edgeref.target()); + } + } + + for only_to_rebase_item in only_to_rebase_items { + let only_to_rebase_item_index = *only_to_rebase_item_indexes + .get(&only_to_rebase_item) + .ok_or(WorkspaceSnapshotGraphError::NodeWithIdNotFound( + only_to_rebase_item, + ))?; + for to_rebase_edgeref in self + .graph + .edges_connecting(to_rebase_container_index, only_to_rebase_item_index) + { + if to_rebase_edgeref + .weight() + .vector_clock_first_seen() + .entry_for(onto_change_set) + .is_none() + { + // `only_to_rebase_item` is new: Edge in `to_rebase` does not have a "First Seen" for `onto`. + } else if self + .get_node_weight(only_to_rebase_item_index)? + .vector_clock_write() + .entry_for(to_rebase_change_set) + .is_some() + { + // Entry was deleted in `onto`. If we have also modified the entry, then + // there's a conflict. + conflicts.push(Conflict::ModifyRemovedItem(only_to_rebase_item_index)); + } else { + // Entry was deleted in `onto`, and has not been modified in `to_rebase`: + // Remove the edge. + updates.push(Update::RemoveEdge(to_rebase_edgeref.id())); + } + } + } + + let mut only_onto_item_indexes = HashMap::new(); + for onto_edgeref in onto.graph.edges_directed(onto_container_index, Outgoing) { + let dest_node_weight = onto.get_node_weight(onto_edgeref.target())?; + if only_onto_items.contains(&dest_node_weight.id()) { + only_onto_item_indexes.insert(dest_node_weight.id(), onto_edgeref.target()); + } + } + + let onto_root_seen_as_of = self + .get_node_weight(self.root_index)? + .vector_clock_recently_seen() + .entry_for(onto_change_set); + for only_onto_item in only_onto_items { + let only_onto_item_index = *only_onto_item_indexes.get(&only_onto_item).ok_or( + WorkspaceSnapshotGraphError::NodeWithIdNotFound(only_onto_item), + )?; + for onto_edgeref in onto + .graph + .edges_connecting(onto_container_index, only_onto_item_index) + { + // `only_onto_item` is new: + // - "First seen" of edge for `onto` > "Seen As Of" on root for `onto` in + // `to_rebase`. + if let Some(onto_first_seen) = onto_edgeref + .weight() + .vector_clock_first_seen() + .entry_for(onto_change_set) + { + if let Some(root_seen_as_of) = onto_root_seen_as_of { + if onto_first_seen > root_seen_as_of { + // The edge for the item was created more recently than the last + // state we knew of from `onto`, which means that the item is + // "new". We can't have removed something that we didn't know + // existed in the first place. + updates.push(Update::NewEdge { + source: to_rebase_container_index, + destination: onto_edgeref.target(), + edge_weight: onto_edgeref.weight().clone(), + }); + } + } + } else if let Some(onto_item_node_weight) = + onto.get_node_weight(only_onto_item_index).ok() + { + if let Some(root_seen_as_of) = onto_root_seen_as_of { + if onto_item_node_weight + .vector_clock_write() + .has_entries_newer_than(root_seen_as_of) + { + // The item removed in `to_rebase` has been modified in `onto` + // since we last knew the state of `onto`: This is a conflict, as + // we don't know if the removal is still intended given the new + // state of the item. + conflicts.push(Conflict::RemoveModifiedItem { + container: to_rebase_container_index, + removed_item: only_onto_item_index, + }); + } + } + } + } + } + } + + Ok((conflicts, updates)) + } + + fn find_unordered_container_membership_conflicts_and_updates( + &self, + to_rebase_change_set: &ChangeSet, + to_rebase_container_index: NodeIndex, + onto: &WorkspaceSnapshotGraph, + onto_change_set: &ChangeSet, + onto_container_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { + #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + struct UniqueEdgeInfo { + pub kind: EdgeWeightKind, + pub target_lineage: Ulid, + } + + #[derive(Debug, Copy, Clone)] + struct EdgeInfo { + pub target_node_index: NodeIndex, + pub edge_index: EdgeIndex, + } + + let mut updates = Vec::new(); + let mut conflicts = Vec::new(); + + let mut to_rebase_edges = HashMap::::new(); + for edgeref in self + .graph + .edges_directed(to_rebase_container_index, Outgoing) + { + let target_node_weight = self.get_node_weight(edgeref.target())?; + to_rebase_edges.insert( + UniqueEdgeInfo { + kind: edgeref.weight().kind(), + target_lineage: target_node_weight.lineage_id(), + }, + EdgeInfo { + target_node_index: edgeref.target(), + edge_index: edgeref.id(), + }, + ); + } + + let mut onto_edges = HashMap::::new(); + for edgeref in onto.graph.edges_directed(onto_container_index, Outgoing) { + let target_node_weight = onto.get_node_weight(edgeref.target())?; + onto_edges.insert( + UniqueEdgeInfo { + kind: edgeref.weight().kind(), + target_lineage: target_node_weight.lineage_id(), + }, + EdgeInfo { + target_node_index: edgeref.target(), + edge_index: edgeref.id(), + }, + ); + } + + let only_to_rebase_edges = { + let mut unique_edges = to_rebase_edges.clone(); + for key in onto_edges.keys() { + unique_edges.remove(key); + } + unique_edges + }; + let only_onto_edges = { + let mut unique_edges = onto_edges.clone(); + for key in to_rebase_edges.keys() { + unique_edges.remove(key); + } + unique_edges + }; + + let root_seen_as_of_onto = self + .get_node_weight(self.root_index)? + .vector_clock_recently_seen() + .entry_for(onto_change_set); + for only_to_rebase_edge_info in only_to_rebase_edges.values() { + let to_rebase_edge_weight = self + .graph + .edge_weight(only_to_rebase_edge_info.edge_index) + .ok_or(WorkspaceSnapshotGraphError::EdgeWeightNotFound)?; + let to_rebase_item_weight = + self.get_node_weight(only_to_rebase_edge_info.target_node_index)?; + + // If `onto` has never seen this edge, then it's new, and there are no conflicts, and + // no updates. + if to_rebase_edge_weight + .vector_clock_first_seen() + .entry_for(onto_change_set) + .is_some() + { + if to_rebase_item_weight + .vector_clock_write() + .entry_for(to_rebase_change_set) + > root_seen_as_of_onto + { + // Edge has been modified in `onto` (`onto` item write vector clock > "seen as + // of" for `onto` entry in `to_rebase` root): Conflict (ModifyRemovedItem) + conflicts.push(Conflict::ModifyRemovedItem( + only_to_rebase_edge_info.target_node_index, + )) + } else { + // Item not modified & removed by `onto`: No conflict; Update::RemoveEdge + updates.push(Update::RemoveEdge(only_to_rebase_edge_info.edge_index)); + } + } + } + + // - Items unique to `onto`: + for only_onto_edge_info in only_onto_edges.values() { + let onto_edge_weight = onto + .graph + .edge_weight(only_onto_edge_info.edge_index) + .ok_or(WorkspaceSnapshotGraphError::EdgeWeightNotFound)?; + let onto_item_weight = onto.get_node_weight(only_onto_edge_info.target_node_index)?; + + if let Some(onto_first_seen) = onto_edge_weight + .vector_clock_first_seen() + .entry_for(onto_change_set) + { + if let Some(root_seen_as_of) = root_seen_as_of_onto { + if onto_first_seen > root_seen_as_of { + // Edge first seen by `onto` > "seen as of" on `to_rebase` graph for `onto`'s entry on + // root node: Item is new. + updates.push(Update::NewEdge { + source: to_rebase_container_index, + destination: only_onto_edge_info.target_node_index, + edge_weight: onto_edge_weight.clone(), + }); + } + } + } else if let Some(root_seen_as_of) = root_seen_as_of_onto { + if onto_item_weight + .vector_clock_write() + .has_entries_newer_than(root_seen_as_of) + { + // Item write vector clock has entries > "seen as of" on `to_rebase` graph for + // `onto`'s entry on root node: Conflict (RemoveModifiedItem) + conflicts.push(Conflict::RemoveModifiedItem { + container: to_rebase_container_index, + removed_item: only_onto_edge_info.target_node_index, + }); + } + } + // Item removed by `to_rebase`: No conflict & no update necessary. + } + + // - Sets same: No conflicts/updates + Ok((conflicts, updates)) + } + + fn get_node_index_by_id(&self, id: Ulid) -> WorkspaceSnapshotGraphResult { + for node_index in self.graph.node_indices() { + // It's possible that there are multiple nodes in the petgraph that have the + // same ID as the one we're interested in, as we may not yet have cleaned up + // nodes/edges representing "old" versions when we're making changes. There + // should only be one in the sub-graph starting at `self.root_index`, + // however, and this represents the current state of the workspace after all + // changes have been made. + if self.has_path_to_root(node_index) { + let node_weight = self.get_node_weight(node_index)?; + if node_weight.id() == id { + return Ok(node_index); + } + } + } + + Err(WorkspaceSnapshotGraphError::NodeWithIdNotFound(id)) + } + + fn get_node_index_by_lineage( + &self, + lineage_id: Ulid, + ) -> WorkspaceSnapshotGraphResult> { + let mut results = Vec::new(); + for node_index in self.graph.node_indices() { + if let NodeWeight::Content(node_weight) = self.get_node_weight(node_index)? { + if node_weight.lineage_id() == lineage_id { + results.push(node_index); + } + } + } + + Ok(results) + } + + fn get_node_weight(&self, node_index: NodeIndex) -> WorkspaceSnapshotGraphResult<&NodeWeight> { + self.graph + .node_weight(node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound) + } + + fn get_node_weight_mut( + &mut self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<&mut NodeWeight> { + self.graph + .node_weight_mut(node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound) + } + + fn has_path_to_root(&self, node: NodeIndex) -> bool { + algo::has_path_connecting(&self.graph, self.root_index, node, None) + } + + fn import_subgraph( + &mut self, + other: &WorkspaceSnapshotGraph, + root_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + let mut new_node_indexes = HashMap::new(); + let mut dfs = petgraph::visit::DfsPostOrder::new(&other.graph, root_index); + while let Some(node_index_to_copy) = dfs.next(&other.graph) { + let node_weight_copy = other.get_node_weight(node_index_to_copy)?.clone(); + let new_node_index = self.add_node(node_weight_copy)?; + new_node_indexes.insert(node_index_to_copy, new_node_index); + + for edge in other.graph.edges_directed(node_index_to_copy, Outgoing) { + self.graph.update_edge( + new_node_index, + new_node_indexes + .get(&edge.target()) + .copied() + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?, + edge.weight().clone(), + ); + } + } + + new_node_indexes + .get(&root_index) + .copied() + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound) + } + + fn is_acyclic_directed(&self) -> bool { + // Using this because "is_cyclic_directed" is recursive. + algo::toposort(&self.graph, None).is_ok() + } + + fn is_on_path_between(&self, start: NodeIndex, end: NodeIndex, node: NodeIndex) -> bool { + algo::has_path_connecting(&self.graph, start, node, None) + && algo::has_path_connecting(&self.graph, node, end, None) + } + + /// [`StableGraph`] guarantees the stability of [`NodeIndex`] across removals, however there + /// are **NO** guarantees around the stability of [`EdgeIndex`] across removals. If + /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] should be considered invalid. + fn remove_edge( + &mut self, + change_set: &ChangeSet, + source_node_index: NodeIndex, + target_node_index: NodeIndex, + edge_kind: EdgeWeightKind, + ) -> WorkspaceSnapshotGraphResult<()> { + let mut edges_to_remove = Vec::new(); + let new_source_node_index = dbg!(self.copy_node_index(change_set, source_node_index)?); + self.replace_references(change_set, dbg!(source_node_index), new_source_node_index)?; + + for edgeref in self + .graph + .edges_connecting(new_source_node_index, target_node_index) + { + dbg!(&edgeref); + if edgeref.weight().kind() == edge_kind { + edges_to_remove.push(edgeref.id()); + } + } + for edge_to_remove in edges_to_remove { + self.graph.remove_edge(edge_to_remove); + } + + self.update_merkle_tree_hash(new_source_node_index)?; + + Ok(()) + } + + fn replace_references( + &mut self, + change_set: &ChangeSet, + original_node_index: NodeIndex, + new_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<()> { + let mut old_to_new_node_indices: HashMap = HashMap::new(); + old_to_new_node_indices.insert(original_node_index, new_node_index); + + let mut dfspo = DfsPostOrder::new(&self.graph, self.root_index); + while let Some(old_node_index) = dfspo.next(&self.graph) { + // All nodes that exist between the root and the `original_node_index` are affected by the replace, and only + // those nodes are affected, because the replacement affects their merkel tree hashes. + if self.is_on_path_between(self.root_index, original_node_index, old_node_index) { + // Copy the node if we have not seen it or grab it if we have. Only the first node in DFS post order + // traversal should already exist since it was created before we entered `replace_references`, and + // is the reason we're updating things in the first place. + let new_node_index = match old_to_new_node_indices.get(&old_node_index) { + Some(found_new_node_index) => *found_new_node_index, + None => { + let new_node_index = self.copy_node_index(change_set, old_node_index)?; + old_to_new_node_indices.insert(old_node_index, new_node_index); + new_node_index + } + }; + + // Find all outgoing edges. From those outgoing edges and find their destinations. + // If they do not have destinations, then there is no work to do (i.e. stale edge + // reference, which should only happen if an edge was removed after we got the + // edge ref, but before we asked about the edge's endpoints). + let mut edges_to_create: Vec<(EdgeWeight, NodeIndex)> = Vec::new(); + for edge_reference in self.graph.edges_directed(old_node_index, Outgoing) { + let edge_weight = edge_reference.weight(); + if let Some((_, destination_node_index)) = + self.graph.edge_endpoints(edge_reference.id()) + { + edges_to_create.push(( + edge_weight.new_with_incremented_vector_clocks(change_set)?, + destination_node_index, + )); + } + } + + // Make copies of these edges where the source is the new node index and the + // destination is one of the following... + // - If an entry exists in `old_to_new_node_indicies` for the destination node index, + // use the value of the entry (the destination was affected by the replacement, + // and needs to use the new node index to reflect this). + // - There is no entry in `old_to_new_node_indicies`; use the same destination node + // index as the old edge (the destination was *NOT* affected by the replacemnt, + // and does not have any new information to reflect). + for (edge_weight, destination_node_index) in edges_to_create { + // Need to directly add the edge, without going through `self.add_edge` to avoid + // infinite recursion, and because we're the place doing all the book keeping + // that we'd be interested in happening from `self.add_edge`. + self.graph.update_edge( + new_node_index, + *old_to_new_node_indices + .get(&destination_node_index) + .unwrap_or(&destination_node_index), + edge_weight, + ); + } + + self.update_merkle_tree_hash(new_node_index)?; + + // Use the new version of the old root node as our root node. + if let Some(new_root_node_index) = old_to_new_node_indices.get(&self.root_index) { + self.root_index = *new_root_node_index; + } + } + } + + Ok(()) + } + + fn update_merkle_tree_hash( + &mut self, + node_index_to_update: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<()> { + let mut hasher = ContentHash::hasher(); + hasher.update( + self.get_node_weight(node_index_to_update)? + .content_hash() + .to_string() + .as_bytes(), + ); + + // Need to make sure the neighbors are added to the hash in a stable order to ensure the + // merkle tree hash is identical for identical trees. + let mut ordered_neighbors = Vec::new(); + for neighbor_node in self + .graph + .neighbors_directed(node_index_to_update, Outgoing) + { + ordered_neighbors.push(neighbor_node); + } + ordered_neighbors.sort(); + + for neighbor_node in ordered_neighbors { + hasher.update( + self.graph + .node_weight(neighbor_node) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)? + .merkle_tree_hash() + .to_string() + .as_bytes(), + ); + } + + let new_node_weight = self + .graph + .node_weight_mut(node_index_to_update) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + new_node_weight.set_merkle_tree_hash(hasher.finalize()); + + Ok(()) + } +} + +fn ordering_node_indexes_for_node_index( + snapshot: &WorkspaceSnapshotGraph, + node_index: NodeIndex, +) -> Vec { + snapshot + .graph + .edges_directed(node_index, Outgoing) + .filter_map(|edge_reference| { + if edge_reference.weight().kind() == EdgeWeightKind::Ordering { + if let Some((_, destination_node_index)) = + snapshot.graph.edge_endpoints(edge_reference.id()) + { + if matches!( + snapshot.get_node_weight(destination_node_index), + Ok(NodeWeight::Ordering(_)) + ) { + return Some(destination_node_index); + } + } + } + + None + }) + .collect() +} + +#[cfg(test)] +mod test { + use super::*; + use crate::workspace_snapshot::node_weight::NodeWeight::Content; + use crate::{ComponentId, ContentHash, FuncId, PropId, SchemaId, SchemaVariantId}; + use serde_json::to_string; + + #[test] + fn new() { + let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + assert!(graph.is_acyclic_directed()); + } + + #[test] + fn add_nodes_and_edges() { + let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + let component_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let component_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(ContentHash::new( + ComponentId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let func_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let func_index = graph + .add_node( + NodeWeight::new_content( + change_set, + func_id, + ContentAddress::Func(ContentHash::new( + FuncId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add func"); + let prop_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let prop_index = graph + .add_node( + NodeWeight::new_content( + change_set, + prop_id, + ContentAddress::Prop(ContentHash::new( + PropId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add prop"); + + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + func_index, + ) + .expect("Unable to add root -> func edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + prop_index, + ) + .expect("Unable to add schema variant -> prop edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(func_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add prop -> func edge"); + + assert!(graph.is_acyclic_directed()); + } + + #[test] + fn cyclic_failure() { + let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let initial_schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let initial_schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + let component_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let initial_component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(ContentHash::new( + ComponentId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + initial_component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + initial_schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_id) + .expect("Cannot find NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + initial_schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(component_id) + .expect("Cannot find NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot find NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let pre_cycle_root_index = graph.root_index; + + // This should cause a cycle. + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot find NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(component_id) + .expect("Cannot find NodeIndex"), + ) + .expect_err("Created a cycle"); + + assert_eq!(pre_cycle_root_index, graph.root_index,); + } + + #[test] + fn update_content() { + let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new("Constellation".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + "Freestar Collective".as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + let component_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let component_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(ContentHash::new("Crimson Fleet".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + graph.dot(); + + // Ensure that the root node merkle tree hash looks as we expect before the update. + let pre_update_root_node_merkle_tree_hash: ContentHash = + serde_json::from_value(serde_json::json![ + "66e2b07b8a9a5f94a0ea18618a57b3264c850ea6cfeb81f5c9a42c4397f2f49d" + ]) + .expect("could not deserialize"); + assert_eq!( + pre_update_root_node_merkle_tree_hash, // expected + graph + .get_node_weight(graph.root_index) + .expect("could not get node weight") + .merkle_tree_hash(), // actual + ); + + let updated_content_hash = ContentHash::new("new_content".as_bytes()); + graph + .update_content(change_set, component_id.into(), updated_content_hash) + .expect("Unable to update Component content hash"); + + graph.dot(); + + let post_update_root_node_merkle_tree_hash: ContentHash = + serde_json::from_value(serde_json::json![ + "0b9b79be9c1b4107bd32dc9fb7accde544dc10171e37847e53c4d16a9efd2da1" + ]) + .expect("could not deserialize"); + assert_eq!( + post_update_root_node_merkle_tree_hash, // expected + graph + .get_node_weight(graph.root_index) + .expect("could not get node weight") + .merkle_tree_hash(), // actual + ); + assert_eq!( + updated_content_hash, // expected + graph + .get_node_weight( + graph + .get_node_index_by_id(component_id) + .expect("could not get node index by id") + ) + .expect("could not get node weight") + .content_hash(), // actual + ); + + graph.cleanup(); + + graph.dot(); + + // Ensure that there are not more nodes than the ones that should be in use. + assert_eq!(4, graph.node_count()); + + // The hashes must not change upon cleanup. + assert_eq!( + post_update_root_node_merkle_tree_hash, // expected + graph + .get_node_weight(graph.root_index) + .expect("could not get node weight") + .merkle_tree_hash(), // actual + ); + assert_eq!( + updated_content_hash, // expected + graph + .get_node_weight( + graph + .get_node_index_by_id(component_id) + .expect("could not get node index by id") + ) + .expect("could not get node weight") + .content_hash(), // actual + ); + } + + #[test] + fn detect_conflicts_and_updates_simple_no_conflicts_no_updates_in_base() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_change_set, + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + initial_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = initial_graph.clone(); + + let component_id = new_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let component_index = new_graph + .add_node( + NodeWeight::new_content( + new_change_set, + component_id, + ContentAddress::Schema(ContentHash::new("Component A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + new_graph + .add_edge( + new_change_set, + new_graph.root_index, + EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + new_graph + .add_edge( + new_change_set, + new_graph + .get_node_index_by_id(component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + new_graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + assert_eq!(Vec::::new(), updates); + } + + #[test] + fn detect_conflicts_and_updates_simple_no_conflicts_with_purely_new_content_in_base() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_id, + ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + let new_onto_component_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let new_onto_component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + new_onto_component_id, + ContentAddress::Component(ContentHash::new("Component B".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component B"); + let new_onto_root_component_edge_index = base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + new_onto_component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(new_onto_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + println!("Updated base graph (Root: {:?}):", base_graph.root_index); + base_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + + let new_onto_component_index = base_graph + .get_node_index_by_id(new_onto_component_id) + .expect("Unable to get NodeIndex"); + match updates.as_slice() { + [Update::NewEdge { + source, + destination, + edge_weight, + }] => { + assert_eq!(new_graph.root_index, *source); + assert_eq!(new_onto_component_index, *destination); + assert_eq!(EdgeWeightKind::Uses, edge_weight.kind()); + } + other => panic!("Unexpected updates: {:?}", other), + } + } + + #[test] + fn detect_conflicts_and_updates_simple_no_conflicts_with_updates_on_both_sides() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_id, + ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + let component_id = new_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let component_index = new_graph + .add_node( + NodeWeight::new_content( + new_change_set, + component_id, + ContentAddress::Component(ContentHash::new("Component A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + new_graph + .add_edge( + new_change_set, + new_graph.root_index, + EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + new_graph + .add_edge( + new_change_set, + new_graph + .get_node_index_by_id(component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + new_graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + println!("new graph (Root {:?}):", new_graph.root_index); + new_graph.dot(); + + let new_onto_component_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let new_onto_component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + new_onto_component_id, + ContentAddress::Component(ContentHash::new("Component B".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component B"); + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + new_onto_component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(new_onto_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + println!("Updated base graph (Root: {:?}):", base_graph.root_index); + base_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + + let new_onto_component_index = base_graph + .get_node_index_by_id(new_onto_component_id) + .expect("Unable to get NodeIndex"); + match updates.as_slice() { + [Update::NewEdge { + source, + destination, + edge_weight, + }] => { + assert_eq!(new_graph.root_index, *source); + assert_eq!(new_onto_component_index, *destination); + assert_eq!(EdgeWeightKind::Uses, edge_weight.kind()); + } + other => panic!("Unexpected updates: {:?}", other), + } + } + + #[test] + fn detect_conflicts_and_updates_simple_with_content_conflict() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_id, + ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let component_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + component_id, + ContentAddress::Component(ContentHash::new("Component A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + base_graph.cleanup(); + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + new_graph + .update_content( + new_change_set, + component_id, + ContentHash::new("Updated Component A".as_bytes()), + ) + .expect("Unable to update Component A"); + + new_graph.cleanup(); + println!("new graph (Root {:?}):", new_graph.root_index); + new_graph.dot(); + + base_graph + .update_content( + base_change_set, + component_id, + ContentHash::new("Base Updated Component A".as_bytes()), + ) + .expect("Unable to update Component A"); + + base_graph.cleanup(); + println!("Updated base graph (Root: {:?}):", base_graph.root_index); + base_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!( + vec![Conflict::NodeContent { + onto: base_graph + .get_node_index_by_id(component_id) + .expect("Unable to get component NodeIndex"), + to_rebase: new_graph + .get_node_index_by_id(component_id) + .expect("Unable to get component NodeIndex"), + }], + conflicts + ); + assert_eq!(Vec::::new(), updates); + } + + #[test] + fn detect_conflicts_and_updates_simple_with_modify_removed_item_conflict() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_id, + ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let component_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + component_id, + ContentAddress::Component(ContentHash::new("Component A".as_bytes())), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + base_graph.cleanup(); + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + base_graph + .remove_edge( + base_change_set, + base_graph.root_index, + base_graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeightKind::Uses, + ) + .expect("Unable to remove Component A"); + + base_graph.cleanup(); + println!("Updated base graph (Root: {:?}):", base_graph.root_index); + base_graph.dot(); + + new_graph + .update_content( + new_change_set, + component_id, + ContentHash::new("Updated Component A".as_bytes()), + ) + .expect("Unable to update Component A"); + + new_graph.cleanup(); + println!("new graph (Root {:?}):", new_graph.root_index); + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!( + vec![Conflict::ModifyRemovedItem( + new_graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex") + )], + conflicts + ); + assert_eq!(Vec::::new(), updates); + } +} diff --git a/lib/dal/src/workspace_snapshot/lamport_clock.rs b/lib/dal/src/workspace_snapshot/lamport_clock.rs new file mode 100644 index 0000000000..32ae4887c2 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/lamport_clock.rs @@ -0,0 +1,69 @@ +//! Lamport Clocks + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; + +use crate::workspace_snapshot::{ChangeSet, ChangeSetError}; + +#[derive(Debug, Error)] +pub enum LamportClockError { + #[error("Change Set error: {0}")] + ChangeSet(#[from] ChangeSetError), +} + +pub type LamportClockResult = Result; + +#[derive(Clone, Copy, Deserialize, Serialize)] +pub struct LamportClock { + #[serde(with = "chrono::serde::ts_nanoseconds")] + pub counter: DateTime, +} + +impl LamportClock { + pub fn new() -> LamportClockResult { + let counter = Utc::now(); + Ok(LamportClock { counter }) + } + + pub fn new_with_value(new_value: DateTime) -> Self { + LamportClock { counter: new_value } + } + + pub fn inc(&mut self) -> LamportClockResult<()> { + self.counter = Utc::now(); + + Ok(()) + } + + pub fn inc_to(&mut self, new_value: DateTime) { + self.counter = new_value; + } + + pub fn merge(&mut self, other: &LamportClock) { + if self.counter < other.counter { + self.counter = other.counter; + } + } +} + +impl std::fmt::Debug for LamportClock { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "LamportClock({})", &self.counter.to_string()) + } +} + +impl Eq for LamportClock {} + +impl PartialEq for LamportClock { + fn eq(&self, other: &Self) -> bool { + self.counter == other.counter + } +} + +impl PartialOrd for LamportClock { + fn partial_cmp(&self, other: &Self) -> Option { + self.counter.partial_cmp(&other.counter) + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs new file mode 100644 index 0000000000..0a876e2631 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -0,0 +1,163 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; + +use crate::workspace_snapshot::{ + change_set::{ChangeSet, ChangeSetError}, + vector_clock::{VectorClock, VectorClockError}, +}; +use crate::ContentHash; + +pub use crate::workspace_snapshot::node_weight::content_node_weight::ContentAddress; +pub use content_node_weight::ContentNodeWeight; +pub use ordering_node_weight::OrderingNodeWeight; + +pub mod content_node_weight; +pub mod ordering_node_weight; + +#[derive(Debug, Error)] +pub enum NodeWeightError { + #[error("Cannot set content hash directly on node weight kind")] + CannotSetContentHashOnKind, + #[error("Cannot update root node's content hash")] + CannotUpdateRootNodeContentHash, + #[error("ChangeSet error: {0}")] + ChangeSet(#[from] ChangeSetError), + #[error("Incompatible node weights")] + IncompatibleNodeWeightVariants, + #[error("Vector Clock error: {0}")] + VectorClock(#[from] VectorClockError), +} + +pub type NodeWeightResult = Result; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub enum NodeWeight { + Content(ContentNodeWeight), + Ordering(OrderingNodeWeight), +} + +impl NodeWeight { + pub fn content_hash(&self) -> ContentHash { + match self { + NodeWeight::Content(content_weight) => content_weight.content_hash(), + NodeWeight::Ordering(ordering_weight) => ordering_weight.content_hash(), + } + } + + pub fn id(&self) -> Ulid { + match self { + NodeWeight::Content(content_weight) => content_weight.id(), + NodeWeight::Ordering(ordering_weight) => ordering_weight.id(), + } + } + + pub fn lineage_id(&self) -> Ulid { + match self { + NodeWeight::Content(content_weight) => content_weight.lineage_id(), + NodeWeight::Ordering(ordering_weight) => ordering_weight.lineage_id(), + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSet, + other: &NodeWeight, + ) -> NodeWeightResult<()> { + match (self, other) { + ( + NodeWeight::Content(self_content_weight), + NodeWeight::Content(other_content_weight), + ) => self_content_weight.merge_clocks(change_set, other_content_weight), + ( + NodeWeight::Ordering(self_ordering_weight), + NodeWeight::Ordering(other_ordering_weight), + ) => self_ordering_weight.merge_clocks(change_set, other_ordering_weight), + _ => Err(NodeWeightError::IncompatibleNodeWeightVariants), + } + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + match self { + NodeWeight::Content(content_weight) => content_weight.merkle_tree_hash(), + NodeWeight::Ordering(ordering_weight) => ordering_weight.merkle_tree_hash(), + } + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + match self { + NodeWeight::Content(content_weight) => content_weight.new_content_hash(content_hash), + NodeWeight::Ordering(_) => Err(NodeWeightError::CannotSetContentHashOnKind), + } + } + + pub fn new_content( + change_set: &ChangeSet, + content_id: Ulid, + kind: ContentAddress, + ) -> NodeWeightResult { + Ok(NodeWeight::Content(ContentNodeWeight::new( + change_set, content_id, kind, + )?)) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSet, + ) -> NodeWeightResult { + let new_weight = match self { + NodeWeight::Content(content_weight) => { + NodeWeight::Content(content_weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::Ordering(ordering_weight) => { + NodeWeight::Ordering(ordering_weight.new_with_incremented_vector_clock(change_set)?) + } + }; + + Ok(new_weight) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + match self { + NodeWeight::Content(content_weight) => content_weight.set_merkle_tree_hash(new_hash), + NodeWeight::Ordering(ordering_weight) => ordering_weight.set_merkle_tree_hash(new_hash), + } + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSet, + new_val: DateTime, + ) { + match self { + NodeWeight::Content(content_weight) => { + content_weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::Ordering(ordering_weight) => { + ordering_weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + } + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + match self { + NodeWeight::Content(content_weight) => content_weight.vector_clock_first_seen(), + NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_first_seen(), + } + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + match self { + NodeWeight::Content(content_weight) => content_weight.vector_clock_recently_seen(), + NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_recently_seen(), + } + } + + pub fn vector_clock_write(&self) -> &VectorClock { + match self { + NodeWeight::Content(content_weight) => content_weight.vector_clock_write(), + NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_write(), + } + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs new file mode 100644 index 0000000000..9d1d5d0cb7 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -0,0 +1,200 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::workspace_snapshot::{ + change_set::ChangeSet, + node_weight::{NodeWeightError, NodeWeightResult}, + vector_clock::VectorClock, +}; +use crate::ContentHash; + +pub type LineageId = Ulid; + +#[remain::sorted] +#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] +/// The type of the object, and the content-addressable-storage address (content hash) +/// of the object itself. +pub enum ContentAddress { + Component(ContentHash), + Func(ContentHash), + FuncArg(ContentHash), + Prop(ContentHash), + Root, + Schema(ContentHash), + SchemaVariant(ContentHash), +} + +impl ContentAddress { + fn content_hash(&self) -> ContentHash { + match self { + ContentAddress::Component(id) => Some(*id), + ContentAddress::Func(id) => Some(*id), + ContentAddress::FuncArg(id) => Some(*id), + ContentAddress::Prop(id) => Some(*id), + ContentAddress::Root => None, + ContentAddress::Schema(id) => Some(*id), + ContentAddress::SchemaVariant(id) => Some(*id), + } + .unwrap_or_default() + } +} + +#[derive(Serialize, Deserialize, Clone)] +pub struct ContentNodeWeight { + /// The stable local ID of the object in question. Mainly used by external things like + /// the UI to be able to say "do X to _this_ thing" since the `NodeIndex` is an + /// internal implementation detail, and the content ID wrapped by the + /// [`NodeWeightKind`] changes whenever something about the node itself changes (for + /// example, the name, or type of a [`Prop`].) + id: Ulid, + /// Globally stable ID for tracking the "lineage" of a thing to determine whether it + /// should be trying to receive updates. + lineage_id: LineageId, + /// What type of thing is this node representing, and what is the content hash used to + /// retrieve the data for this specific node. + content_address: ContentAddress, + /// [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree) hash for the graph + /// starting with this node as the root. Mainly useful in quickly determining "has + /// something changed anywhere in this (sub)graph". + merkle_tree_hash: ContentHash, + /// The first time a [`ChangeSet`] has "seen" this content. This is useful for determining + /// whether the absence of this content on one side or the other of a rebase/merge is because + /// the content is new, or because one side deleted it. + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl ContentNodeWeight { + pub fn new( + change_set: &ChangeSet, + id: Ulid, + content_address: ContentAddress, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + content_address, + merkle_tree_hash: ContentHash::default(), + vector_clock_first_seen: VectorClock::new(change_set)?, + vector_clock_recently_seen: VectorClock::new(change_set)?, + vector_clock_write: VectorClock::new(change_set)?, + }) + } + + pub fn content_address(&self) -> ContentAddress { + self.content_address + } + + pub fn content_hash(&self) -> ContentHash { + self.content_address.content_hash() + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_vector_clock(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set)?; + self.vector_clock_recently_seen.inc(change_set)?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + self.vector_clock_recently_seen + .inc(change_set) + .map_err(Into::into) + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSet, + other: &ContentNodeWeight, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set, &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set, &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen + .merge(change_set, &other.vector_clock_recently_seen)?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + let new_kind = match &self.content_address { + ContentAddress::Component(_) => ContentAddress::Component(content_hash), + ContentAddress::Func(_) => ContentAddress::Func(content_hash), + ContentAddress::FuncArg(_) => ContentAddress::FuncArg(content_hash), + ContentAddress::Prop(_) => ContentAddress::Prop(content_hash), + ContentAddress::Root => return Err(NodeWeightError::CannotUpdateRootNodeContentHash), + ContentAddress::Schema(_) => ContentAddress::Schema(content_hash), + ContentAddress::SchemaVariant(_) => ContentAddress::SchemaVariant(content_hash), + }; + + self.content_address = new_kind; + + Ok(()) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSet, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSet, + new_val: DateTime, + ) { + self.vector_clock_recently_seen.inc_to(change_set, new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for ContentNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("NodeWeight") + .field("id", &self.id.to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("content_address", &self.content_address) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs new file mode 100644 index 0000000000..3e05ab99c8 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -0,0 +1,156 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::{ + workspace_snapshot::{ + change_set::ChangeSet, node_weight::NodeWeightResult, vector_clock::VectorClock, + }, + ContentHash, +}; + +#[derive(Clone, Serialize, Deserialize, Default)] +pub struct OrderingNodeWeight { + id: Ulid, + lineage_id: Ulid, + /// The `id` of the items, in the order that they should appear in the container. + order: Vec, + content_hash: ContentHash, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl OrderingNodeWeight { + pub fn content_hash(&self) -> ContentHash { + self.content_hash + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_seen_vector_clock(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + self.vector_clock_first_seen.inc(change_set)?; + + Ok(()) + } + + pub fn increment_vector_clock(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set).map_err(Into::into) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSet, + other: &OrderingNodeWeight, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set, other.vector_clock_write())?; + self.vector_clock_first_seen + .merge(change_set, other.vector_clock_first_seen())?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn new(change_set: &ChangeSet) -> NodeWeightResult { + Ok(Self { + id: change_set.generate_ulid()?, + lineage_id: change_set.generate_ulid()?, + vector_clock_write: VectorClock::new(change_set)?, + vector_clock_first_seen: VectorClock::new(change_set)?, + ..Default::default() + }) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSet, + ) -> NodeWeightResult { + let mut new_ordering_weight = self.clone(); + new_ordering_weight.increment_vector_clock(change_set)?; + + Ok(new_ordering_weight) + } + + pub fn order(&self) -> &Vec { + &self.order + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_order<'a>( + &mut self, + change_set: &ChangeSet, + order: impl AsRef<&'a [Ulid]>, + ) -> NodeWeightResult<()> { + self.order = Vec::from(*order.as_ref()); + self.update_content_hash(); + self.increment_seen_vector_clock(change_set)?; + + Ok(()) + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSet, + new_val: DateTime, + ) { + self.vector_clock_recently_seen.inc_to(change_set, new_val); + } + + fn update_content_hash(&mut self) { + let mut content_hasher = ContentHash::hasher(); + let concat_elements = self + .order + .iter() + .map(|e| e.to_string()) + .collect::>() + .join(" "); + let content_bytes = concat_elements.as_bytes(); + content_hasher.update(content_bytes); + + self.content_hash = content_hasher.finalize(); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for OrderingNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("NodeWeight") + .field("id", &self.id.to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("order", &self.order) + .field("content_hash", &self.content_hash) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/update.rs b/lib/dal/src/workspace_snapshot/update.rs new file mode 100644 index 0000000000..1ba2a9cbab --- /dev/null +++ b/lib/dal/src/workspace_snapshot/update.rs @@ -0,0 +1,21 @@ +use petgraph::prelude::*; + +use super::edge_weight::EdgeWeight; + +#[remain::sorted] +#[derive(Debug, Clone, PartialEq, Eq)] +pub enum Update { + NewEdge { + source: NodeIndex, + destination: NodeIndex, + edge_weight: EdgeWeight, + }, + NewSubgraph { + source: NodeIndex, + }, + RemoveEdge(EdgeIndex), + ReplaceSubgraph { + new: NodeIndex, + old: NodeIndex, + }, +} diff --git a/lib/dal/src/workspace_snapshot/vector_clock.rs b/lib/dal/src/workspace_snapshot/vector_clock.rs new file mode 100644 index 0000000000..09a5c2a058 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/vector_clock.rs @@ -0,0 +1,115 @@ +//! Vector Clocks + +use std::collections::HashMap; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; + +use crate::workspace_snapshot::{ + lamport_clock::{LamportClock, LamportClockError}, + {ChangeSet, ChangeSetId}, +}; + +#[derive(Debug, Error)] +pub enum VectorClockError { + #[error("Lamport Clock Error: {0}")] + LamportClock(#[from] LamportClockError), +} + +pub type VectorClockResult = Result; + +#[derive(Default, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct VectorClock { + entries: HashMap, +} + +impl VectorClock { + /// Create a new [`VectorClock`] with an entry for [`ChangeSet`]. + pub fn new(change_set: &ChangeSet) -> VectorClockResult { + let lamport_clock = LamportClock::new()?; + let mut entries = HashMap::new(); + entries.insert(change_set.id, lamport_clock); + + Ok(VectorClock { entries }) + } + + pub fn entry_for(&self, change_set: &ChangeSet) -> Option { + self.entries.get(&change_set.id).copied() + } + + pub fn has_entries_newer_than(&self, clock_stamp: LamportClock) -> bool { + self.entries.values().any(|v| *v > clock_stamp) + } + + pub fn inc_to(&mut self, change_set: &ChangeSet, new_clock_value: DateTime) { + if let Some(lamport_clock) = self.entries.get_mut(&change_set.id) { + lamport_clock.inc_to(new_clock_value); + } else { + self.entries + .insert(change_set.id, LamportClock::new_with_value(new_clock_value)); + } + } + + /// Increment the entry for [`ChangeSet`], adding one if there wasn't one already. + pub fn inc(&mut self, change_set: &ChangeSet) -> VectorClockResult<()> { + if let Some(lamport_clock) = self.entries.get_mut(&change_set.id) { + lamport_clock.inc()?; + } else { + self.entries.insert(change_set.id, LamportClock::new()?); + } + + Ok(()) + } + + /// Add all entries in `other` to `self`, taking the most recent value if the entry already + /// exists in `self`, then increment the entry for [`ChangeSet`] (adding one if it is not + /// already there). + pub fn merge(&mut self, change_set: &ChangeSet, other: &VectorClock) -> VectorClockResult<()> { + for (other_change_set_id, other_lamport_clock) in other.entries.iter() { + if let Some(lamport_clock) = self.entries.get_mut(other_change_set_id) { + lamport_clock.merge(other_lamport_clock); + } else { + self.entries + .insert(*other_change_set_id, *other_lamport_clock); + } + } + self.inc(change_set)?; + + Ok(()) + } + + /// Return a new [`VectorClock`] with the entry for [`ChangeSet`] incremented. + pub fn fork(&self, change_set: &ChangeSet) -> VectorClockResult { + let mut forked = self.clone(); + forked.inc(change_set)?; + + Ok(forked) + } + + /// Returns true if all entries in `other` are present in `self`, and `<=` the entry in + /// `self`, meaning that `self` has already seen/incorporated all of the information + /// in `other`. + pub fn is_newer_than(&self, other: &VectorClock) -> bool { + for (other_change_set_id, other_lamport_clock) in &other.entries { + if let Some(my_clock) = self.entries.get(other_change_set_id) { + if other_lamport_clock > my_clock { + return false; + } + } else { + return false; + } + } + + true + } +} + +impl std::fmt::Debug for VectorClock { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fmt.debug_map() + .entries(self.entries.iter().map(|(k, v)| (k.to_string(), v))) + .finish() + } +} diff --git a/lib/gobbler-server/BUCK b/lib/gobbler-server/BUCK new file mode 100644 index 0000000000..21e4b8bef1 --- /dev/null +++ b/lib/gobbler-server/BUCK @@ -0,0 +1,69 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "gobbler-server", + deps = [ + "//lib/buck2-resources:buck2-resources", + "//lib/dal:dal", + "//lib/nats-subscriber:nats-subscriber", + "//lib/si-crypto:si-crypto", + "//lib/si-data-nats:si-data-nats", + "//lib/si-data-pg:si-data-pg", + "//lib/si-rabbitmq:si-rabbitmq", + "//lib/si-settings:si-settings", + "//lib/si-std:si-std", + "//lib/si-test-macros:si-test-macros", + "//lib/telemetry-rs:telemetry", + "//lib/veritech-client:veritech-client", + "//third-party/rust:derive_builder", + "//third-party/rust:futures", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:serde_json", + "//third-party/rust:stream-cancel", + "//third-party/rust:thiserror", + "//third-party/rust:tokio", + "//third-party/rust:tokio-stream", + "//third-party/rust:ulid", + ], + srcs = glob([ + "src/**/*.rs", + ]), +) + +rust_test( + name = "test-integration", + deps = [ + "//lib/dal:dal", + "//lib/dal-test:dal-test", + "//lib/si-pkg:si-pkg", + "//lib/si-rabbitmq:si-rabbitmq", + "//lib/si-test-macros:si-test-macros", + "//lib/veritech-client:veritech-client", + "//third-party/rust:base64", + "//third-party/rust:itertools", + "//third-party/rust:pretty_assertions_sorted", + "//third-party/rust:serde_json", + "//third-party/rust:sodiumoxide", + "//third-party/rust:strum", + "//third-party/rust:tempfile", + "//third-party/rust:tokio", + "//third-party/rust:ulid", + ], + crate_root = "tests/integration.rs", + srcs = glob(["tests/**/*.rs"]), + env = { + "CARGO_PKG_NAME": "integration", + }, + resources = { + "cyclone": "//bin/cyclone:cyclone", + "dev.decryption.key": "//lib/cyclone-server:dev.decryption.key", + "dev.encryption.key": "//lib/cyclone-server:dev.encryption.key", + "dev.jwt_signing_private_key.pem": "//config/keys:dev.jwt_signing_private_key.pem", + "dev.jwt_signing_public_key.pem": "//config/keys:dev.jwt_signing_public_key.pem", + "lang-js": "//bin/lang-js:lang-js", + "pkgs_path": "//pkgs:pkgs", + "prod.jwt_signing_public_key.pem": "//config/keys:prod.jwt_signing_public_key.pem", + }, +) + diff --git a/lib/gobbler-server/Cargo.toml b/lib/gobbler-server/Cargo.toml new file mode 100644 index 0000000000..253052fe4e --- /dev/null +++ b/lib/gobbler-server/Cargo.toml @@ -0,0 +1,33 @@ +[package] +name = "gobbler-server" +version = "0.1.0" +edition = "2021" +rust-version = "1.64" +publish = false + +[dependencies] +buck2-resources = { path = "../../lib/buck2-resources" } +dal = { path = "../../lib/dal" } +derive_builder = { workspace = true } +futures = { workspace = true } +nats-subscriber = { path = "../../lib/nats-subscriber" } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +si-crypto = { path = "../../lib/si-crypto" } +si-data-nats = { path = "../../lib/si-data-nats" } +si-data-pg = { path = "../../lib/si-data-pg" } +si-rabbitmq = { path = "../../lib/si-rabbitmq" } +si-settings = { path = "../../lib/si-settings" } +si-std = { path = "../../lib/si-std" } +si-test-macros = { path = "../../lib/si-test-macros" } +stream-cancel = { workspace = true } +telemetry = { path = "../../lib/telemetry-rs" } +thiserror = { workspace = true } +tokio = { workspace = true } +tokio-stream = { workspace = true } +ulid = { workspace = true } +veritech-client = { path = "../../lib/veritech-client" } + +[dev-dependencies] +dal-test = { path = "../../lib/dal-test" } diff --git a/lib/gobbler-server/src/config.rs b/lib/gobbler-server/src/config.rs new file mode 100644 index 0000000000..3673f79b8a --- /dev/null +++ b/lib/gobbler-server/src/config.rs @@ -0,0 +1,197 @@ +use std::{env, path::Path}; + +use buck2_resources::Buck2Resources; +use derive_builder::Builder; +use serde::{Deserialize, Serialize}; +use si_data_nats::NatsConfig; +use si_data_pg::PgPoolConfig; +use si_std::CanonicalFile; +use si_std::CanonicalFileError; +use telemetry::prelude::*; +use thiserror::Error; +use ulid::Ulid; + +pub use si_settings::{StandardConfig, StandardConfigFile}; + +const DEFAULT_CONCURRENCY_LIMIT: usize = 5; + +#[remain::sorted] +#[derive(Debug, Error)] +pub enum ConfigError { + #[error("config builder")] + Builder(#[from] ConfigBuilderError), + #[error(transparent)] + CanonicalFile(#[from] CanonicalFileError), + #[error("error configuring for development")] + Development(#[source] Box), + #[error(transparent)] + Settings(#[from] si_settings::SettingsError), +} + +impl ConfigError { + fn development(err: impl std::error::Error + 'static + Sync + Send) -> Self { + Self::Development(Box::new(err)) + } +} + +type Result = std::result::Result; + +#[derive(Debug, Builder)] +pub struct Config { + #[builder(default = "PgPoolConfig::default()")] + pg_pool: PgPoolConfig, + + #[builder(default = "NatsConfig::default()")] + nats: NatsConfig, + + cyclone_encryption_key_path: CanonicalFile, + + #[builder(default = "default_concurrency_limit()")] + concurrency: usize, + + #[builder(default = "random_instance_id()")] + instance_id: String, +} + +impl StandardConfig for Config { + type Builder = ConfigBuilder; +} + +impl Config { + /// Gets a reference to the config's pg pool. + #[must_use] + pub fn pg_pool(&self) -> &PgPoolConfig { + &self.pg_pool + } + + /// Gets a reference to the config's nats. + #[must_use] + pub fn nats(&self) -> &NatsConfig { + &self.nats + } + + /// Gets a reference to the config's subject prefix. + pub fn subject_prefix(&self) -> Option<&str> { + self.nats.subject_prefix.as_deref() + } + + /// Gets a reference to the config's cyclone public key path. + #[must_use] + pub fn cyclone_encryption_key_path(&self) -> &Path { + self.cyclone_encryption_key_path.as_path() + } + + /// Gets the config's concurrency limit. + pub fn concurrency(&self) -> usize { + self.concurrency + } + + /// Gets the config's instance ID. + pub fn instance_id(&self) -> &str { + self.instance_id.as_ref() + } +} + +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct ConfigFile { + #[serde(default)] + pg: PgPoolConfig, + #[serde(default)] + nats: NatsConfig, + #[serde(default = "default_cyclone_encryption_key_path")] + cyclone_encryption_key_path: String, + #[serde(default = "default_concurrency_limit")] + concurrency_limit: usize, + #[serde(default = "random_instance_id")] + instance_id: String, +} + +impl Default for ConfigFile { + fn default() -> Self { + Self { + pg: Default::default(), + nats: Default::default(), + cyclone_encryption_key_path: default_cyclone_encryption_key_path(), + concurrency_limit: default_concurrency_limit(), + instance_id: random_instance_id(), + } + } +} + +impl StandardConfigFile for ConfigFile { + type Error = ConfigError; +} + +impl TryFrom for Config { + type Error = ConfigError; + + fn try_from(mut value: ConfigFile) -> Result { + detect_and_configure_development(&mut value)?; + + let mut config = Config::builder(); + config.pg_pool(value.pg); + config.nats(value.nats); + config.cyclone_encryption_key_path(value.cyclone_encryption_key_path.try_into()?); + config.concurrency(value.concurrency_limit); + config.instance_id(value.instance_id); + config.build().map_err(Into::into) + } +} + +fn random_instance_id() -> String { + Ulid::new().to_string() +} + +fn default_cyclone_encryption_key_path() -> String { + "/run/gobbler/cyclone_encryption.key".to_string() +} + +fn default_concurrency_limit() -> usize { + DEFAULT_CONCURRENCY_LIMIT +} + +#[allow(clippy::disallowed_methods)] // Used to determine if running in development +pub fn detect_and_configure_development(config: &mut ConfigFile) -> Result<()> { + if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { + buck2_development(config) + } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { + cargo_development(dir, config) + } else { + Ok(()) + } +} + +fn buck2_development(config: &mut ConfigFile) -> Result<()> { + let resources = Buck2Resources::read().map_err(ConfigError::development)?; + + let cyclone_encryption_key_path = resources + .get_ends_with("dev.encryption.key") + .map_err(ConfigError::development)? + .to_string_lossy() + .to_string(); + + warn!( + cyclone_encryption_key_path = cyclone_encryption_key_path.as_str(), + "detected development run", + ); + + config.cyclone_encryption_key_path = cyclone_encryption_key_path; + + Ok(()) +} + +fn cargo_development(dir: String, config: &mut ConfigFile) -> Result<()> { + let cyclone_encryption_key_path = Path::new(&dir) + .join("../../lib/cyclone-server/src/dev.encryption.key") + .to_string_lossy() + .to_string(); + + warn!( + cyclone_encryption_key_path = cyclone_encryption_key_path.as_str(), + "detected development run", + ); + + config.cyclone_encryption_key_path = cyclone_encryption_key_path; + + Ok(()) +} diff --git a/lib/gobbler-server/src/lib.rs b/lib/gobbler-server/src/lib.rs new file mode 100644 index 0000000000..e284820890 --- /dev/null +++ b/lib/gobbler-server/src/lib.rs @@ -0,0 +1,25 @@ +mod config; +pub mod server; + +pub use crate::{ + config::{ + detect_and_configure_development, Config, ConfigBuilder, ConfigError, ConfigFile, + StandardConfig, StandardConfigFile, + }, + server::{Server, ServerError}, +}; + +const NATS_JOBS_DEFAULT_SUBJECT: &str = "gobbler-jobs"; +const NATS_JOBS_DEFAULT_QUEUE: &str = "gobbler"; + +pub fn nats_jobs_subject(prefix: Option<&str>) -> String { + nats_subject(prefix, NATS_JOBS_DEFAULT_SUBJECT) +} + +pub fn nats_subject(prefix: Option<&str>, suffix: impl AsRef) -> String { + let suffix = suffix.as_ref(); + match prefix { + Some(prefix) => format!("{prefix}.{suffix}"), + None => suffix.to_string(), + } +} diff --git a/lib/gobbler-server/src/server.rs b/lib/gobbler-server/src/server.rs new file mode 100644 index 0000000000..dbaa2d1106 --- /dev/null +++ b/lib/gobbler-server/src/server.rs @@ -0,0 +1,608 @@ +use std::{io, path::Path, sync::Arc}; + +use dal::{ + job::{ + consumer::{JobConsumer, JobConsumerError, JobInfo}, + definition::{FixesJob, RefreshJob}, + producer::BlockingJobError, + }, + DalContext, DalContextBuilder, DependentValuesUpdate, InitializationError, JobFailure, + JobFailureError, JobQueueProcessor, NatsProcessor, ServicesContext, TransactionsError, +}; +use futures::{FutureExt, Stream, StreamExt}; +use nats_subscriber::{Request, SubscriberError}; +use si_crypto::SymmetricCryptoService; +use si_data_nats::{NatsClient, NatsConfig, NatsError}; +use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; +use stream_cancel::StreamExt as StreamCancelStreamExt; +use telemetry::prelude::*; +use thiserror::Error; +use tokio::{ + signal::unix, + sync::{ + mpsc::{self, UnboundedReceiver, UnboundedSender}, + oneshot, watch, + }, + task, +}; +use tokio_stream::wrappers::UnboundedReceiverStream; +use veritech_client::{Client as VeritechClient, CycloneEncryptionKey, CycloneEncryptionKeyError}; + +use crate::{nats_jobs_subject, Config, NATS_JOBS_DEFAULT_QUEUE}; + +#[remain::sorted] +#[derive(Debug, Error)] +pub enum ServerError { + #[error("error when loading encryption key: {0}")] + CycloneEncryptionKey(#[from] CycloneEncryptionKeyError), + #[error(transparent)] + Initialization(#[from] InitializationError), + #[error(transparent)] + JobConsumer(#[from] JobConsumerError), + #[error(transparent)] + JobFailure(#[from] Box), + #[error(transparent)] + Nats(#[from] NatsError), + #[error(transparent)] + PgPool(#[from] Box), + #[error(transparent)] + SerdeJson(#[from] serde_json::Error), + #[error("failed to setup signal handler")] + Signal(#[source] io::Error), + #[error(transparent)] + Subscriber(#[from] SubscriberError), + #[error(transparent)] + Transactions(#[from] Box), + #[error("unknown job kind {0}")] + UnknownJobKind(String), +} + +impl From for ServerError { + fn from(e: PgPoolError) -> Self { + Self::PgPool(Box::new(e)) + } +} + +impl From for ServerError { + fn from(e: JobFailureError) -> Self { + Self::JobFailure(Box::new(e)) + } +} + +impl From for ServerError { + fn from(e: TransactionsError) -> Self { + Self::Transactions(Box::new(e)) + } +} + +type Result = std::result::Result; + +pub struct Server { + concurrency_limit: usize, + encryption_key: Arc, + nats: NatsClient, + pg_pool: PgPool, + veritech: VeritechClient, + job_processor: Box, + /// An internal shutdown watch receiver handle which can be provided to internal tasks which + /// want to be notified when a shutdown event is in progress. + shutdown_watch_rx: watch::Receiver<()>, + /// An external shutdown sender handle which can be handed out to external callers who wish to + /// trigger a server shutdown at will. + external_shutdown_tx: mpsc::Sender, + /// An internal graceful shutdown receiever handle which the server's main thread uses to stop + /// accepting work when a shutdown event is in progress. + graceful_shutdown_rx: oneshot::Receiver<()>, + metadata: Arc, +} + +impl Server { + #[instrument(name = "gobbler.init.from_config", skip_all)] + pub async fn from_config(config: Config) -> Result { + dal::init()?; + + let encryption_key = + Self::load_encryption_key(config.cyclone_encryption_key_path()).await?; + let nats = Self::connect_to_nats(config.nats()).await?; + let pg_pool = Self::create_pg_pool(config.pg_pool()).await?; + let veritech = Self::create_veritech_client(nats.clone()); + let job_processor = Self::create_job_processor(nats.clone()); + + Self::from_services( + config.instance_id().to_string(), + config.concurrency(), + encryption_key, + nats, + pg_pool, + veritech, + job_processor, + ) + } + + #[allow(clippy::too_many_arguments)] + #[instrument(name = "gobbler.init.from_services", skip_all)] + pub fn from_services( + instance_id: impl Into, + concurrency_limit: usize, + encryption_key: Arc, + nats: NatsClient, + pg_pool: PgPool, + veritech: VeritechClient, + job_processor: Box, + ) -> Result { + // An mpsc channel which can be used to externally shut down the server. + let (external_shutdown_tx, external_shutdown_rx) = mpsc::channel(4); + // A watch channel used to notify internal parts of the server that a shutdown event is in + // progress. The value passed along is irrelevant--we only care that the event was + // triggered and react accordingly. + let (shutdown_watch_tx, shutdown_watch_rx) = watch::channel(()); + + dal::init()?; + + let metadata = ServerMetadata { + job_instance: instance_id.into(), + job_invoked_provider: "si", + }; + + let graceful_shutdown_rx = + prepare_graceful_shutdown(external_shutdown_rx, shutdown_watch_tx)?; + + Ok(Server { + concurrency_limit, + pg_pool, + nats, + veritech, + encryption_key, + job_processor, + shutdown_watch_rx, + external_shutdown_tx, + graceful_shutdown_rx, + metadata: Arc::new(metadata), + }) + } + + pub async fn run(self) -> Result<()> { + let (tx, rx) = mpsc::unbounded_channel(); + + // Span a task to receive and process jobs from the unbounded channel + drop(task::spawn(process_job_requests_task( + rx, + self.concurrency_limit, + ))); + + // Run "the main loop" which pulls message from a subscription off NATS and forwards each + // request to an unbounded channel + receive_job_requests_task( + tx, + self.metadata, + self.pg_pool, + self.nats, + self.veritech, + self.job_processor, + self.encryption_key, + self.shutdown_watch_rx, + ) + .await; + + let _ = self.graceful_shutdown_rx.await; + info!("received and processed graceful shutdown, terminating server instance"); + + Ok(()) + } + + /// Gets a [`ShutdownHandle`](GobblerShutdownHandle) that can externally or on demand trigger the server's shutdown + /// process. + pub fn shutdown_handle(&self) -> GobblerShutdownHandle { + GobblerShutdownHandle { + shutdown_tx: self.external_shutdown_tx.clone(), + } + } + + #[instrument(name = "gobbler.init.load_encryption_key", skip_all)] + async fn load_encryption_key(path: impl AsRef) -> Result> { + Ok(Arc::new(CycloneEncryptionKey::load(path).await?)) + } + + #[instrument(name = "gobbler.init.connect_to_nats", skip_all)] + async fn connect_to_nats(nats_config: &NatsConfig) -> Result { + let client = NatsClient::new(nats_config).await?; + debug!("successfully connected nats client"); + Ok(client) + } + + #[instrument(name = "gobbler.init.create_pg_pool", skip_all)] + async fn create_pg_pool(pg_pool_config: &PgPoolConfig) -> Result { + let pool = PgPool::new(pg_pool_config).await?; + debug!("successfully started pg pool (note that not all connections may be healthy)"); + Ok(pool) + } + + #[instrument(name = "gobbler.init.create_veritech_client", skip_all)] + fn create_veritech_client(nats: NatsClient) -> VeritechClient { + VeritechClient::new(nats) + } + + #[instrument(name = "gobbler.init.create_job_processor", skip_all)] + fn create_job_processor(nats: NatsClient) -> Box { + Box::new(NatsProcessor::new(nats)) as Box + } +} + +#[derive(Clone, Debug)] +pub struct ServerMetadata { + job_instance: String, + job_invoked_provider: &'static str, +} + +pub struct GobblerShutdownHandle { + shutdown_tx: mpsc::Sender, +} + +impl GobblerShutdownHandle { + pub async fn shutdown(self) { + if let Err(err) = self.shutdown_tx.send(ShutdownSource::Handle).await { + warn!(error = ?err, "shutdown tx returned error, receiver is likely already closed"); + } + } +} + +#[remain::sorted] +#[derive(Debug, Eq, PartialEq)] +pub enum ShutdownSource { + Handle, +} + +impl Default for ShutdownSource { + fn default() -> Self { + Self::Handle + } +} + +pub struct JobItem { + metadata: Arc, + messaging_destination: Arc, + ctx_builder: DalContextBuilder, + request: Result>, +} + +pub struct Subscriber; + +impl Subscriber { + pub async fn jobs( + metadata: Arc, + pg_pool: PgPool, + nats: NatsClient, + veritech: veritech_client::Client, + job_processor: Box, + encryption_key: Arc, + ) -> Result> { + let subject = nats_jobs_subject(nats.metadata().subject_prefix()); + debug!( + messaging.destination = &subject.as_str(), + "subscribing for job requests" + ); + + let services_context = ServicesContext::new( + pg_pool, + nats.clone(), + job_processor, + veritech.clone(), + encryption_key, + None, + None, + (), + ); + + // Make non blocking context here, and update it for each job + // Since the any blocking job should block on its child jobs + let ctx_builder = DalContext::builder(services_context, false); + + let messaging_destination = Arc::new(subject.clone()); + + Ok(nats_subscriber::Subscriber::create(subject) + .queue_name(NATS_JOBS_DEFAULT_QUEUE) + .start(&nats) + .await? + .map(move |request| JobItem { + metadata: metadata.clone(), + messaging_destination: messaging_destination.clone(), + ctx_builder: ctx_builder.clone(), + request: request.map_err(Into::into), + })) + } +} + +#[allow(clippy::too_many_arguments)] +async fn receive_job_requests_task( + tx: UnboundedSender, + metadata: Arc, + pg_pool: PgPool, + nats: NatsClient, + veritech: veritech_client::Client, + job_processor: Box, + encryption_key: Arc, + shutdown_watch_rx: watch::Receiver<()>, +) { + if let Err(err) = receive_job_requests( + tx, + metadata, + pg_pool, + nats, + veritech, + job_processor, + encryption_key, + shutdown_watch_rx, + ) + .await + { + warn!(error = ?err, "processing job requests failed"); + } +} + +#[allow(clippy::too_many_arguments)] +async fn receive_job_requests( + tx: UnboundedSender, + metadata: Arc, + pg_pool: PgPool, + nats: NatsClient, + veritech: veritech_client::Client, + job_processor: Box, + encryption_key: Arc, + mut shutdown_watch_rx: watch::Receiver<()>, +) -> Result<()> { + let mut requests = Subscriber::jobs( + metadata, + pg_pool, + nats, + veritech, + job_processor, + encryption_key, + ) + .await? + .take_until_if(Box::pin(shutdown_watch_rx.changed().map(|_| true))); + + // Forward each request off the stream to a consuming task via an *unbounded* channel so we + // buffer requests until we run out of memory. Have fun! + while let Some(job) = requests.next().await { + if let Err(_job) = tx.send(job) { + error!("process_job_requests rx has already closed"); + } + } + + Ok(()) +} + +async fn process_job_requests_task(rx: UnboundedReceiver, concurrency_limit: usize) { + UnboundedReceiverStream::new(rx) + .for_each_concurrent(concurrency_limit, |job| async move { + // Got the next message from the subscriber + trace!("pulled request into an available concurrent task"); + + match job.request { + Ok(request) => { + // Spawn a task and process the request + let join_handle = task::spawn(execute_job_task( + job.metadata, + job.messaging_destination, + job.ctx_builder, + request, + )); + if let Err(err) = join_handle.await { + // NOTE(fnichol): This likely happens when there is contention or + // an error in the Tokio runtime so we will be loud and log an + // error under the assumptions that 1) this event rarely + // happens and 2) the task code did not contribute to trigger + // the `JoinError`. + error!( + error = ?err, + "execute-job-task failed to execute to completion" + ); + }; + } + Err(err) => { + warn!(error = ?err, "next job request had an error, job will not be executed"); + } + } + }) + .await; +} + +#[instrument( +name = "execute_job_task", +skip_all, +fields( +job.id = request.payload.id, +job.instance = metadata.job_instance, +job.invoked_name = request.payload.kind, +job.invoked_args = Empty, +job.invoked_provider = metadata.job_invoked_provider, +job.trigger = "pubsub", +messaging.destination = Empty, +messaging.destination_kind = "topic", +messaging.operation = "process", +otel.kind = % FormattedSpanKind(SpanKind::Consumer), +otel.name = Empty, +otel.status_code = Empty, +otel.status_message = Empty, +) +)] +async fn execute_job_task( + metadata: Arc, + messaging_destination: Arc, + ctx_builder: DalContextBuilder, + request: Request, +) { + let span = Span::current(); + let id = request.payload.id.clone(); + + let arg_str = serde_json::to_string(&request.payload.arg) + .unwrap_or_else(|_| "arg failed to serialize".to_string()); + + span.record("job.invoked_arg", arg_str); + span.record("messaging.destination", messaging_destination.as_str()); + span.record( + "otel.name", + format!("{} process", &messaging_destination).as_str(), + ); + + let maybe_reply_channel = request.reply_mailbox.clone(); + let reply_message = match execute_job( + &metadata, + messaging_destination, + ctx_builder.clone(), + request, + ) + .await + { + Ok(_) => { + span.record_ok(); + Ok(()) + } + Err(err) => { + error!( + error = ?err, + job.invocation_id = %id, + job.instance = &metadata.job_instance, + "job execution failed" + ); + let new_err = Err(BlockingJobError::JobExecution(err.to_string())); + span.record_err(err); + + new_err + } + }; + + if let Some(reply_channel) = maybe_reply_channel { + if let Ok(message) = serde_json::to_vec(&reply_message) { + if let Err(err) = ctx_builder + .nats_conn() + .publish(reply_channel, message) + .await + { + error!(error = ?err, "Unable to notify spawning job of blocking job completion"); + }; + } + } +} + +async fn execute_job( + _metadata: &Arc, + _messaging_destination: Arc, + mut ctx_builder: DalContextBuilder, + request: Request, +) -> Result<()> { + let (job_info, _) = request.into_parts(); + if job_info.blocking { + ctx_builder.set_blocking(); + } + + let current_span = tracing::Span::current(); + if !current_span.is_disabled() { + tracing::Span::current().record("job_info.id", &job_info.id); + tracing::Span::current().record("job_info.kind", &job_info.kind); + let arg_str = serde_json::to_string(&job_info.arg)?; + tracing::Span::current().record("job_info.arg", arg_str); + tracing::Span::current().record( + "job_info.access_builder", + serde_json::to_string(&job_info.access_builder)?, + ); + tracing::Span::current().record( + "job_info.visibility", + serde_json::to_string(&job_info.visibility)?, + ); + tracing::Span::current().record("job_info.blocking", job_info.blocking); + } + + let job = + match job_info.kind.as_str() { + stringify!(DependentValuesUpdate) => { + Box::new(DependentValuesUpdate::try_from(job_info.clone())?) + as Box + } + stringify!(FixesJob) => Box::new(FixesJob::try_from(job_info.clone())?) + as Box, + stringify!(RefreshJob) => Box::new(RefreshJob::try_from(job_info.clone())?) + as Box, + kind => return Err(ServerError::UnknownJobKind(kind.to_owned())), + }; + + info!("Processing job"); + + if let Err(err) = job.run_job(ctx_builder.clone()).await { + // The missing part is this, should we execute subsequent jobs if the one they depend on fail or not? + record_job_failure(ctx_builder, job, err).await?; + } + + info!("Finished processing job"); + + Ok(()) +} + +async fn record_job_failure( + ctx_builder: DalContextBuilder, + job: Box, + err: JobConsumerError, +) -> Result<()> { + warn!(error = ?err, "job execution failed, recording a job failure to the database"); + + let access_builder = job.access_builder(); + let visibility = job.visibility(); + let ctx = ctx_builder.build(access_builder.build(visibility)).await?; + + JobFailure::new(&ctx, job.type_name(), err.to_string()).await?; + + ctx.commit().await?; + + Err(err.into()) +} + +fn prepare_graceful_shutdown( + mut external_shutdown_rx: mpsc::Receiver, + shutdown_watch_tx: watch::Sender<()>, +) -> Result> { + // A oneshot channel signaling the start of a graceful shutdown. Receivers can use this to + // perform an clean/graceful shutdown work that needs to happen to preserve server integrity. + let (graceful_shutdown_tx, graceful_shutdown_rx) = oneshot::channel::<()>(); + // A stream of `SIGTERM` signals, emitted as the process receives them. + let mut sigterm_stream = + unix::signal(unix::SignalKind::terminate()).map_err(ServerError::Signal)?; + + tokio::spawn(async move { + fn send_graceful_shutdown( + graceful_shutdown_tx: oneshot::Sender<()>, + shutdown_watch_tx: watch::Sender<()>, + ) { + // Send shutdown to all long running subscriptions, so they can cleanly terminate + if shutdown_watch_tx.send(()).is_err() { + error!("all watch shutdown receivers have already been dropped"); + } + // Send graceful shutdown to main server thread which stops it from accepting requests. + // We'll do this step last so as to let all subscriptions have a chance to shutdown. + if graceful_shutdown_tx.send(()).is_err() { + error!("the server graceful shutdown receiver has already dropped"); + } + } + + info!("spawned graceful shutdown handler"); + + tokio::select! { + _ = sigterm_stream.recv() => { + info!("received SIGTERM signal, performing graceful shutdown"); + send_graceful_shutdown(graceful_shutdown_tx, shutdown_watch_tx); + } + source = external_shutdown_rx.recv() => { + info!( + "received external shutdown, performing graceful shutdown; source={:?}", + source, + ); + send_graceful_shutdown(graceful_shutdown_tx, shutdown_watch_tx); + } + else => { + // All other arms are closed, nothing left to do but return + trace!("returning from graceful shutdown with all select arms closed"); + } + }; + }); + + Ok(graceful_shutdown_rx) +} diff --git a/lib/gobbler-server/tests/integration.rs b/lib/gobbler-server/tests/integration.rs new file mode 100644 index 0000000000..9fad204f79 --- /dev/null +++ b/lib/gobbler-server/tests/integration.rs @@ -0,0 +1,3 @@ +const TEST_PG_DBNAME: &str = "si_test_gobbler"; + +mod integration_test; diff --git a/lib/gobbler-server/tests/integration_test/connection.rs b/lib/gobbler-server/tests/integration_test/connection.rs new file mode 100644 index 0000000000..60e14bc9b3 --- /dev/null +++ b/lib/gobbler-server/tests/integration_test/connection.rs @@ -0,0 +1,25 @@ +use dal::workspace_snapshot::change_set::ChangeSet; +use dal::{DalContext, WorkspaceSnapshot}; +use si_rabbitmq::Connection; +use si_test_macros::gobbler_test as test; + +/// Recommended to run with the following environment variable: +/// ```shell +/// SI_TEST_BUILTIN_SCHEMAS=none +/// ``` +#[test] +async fn create_snapshot(ctx: &DalContext) { + let change_set = ChangeSet::new().expect("could not create change set"); + let _snapshot = WorkspaceSnapshot::new(ctx, &change_set) + .await + .expect("could not create snapshot"); +} + +/// Recommended to run with the following environment variable: +/// ```shell +/// SI_TEST_BUILTIN_SCHEMAS=none +/// ``` +#[test] +async fn connect_to_queue(_ctx: &DalContext) { + let _ = Connection::new().await.expect("could not connect"); +} diff --git a/lib/gobbler-server/tests/integration_test/mod.rs b/lib/gobbler-server/tests/integration_test/mod.rs new file mode 100644 index 0000000000..6fa1f6955a --- /dev/null +++ b/lib/gobbler-server/tests/integration_test/mod.rs @@ -0,0 +1 @@ +mod connection; diff --git a/lib/si-rabbitmq/BUCK b/lib/si-rabbitmq/BUCK new file mode 100644 index 0000000000..18b9005d0e --- /dev/null +++ b/lib/si-rabbitmq/BUCK @@ -0,0 +1,21 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "si-rabbitmq", + deps = [ + "//lib/si-data-nats:si-data-nats", + "//lib/si-test-macros:si-test-macros", + "//lib/telemetry-rs:telemetry", + "//third-party/rust:futures", + "//third-party/rust:futures-lite", + "//third-party/rust:pin-project-lite", + "//third-party/rust:rabbitmq-stream-client", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:serde_json", + "//third-party/rust:thiserror", + "//third-party/rust:tokio", + "//third-party/rust:ulid", + ], + srcs = glob(["src/**/*.rs"]), +) diff --git a/lib/si-rabbitmq/Cargo.toml b/lib/si-rabbitmq/Cargo.toml new file mode 100644 index 0000000000..b6deedf7e9 --- /dev/null +++ b/lib/si-rabbitmq/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "si-rabbitmq" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +futures = { workspace = true } +futures-lite = { workspace = true } +pin-project-lite = { workspace = true } +rabbitmq-stream-client = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +si-data-nats = { path = "../../lib/si-data-nats" } +si-test-macros = { path = "../../lib/si-test-macros" } +telemetry = { path = "../../lib/telemetry-rs" } +thiserror = { workspace = true } +tokio = { workspace = true } +ulid = { workspace = true } diff --git a/lib/si-rabbitmq/src/connection.rs b/lib/si-rabbitmq/src/connection.rs new file mode 100644 index 0000000000..7ee344a374 --- /dev/null +++ b/lib/si-rabbitmq/src/connection.rs @@ -0,0 +1,40 @@ +use rabbitmq_stream_client::types::ByteCapacity; +use rabbitmq_stream_client::Environment; + +use crate::error::RabbitResult; + +/// A connection to a RabbitMQ node. +#[allow(missing_debug_implementations)] +pub struct Connection { + environment: Environment, +} + +impl Connection { + /// Creates a new [`Connection`], which contains a connection to a RabbitMQ node. + pub async fn new() -> RabbitResult { + let environment = Environment::builder() + .host("localhost") + .port(5672) + .build() + .await?; + Ok(Self { environment }) + } + + /// Returns the inner data structure handling the connection. + pub fn inner(&self) -> &Environment { + &self.environment + } + + pub async fn create_stream(&self, stream: impl AsRef) -> RabbitResult<()> { + Ok(self + .environment + .stream_creator() + .max_length(ByteCapacity::KB(400)) + .create(stream.as_ref()) + .await?) + } + + pub async fn delete_stream(&self, stream: impl AsRef) -> RabbitResult<()> { + Ok(self.environment.delete_stream(stream.as_ref()).await?) + } +} diff --git a/lib/si-rabbitmq/src/consumer.rs b/lib/si-rabbitmq/src/consumer.rs new file mode 100644 index 0000000000..fc7a4e450c --- /dev/null +++ b/lib/si-rabbitmq/src/consumer.rs @@ -0,0 +1,37 @@ +use crate::connection::Connection; +use futures::StreamExt; +use rabbitmq_stream_client::error::ConsumerDeliveryError; +use rabbitmq_stream_client::types::Delivery; +use rabbitmq_stream_client::Consumer as UpstreamConsumer; +use tokio::task; + +use crate::RabbitResult; + +/// An interface for consuming RabbitMQ stream messages. +#[allow(missing_debug_implementations)] +pub struct Consumer(UpstreamConsumer); + +impl Consumer { + /// Creates a new [`Consumer`] for consuming RabbitMQ stream messages. + pub async fn new(connection: &Connection, stream: &str) -> RabbitResult { + let consumer = connection.inner().consumer().build(stream).await?; + Ok(Self(consumer)) + } + + /// Starts a consumer task that watches the stream. + pub async fn start( + mut self, + processing_func: fn(delivery: Result), + ) -> RabbitResult<()> { + let handle = self.0.handle(); + task::spawn(async move { + while let Some(delivery) = self.0.next().await { + processing_func(delivery) + } + }); + + // TODO(nick): handle when close happens more precisely. + handle.close().await?; + Ok(()) + } +} diff --git a/lib/si-rabbitmq/src/error.rs b/lib/si-rabbitmq/src/error.rs new file mode 100644 index 0000000000..fc2a6035de --- /dev/null +++ b/lib/si-rabbitmq/src/error.rs @@ -0,0 +1,32 @@ +use rabbitmq_stream_client::error::{ + ClientError, ConsumerCloseError, ConsumerCreateError, ProducerCloseError, ProducerCreateError, + ProducerPublishError, StreamCreateError, StreamDeleteError, +}; +use thiserror::Error; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Error, Debug)] +pub enum RabbitError { + #[error("client error: {0}")] + Client(#[from] ClientError), + #[error("consumer close error: {0}")] + ConsumerClose(#[from] ConsumerCloseError), + #[error("consumer create error: {0}")] + ConsumerCreate(#[from] ConsumerCreateError), + #[error("producer close error: {0}")] + ProducerClose(#[from] ProducerCloseError), + #[error("cannot send because the producer is closed")] + ProducerClosed, + #[error("producer create error: {0}")] + ProducerCreate(#[from] ProducerCreateError), + #[error("producer publish error: {0}")] + ProducerPublish(#[from] ProducerPublishError), + #[error("stream create error: {0}")] + StreamCreate(#[from] StreamCreateError), + #[error("stream delete error: {0}")] + StreamDelete(#[from] StreamDeleteError), +} + +#[allow(missing_docs)] +pub type RabbitResult = Result; diff --git a/lib/si-rabbitmq/src/lib.rs b/lib/si-rabbitmq/src/lib.rs new file mode 100644 index 0000000000..1aa1126e49 --- /dev/null +++ b/lib/si-rabbitmq/src/lib.rs @@ -0,0 +1,35 @@ +//! This library provides the ability to [connect](Connection) to [RabbitMQ](https://rabbitmq.com) +//! nodes, [produce](Producer) stream messages, and [consume](Consumer) stream messages. + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + +mod connection; +mod consumer; +mod error; +mod producer; + +pub use connection::Connection; +pub use consumer::Consumer; +pub use error::RabbitError; +pub use error::RabbitResult; +pub use producer::Producer; diff --git a/lib/si-rabbitmq/src/producer.rs b/lib/si-rabbitmq/src/producer.rs new file mode 100644 index 0000000000..f6646eaef6 --- /dev/null +++ b/lib/si-rabbitmq/src/producer.rs @@ -0,0 +1,58 @@ +use rabbitmq_stream_client::types::Message; +use rabbitmq_stream_client::{NoDedup, Producer as UpstreamProducer}; + +use crate::connection::Connection; +use crate::{RabbitError, RabbitResult}; + +/// An interface for producing and sending RabbitMQ stream messages. +#[allow(missing_debug_implementations)] +pub struct Producer { + producer: UpstreamProducer, + closed: bool, +} + +impl Producer { + /// Creates a new [`Producer`] for producing and sending RabbitMQ stream messages. + pub async fn new(connection: &Connection, stream: &str) -> RabbitResult { + let producer = connection.inner().producer().build(stream).await?; + Ok(Self { + producer, + closed: false, + }) + } + + /// Sends a single message to a stream. + pub async fn send_single(&self, message: impl Into>) -> RabbitResult<()> { + if self.closed { + return Err(RabbitError::ProducerClosed); + } + self.producer + .send_with_confirm(Message::builder().body(message).build()) + .await?; + Ok(()) + } + + /// Sends a batch of messages to a stream. + pub async fn send_batch(&self, messages: impl Into>>) -> RabbitResult<()> { + if self.closed { + return Err(RabbitError::ProducerClosed); + } + self.producer + .batch_send_with_confirm( + messages + .into() + .iter() + .map(|m| Message::builder().body(m.clone()).build()) + .collect(), + ) + .await?; + Ok(()) + } + + /// Closes the producer connection and renders the producer unusable. + pub async fn close(mut self) -> RabbitResult<()> { + self.producer.close().await?; + self.closed = true; + Ok(()) + } +} diff --git a/lib/si-test-macros/src/lib.rs b/lib/si-test-macros/src/lib.rs index 94cd504e68..4fbd3ed262 100644 --- a/lib/si-test-macros/src/lib.rs +++ b/lib/si-test-macros/src/lib.rs @@ -390,3 +390,13 @@ pub fn sdf_test(attr: TokenStream, input: TokenStream) -> TokenStream { let item = parse_macro_input!(input as ItemFn); sdf_test::expand(item, args).into() } + +/// A procedural macro which helps to streamline, setup, and manage gobbler-related tests. +/// +/// Currently, this macro is equivalent to [`dal_test`](dal_test()). +#[proc_macro_attribute] +pub fn gobbler_test(attr: TokenStream, input: TokenStream) -> TokenStream { + let args = parse_macro_input!(attr as Args); + let item = parse_macro_input!(input as ItemFn); + dal_test::expand(item, args).into() +} diff --git a/third-party/rust/BUCK b/third-party/rust/BUCK index 10efb45e77..c7a72c3f3e 100644 --- a/third-party/rust/BUCK +++ b/third-party/rust/BUCK @@ -1508,6 +1508,17 @@ cargo.rust_library( crate = "borsh", crate_root = "borsh-0.10.3.crate/src/lib.rs", edition = "2018", + env = { + "CARGO_MANIFEST_DIR": "borsh-0.10.3.crate", + "CARGO_PKG_AUTHORS": "Near Inc ", + "CARGO_PKG_DESCRIPTION": "Binary Object Representation Serializer for Hashing\n", + "CARGO_PKG_NAME": "borsh", + "CARGO_PKG_REPOSITORY": "https://github.com/near/borsh-rs", + "CARGO_PKG_VERSION": "0.10.3", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "10", + "CARGO_PKG_VERSION_PATCH": "3", + }, features = ["std"], visibility = [], deps = [ @@ -2920,6 +2931,17 @@ cargo.rust_library( crate = "curve25519_dalek", crate_root = "curve25519-dalek-3.2.0.crate/src/lib.rs", edition = "2015", + env = { + "CARGO_MANIFEST_DIR": "curve25519-dalek-3.2.0.crate", + "CARGO_PKG_AUTHORS": "Isis Lovecruft :Henry de Valence ", + "CARGO_PKG_DESCRIPTION": "A pure-Rust implementation of group operations on ristretto255 and Curve25519", + "CARGO_PKG_NAME": "curve25519-dalek", + "CARGO_PKG_REPOSITORY": "https://github.com/dalek-cryptography/curve25519-dalek", + "CARGO_PKG_VERSION": "3.2.0", + "CARGO_PKG_VERSION_MAJOR": "3", + "CARGO_PKG_VERSION_MINOR": "2", + "CARGO_PKG_VERSION_PATCH": "0", + }, features = ["u64_backend"], visibility = [], deps = [ @@ -7064,9 +7086,7 @@ cargo.rust_binary( crate_root = "mime_guess-2.0.4.crate/build.rs", edition = "2015", visibility = [], - deps = [ - ":unicase-2.7.0", - ], + deps = [":unicase-2.7.0"], ) buildscript_run( @@ -7883,6 +7903,56 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "num_enum-0.6.1.crate", + sha256 = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1", + strip_prefix = "num_enum-0.6.1", + urls = ["https://crates.io/api/v1/crates/num_enum/0.6.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "num_enum-0.6.1", + srcs = [":num_enum-0.6.1.crate"], + crate = "num_enum", + crate_root = "num_enum-0.6.1.crate/src/lib.rs", + edition = "2021", + features = [ + "default", + "std", + ], + visibility = [], + deps = [":num_enum_derive-0.6.1"], +) + +http_archive( + name = "num_enum_derive-0.6.1.crate", + sha256 = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6", + strip_prefix = "num_enum_derive-0.6.1", + urls = ["https://crates.io/api/v1/crates/num_enum_derive/0.6.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "num_enum_derive-0.6.1", + srcs = [":num_enum_derive-0.6.1.crate"], + crate = "num_enum_derive", + crate_root = "num_enum_derive-0.6.1.crate/src/lib.rs", + edition = "2021", + features = [ + "proc-macro-crate", + "std", + ], + proc_macro = True, + visibility = [], + deps = [ + ":proc-macro-crate-1.3.1", + ":proc-macro2-1.0.66", + ":quote-1.0.33", + ":syn-2.0.29", + ], +) + http_archive( name = "number_prefix-0.4.0.crate", sha256 = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3", @@ -8356,6 +8426,28 @@ cargo.rust_library( deps = [":num-traits-0.2.16"], ) +http_archive( + name = "ordered-float-3.7.0.crate", + sha256 = "2fc2dbde8f8a79f2102cc474ceb0ad68e3b80b85289ea62389b60e66777e4213", + strip_prefix = "ordered-float-3.7.0", + urls = ["https://crates.io/api/v1/crates/ordered-float/3.7.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "ordered-float-3.7.0", + srcs = [":ordered-float-3.7.0.crate"], + crate = "ordered_float", + crate_root = "ordered-float-3.7.0.crate/src/lib.rs", + edition = "2021", + features = [ + "default", + "std", + ], + visibility = [], + deps = [":num-traits-0.2.16"], +) + http_archive( name = "ordered-multimap-0.4.3.crate", sha256 = "ccd746e37177e1711c20dd619a1620f34f5c8b569c53590a72dedd5344d8924a", @@ -9520,6 +9612,27 @@ cargo.rust_library( deps = [":toml-0.5.11"], ) +http_archive( + name = "proc-macro-crate-1.3.1.crate", + sha256 = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919", + strip_prefix = "proc-macro-crate-1.3.1", + urls = ["https://crates.io/api/v1/crates/proc-macro-crate/1.3.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "proc-macro-crate-1.3.1", + srcs = [":proc-macro-crate-1.3.1.crate"], + crate = "proc_macro_crate", + crate_root = "proc-macro-crate-1.3.1.crate/src/lib.rs", + edition = "2021", + visibility = [], + deps = [ + ":once_cell-1.18.0", + ":toml_edit-0.19.14", + ], +) + http_archive( name = "proc-macro-error-1.0.4.crate", sha256 = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c", @@ -9855,6 +9968,71 @@ cargo.rust_library( deps = [":proc-macro2-1.0.66"], ) +alias( + name = "rabbitmq-stream-client", + actual = ":rabbitmq-stream-client-0.3.0", + visibility = ["PUBLIC"], +) + +http_archive( + name = "rabbitmq-stream-client-0.3.0.crate", + sha256 = "735d6cecec44dc27382b962309c05de47d40727ff9898a501ec8fadec76be9a8", + strip_prefix = "rabbitmq-stream-client-0.3.0", + urls = ["https://crates.io/api/v1/crates/rabbitmq-stream-client/0.3.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "rabbitmq-stream-client-0.3.0", + srcs = [":rabbitmq-stream-client-0.3.0.crate"], + crate = "rabbitmq_stream_client", + crate_root = "rabbitmq-stream-client-0.3.0.crate/src/lib.rs", + edition = "2018", + visibility = [], + deps = [ + ":async-trait-0.1.73", + ":bytes-1.4.0", + ":dashmap-5.5.1", + ":futures-0.3.28", + ":pin-project-1.1.3", + ":rabbitmq-stream-protocol-0.3.0", + ":rand-0.8.5", + ":rustls-pemfile-1.0.3", + ":thiserror-1.0.47", + ":tokio-1.32.0", + ":tokio-rustls-0.24.1", + ":tokio-stream-0.1.14", + ":tokio-util-0.7.8", + ":tracing-0.1.37", + ":url-2.4.0", + ], +) + +http_archive( + name = "rabbitmq-stream-protocol-0.3.0.crate", + sha256 = "ed68734bea9f111e2541f7d1cb8f2b109959839173589183e09e53771d62092f", + strip_prefix = "rabbitmq-stream-protocol-0.3.0", + urls = ["https://crates.io/api/v1/crates/rabbitmq-stream-protocol/0.3.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "rabbitmq-stream-protocol-0.3.0", + srcs = [":rabbitmq-stream-protocol-0.3.0.crate"], + crate = "rabbitmq_stream_protocol", + crate_root = "rabbitmq-stream-protocol-0.3.0.crate/src/lib.rs", + edition = "2018", + visibility = [], + deps = [ + ":byteorder-1.4.3", + ":chrono-0.4.26", + ":derive_more-0.99.17", + ":num_enum-0.6.1", + ":ordered-float-3.7.0", + ":uuid-1.4.1", + ], +) + http_archive( name = "radium-0.7.0.crate", sha256 = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09", @@ -14074,6 +14252,7 @@ cargo.rust_binary( ":pretty_assertions_sorted-1.2.3", ":proc-macro2-1.0.66", ":quote-1.0.33", + ":rabbitmq-stream-client-0.3.0", ":rand-0.8.5", ":refinery-0.8.10", ":regex-1.9.3", diff --git a/third-party/rust/Cargo.lock b/third-party/rust/Cargo.lock index ea90f304df..a40b91beb5 100644 --- a/third-party/rust/Cargo.lock +++ b/third-party/rust/Cargo.lock @@ -551,7 +551,7 @@ checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" dependencies = [ "borsh-derive-internal", "borsh-schema-derive-internal", - "proc-macro-crate", + "proc-macro-crate 0.1.5", "proc-macro2", "syn 1.0.109", ] @@ -1138,7 +1138,7 @@ dependencies = [ "anyhow", "html-escape", "nom", - "ordered-float", + "ordered-float 2.10.0", ] [[package]] @@ -2711,6 +2711,27 @@ dependencies = [ "libc", ] +[[package]] +name = "num_enum" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7a015b430d3c108a207fd776d2e2196aaf8b1cf8cf93253e3a097ff3085076a1" +dependencies = [ + "num_enum_derive", +] + +[[package]] +name = "num_enum_derive" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" +dependencies = [ + "proc-macro-crate 1.3.1", + "proc-macro2", + "quote", + "syn 2.0.29", +] + [[package]] name = "number_prefix" version = "0.4.0" @@ -2859,6 +2880,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "ordered-float" +version = "3.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fc2dbde8f8a79f2102cc474ceb0ad68e3b80b85289ea62389b60e66777e4213" +dependencies = [ + "num-traits", +] + [[package]] name = "ordered-multimap" version = "0.4.3" @@ -3300,6 +3330,16 @@ dependencies = [ "toml 0.5.11", ] +[[package]] +name = "proc-macro-crate" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +dependencies = [ + "once_cell", + "toml_edit", +] + [[package]] name = "proc-macro-error" version = "1.0.4" @@ -3426,6 +3466,43 @@ dependencies = [ "proc-macro2", ] +[[package]] +name = "rabbitmq-stream-client" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "735d6cecec44dc27382b962309c05de47d40727ff9898a501ec8fadec76be9a8" +dependencies = [ + "async-trait", + "bytes 1.4.0", + "dashmap", + "futures", + "pin-project 1.1.3", + "rabbitmq-stream-protocol", + "rand 0.8.5", + "rustls-pemfile", + "thiserror", + "tokio", + "tokio-rustls 0.24.1", + "tokio-stream", + "tokio-util", + "tracing", + "url", +] + +[[package]] +name = "rabbitmq-stream-protocol" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed68734bea9f111e2541f7d1cb8f2b109959839173589183e09e53771d62092f" +dependencies = [ + "byteorder", + "chrono", + "derive_more", + "num_enum", + "ordered-float 3.7.0", + "uuid", +] + [[package]] name = "radium" version = "0.7.0" @@ -4846,6 +4923,7 @@ dependencies = [ "pretty_assertions_sorted", "proc-macro2", "quote", + "rabbitmq-stream-client", "rand 0.8.5", "refinery", "regex", diff --git a/third-party/rust/Cargo.toml b/third-party/rust/Cargo.toml index bb256fbb1e..8e09e8cdac 100644 --- a/third-party/rust/Cargo.toml +++ b/third-party/rust/Cargo.toml @@ -32,7 +32,11 @@ ciborium = "0.2.1" clap = { version = "4.2.7", features = ["derive", "color", "env", "wrap_help"] } color-eyre = "0.6.2" colored = "2.0.4" -comfy-table = { version = "7.0.1", features = ["crossterm", "tty", "custom_styling"] } +comfy-table = { version = "7.0.1", features = [ + "crossterm", + "tty", + "custom_styling", +] } config = { version = "0.13.3", default-features = false, features = ["toml"] } console = "0.15.7" convert_case = "0.6.0" @@ -50,8 +54,15 @@ futures = "0.3.28" futures-lite = "1.13.0" hex = "0.4.3" http = "0.2.9" -hyper = { version = "0.14.26", features = ["client", "http1", "runtime", "server"] } -hyperlocal = { version = "0.8.0", default-features = false, features = ["client"] } +hyper = { version = "0.14.26", features = [ + "client", + "http1", + "runtime", + "server", +] } +hyperlocal = { version = "0.8.0", default-features = false, features = [ + "client", +] } iftree = "1.0.4" indicatif = "0.17.5" indoc = "2.0.1" @@ -65,7 +76,10 @@ nkeys = "0.2.0" num_cpus = "1.15.0" once_cell = "1.17.1" open = "5.0.0" -opentelemetry = { version = "~0.18.0", features = ["rt-tokio", "trace"] } # pinned, pending new release of tracing-opentelemetry, 0.18 +opentelemetry = { version = "~0.18.0", features = [ + "rt-tokio", + "trace", +] } # pinned, pending new release of tracing-opentelemetry, 0.18 opentelemetry-otlp = "~0.11.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 opentelemetry-semantic-conventions = "~0.10.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 ouroboros = "0.15.6" @@ -78,14 +92,27 @@ postgres-types = { version = "0.2.5", features = ["derive"] } pretty_assertions_sorted = "1.2.1" proc-macro2 = "1.0.56" quote = "1.0.27" +rabbitmq-stream-client = "0.3.0" rand = "0.8.5" refinery = { version = "0.8.9", features = ["tokio-postgres"] } regex = "1.8.1" remain = "0.2.8" -reqwest = { version = "0.11.17", default-features = false, features = ["rustls-tls", "json", "multipart"] } -rust-s3 = { version = "0.33.0", default-features = false, features = ["tokio-rustls-tls"] } +reqwest = { version = "0.11.17", default-features = false, features = [ + "rustls-tls", + "json", + "multipart", +] } +rust-s3 = { version = "0.33.0", default-features = false, features = [ + "tokio-rustls-tls", +] } rustls = "0.21.6" # pinned, pending update from tokio-rustls for async-nats -sea-orm = { version = "0.11", features = ["sqlx-postgres", "runtime-tokio-rustls", "macros", "with-chrono", "debug-print"] } +sea-orm = { version = "0.11", features = [ + "sqlx-postgres", + "runtime-tokio-rustls", + "macros", + "with-chrono", + "debug-print", +] } self-replace = "1.3.5" serde = { version = "1.0.160", features = ["derive", "rc"] } serde-aux = "4.2.0" @@ -99,10 +126,16 @@ strum = { version = "0.24.1", features = ["derive"] } syn = { version = "2.0.15", features = ["full", "extra-traits"] } tar = "0.4.38" tempfile = "3.5.0" -test-log = { version = "0.2.11", default-features = false, features = ["trace"] } +test-log = { version = "0.2.11", default-features = false, features = [ + "trace", +] } thiserror = "1.0.40" tokio = { version = "1.28.0", features = ["full"] } -tokio-postgres = { version = "0.7.8", features = ["runtime", "with-chrono-0_4", "with-serde_json-1"] } +tokio-postgres = { version = "0.7.8", features = [ + "runtime", + "with-chrono-0_4", + "with-serde_json-1", +] } tokio-serde = { version = "0.8.0", features = ["json"] } tokio-stream = "0.1.14" tokio-test = "0.4.2" diff --git a/third-party/rust/fixups/borsh/fixups.toml b/third-party/rust/fixups/borsh/fixups.toml new file mode 100644 index 0000000000..46dfe3a3ac --- /dev/null +++ b/third-party/rust/fixups/borsh/fixups.toml @@ -0,0 +1,4 @@ +cargo_env = true + +[[buildscript]] +[buildscript.rustc_flags] diff --git a/third-party/rust/fixups/crossbeam-epoch/fixups.toml b/third-party/rust/fixups/crossbeam-epoch/fixups.toml new file mode 100644 index 0000000000..5e026f75e0 --- /dev/null +++ b/third-party/rust/fixups/crossbeam-epoch/fixups.toml @@ -0,0 +1,2 @@ +[[buildscript]] +[buildscript.rustc_flags] diff --git a/third-party/rust/fixups/curve25519-dalek/fixups.toml b/third-party/rust/fixups/curve25519-dalek/fixups.toml new file mode 100644 index 0000000000..46dfe3a3ac --- /dev/null +++ b/third-party/rust/fixups/curve25519-dalek/fixups.toml @@ -0,0 +1,4 @@ +cargo_env = true + +[[buildscript]] +[buildscript.rustc_flags] diff --git a/third-party/rust/fixups/mime_guess/fixups.toml b/third-party/rust/fixups/mime_guess/fixups.toml index 2bb09919c5..5d7f25f8c8 100644 --- a/third-party/rust/fixups/mime_guess/fixups.toml +++ b/third-party/rust/fixups/mime_guess/fixups.toml @@ -1,4 +1,4 @@ -extra_deps = [":unicase-2.6.0"] +extra_deps = [":unicase-2.7.0"] [[buildscript]] [buildscript.gen_srcs] diff --git a/third-party/rust/fixups/system-configuration-sys/fixups.toml b/third-party/rust/fixups/system-configuration-sys/fixups.toml new file mode 100644 index 0000000000..46dfe3a3ac --- /dev/null +++ b/third-party/rust/fixups/system-configuration-sys/fixups.toml @@ -0,0 +1,4 @@ +cargo_env = true + +[[buildscript]] +[buildscript.rustc_flags] From ead3fd88ce47a77722c5e94e8b8a398a8b0155b7 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Mon, 21 Aug 2023 18:03:11 -0400 Subject: [PATCH 02/92] Add first complex test for graph work and prune existing tests Add first complex test for graph work, which includes multiple conflicts and updates. It can be expanded upon to add even more scenarios in the future, such as ordering conflicts (once ready). This commit also prunes duplicate tests and cleans up panics other existing tests. Some have been renamed as a result of these changes. Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig --- lib/dal/BUCK | 1 + lib/dal/src/content/hash.rs | 6 + lib/dal/src/workspace_snapshot/conflict.rs | 3 +- lib/dal/src/workspace_snapshot/graph.rs | 411 ++++++++++++++++++--- lib/dal/src/workspace_snapshot/update.rs | 2 + 5 files changed, 373 insertions(+), 50 deletions(-) diff --git a/lib/dal/BUCK b/lib/dal/BUCK index c3f701af08..21a449f516 100644 --- a/lib/dal/BUCK +++ b/lib/dal/BUCK @@ -37,6 +37,7 @@ rust_library( "//third-party/rust:paste", "//third-party/rust:petgraph", "//third-party/rust:postgres-types", + "//third-party/rust:pretty_assertions_sorted", "//third-party/rust:rand", "//third-party/rust:refinery", "//third-party/rust:regex", diff --git a/lib/dal/src/content/hash.rs b/lib/dal/src/content/hash.rs index f31f1d37a1..6f58d17fbd 100644 --- a/lib/dal/src/content/hash.rs +++ b/lib/dal/src/content/hash.rs @@ -28,6 +28,12 @@ impl From<&Value> for ContentHash { } } +impl From<&str> for ContentHash { + fn from(input: &str) -> Self { + Self::new(input.as_bytes()) + } +} + impl Default for ContentHash { fn default() -> Self { Self::new("".as_bytes()) diff --git a/lib/dal/src/workspace_snapshot/conflict.rs b/lib/dal/src/workspace_snapshot/conflict.rs index b4c97059b8..e6e894cc2f 100644 --- a/lib/dal/src/workspace_snapshot/conflict.rs +++ b/lib/dal/src/workspace_snapshot/conflict.rs @@ -5,14 +5,15 @@ use petgraph::stable_graph::NodeIndex; #[remain::sorted] #[derive(Debug, Copy, Clone, PartialEq, Eq)] pub enum Conflict { + // TODO(nick,jacob): this variant will not be possible until ordering is in place. ChildOrder { ours: NodeIndex, theirs: NodeIndex, }, ModifyRemovedItem(NodeIndex), NodeContent { - to_rebase: NodeIndex, onto: NodeIndex, + to_rebase: NodeIndex, }, RemoveModifiedItem { container: NodeIndex, diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 8571c41b99..5939ca9880 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -105,7 +105,7 @@ impl WorkspaceSnapshotGraph { // Because outgoing edges are part of a node's identity, we create a new "from" node // as we are effectively writing to that node (we'll need to update the merkle tree // hash), and everything in the graph should be treated as copy-on-write. - let new_from_node_index = self.copy_node_index(change_set, from_node_index)?; + let new_from_node_index = self.copy_node_index(from_node_index)?; // Add the new edge to the new version of the "from" node. let new_edge_index = @@ -135,14 +135,11 @@ impl WorkspaceSnapshotGraph { fn copy_node_index( &mut self, - change_set: &ChangeSet, node_index_to_copy: NodeIndex, ) -> WorkspaceSnapshotGraphResult { - let new_node_index = self.graph.add_node( - self.get_node_weight(node_index_to_copy)? - .new_with_incremented_vector_clock(change_set)?, - ); - + let new_node_index = self + .graph + .add_node(self.get_node_weight(node_index_to_copy)?.clone()); Ok(new_node_index) } @@ -202,17 +199,22 @@ impl WorkspaceSnapshotGraph { // `onto`. to_rebase_node_indexes.push(self.root_index); } else { - to_rebase_node_indexes.extend( - self.get_node_index_by_lineage(onto_node_weight.lineage_id()) - .map_err(|err| { - error!( - "Unable to find NodeIndex(es) for lineage_id {}: {}", - onto_node_weight.lineage_id(), - err, - ); - event - })?, - ); + // Only retain node indexes... or indices... if they are part of the current + // graph. There may still be garbage from previous updates to the graph + // laying around. + let mut potential_to_rebase_node_indexes = self + .get_node_index_by_lineage(onto_node_weight.lineage_id()) + .map_err(|err| { + error!( + "Unable to find NodeIndex(es) for lineage_id {}: {}", + onto_node_weight.lineage_id(), + err, + ); + event + })?; + potential_to_rebase_node_indexes + .retain(|node_index| self.has_path_to_root(*node_index)); + to_rebase_node_indexes.extend(potential_to_rebase_node_indexes); } } @@ -432,7 +434,7 @@ impl WorkspaceSnapshotGraph { new_content_hash: ContentHash, ) -> WorkspaceSnapshotGraphResult<()> { let original_node_index = self.get_node_index_by_id(id)?; - let new_node_index = self.copy_node_index(change_set, original_node_index)?; + let new_node_index = self.copy_node_index(original_node_index)?; let node_weight = self.get_node_weight_mut(new_node_index)?; node_weight.new_content_hash(new_content_hash)?; @@ -918,7 +920,7 @@ impl WorkspaceSnapshotGraph { edge_kind: EdgeWeightKind, ) -> WorkspaceSnapshotGraphResult<()> { let mut edges_to_remove = Vec::new(); - let new_source_node_index = dbg!(self.copy_node_index(change_set, source_node_index)?); + let new_source_node_index = dbg!(self.copy_node_index(source_node_index)?); self.replace_references(change_set, dbg!(source_node_index), new_source_node_index)?; for edgeref in self @@ -959,7 +961,7 @@ impl WorkspaceSnapshotGraph { let new_node_index = match old_to_new_node_indices.get(&old_node_index) { Some(found_new_node_index) => *found_new_node_index, None => { - let new_node_index = self.copy_node_index(change_set, old_node_index)?; + let new_node_index = self.copy_node_index(old_node_index)?; old_to_new_node_indices.insert(old_node_index, new_node_index); new_node_index } @@ -975,10 +977,7 @@ impl WorkspaceSnapshotGraph { if let Some((_, destination_node_index)) = self.graph.edge_endpoints(edge_reference.id()) { - edges_to_create.push(( - edge_weight.new_with_incremented_vector_clocks(change_set)?, - destination_node_index, - )); + edges_to_create.push((edge_weight.clone(), destination_node_index)); } } @@ -1088,9 +1087,14 @@ fn ordering_node_indexes_for_node_index( #[cfg(test)] mod test { use super::*; - use crate::workspace_snapshot::node_weight::NodeWeight::Content; use crate::{ComponentId, ContentHash, FuncId, PropId, SchemaId, SchemaVariantId}; - use serde_json::to_string; + use pretty_assertions_sorted::assert_eq; + + #[derive(Debug, PartialEq)] + struct ConflictsAndUpdates { + conflicts: Vec, + updates: Vec, + } #[test] fn new() { @@ -1378,7 +1382,7 @@ mod test { NodeWeight::new_content( change_set, schema_id, - ContentAddress::Schema(ContentHash::new("Constellation".as_bytes())), + ContentAddress::Schema(ContentHash::from("Constellation")), ) .expect("Unable to create NodeWeight"), ) @@ -1402,7 +1406,7 @@ mod test { NodeWeight::new_content( change_set, component_id, - ContentAddress::Component(ContentHash::new("Crimson Fleet".as_bytes())), + ContentAddress::Component(ContentHash::from("Crimson Fleet")), ) .expect("Unable to create NodeWeight"), ) @@ -1467,7 +1471,7 @@ mod test { .merkle_tree_hash(), // actual ); - let updated_content_hash = ContentHash::new("new_content".as_bytes()); + let updated_content_hash = ContentHash::from("new_content"); graph .update_content(change_set, component_id.into(), updated_content_hash) .expect("Unable to update Component content hash"); @@ -1541,7 +1545,7 @@ mod test { NodeWeight::new_content( initial_change_set, schema_id, - ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ContentAddress::Schema(ContentHash::from("Schema A")), ) .expect("Unable to create NodeWeight"), ) @@ -1554,7 +1558,7 @@ mod test { NodeWeight::new_content( initial_change_set, schema_variant_id, - ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), ) .expect("Unable to create NodeWeight"), ) @@ -1595,7 +1599,7 @@ mod test { NodeWeight::new_content( new_change_set, component_id, - ContentAddress::Schema(ContentHash::new("Component A".as_bytes())), + ContentAddress::Schema(ContentHash::from("Component A")), ) .expect("Unable to create NodeWeight"), ) @@ -1648,7 +1652,7 @@ mod test { NodeWeight::new_content( base_change_set, schema_id, - ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ContentAddress::Schema(ContentHash::from("Schema A")), ) .expect("Unable to create NodeWeight"), ) @@ -1661,7 +1665,7 @@ mod test { NodeWeight::new_content( base_change_set, schema_variant_id, - ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), ) .expect("Unable to create NodeWeight"), ) @@ -1703,7 +1707,7 @@ mod test { NodeWeight::new_content( base_change_set, new_onto_component_id, - ContentAddress::Component(ContentHash::new("Component B".as_bytes())), + ContentAddress::Component(ContentHash::from("Component B")), ) .expect("Unable to create NodeWeight"), ) @@ -1772,7 +1776,7 @@ mod test { NodeWeight::new_content( base_change_set, schema_id, - ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ContentAddress::Schema(ContentHash::from("Schema A")), ) .expect("Unable to create NodeWeight"), ) @@ -1785,7 +1789,7 @@ mod test { NodeWeight::new_content( base_change_set, schema_variant_id, - ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), ) .expect("Unable to create NodeWeight"), ) @@ -1827,7 +1831,7 @@ mod test { NodeWeight::new_content( new_change_set, component_id, - ContentAddress::Component(ContentHash::new("Component A".as_bytes())), + ContentAddress::Component(ContentHash::from("Component A")), ) .expect("Unable to create NodeWeight"), ) @@ -1866,7 +1870,7 @@ mod test { NodeWeight::new_content( base_change_set, new_onto_component_id, - ContentAddress::Component(ContentHash::new("Component B".as_bytes())), + ContentAddress::Component(ContentHash::from("Component B")), ) .expect("Unable to create NodeWeight"), ) @@ -1935,7 +1939,7 @@ mod test { NodeWeight::new_content( base_change_set, schema_id, - ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ContentAddress::Schema(ContentHash::from("Schema A")), ) .expect("Unable to create NodeWeight"), ) @@ -1948,7 +1952,7 @@ mod test { NodeWeight::new_content( base_change_set, schema_variant_id, - ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), ) .expect("Unable to create NodeWeight"), ) @@ -1983,7 +1987,7 @@ mod test { NodeWeight::new_content( base_change_set, component_id, - ContentAddress::Component(ContentHash::new("Component A".as_bytes())), + ContentAddress::Component(ContentHash::from("Component A")), ) .expect("Unable to create NodeWeight"), ) @@ -2023,7 +2027,7 @@ mod test { .update_content( new_change_set, component_id, - ContentHash::new("Updated Component A".as_bytes()), + ContentHash::from("Updated Component A"), ) .expect("Unable to update Component A"); @@ -2035,7 +2039,7 @@ mod test { .update_content( base_change_set, component_id, - ContentHash::new("Base Updated Component A".as_bytes()), + ContentHash::from("Base Updated Component A"), ) .expect("Unable to update Component A"); @@ -2076,7 +2080,7 @@ mod test { NodeWeight::new_content( base_change_set, schema_id, - ContentAddress::Schema(ContentHash::new("Schema A".as_bytes())), + ContentAddress::Schema(ContentHash::from("Schema A")), ) .expect("Unable to create NodeWeight"), ) @@ -2089,7 +2093,7 @@ mod test { NodeWeight::new_content( base_change_set, schema_variant_id, - ContentAddress::SchemaVariant(ContentHash::new("Schema Variant A".as_bytes())), + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), ) .expect("Unable to create NodeWeight"), ) @@ -2124,7 +2128,7 @@ mod test { NodeWeight::new_content( base_change_set, component_id, - ContentAddress::Component(ContentHash::new("Component A".as_bytes())), + ContentAddress::Component(ContentHash::from("Component A")), ) .expect("Unable to create NodeWeight"), ) @@ -2179,7 +2183,7 @@ mod test { .update_content( new_change_set, component_id, - ContentHash::new("Updated Component A".as_bytes()), + ContentHash::from("Updated Component A"), ) .expect("Unable to update Component A"); @@ -2201,4 +2205,313 @@ mod test { ); assert_eq!(Vec::::new(), updates); } + + #[test] + fn detect_conflicts_and_updates_complex() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + // Docker Image Schema + let docker_image_schema_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let docker_image_schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + docker_image_schema_id, + ContentAddress::Schema(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + docker_image_schema_index, + ) + .expect("Unable to add root -> schema edge"); + + // Docker Image Schema Variant + let docker_image_schema_variant_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let docker_image_schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + docker_image_schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(docker_image_schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + docker_image_schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + // Nginx Docker Image Component + let nginx_docker_image_component_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let nginx_docker_image_component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + nginx_docker_image_component_id, + ContentAddress::Component(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + nginx_docker_image_component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(nginx_docker_image_component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(docker_image_schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + // Alpine Component + let alpine_component_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let alpine_component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + alpine_component_id, + ContentAddress::Component(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + alpine_component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(alpine_component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(docker_image_schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + // Butane Schema + let butane_schema_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let butane_schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + butane_schema_id, + ContentAddress::Schema(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + butane_schema_index, + ) + .expect("Unable to add root -> schema edge"); + + // Butane Schema Variant + let butane_schema_variant_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let butane_schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + butane_schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(butane_schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + butane_schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + // Nginx Butane Component + let nginx_butane_component_id = base_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let nginx_butane_node_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + nginx_butane_component_id, + ContentAddress::Component(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + base_graph + .add_edge( + base_change_set, + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + nginx_butane_node_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_change_set, + base_graph + .get_node_index_by_id(nginx_butane_component_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(butane_schema_variant_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + base_graph.cleanup(); + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + // Create a new change set to cause some problems! + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + // Create a modify removed item conflict. + base_graph + .remove_edge( + base_change_set, + base_graph.root_index, + base_graph + .get_node_index_by_id(nginx_butane_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeightKind::Uses, + ) + .expect("Unable to update the component"); + new_graph + .update_content( + new_change_set, + nginx_butane_component_id, + ContentHash::from("second"), + ) + .expect("Unable to update the component"); + + // Create a node content conflict. + base_graph + .update_content( + base_change_set, + docker_image_schema_variant_id, + ContentHash::from("oopsie"), + ) + .expect("Unable to update the component"); + new_graph + .update_content( + new_change_set, + docker_image_schema_variant_id, + ContentHash::from("poopsie"), + ) + .expect("Unable to update the component"); + + // Create a pure update. + new_graph + .update_content( + new_change_set, + docker_image_schema_id, + ContentHash::from("bg3"), + ) + .expect("Unable to update the schema"); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .expect("Unable to detect conflicts and updates"); + + println!("base graph current root: {:?}", base_graph.root_index); + base_graph.dot(); + println!("new graph current root: {:?}", new_graph.root_index); + new_graph.dot(); + + let expected_conflicts = vec![ + Conflict::ModifyRemovedItem( + new_graph + .get_node_index_by_id(nginx_butane_component_id) + .expect("Unable to get component NodeIndex"), + ), + Conflict::NodeContent { + onto: base_graph + .get_node_index_by_id(docker_image_schema_variant_id) + .expect("Unable to get component NodeIndex"), + to_rebase: new_graph + .get_node_index_by_id(docker_image_schema_variant_id) + .expect("Unable to get component NodeIndex"), + }, + ]; + // assert_eq!(expected_conflicts, conflicts); + + let expected_updates = Vec::::new(); + // assert_eq!(Vec::::new(), updates); + + assert_eq!( + ConflictsAndUpdates { + conflicts: expected_conflicts, + updates: expected_updates, + }, + ConflictsAndUpdates { conflicts, updates }, + ); + } } diff --git a/lib/dal/src/workspace_snapshot/update.rs b/lib/dal/src/workspace_snapshot/update.rs index 1ba2a9cbab..0087942833 100644 --- a/lib/dal/src/workspace_snapshot/update.rs +++ b/lib/dal/src/workspace_snapshot/update.rs @@ -15,7 +15,9 @@ pub enum Update { }, RemoveEdge(EdgeIndex), ReplaceSubgraph { + // "onto" new: NodeIndex, + // "to_rebase" old: NodeIndex, }, } From 7c8f9481d8ee0b856402e76968c84df444ca3323 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Tue, 22 Aug 2023 08:40:38 -0700 Subject: [PATCH 03/92] Ensure vector clocks are updated when updating node content Previously, we were relying on `copy_node_index` to also increment the vector clocks, but we updated `copy_node_index` to be more of a "pure" copy, which means that it was no longer incrementing the vector clocks. In `update_content` we explicitly want the vector clocks updated, since we are doing a write, so we now do this directly. --- lib/dal/src/workspace_snapshot/graph.rs | 1 + lib/dal/src/workspace_snapshot/node_weight.rs | 11 +++++++++++ 2 files changed, 12 insertions(+) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 5939ca9880..017142a9f3 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -436,6 +436,7 @@ impl WorkspaceSnapshotGraph { let original_node_index = self.get_node_index_by_id(id)?; let new_node_index = self.copy_node_index(original_node_index)?; let node_weight = self.get_node_weight_mut(new_node_index)?; + node_weight.increment_vector_clock(change_set)?; node_weight.new_content_hash(new_content_hash)?; self.replace_references(change_set, original_node_index, new_node_index) diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index 0a876e2631..08c8416ce7 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -53,6 +53,17 @@ impl NodeWeight { } } + pub fn increment_vector_clock(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + match self { + NodeWeight::Content(content_weight) => { + content_weight.increment_vector_clock(change_set) + } + NodeWeight::Ordering(ordering_weight) => { + ordering_weight.increment_vector_clock(change_set) + } + } + } + pub fn lineage_id(&self) -> Ulid { match self { NodeWeight::Content(content_weight) => content_weight.lineage_id(), From 5a431bee88859d545b4eb5ce72c011ddb4ce1284 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Tue, 22 Aug 2023 11:56:59 -0400 Subject: [PATCH 04/92] Fix test logic to perform a replace subgraph update The last "update_content" call was intended to cause a "ReplaceSubgraph" update to be found. It was written incorrectly and has been fixed. The expected results have been modified to reflect this change. Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig --- lib/dal/src/workspace_snapshot/graph.rs | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 017142a9f3..ae24fb1f8f 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -2470,9 +2470,9 @@ mod test { .expect("Unable to update the component"); // Create a pure update. - new_graph + base_graph .update_content( - new_change_set, + base_change_set, docker_image_schema_id, ContentHash::from("bg3"), ) @@ -2502,10 +2502,14 @@ mod test { .expect("Unable to get component NodeIndex"), }, ]; - // assert_eq!(expected_conflicts, conflicts); - - let expected_updates = Vec::::new(); - // assert_eq!(Vec::::new(), updates); + let expected_updates = vec![Update::ReplaceSubgraph { + new: base_graph + .get_node_index_by_id(docker_image_schema_id) + .expect("Unable to get NodeIndex"), + old: new_graph + .get_node_index_by_id(docker_image_schema_id) + .expect("Unable to get NodeIndex"), + }]; assert_eq!( ConflictsAndUpdates { From 30a8732a88f24de6ee0524db8d42be4a77781dfe Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Tue, 22 Aug 2023 15:22:47 -0700 Subject: [PATCH 05/92] Refine helper method for finding the `Ordering` node for a container, if it has one We're going to need to find the `Ordering` node more often than just in the conflict/update detection logic, and the current method of finding a `Vec` of possible nodes is a bit awkward to be using in multiple places. This makes it so we have a method returning a `Result>`, which is what we were really wanting in the existing places anyway. --- lib/dal/src/workspace_snapshot/graph.rs | 45 +++++++++++++------------ 1 file changed, 24 insertions(+), 21 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index ae24fb1f8f..1967495822 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -281,28 +281,13 @@ impl WorkspaceSnapshotGraph { } if onto_ordering_node_index.is_none() { - let onto_ordering_node_indexes = - ordering_node_indexes_for_node_index(onto, onto_node_index); - if onto_ordering_node_indexes.len() > 1 { - error!( - "Too many ordering nodes found for onto NodeIndex {:?}", - onto_node_index - ); - return Err(event); - } - onto_ordering_node_index = onto_ordering_node_indexes.get(0).copied(); - } - let to_rebase_ordering_node_indexes = - ordering_node_indexes_for_node_index(self, to_rebase_node_index); - if to_rebase_ordering_node_indexes.len() > 1 { - error!( - "Too many ordering nodes found for to_rebase NodeIndex {:?}", - to_rebase_node_index - ); - return Err(event); + onto_ordering_node_index = onto + .ordering_node_index_for_container(onto_node_index) + .map_err(|_| event)?; } - let to_rebase_ordering_node_index = - to_rebase_ordering_node_indexes.get(0).copied(); + let to_rebase_ordering_node_index = self + .ordering_node_index_for_container(to_rebase_node_index) + .map_err(|_| event)?; match (to_rebase_ordering_node_index, onto_ordering_node_index) { (None, None) => { @@ -910,6 +895,24 @@ impl WorkspaceSnapshotGraph { && algo::has_path_connecting(&self.graph, node, end, None) } + pub fn ordering_node_index_for_container( + &self, + container_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult> { + let onto_ordering_node_indexes = + ordering_node_indexes_for_node_index(self, container_node_index); + if onto_ordering_node_indexes.len() > 1 { + error!( + "Too many ordering nodes found for container NodeIndex {:?}", + container_node_index + ); + return Err(WorkspaceSnapshotGraphError::TooManyOrderingForNode( + container_node_index, + )); + } + Ok(onto_ordering_node_indexes.get(0).copied()) + } + /// [`StableGraph`] guarantees the stability of [`NodeIndex`] across removals, however there /// are **NO** guarantees around the stability of [`EdgeIndex`] across removals. If /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] should be considered invalid. From 7c4f9327ce13e5e7d2dc01c52032485a78992207 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Wed, 23 Aug 2023 15:45:38 -0700 Subject: [PATCH 06/92] Initial implementation of ordered child nodes in workspace graph While the concept of an ordering node has existed in the workspace graph, and the conflict/update detection logic, there was nothing to create (and maintain) ordering nodes. It is now possible to create nodes that have ordered children (not all children of a node must be in the ordering), and to re-order the children after it has been created. This does not yet test removing children from the ordering, when the edge to them is removed, nor does it yet test any of the conflict/update detection logic around ordering conflicts & updates. --- lib/dal/src/workspace_snapshot/graph.rs | 704 +++++++++++++++++- lib/dal/src/workspace_snapshot/node_weight.rs | 23 +- .../node_weight/ordering_node_weight.rs | 8 +- 3 files changed, 692 insertions(+), 43 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 1967495822..643a95ea50 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -1,4 +1,3 @@ -use petgraph::visit::NodeCount; use petgraph::{algo, prelude::*, visit::DfsEvent}; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet}; @@ -17,6 +16,8 @@ use crate::{ ContentHash, }; +use super::node_weight::OrderingNodeWeight; + #[allow(clippy::large_enum_variant)] #[remain::sorted] #[derive(Debug, Error)] @@ -43,6 +44,8 @@ pub enum WorkspaceSnapshotGraphError { NodeWithIdNotFound(Ulid), #[error("NodeIndex has too many Ordering children: {0:?}")] TooManyOrderingForNode(NodeIndex), + #[error("Unable to add node to the graph")] + UnableToAddNode, #[error("Workspace Snapshot has conflicts and must be rebased")] WorkspaceNeedsRebase, #[error("Workspace Snapshot has conflicts")] @@ -83,6 +86,92 @@ impl WorkspaceSnapshotGraph { } pub fn add_edge( + &mut self, + change_set: &ChangeSet, + from_node_index: NodeIndex, + edge_weight: EdgeWeight, + to_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + let new_edge_index = + self.add_unordered_edge(change_set, from_node_index, edge_weight, to_node_index)?; + + let (new_from_node_index, _) = self + .graph + .edge_endpoints(new_edge_index) + .ok_or(WorkspaceSnapshotGraphError::EdgeWeightNotFound)?; + + // Find the ordering node of the "container" if there is one, and add the thing pointed to + // by the `to_node_index` to the ordering. + if let Some(container_ordering_node_index) = + self.ordering_node_index_for_container(new_from_node_index)? + { + if let NodeWeight::Ordering(previous_container_ordering_node_weight) = self + .graph + .node_weight(container_ordering_node_index) + .ok_or_else(|| WorkspaceSnapshotGraphError::NodeWeightNotFound)? + { + let element_node_weight = self + .graph + .node_weight(to_node_index) + .ok_or_else(|| WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + let mut new_container_ordering_node_weight = + previous_container_ordering_node_weight.clone(); + let mut new_order = + Vec::with_capacity(previous_container_ordering_node_weight.order().len() + 1); + new_order.extend(previous_container_ordering_node_weight.order()); + new_order.push(element_node_weight.id()); + new_container_ordering_node_weight.set_order(change_set, new_order)?; + + let new_container_ordering_node_index = + self.add_node(NodeWeight::Ordering(new_container_ordering_node_weight))?; + self.replace_references( + container_ordering_node_index, + new_container_ordering_node_index, + )?; + } + } + + Ok(new_edge_index) + } + + fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotGraphResult { + let new_node_index = self.graph.add_node(node); + self.update_merkle_tree_hash(new_node_index)?; + + Ok(new_node_index) + } + + fn add_ordered_node( + &mut self, + change_set: &ChangeSet, + node: NodeWeight, + ) -> WorkspaceSnapshotGraphResult { + let node_weight_id = node.id(); + let new_node_index = self.add_node(node)?; + let ordering_node_index = + self.add_node(NodeWeight::Ordering(OrderingNodeWeight::new(change_set)?))?; + self.add_edge( + change_set, + new_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Ordering)?, + ordering_node_index, + )?; + + // We can't use `self.get_node_index_by_id` yet, since the node isn't connected to the rest + // of the graph yet, and `get_node_index_by_id` checks to make sure there's a path from the + // root to the node before returning it. There should only be one node with an edge + // pointing to the ordering node we just created, however, and that should be the "new + // version" of the node we're adding. + for neighbor_index in self.graph.neighbors_directed(ordering_node_index, Incoming) { + if self.get_node_weight(neighbor_index)?.id() == node_weight_id { + return Ok(neighbor_index); + } + } + + Err(WorkspaceSnapshotGraphError::UnableToAddNode) + } + + pub fn add_unordered_edge( &mut self, change_set: &ChangeSet, from_node_index: NodeIndex, @@ -114,18 +203,11 @@ impl WorkspaceSnapshotGraph { self.update_merkle_tree_hash(new_from_node_index)?; // Update the rest of the graph to reflect the new node/edge. - self.replace_references(change_set, from_node_index, new_from_node_index)?; + self.replace_references(from_node_index, new_from_node_index)?; Ok(new_edge_index) } - fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotGraphResult { - let new_node_index = self.graph.add_node(node); - self.update_merkle_tree_hash(new_node_index)?; - - Ok(new_node_index) - } - pub fn cleanup(&mut self) { self.graph.retain_nodes(|frozen_graph, current_node| { // We cannot use "has_path_to_root" because we need to use the Frozen>. @@ -412,21 +494,6 @@ impl WorkspaceSnapshotGraph { ); } - pub fn update_content( - &mut self, - change_set: &ChangeSet, - id: Ulid, - new_content_hash: ContentHash, - ) -> WorkspaceSnapshotGraphResult<()> { - let original_node_index = self.get_node_index_by_id(id)?; - let new_node_index = self.copy_node_index(original_node_index)?; - let node_weight = self.get_node_weight_mut(new_node_index)?; - node_weight.increment_vector_clock(change_set)?; - node_weight.new_content_hash(new_content_hash)?; - - self.replace_references(change_set, original_node_index, new_node_index) - } - fn find_ordered_container_membership_conflicts_and_updates( &self, to_rebase_change_set: &ChangeSet, @@ -895,6 +962,36 @@ impl WorkspaceSnapshotGraph { && algo::has_path_connecting(&self.graph, node, end, None) } + pub fn ordered_children_for_node( + &self, + container_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult> { + let mut ordered_child_indexes = Vec::new(); + if let Some(container_ordering_index) = + self.ordering_node_index_for_container(container_node_index)? + { + if let NodeWeight::Ordering(ordering_weight) = + self.get_node_weight(container_ordering_index)? + { + let mut node_index_by_id = HashMap::new(); + for neighbor_index in self + .graph + .neighbors_directed(container_node_index, Outgoing) + { + let neighbor_weight = self.get_node_weight(neighbor_index)?; + node_index_by_id.insert(neighbor_weight.id(), neighbor_index); + } + for ordered_id in ordering_weight.order() { + ordered_child_indexes.push(*node_index_by_id.get(ordered_id).ok_or_else( + || WorkspaceSnapshotGraphError::NodeWithIdNotFound(*ordered_id), + )?); + } + } + } + + Ok(ordered_child_indexes) + } + pub fn ordering_node_index_for_container( &self, container_node_index: NodeIndex, @@ -915,7 +1012,8 @@ impl WorkspaceSnapshotGraph { /// [`StableGraph`] guarantees the stability of [`NodeIndex`] across removals, however there /// are **NO** guarantees around the stability of [`EdgeIndex`] across removals. If - /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] should be considered invalid. + /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] found before + /// [`Self::cleanup()`] has run should be considered invalid. fn remove_edge( &mut self, change_set: &ChangeSet, @@ -924,14 +1022,13 @@ impl WorkspaceSnapshotGraph { edge_kind: EdgeWeightKind, ) -> WorkspaceSnapshotGraphResult<()> { let mut edges_to_remove = Vec::new(); - let new_source_node_index = dbg!(self.copy_node_index(source_node_index)?); - self.replace_references(change_set, dbg!(source_node_index), new_source_node_index)?; + let new_source_node_index = self.copy_node_index(source_node_index)?; + self.replace_references(source_node_index, new_source_node_index)?; for edgeref in self .graph .edges_connecting(new_source_node_index, target_node_index) { - dbg!(&edgeref); if edgeref.weight().kind() == edge_kind { edges_to_remove.push(edgeref.id()); } @@ -939,15 +1036,46 @@ impl WorkspaceSnapshotGraph { for edge_to_remove in edges_to_remove { self.graph.remove_edge(edge_to_remove); } - self.update_merkle_tree_hash(new_source_node_index)?; + if let Some(previous_container_ordering_node_index) = + self.ordering_node_index_for_container(new_source_node_index)? + { + let old_target_node_weight = self + .graph + .node_weight(target_node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + if let NodeWeight::Ordering(previous_container_ordering_node_weight) = self + .graph + .node_weight(previous_container_ordering_node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)? + { + let mut new_container_ordering_node_weight = + previous_container_ordering_node_weight.clone(); + let old_target_id = old_target_node_weight.id(); + let mut new_order = new_container_ordering_node_weight.order().clone(); + new_order.retain(|id| *id != old_target_id); + + // We only want to update the ordering of the container if we removed an edge to + // one of the ordered relationships. + if &new_order != previous_container_ordering_node_weight.order() { + new_container_ordering_node_weight.set_order(change_set, new_order)?; + + let new_container_ordering_node_index = + self.add_node(NodeWeight::Ordering(new_container_ordering_node_weight))?; + self.replace_references( + previous_container_ordering_node_index, + new_container_ordering_node_index, + )?; + } + } + } + Ok(()) } fn replace_references( &mut self, - change_set: &ChangeSet, original_node_index: NodeIndex, new_node_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult<()> { @@ -1018,6 +1146,37 @@ impl WorkspaceSnapshotGraph { Ok(()) } + pub fn update_content( + &mut self, + change_set: &ChangeSet, + id: Ulid, + new_content_hash: ContentHash, + ) -> WorkspaceSnapshotGraphResult<()> { + let original_node_index = self.get_node_index_by_id(id)?; + let new_node_index = self.copy_node_index(original_node_index)?; + let node_weight = self.get_node_weight_mut(new_node_index)?; + node_weight.increment_vector_clock(change_set)?; + node_weight.new_content_hash(new_content_hash)?; + + self.replace_references(original_node_index, new_node_index) + } + + pub fn update_order( + &mut self, + change_set: &ChangeSet, + container_id: Ulid, + new_order: Vec, + ) -> WorkspaceSnapshotGraphResult<()> { + let original_node_index = self + .ordering_node_index_for_container(self.get_node_index_by_id(container_id)?)? + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + let new_node_index = self.copy_node_index(original_node_index)?; + let node_weight = self.get_node_weight_mut(new_node_index)?; + node_weight.set_order(change_set, new_order)?; + + self.replace_references(original_node_index, new_node_index) + } + fn update_merkle_tree_hash( &mut self, node_index_to_update: NodeIndex, @@ -2522,4 +2681,489 @@ mod test { ConflictsAndUpdates { conflicts, updates }, ); } + + #[test] + fn add_ordered_node() { + let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let func_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let func_index = graph + .add_node( + NodeWeight::new_content( + change_set, + func_id, + ContentAddress::Func(ContentHash::new( + FuncId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add func"); + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + func_index, + ) + .expect("Unable to add root -> func edge"); + + let prop_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let prop_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + prop_id, + ContentAddress::Prop(ContentHash::new( + PropId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + prop_index, + ) + .expect("Unable to add schema variant -> prop edge"); + graph + .add_unordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(func_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add prop -> func edge"); + graph.cleanup(); + graph.dot(); + + let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_1_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_1_index, + ) + .expect("Unable to add prop -> ordered_prop_1 edge"); + + let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_2_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_2_index, + ) + .expect("Unable to add prop -> ordered_prop_2 edge"); + + let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_3_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_3_index, + ) + .expect("Unable to add prop -> ordered_prop_3 edge"); + graph.cleanup(); + graph.dot(); + + assert_eq!( + vec![ + ordered_prop_1_index, + ordered_prop_2_index, + ordered_prop_3_index + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered cchildren for node") + ); + } + + #[test] + fn reorder_ordered_node() { + let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let func_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let func_index = graph + .add_node( + NodeWeight::new_content( + change_set, + func_id, + ContentAddress::Func(ContentHash::new( + FuncId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add func"); + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + func_index, + ) + .expect("Unable to add root -> func edge"); + + let prop_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let prop_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + prop_id, + ContentAddress::Prop(ContentHash::new( + PropId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + prop_index, + ) + .expect("Unable to add schema variant -> prop edge"); + graph + .add_unordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(func_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add prop -> func edge"); + graph.cleanup(); + graph.dot(); + + let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_1_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_1_index, + ) + .expect("Unable to add prop -> ordered_prop_1 edge"); + + let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_2_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_2_index, + ) + .expect("Unable to add prop -> ordered_prop_2 edge"); + + let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_3_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_3_index, + ) + .expect("Unable to add prop -> ordered_prop_3 edge"); + + let ordered_prop_4_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_4_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_4_index, + ) + .expect("Unable to add prop -> ordered_prop_4 edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + vec![ + ordered_prop_1_index, + ordered_prop_2_index, + ordered_prop_3_index, + ordered_prop_4_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + ); + + let new_order = vec![ + ordered_prop_2_id, + ordered_prop_1_id, + ordered_prop_4_id, + ordered_prop_3_id, + ]; + + graph + .update_order(change_set, prop_id, new_order) + .expect("Unable to update order of prop's children"); + + assert_eq!( + vec![ + ordered_prop_2_index, + ordered_prop_1_index, + ordered_prop_4_index, + ordered_prop_3_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + ); + } } diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index 08c8416ce7..547fba14d7 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -20,6 +20,8 @@ pub mod ordering_node_weight; pub enum NodeWeightError { #[error("Cannot set content hash directly on node weight kind")] CannotSetContentHashOnKind, + #[error("Cannot set content order directly on node weight kind")] + CannotSetOrderOnKind, #[error("Cannot update root node's content hash")] CannotUpdateRootNodeContentHash, #[error("ChangeSet error: {0}")] @@ -96,13 +98,6 @@ impl NodeWeight { } } - pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { - match self { - NodeWeight::Content(content_weight) => content_weight.new_content_hash(content_hash), - NodeWeight::Ordering(_) => Err(NodeWeightError::CannotSetContentHashOnKind), - } - } - pub fn new_content( change_set: &ChangeSet, content_id: Ulid, @@ -113,6 +108,13 @@ impl NodeWeight { )?)) } + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + match self { + NodeWeight::Content(content_weight) => content_weight.new_content_hash(content_hash), + NodeWeight::Ordering(_) => Err(NodeWeightError::CannotSetContentHashOnKind), + } + } + pub fn new_with_incremented_vector_clock( &self, change_set: &ChangeSet, @@ -136,6 +138,13 @@ impl NodeWeight { } } + pub fn set_order(&mut self, change_set: &ChangeSet, order: Vec) -> NodeWeightResult<()> { + match self { + NodeWeight::Content(_) => Err(NodeWeightError::CannotSetOrderOnKind), + NodeWeight::Ordering(ordering_weight) => ordering_weight.set_order(change_set, order), + } + } + pub fn set_vector_clock_recently_seen_to( &mut self, change_set: &ChangeSet, diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs index 3e05ab99c8..4e3ddd6108 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -90,12 +90,8 @@ impl OrderingNodeWeight { self.merkle_tree_hash = new_hash; } - pub fn set_order<'a>( - &mut self, - change_set: &ChangeSet, - order: impl AsRef<&'a [Ulid]>, - ) -> NodeWeightResult<()> { - self.order = Vec::from(*order.as_ref()); + pub fn set_order(&mut self, change_set: &ChangeSet, order: Vec) -> NodeWeightResult<()> { + self.order = order; self.update_content_hash(); self.increment_seen_vector_clock(change_set)?; From 8322d92d10e49edc72be198b4278b4157a4f8966 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Tue, 22 Aug 2023 15:12:16 -0400 Subject: [PATCH 07/92] Add working RabbitMQ connection for the Gobbler This commit ensure RabbitMQ is running with the correct ports and enabled plugin (i.e. the stream plugin). When running tests with this commit, you must use "buck2 build component/rabbitmq" and "docker tag" to ensure the image name and tag are correct. This commit also fixes the lang-js dependent target bug that the dal integration tests also experienced when the pnpm logic was changed recently. You can test the connection with the following command: ``` SI_TEST_BUILTIN_SCHEMAS=none buck2 run \ lib/gobbler-server:test-integration -- \ integration_test::connection::connect_to_queue ``` Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig --- dev/docker-compose.platform.yml | 6 ++---- lib/gobbler-server/BUCK | 2 +- lib/si-rabbitmq/src/connection.rs | 4 +++- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/dev/docker-compose.platform.yml b/dev/docker-compose.platform.yml index 3f861e5a85..ef7b285762 100644 --- a/dev/docker-compose.platform.yml +++ b/dev/docker-compose.platform.yml @@ -37,9 +37,7 @@ services: - jaeger rabbitmq: - # FIXME(nick): use our own rabbitmq once on "main". - # image: systeminit/rabbitmq:stable - image: rabbitmq:3.12-management-alpine + image: systeminit/rabbitmq:stable ports: - - "5672:5672" + - "5552:5552" - "15672:15672" diff --git a/lib/gobbler-server/BUCK b/lib/gobbler-server/BUCK index 21e4b8bef1..7b6c9e8758 100644 --- a/lib/gobbler-server/BUCK +++ b/lib/gobbler-server/BUCK @@ -61,7 +61,7 @@ rust_test( "dev.encryption.key": "//lib/cyclone-server:dev.encryption.key", "dev.jwt_signing_private_key.pem": "//config/keys:dev.jwt_signing_private_key.pem", "dev.jwt_signing_public_key.pem": "//config/keys:dev.jwt_signing_public_key.pem", - "lang-js": "//bin/lang-js:lang-js", + "lang-js": "//bin/lang-js:bin", "pkgs_path": "//pkgs:pkgs", "prod.jwt_signing_public_key.pem": "//config/keys:prod.jwt_signing_public_key.pem", }, diff --git a/lib/si-rabbitmq/src/connection.rs b/lib/si-rabbitmq/src/connection.rs index 7ee344a374..3da6de4efb 100644 --- a/lib/si-rabbitmq/src/connection.rs +++ b/lib/si-rabbitmq/src/connection.rs @@ -14,7 +14,9 @@ impl Connection { pub async fn new() -> RabbitResult { let environment = Environment::builder() .host("localhost") - .port(5672) + .username("guest") + .password("guest") + .port(5552) .build() .await?; Ok(Self { environment }) From d029f5b69762dcf79b788ef623376bc658f09648 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Tue, 22 Aug 2023 18:40:05 -0400 Subject: [PATCH 08/92] Test producer logic for the gobbler This commit adds an integration test for ensuring producer logic works for the gobbler. Signed-off-by: Nick Gerace --- .../tests/integration_test/connection.rs | 4 +-- .../tests/integration_test/mod.rs | 1 + .../integration_test/produce_and_consume.rs | 30 +++++++++++++++++++ lib/si-rabbitmq/src/connection.rs | 6 ++-- lib/si-rabbitmq/src/consumer.rs | 4 +-- lib/si-rabbitmq/src/lib.rs | 4 +-- lib/si-rabbitmq/src/producer.rs | 28 ++++++++++------- 7 files changed, 58 insertions(+), 19 deletions(-) create mode 100644 lib/gobbler-server/tests/integration_test/produce_and_consume.rs diff --git a/lib/gobbler-server/tests/integration_test/connection.rs b/lib/gobbler-server/tests/integration_test/connection.rs index 60e14bc9b3..cb632b9de4 100644 --- a/lib/gobbler-server/tests/integration_test/connection.rs +++ b/lib/gobbler-server/tests/integration_test/connection.rs @@ -1,6 +1,6 @@ use dal::workspace_snapshot::change_set::ChangeSet; use dal::{DalContext, WorkspaceSnapshot}; -use si_rabbitmq::Connection; +use si_rabbitmq::StreamManager; use si_test_macros::gobbler_test as test; /// Recommended to run with the following environment variable: @@ -21,5 +21,5 @@ async fn create_snapshot(ctx: &DalContext) { /// ``` #[test] async fn connect_to_queue(_ctx: &DalContext) { - let _ = Connection::new().await.expect("could not connect"); + let _ = StreamManager::new().await.expect("could not connect"); } diff --git a/lib/gobbler-server/tests/integration_test/mod.rs b/lib/gobbler-server/tests/integration_test/mod.rs index 6fa1f6955a..7b2cba6f32 100644 --- a/lib/gobbler-server/tests/integration_test/mod.rs +++ b/lib/gobbler-server/tests/integration_test/mod.rs @@ -1 +1,2 @@ mod connection; +mod produce_and_consume; diff --git a/lib/gobbler-server/tests/integration_test/produce_and_consume.rs b/lib/gobbler-server/tests/integration_test/produce_and_consume.rs new file mode 100644 index 0000000000..a419881b7d --- /dev/null +++ b/lib/gobbler-server/tests/integration_test/produce_and_consume.rs @@ -0,0 +1,30 @@ +use dal::DalContext; +use dal_test::random_identifier_string; +use si_rabbitmq::{Producer, StreamManager}; +use si_test_macros::gobbler_test as test; + +/// Recommended to run with the following environment variable: +/// ```shell +/// SI_TEST_BUILTIN_SCHEMAS=none +/// ``` +#[test] +async fn produce(_ctx: &DalContext) { + let stream = &random_identifier_string(); + let manager = StreamManager::new().await.expect("could not connect"); + manager + .create_stream(stream) + .await + .expect("could not create stream"); + + let mut producer = Producer::new(&manager, "producer", stream) + .await + .expect("could not create producer"); + producer + .send_single("foo") + .await + .expect("could not singe message"); + producer + .send_batch(vec!["bar".as_bytes(), "baz".as_bytes()]) + .await + .expect("could not send message batch"); +} diff --git a/lib/si-rabbitmq/src/connection.rs b/lib/si-rabbitmq/src/connection.rs index 3da6de4efb..f7124b9591 100644 --- a/lib/si-rabbitmq/src/connection.rs +++ b/lib/si-rabbitmq/src/connection.rs @@ -5,12 +5,12 @@ use crate::error::RabbitResult; /// A connection to a RabbitMQ node. #[allow(missing_debug_implementations)] -pub struct Connection { +pub struct StreamManager { environment: Environment, } -impl Connection { - /// Creates a new [`Connection`], which contains a connection to a RabbitMQ node. +impl StreamManager { + /// Creates a new [`StreamManager`], which contains a connection to a RabbitMQ node. pub async fn new() -> RabbitResult { let environment = Environment::builder() .host("localhost") diff --git a/lib/si-rabbitmq/src/consumer.rs b/lib/si-rabbitmq/src/consumer.rs index fc7a4e450c..9af92f1c5c 100644 --- a/lib/si-rabbitmq/src/consumer.rs +++ b/lib/si-rabbitmq/src/consumer.rs @@ -1,4 +1,4 @@ -use crate::connection::Connection; +use crate::connection::StreamManager; use futures::StreamExt; use rabbitmq_stream_client::error::ConsumerDeliveryError; use rabbitmq_stream_client::types::Delivery; @@ -13,7 +13,7 @@ pub struct Consumer(UpstreamConsumer); impl Consumer { /// Creates a new [`Consumer`] for consuming RabbitMQ stream messages. - pub async fn new(connection: &Connection, stream: &str) -> RabbitResult { + pub async fn new(connection: &StreamManager, stream: &str) -> RabbitResult { let consumer = connection.inner().consumer().build(stream).await?; Ok(Self(consumer)) } diff --git a/lib/si-rabbitmq/src/lib.rs b/lib/si-rabbitmq/src/lib.rs index 1aa1126e49..f58171eb7b 100644 --- a/lib/si-rabbitmq/src/lib.rs +++ b/lib/si-rabbitmq/src/lib.rs @@ -1,4 +1,4 @@ -//! This library provides the ability to [connect](Connection) to [RabbitMQ](https://rabbitmq.com) +//! This library provides the ability to [connect](StreamManager) to [RabbitMQ](https://rabbitmq.com) //! nodes, [produce](Producer) stream messages, and [consume](Consumer) stream messages. #![warn( @@ -28,7 +28,7 @@ mod consumer; mod error; mod producer; -pub use connection::Connection; +pub use connection::StreamManager; pub use consumer::Consumer; pub use error::RabbitError; pub use error::RabbitResult; diff --git a/lib/si-rabbitmq/src/producer.rs b/lib/si-rabbitmq/src/producer.rs index f6646eaef6..050f58e2fa 100644 --- a/lib/si-rabbitmq/src/producer.rs +++ b/lib/si-rabbitmq/src/producer.rs @@ -1,20 +1,29 @@ use rabbitmq_stream_client::types::Message; -use rabbitmq_stream_client::{NoDedup, Producer as UpstreamProducer}; +use rabbitmq_stream_client::{Dedup, NoDedup, Producer as UpstreamProducer}; -use crate::connection::Connection; +use crate::connection::StreamManager; use crate::{RabbitError, RabbitResult}; /// An interface for producing and sending RabbitMQ stream messages. #[allow(missing_debug_implementations)] pub struct Producer { - producer: UpstreamProducer, + producer: UpstreamProducer, closed: bool, } impl Producer { /// Creates a new [`Producer`] for producing and sending RabbitMQ stream messages. - pub async fn new(connection: &Connection, stream: &str) -> RabbitResult { - let producer = connection.inner().producer().build(stream).await?; + pub async fn new( + connection: &StreamManager, + name: impl AsRef, + stream: impl AsRef, + ) -> RabbitResult { + let producer = connection + .inner() + .producer() + .name(name.as_ref()) + .build(stream.as_ref()) + .await?; Ok(Self { producer, closed: false, @@ -22,7 +31,7 @@ impl Producer { } /// Sends a single message to a stream. - pub async fn send_single(&self, message: impl Into>) -> RabbitResult<()> { + pub async fn send_single(&mut self, message: impl Into>) -> RabbitResult<()> { if self.closed { return Err(RabbitError::ProducerClosed); } @@ -33,16 +42,15 @@ impl Producer { } /// Sends a batch of messages to a stream. - pub async fn send_batch(&self, messages: impl Into>>) -> RabbitResult<()> { + pub async fn send_batch(&mut self, messages: Vec>>) -> RabbitResult<()> { if self.closed { return Err(RabbitError::ProducerClosed); } self.producer .batch_send_with_confirm( messages - .into() - .iter() - .map(|m| Message::builder().body(m.clone()).build()) + .into_iter() + .map(|m| Message::builder().body(m.into()).build()) .collect(), ) .await?; From 83d8e9639918ff2d8c9c0a03ef9b110f3605bc37 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Wed, 23 Aug 2023 13:52:38 -0400 Subject: [PATCH 09/92] Ensure we can produce messages to RabbitMQ - Ensure we can produce messages to RabbitMQ by setting the advertised host to localhost (otherwise, the initial environment connection smoke test will work, but the stream client connection will fail) - Re-word manager, client and connection naming under the "environment" umbrella, which reflects how producer and consumer are named in the RabbitMQ wrapper crate Signed-off-by: Nick Gerace Co-authored-by: Zachary Hamm --- dev/docker-compose.platform.yml | 3 +++ .../tests/integration_test/connection.rs | 4 ++-- .../integration_test/produce_and_consume.rs | 8 +++---- lib/si-rabbitmq/src/consumer.rs | 6 ++--- .../src/{connection.rs => environment.rs} | 22 +++++++++---------- lib/si-rabbitmq/src/lib.rs | 6 ++--- lib/si-rabbitmq/src/producer.rs | 6 ++--- 7 files changed, 29 insertions(+), 26 deletions(-) rename lib/si-rabbitmq/src/{connection.rs => environment.rs} (63%) diff --git a/dev/docker-compose.platform.yml b/dev/docker-compose.platform.yml index ef7b285762..f44e54ca43 100644 --- a/dev/docker-compose.platform.yml +++ b/dev/docker-compose.platform.yml @@ -38,6 +38,9 @@ services: rabbitmq: image: systeminit/rabbitmq:stable + environment: + # NOTE(nick,zack): the formatting here is very particular, likely due to whitespaces. + RABBITMQ_SERVER_ADDITIONAL_ERL_ARGS: "-rabbitmq_stream advertised_host localhost" ports: - "5552:5552" - "15672:15672" diff --git a/lib/gobbler-server/tests/integration_test/connection.rs b/lib/gobbler-server/tests/integration_test/connection.rs index cb632b9de4..1931b7ce6f 100644 --- a/lib/gobbler-server/tests/integration_test/connection.rs +++ b/lib/gobbler-server/tests/integration_test/connection.rs @@ -1,6 +1,6 @@ use dal::workspace_snapshot::change_set::ChangeSet; use dal::{DalContext, WorkspaceSnapshot}; -use si_rabbitmq::StreamManager; +use si_rabbitmq::Environment; use si_test_macros::gobbler_test as test; /// Recommended to run with the following environment variable: @@ -21,5 +21,5 @@ async fn create_snapshot(ctx: &DalContext) { /// ``` #[test] async fn connect_to_queue(_ctx: &DalContext) { - let _ = StreamManager::new().await.expect("could not connect"); + let _ = Environment::new().await.expect("could not connect"); } diff --git a/lib/gobbler-server/tests/integration_test/produce_and_consume.rs b/lib/gobbler-server/tests/integration_test/produce_and_consume.rs index a419881b7d..4a02eaee7b 100644 --- a/lib/gobbler-server/tests/integration_test/produce_and_consume.rs +++ b/lib/gobbler-server/tests/integration_test/produce_and_consume.rs @@ -1,6 +1,6 @@ use dal::DalContext; use dal_test::random_identifier_string; -use si_rabbitmq::{Producer, StreamManager}; +use si_rabbitmq::{Environment, Producer}; use si_test_macros::gobbler_test as test; /// Recommended to run with the following environment variable: @@ -10,13 +10,13 @@ use si_test_macros::gobbler_test as test; #[test] async fn produce(_ctx: &DalContext) { let stream = &random_identifier_string(); - let manager = StreamManager::new().await.expect("could not connect"); - manager + let environment = Environment::new().await.expect("could not connect"); + environment .create_stream(stream) .await .expect("could not create stream"); - let mut producer = Producer::new(&manager, "producer", stream) + let mut producer = Producer::new(&environment, "producer", stream) .await .expect("could not create producer"); producer diff --git a/lib/si-rabbitmq/src/consumer.rs b/lib/si-rabbitmq/src/consumer.rs index 9af92f1c5c..29bed7f156 100644 --- a/lib/si-rabbitmq/src/consumer.rs +++ b/lib/si-rabbitmq/src/consumer.rs @@ -1,4 +1,4 @@ -use crate::connection::StreamManager; +use crate::environment::Environment; use futures::StreamExt; use rabbitmq_stream_client::error::ConsumerDeliveryError; use rabbitmq_stream_client::types::Delivery; @@ -13,8 +13,8 @@ pub struct Consumer(UpstreamConsumer); impl Consumer { /// Creates a new [`Consumer`] for consuming RabbitMQ stream messages. - pub async fn new(connection: &StreamManager, stream: &str) -> RabbitResult { - let consumer = connection.inner().consumer().build(stream).await?; + pub async fn new(environment: &Environment, stream: &str) -> RabbitResult { + let consumer = environment.inner().consumer().build(stream).await?; Ok(Self(consumer)) } diff --git a/lib/si-rabbitmq/src/connection.rs b/lib/si-rabbitmq/src/environment.rs similarity index 63% rename from lib/si-rabbitmq/src/connection.rs rename to lib/si-rabbitmq/src/environment.rs index f7124b9591..606d57cf2d 100644 --- a/lib/si-rabbitmq/src/connection.rs +++ b/lib/si-rabbitmq/src/environment.rs @@ -1,35 +1,35 @@ use rabbitmq_stream_client::types::ByteCapacity; -use rabbitmq_stream_client::Environment; +use rabbitmq_stream_client::Environment as UpstreamEnvironment; use crate::error::RabbitResult; /// A connection to a RabbitMQ node. #[allow(missing_debug_implementations)] -pub struct StreamManager { - environment: Environment, +pub struct Environment { + inner: UpstreamEnvironment, } -impl StreamManager { - /// Creates a new [`StreamManager`], which contains a connection to a RabbitMQ node. +impl Environment { + /// Creates a new [`Environment`], which contains a connection to a RabbitMQ node. pub async fn new() -> RabbitResult { - let environment = Environment::builder() + let inner = UpstreamEnvironment::builder() .host("localhost") .username("guest") .password("guest") .port(5552) .build() .await?; - Ok(Self { environment }) + Ok(Self { inner }) } /// Returns the inner data structure handling the connection. - pub fn inner(&self) -> &Environment { - &self.environment + pub fn inner(&self) -> &UpstreamEnvironment { + &self.inner } pub async fn create_stream(&self, stream: impl AsRef) -> RabbitResult<()> { Ok(self - .environment + .inner .stream_creator() .max_length(ByteCapacity::KB(400)) .create(stream.as_ref()) @@ -37,6 +37,6 @@ impl StreamManager { } pub async fn delete_stream(&self, stream: impl AsRef) -> RabbitResult<()> { - Ok(self.environment.delete_stream(stream.as_ref()).await?) + Ok(self.inner.delete_stream(stream.as_ref()).await?) } } diff --git a/lib/si-rabbitmq/src/lib.rs b/lib/si-rabbitmq/src/lib.rs index f58171eb7b..137001ff94 100644 --- a/lib/si-rabbitmq/src/lib.rs +++ b/lib/si-rabbitmq/src/lib.rs @@ -1,4 +1,4 @@ -//! This library provides the ability to [connect](StreamManager) to [RabbitMQ](https://rabbitmq.com) +//! This library provides the ability to [connect](Environment) to [RabbitMQ](https://rabbitmq.com) //! nodes, [produce](Producer) stream messages, and [consume](Consumer) stream messages. #![warn( @@ -23,13 +23,13 @@ clippy::missing_panics_doc )] -mod connection; mod consumer; +mod environment; mod error; mod producer; -pub use connection::StreamManager; pub use consumer::Consumer; +pub use environment::Environment; pub use error::RabbitError; pub use error::RabbitResult; pub use producer::Producer; diff --git a/lib/si-rabbitmq/src/producer.rs b/lib/si-rabbitmq/src/producer.rs index 050f58e2fa..0aece12230 100644 --- a/lib/si-rabbitmq/src/producer.rs +++ b/lib/si-rabbitmq/src/producer.rs @@ -1,7 +1,7 @@ use rabbitmq_stream_client::types::Message; use rabbitmq_stream_client::{Dedup, NoDedup, Producer as UpstreamProducer}; -use crate::connection::StreamManager; +use crate::environment::Environment; use crate::{RabbitError, RabbitResult}; /// An interface for producing and sending RabbitMQ stream messages. @@ -14,11 +14,11 @@ pub struct Producer { impl Producer { /// Creates a new [`Producer`] for producing and sending RabbitMQ stream messages. pub async fn new( - connection: &StreamManager, + environment: &Environment, name: impl AsRef, stream: impl AsRef, ) -> RabbitResult { - let producer = connection + let producer = environment .inner() .producer() .name(name.as_ref()) From 88c1c06f239d1768384c4b4872274c9696842d80 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Wed, 23 Aug 2023 18:58:26 -0400 Subject: [PATCH 10/92] Refactor consumer for use in the gobbler The consumer domain has been refactored to support processing deliveries externally rather than within methods. While the producer and environment domains have been hardened, the consumer domain is brittle. The gobbler will consume messages on the queue, but it will have to notify the original producer of those messages when it is done. That work does not exist in this commit, but the consuming part does. Signed-off-by: Nick Gerace --- Cargo.lock | 1 + lib/gobbler-server/Cargo.toml | 1 + lib/gobbler-server/tests/integration.rs | 6 + .../tests/integration_test/connection.rs | 12 +- .../tests/integration_test/mod.rs | 2 +- .../integration_test/produce_and_consume.rs | 30 ----- .../tests/integration_test/queue.rs | 107 ++++++++++++++++++ lib/si-rabbitmq/src/consumer.rs | 59 +++++++--- lib/si-rabbitmq/src/environment.rs | 14 ++- lib/si-rabbitmq/src/error.rs | 5 +- lib/si-rabbitmq/src/producer.rs | 16 +-- 11 files changed, 184 insertions(+), 69 deletions(-) delete mode 100644 lib/gobbler-server/tests/integration_test/produce_and_consume.rs create mode 100644 lib/gobbler-server/tests/integration_test/queue.rs diff --git a/Cargo.lock b/Cargo.lock index a602c748a6..8f850db7f6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2122,6 +2122,7 @@ dependencies = [ "derive_builder", "futures", "nats-subscriber", + "pretty_assertions_sorted", "remain", "serde", "serde_json", diff --git a/lib/gobbler-server/Cargo.toml b/lib/gobbler-server/Cargo.toml index 253052fe4e..dc236b9cb4 100644 --- a/lib/gobbler-server/Cargo.toml +++ b/lib/gobbler-server/Cargo.toml @@ -31,3 +31,4 @@ veritech-client = { path = "../../lib/veritech-client" } [dev-dependencies] dal-test = { path = "../../lib/dal-test" } +pretty_assertions_sorted = { workspace = true } diff --git a/lib/gobbler-server/tests/integration.rs b/lib/gobbler-server/tests/integration.rs index 9fad204f79..389866fa81 100644 --- a/lib/gobbler-server/tests/integration.rs +++ b/lib/gobbler-server/tests/integration.rs @@ -1,3 +1,9 @@ +//! All tests should be ran with the following environment variable: +//! +//! ```shell +//! SI_TEST_BUILTIN_SCHEMAS=none +//! ``` + const TEST_PG_DBNAME: &str = "si_test_gobbler"; mod integration_test; diff --git a/lib/gobbler-server/tests/integration_test/connection.rs b/lib/gobbler-server/tests/integration_test/connection.rs index 1931b7ce6f..f6c2e4f290 100644 --- a/lib/gobbler-server/tests/integration_test/connection.rs +++ b/lib/gobbler-server/tests/integration_test/connection.rs @@ -3,23 +3,15 @@ use dal::{DalContext, WorkspaceSnapshot}; use si_rabbitmq::Environment; use si_test_macros::gobbler_test as test; -/// Recommended to run with the following environment variable: -/// ```shell -/// SI_TEST_BUILTIN_SCHEMAS=none -/// ``` #[test] -async fn create_snapshot(ctx: &DalContext) { +async fn connect_to_database(ctx: &DalContext) { let change_set = ChangeSet::new().expect("could not create change set"); let _snapshot = WorkspaceSnapshot::new(ctx, &change_set) .await .expect("could not create snapshot"); } -/// Recommended to run with the following environment variable: -/// ```shell -/// SI_TEST_BUILTIN_SCHEMAS=none -/// ``` #[test] async fn connect_to_queue(_ctx: &DalContext) { - let _ = Environment::new().await.expect("could not connect"); + let _environment = Environment::new().await.expect("could not connect"); } diff --git a/lib/gobbler-server/tests/integration_test/mod.rs b/lib/gobbler-server/tests/integration_test/mod.rs index 7b2cba6f32..d8748899ab 100644 --- a/lib/gobbler-server/tests/integration_test/mod.rs +++ b/lib/gobbler-server/tests/integration_test/mod.rs @@ -1,2 +1,2 @@ mod connection; -mod produce_and_consume; +mod queue; diff --git a/lib/gobbler-server/tests/integration_test/produce_and_consume.rs b/lib/gobbler-server/tests/integration_test/produce_and_consume.rs deleted file mode 100644 index 4a02eaee7b..0000000000 --- a/lib/gobbler-server/tests/integration_test/produce_and_consume.rs +++ /dev/null @@ -1,30 +0,0 @@ -use dal::DalContext; -use dal_test::random_identifier_string; -use si_rabbitmq::{Environment, Producer}; -use si_test_macros::gobbler_test as test; - -/// Recommended to run with the following environment variable: -/// ```shell -/// SI_TEST_BUILTIN_SCHEMAS=none -/// ``` -#[test] -async fn produce(_ctx: &DalContext) { - let stream = &random_identifier_string(); - let environment = Environment::new().await.expect("could not connect"); - environment - .create_stream(stream) - .await - .expect("could not create stream"); - - let mut producer = Producer::new(&environment, "producer", stream) - .await - .expect("could not create producer"); - producer - .send_single("foo") - .await - .expect("could not singe message"); - producer - .send_batch(vec!["bar".as_bytes(), "baz".as_bytes()]) - .await - .expect("could not send message batch"); -} diff --git a/lib/gobbler-server/tests/integration_test/queue.rs b/lib/gobbler-server/tests/integration_test/queue.rs new file mode 100644 index 0000000000..2a4dadafef --- /dev/null +++ b/lib/gobbler-server/tests/integration_test/queue.rs @@ -0,0 +1,107 @@ +use dal::DalContext; +use pretty_assertions_sorted::assert_eq; +use si_rabbitmq::{Consumer, Environment, Producer}; +use si_test_macros::gobbler_test as test; + +#[test] +async fn produce(_ctx: &DalContext) { + let stream = "test-produce"; + let environment = Environment::new().await.expect("could not connect"); + + // FIXME(nick): add stream setup to test macro. + environment + .delete_stream(stream) + .await + .expect("could not delete stream"); + environment + .create_stream(stream) + .await + .expect("could not create stream"); + + let mut producer = Producer::new(&environment, "producer", stream) + .await + .expect("could not create producer"); + producer + .send_single("foo") + .await + .expect("could not singe message"); + producer + .send_batch(vec!["bar".as_bytes(), "baz".as_bytes()]) + .await + .expect("could not send message batch"); + producer.close().await.expect("could not close producer"); +} + +#[test] +async fn consume(_ctx: &DalContext) { + let stream = "test-consume"; + let environment = Environment::new().await.expect("could not connect"); + + // FIXME(nick): add stream setup to test macro. + environment + .delete_stream(stream) + .await + .expect("could not delete stream"); + environment + .create_stream(stream) + .await + .expect("could not create stream"); + + let mut producer = Producer::new(&environment, "producer", stream) + .await + .expect("could not create producer"); + producer + .send_single("foo") + .await + .expect("could not singe message"); + producer + .send_batch(vec!["bar".as_bytes(), "baz".as_bytes()]) + .await + .expect("could not send message batch"); + producer.close().await.expect("could not close producer"); + + let mut consumer = Consumer::new(&environment, stream) + .await + .expect("could not create consumer"); + let handle = consumer.handle(); + + // Grab the three deliveries that we expect. + let delivery = consumer + .next() + .await + .expect("could not consume next delivery") + .expect("no delivery to consume") + .expect("consumer delivery error"); + let data = consumer + .process_delivery(&delivery) + .expect("could not process delivery") + .expect("no data in message"); + assert_eq!("foo", &data); + let delivery = consumer + .next() + .await + .expect("could not consume next delivery") + .expect("no delivery to consume") + .expect("consumer delivery error"); + let data = consumer + .process_delivery(&delivery) + .expect("could not process delivery") + .expect("no data in message"); + assert_eq!("bar", &data); + let delivery = consumer + .next() + .await + .expect("could not consume next delivery") + .expect("no delivery to consume") + .expect("consumer delivery error"); + let data = consumer + .process_delivery(&delivery) + .expect("could not process delivery") + .expect("no data in message"); + assert_eq!("baz", &data); + + handle + .close() + .await + .expect("could not close the consumer associated to this hangler"); +} diff --git a/lib/si-rabbitmq/src/consumer.rs b/lib/si-rabbitmq/src/consumer.rs index 29bed7f156..18c8ade0d7 100644 --- a/lib/si-rabbitmq/src/consumer.rs +++ b/lib/si-rabbitmq/src/consumer.rs @@ -1,37 +1,60 @@ use crate::environment::Environment; use futures::StreamExt; use rabbitmq_stream_client::error::ConsumerDeliveryError; -use rabbitmq_stream_client::types::Delivery; -use rabbitmq_stream_client::Consumer as UpstreamConsumer; +use rabbitmq_stream_client::types::{Delivery, Message, OffsetSpecification}; +use rabbitmq_stream_client::{Consumer as UpstreamConsumer, ConsumerHandle}; +use telemetry::prelude::*; use tokio::task; use crate::RabbitResult; /// An interface for consuming RabbitMQ stream messages. #[allow(missing_debug_implementations)] -pub struct Consumer(UpstreamConsumer); +pub struct Consumer { + inner: UpstreamConsumer, +} impl Consumer { /// Creates a new [`Consumer`] for consuming RabbitMQ stream messages. pub async fn new(environment: &Environment, stream: &str) -> RabbitResult { - let consumer = environment.inner().consumer().build(stream).await?; - Ok(Self(consumer)) + let inner = environment + .inner() + .consumer() + .offset(OffsetSpecification::First) + .build(stream) + .await?; + Ok(Self { inner }) + } + + pub async fn next(&mut self) -> RabbitResult>> { + Ok(self.inner.next().await) + } + + pub fn handle(&self) -> ConsumerHandle { + self.inner.handle() } - /// Starts a consumer task that watches the stream. - pub async fn start( - mut self, - processing_func: fn(delivery: Result), - ) -> RabbitResult<()> { - let handle = self.0.handle(); - task::spawn(async move { - while let Some(delivery) = self.0.next().await { - processing_func(delivery) + pub fn process_delivery(&self, delivery: &Delivery) -> RabbitResult> { + let maybe_data = delivery + .message() + .data() + .map(|data| String::from_utf8(data.to_vec())); + Ok(match maybe_data { + Some(data) => Some(data?), + None => None, + }) + } +} + +impl Drop for Consumer { + fn drop(&mut self) { + let handle = self.handle(); + + // Close the consumer associated to the handle provided. + task::spawn(async { + if let Err(e) = handle.close().await { + warn!("error when closing consumer on drop: {e}"); } }); - - // TODO(nick): handle when close happens more precisely. - handle.close().await?; - Ok(()) } } diff --git a/lib/si-rabbitmq/src/environment.rs b/lib/si-rabbitmq/src/environment.rs index 606d57cf2d..3e87055598 100644 --- a/lib/si-rabbitmq/src/environment.rs +++ b/lib/si-rabbitmq/src/environment.rs @@ -1,4 +1,5 @@ -use rabbitmq_stream_client::types::ByteCapacity; +use rabbitmq_stream_client::error::StreamDeleteError; +use rabbitmq_stream_client::types::{ByteCapacity, ResponseCode}; use rabbitmq_stream_client::Environment as UpstreamEnvironment; use crate::error::RabbitResult; @@ -37,6 +38,15 @@ impl Environment { } pub async fn delete_stream(&self, stream: impl AsRef) -> RabbitResult<()> { - Ok(self.inner.delete_stream(stream.as_ref()).await?) + match self.inner.delete_stream(stream.as_ref()).await { + Ok(()) => Ok(()), + Err(e) => match e { + StreamDeleteError::Delete { + status: ResponseCode::StreamDoesNotExist, + stream: _, + } => Ok(()), + e => Err(e.into()), + }, + } } } diff --git a/lib/si-rabbitmq/src/error.rs b/lib/si-rabbitmq/src/error.rs index fc2a6035de..3f660b9321 100644 --- a/lib/si-rabbitmq/src/error.rs +++ b/lib/si-rabbitmq/src/error.rs @@ -2,6 +2,7 @@ use rabbitmq_stream_client::error::{ ClientError, ConsumerCloseError, ConsumerCreateError, ProducerCloseError, ProducerCreateError, ProducerPublishError, StreamCreateError, StreamDeleteError, }; +use std::string::FromUtf8Error; use thiserror::Error; #[allow(missing_docs)] @@ -14,9 +15,11 @@ pub enum RabbitError { ConsumerClose(#[from] ConsumerCloseError), #[error("consumer create error: {0}")] ConsumerCreate(#[from] ConsumerCreateError), + #[error("from utf-8 error: {0}")] + FromUtf8(#[from] FromUtf8Error), #[error("producer close error: {0}")] ProducerClose(#[from] ProducerCloseError), - #[error("cannot send because the producer is closed")] + #[error("can no longer use producer because it has been closed")] ProducerClosed, #[error("producer create error: {0}")] ProducerCreate(#[from] ProducerCreateError), diff --git a/lib/si-rabbitmq/src/producer.rs b/lib/si-rabbitmq/src/producer.rs index 0aece12230..014d48ffc0 100644 --- a/lib/si-rabbitmq/src/producer.rs +++ b/lib/si-rabbitmq/src/producer.rs @@ -1,5 +1,7 @@ use rabbitmq_stream_client::types::Message; use rabbitmq_stream_client::{Dedup, NoDedup, Producer as UpstreamProducer}; +use telemetry::prelude::warn; +use tokio::task; use crate::environment::Environment; use crate::{RabbitError, RabbitResult}; @@ -7,7 +9,7 @@ use crate::{RabbitError, RabbitResult}; /// An interface for producing and sending RabbitMQ stream messages. #[allow(missing_debug_implementations)] pub struct Producer { - producer: UpstreamProducer, + inner: UpstreamProducer, closed: bool, } @@ -18,14 +20,14 @@ impl Producer { name: impl AsRef, stream: impl AsRef, ) -> RabbitResult { - let producer = environment + let inner = environment .inner() .producer() .name(name.as_ref()) .build(stream.as_ref()) .await?; Ok(Self { - producer, + inner, closed: false, }) } @@ -35,7 +37,7 @@ impl Producer { if self.closed { return Err(RabbitError::ProducerClosed); } - self.producer + self.inner .send_with_confirm(Message::builder().body(message).build()) .await?; Ok(()) @@ -46,7 +48,7 @@ impl Producer { if self.closed { return Err(RabbitError::ProducerClosed); } - self.producer + self.inner .batch_send_with_confirm( messages .into_iter() @@ -57,9 +59,9 @@ impl Producer { Ok(()) } - /// Closes the producer connection and renders the producer unusable. + // Closes the producer connection and renders the producer unusable. pub async fn close(mut self) -> RabbitResult<()> { - self.producer.close().await?; + self.inner.close().await?; self.closed = true; Ok(()) } From 6bad95d1403bdae3100ec9e6238f6ee7919e2121 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Thu, 24 Aug 2023 09:05:29 -0700 Subject: [PATCH 11/92] Test removing ordered children from ordered containers --- lib/dal/src/workspace_snapshot/graph.rs | 289 ++++++++++++++++++++++++ 1 file changed, 289 insertions(+) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 643a95ea50..b0a55525c8 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -3166,4 +3166,293 @@ mod test { .expect("Unable to find ordered children for node") ); } + + #[test] + fn remove_ordered_node() { + let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let func_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let func_index = graph + .add_node( + NodeWeight::new_content( + change_set, + func_id, + ContentAddress::Func(ContentHash::new( + FuncId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add func"); + graph + .add_edge( + change_set, + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + func_index, + ) + .expect("Unable to add root -> func edge"); + + let prop_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let prop_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + prop_id, + ContentAddress::Prop(ContentHash::new( + PropId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(schema_variant_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + prop_index, + ) + .expect("Unable to add schema variant -> prop edge"); + graph + .add_unordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(func_id) + .expect("Cannot get NodeIndex"), + ) + .expect("Unable to add prop -> func edge"); + graph.cleanup(); + graph.dot(); + + let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_1_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_1_index, + ) + .expect("Unable to add prop -> ordered_prop_1 edge"); + + let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_2_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_2_index, + ) + .expect("Unable to add prop -> ordered_prop_2 edge"); + + let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_3_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_3_index, + ) + .expect("Unable to add prop -> ordered_prop_3 edge"); + + let ordered_prop_4_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_4_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Uses) + .expect("Unable to create uses edge weight"), + ordered_prop_4_index, + ) + .expect("Unable to add prop -> ordered_prop_4 edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + vec![ + ordered_prop_1_index, + ordered_prop_2_index, + ordered_prop_3_index, + ordered_prop_4_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + ); + + graph + .remove_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeIndex for prop"), + ordered_prop_2_index, + EdgeWeightKind::Uses, + ) + .expect("Unable to update order of prop's children"); + + assert_eq!( + vec![ + ordered_prop_1_index, + ordered_prop_3_index, + ordered_prop_4_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + ); + if let NodeWeight::Ordering(ordering_weight) = graph + .get_node_weight( + graph + .ordering_node_index_for_container( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to find ordering node for prop"), + ) + .expect("Error getting ordering NodeIndex for prop") + .expect("Unable to find ordering NodeIndex"), + ) + .expect("Unable to get ordering NodeWeight for ordering node") + { + assert_eq!( + &vec![ordered_prop_1_id, ordered_prop_3_id, ordered_prop_4_id], + ordering_weight.order() + ); + } else { + panic!("Unable to destructure ordering node weight"); + } + } } From 39d19e96af46c513589242ad3231f821ec64874e Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Thu, 24 Aug 2023 13:18:26 -0700 Subject: [PATCH 12/92] Test basic conflict & update detection with ordered containers In addition to adding new tests for basic conflict & update detection, this fixes a bug where the "seen" vector clock was being updated when changing the order of an `OrderingNodeWeight`, instead of updating the "write" vector clock. This also removes an unnecessary additional update of the "write" vector clock of the `EdgeWeight` when adding an edge. By assuming that the vector clock is already up to date, testing becomes easier & more stable. --- lib/dal/src/workspace_snapshot/graph.rs | 511 +++++++++++++++++- .../node_weight/ordering_node_weight.rs | 2 +- 2 files changed, 504 insertions(+), 9 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index b0a55525c8..c4caa43f2e 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -93,7 +93,7 @@ impl WorkspaceSnapshotGraph { to_node_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult { let new_edge_index = - self.add_unordered_edge(change_set, from_node_index, edge_weight, to_node_index)?; + self.add_unordered_edge(from_node_index, edge_weight, to_node_index)?; let (new_from_node_index, _) = self .graph @@ -173,7 +173,6 @@ impl WorkspaceSnapshotGraph { pub fn add_unordered_edge( &mut self, - change_set: &ChangeSet, from_node_index: NodeIndex, mut edge_weight: EdgeWeight, to_node_index: NodeIndex, @@ -188,9 +187,6 @@ impl WorkspaceSnapshotGraph { return Err(WorkspaceSnapshotGraphError::CreateGraphCycle); } - // Ensure the vector clocks of the edge are up-to-date. - edge_weight.increment_vector_clocks(change_set)?; - // Because outgoing edges are part of a node's identity, we create a new "from" node // as we are effectively writing to that node (we'll need to update the merkle tree // hash), and everything in the graph should be treated as copy-on-write. @@ -2787,7 +2783,6 @@ mod test { .expect("Unable to add schema variant -> prop edge"); graph .add_unordered_edge( - change_set, graph .get_node_index_by_id(prop_id) .expect("Cannot get NodeIndex"), @@ -3002,7 +2997,6 @@ mod test { .expect("Unable to add schema variant -> prop edge"); graph .add_unordered_edge( - change_set, graph .get_node_index_by_id(prop_id) .expect("Cannot get NodeIndex"), @@ -3272,7 +3266,6 @@ mod test { .expect("Unable to add schema variant -> prop edge"); graph .add_unordered_edge( - change_set, graph .get_node_index_by_id(prop_id) .expect("Cannot get NodeIndex"), @@ -3455,4 +3448,506 @@ mod test { panic!("Unable to destructure ordering node weight"); } } + + #[test] + fn detect_conflicts_and_updates_simple_ordering_no_conflicts_no_updates_in_base() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_change_set, + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let container_prop_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let container_prop_index = initial_graph + .add_ordered_node( + initial_change_set, + NodeWeight::new_content( + initial_change_set, + container_prop_id, + ContentAddress::Prop(ContentHash::new( + container_prop_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add container prop"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + container_prop_index, + ) + .expect("Unable to add schema variant -> container prop edge"); + + let ordered_prop_1_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_1_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 1"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_1_index, + ) + .expect("Unable to add container prop -> ordered prop 1 edge"); + + let ordered_prop_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_2_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 2"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_2_index, + ) + .expect("Unable to add container prop -> ordered prop 2 edge"); + + let ordered_prop_3_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_3_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 3"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_3_index, + ) + .expect("Unable to add container prop -> ordered prop 3 edge"); + + let ordered_prop_4_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_4_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 4"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_4_index, + ) + .expect("Unable to add container prop -> ordered prop 4 edge"); + + initial_graph.cleanup(); + initial_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = initial_graph.clone(); + + let ordered_prop_5_id = new_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_5_index = new_graph + .add_node( + NodeWeight::new_content( + new_change_set, + ordered_prop_5_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_5_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 5"); + new_graph + .add_edge( + new_change_set, + new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_5_index, + ) + .expect("Unable to add container prop -> ordered prop 5 edge"); + + new_graph.cleanup(); + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + assert_eq!(Vec::::new(), updates); + } + + #[test] + fn detect_conflicts_and_updates_simple_ordering_no_conflicts_with_updates_in_base() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_change_set, + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let container_prop_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let container_prop_index = initial_graph + .add_ordered_node( + initial_change_set, + NodeWeight::new_content( + initial_change_set, + container_prop_id, + ContentAddress::Prop(ContentHash::new( + container_prop_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add container prop"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + container_prop_index, + ) + .expect("Unable to add schema variant -> container prop edge"); + + let ordered_prop_1_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_1_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 1"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_1_index, + ) + .expect("Unable to add container prop -> ordered prop 1 edge"); + + let ordered_prop_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_2_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 2"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_2_index, + ) + .expect("Unable to add container prop -> ordered prop 2 edge"); + + let ordered_prop_3_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_3_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 3"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_3_index, + ) + .expect("Unable to add container prop -> ordered prop 3 edge"); + + let ordered_prop_4_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_4_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 4"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_4_index, + ) + .expect("Unable to add container prop -> ordered prop 4 edge"); + + initial_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let new_graph = initial_graph.clone(); + + let ordered_prop_5_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_5_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_5_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_5_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 5"); + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + new_edge_weight.clone(), + ordered_prop_5_index, + ) + .expect("Unable to add container prop -> ordered prop 5 edge"); + + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + assert_eq!( + vec![ + Update::NewEdge { + source: new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + destination: initial_graph + .get_node_index_by_id(ordered_prop_5_id) + .expect("Unable to get NodeIndex"), + edge_weight: new_edge_weight + }, + Update::ReplaceSubgraph { + new: initial_graph + .ordering_node_index_for_container( + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex") + ) + .expect("Unable to get new ordering NodeIndex") + .expect("Ordering NodeIndex not found"), + old: new_graph + .ordering_node_index_for_container( + new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex") + ) + .expect("Unable to get old ordering NodeIndex") + .expect("Ordering NodeIndex not found"), + } + ], + updates + ); + } } diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs index 4e3ddd6108..1ae9ab4a4e 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -93,7 +93,7 @@ impl OrderingNodeWeight { pub fn set_order(&mut self, change_set: &ChangeSet, order: Vec) -> NodeWeightResult<()> { self.order = order; self.update_content_hash(); - self.increment_seen_vector_clock(change_set)?; + self.increment_vector_clock(change_set)?; Ok(()) } From ff5f50080cc29ca75c518ff20d54e28a70b4661b Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Fri, 25 Aug 2023 10:55:53 -0700 Subject: [PATCH 13/92] Detect conflicting ordering updates for containers with Ordering nodes If the items the two "versions" of the containers have in common aren't in the same order, and there have been changes to the ordering in both "versions", then that's a conflict that needs to be resolved. --- lib/dal/src/workspace_snapshot/conflict.rs | 4 +- lib/dal/src/workspace_snapshot/graph.rs | 318 +++++++++++++++++++-- 2 files changed, 303 insertions(+), 19 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/conflict.rs b/lib/dal/src/workspace_snapshot/conflict.rs index e6e894cc2f..06e95cdc1e 100644 --- a/lib/dal/src/workspace_snapshot/conflict.rs +++ b/lib/dal/src/workspace_snapshot/conflict.rs @@ -7,8 +7,8 @@ use petgraph::stable_graph::NodeIndex; pub enum Conflict { // TODO(nick,jacob): this variant will not be possible until ordering is in place. ChildOrder { - ours: NodeIndex, - theirs: NodeIndex, + onto: NodeIndex, + to_rebase: NodeIndex, }, ModifyRemovedItem(NodeIndex), NodeContent { diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index c4caa43f2e..0b1d23ff20 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -299,9 +299,7 @@ impl WorkspaceSnapshotGraph { // We'll lazily populate these, since we don't know if we'll need it at all, and // we definitely don't want to be re-fetching this information inside the loop // below, as it will be identical every time. - let mut onto_edges = None; let mut onto_ordering_node_index = None; - let mut onto_order_set: Option> = None; // If everything with the same `lineage_id` is identical, then we can prune the // graph traversal, and avoid unnecessary lookups/comparisons. @@ -389,12 +387,6 @@ impl WorkspaceSnapshotGraph { onto_node_index, to_rebase_node_index ); - let onto_edges = onto_edges.get_or_insert_with(|| { - onto.graph.edges_directed(onto_node_index, Outgoing) - }); - let to_rebase_edges = - self.graph.edges_directed(to_rebase_node_index, Outgoing); - let (container_conflicts, container_updates) = self .find_unordered_container_membership_conflicts_and_updates( to_rebase_change_set, @@ -426,15 +418,6 @@ impl WorkspaceSnapshotGraph { "Comparing ordered containers: {:?}, {:?}", onto_node_index, to_rebase_node_index ); - if onto_order_set.is_none() { - if let NodeWeight::Ordering(onto_order_weight) = onto - .get_node_weight(onto_ordering_node_index) - .map_err(|_| event)? - { - onto_order_set = - Some(onto_order_weight.order().iter().copied().collect()); - }; - } let (container_conflicts, container_updates) = self .find_ordered_container_membership_conflicts_and_updates( to_rebase_change_set, @@ -592,6 +575,31 @@ impl WorkspaceSnapshotGraph { let onto_ordering_set: HashSet = onto_ordering.order().iter().copied().collect(); let to_rebase_ordering_set: HashSet = to_rebase_ordering.order().iter().copied().collect(); + + // Make sure that both `onto` and `to_rebase` have the same relative ordering for the + // nodes they have in common. If they don't, then that means that the order changed on + // at least one of them. + let common_items: HashSet = onto_ordering_set + .intersection(&to_rebase_ordering_set) + .copied() + .collect(); + let common_onto_items = { + let mut items = onto_ordering.order().clone(); + items.retain(|i| common_items.contains(i)); + items + }; + let common_to_rebase_items = { + let mut items = to_rebase_ordering.order().clone(); + items.retain(|i| common_items.contains(i)); + items + }; + if common_onto_items != common_to_rebase_items { + conflicts.push(Conflict::ChildOrder { + onto: onto_ordering_index, + to_rebase: to_rebase_ordering_index, + }); + } + let only_onto_items: HashSet = onto_ordering_set .difference(&to_rebase_ordering_set) .copied() @@ -3950,4 +3958,280 @@ mod test { updates ); } + + #[test] + fn detect_conflicts_and_updates_simple_ordering_with_conflicting_ordering_updates() { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_change_set, + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let container_prop_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let container_prop_index = initial_graph + .add_ordered_node( + initial_change_set, + NodeWeight::new_content( + initial_change_set, + container_prop_id, + ContentAddress::Prop(ContentHash::new( + container_prop_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add container prop"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + container_prop_index, + ) + .expect("Unable to add schema variant -> container prop edge"); + + let ordered_prop_1_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_1_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 1"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_1_index, + ) + .expect("Unable to add container prop -> ordered prop 1 edge"); + + let ordered_prop_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_2_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 2"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_2_index, + ) + .expect("Unable to add container prop -> ordered prop 2 edge"); + + let ordered_prop_3_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_3_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 3"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_3_index, + ) + .expect("Unable to add container prop -> ordered prop 3 edge"); + + let ordered_prop_4_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_4_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 4"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_4_index, + ) + .expect("Unable to add container prop -> ordered prop 4 edge"); + + initial_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = initial_graph.clone(); + + let new_order = vec![ + ordered_prop_2_id, + ordered_prop_1_id, + ordered_prop_4_id, + ordered_prop_3_id, + ]; + new_graph + .update_order(new_change_set, container_prop_id, new_order) + .expect("Unable to update order of container prop's children"); + + let ordered_prop_5_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_5_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_5_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_5_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 5"); + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + new_edge_weight.clone(), + ordered_prop_5_index, + ) + .expect("Unable to add container prop -> ordered prop 5 edge"); + + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!( + vec![Conflict::ChildOrder { + onto: initial_graph + .ordering_node_index_for_container( + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex") + ) + .expect("Unable to get ordering NodeIndex") + .expect("Ordering NodeIndex not found"), + to_rebase: new_graph + .ordering_node_index_for_container( + new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex") + ) + .expect("Unable to get ordering NodeIndex") + .expect("Ordering NodeIndex not found"), + }], + conflicts + ); + assert_eq!( + vec![Update::NewEdge { + source: new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get new_graph container NodeIndex"), + destination: initial_graph + .get_node_index_by_id(ordered_prop_5_id) + .expect("Unable to get ordered prop 5 NodeIndex"), + edge_weight: new_edge_weight, + }], + updates + ); + } } From 863525a8e07305cf65c72e45220c96f9d87ed486 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Fri, 25 Aug 2023 12:46:44 -0700 Subject: [PATCH 14/92] Detect when a workspace snapshot has added an ordered child & rebasing onto one that removed a sibling of the new child We consider it to not be a conflict when we have a change set (workspace snapshot) that is attempting to add an item to an ordered container, and the change set that we're attempting to rebase onto has removed an element that the change set being rebased knows about, but has not modified in any way, nor has the change set being rebased reordered any of the pre-existing elements of the container. --- lib/dal/src/workspace_snapshot/edge_weight.rs | 7 + lib/dal/src/workspace_snapshot/graph.rs | 280 ++++++++++++++++++ lib/dal/src/workspace_snapshot/node_weight.rs | 9 + .../node_weight/content_node_weight.rs | 8 +- .../node_weight/ordering_node_weight.rs | 8 + 5 files changed, 309 insertions(+), 3 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs index c3e17c0b04..b36bcc1419 100644 --- a/lib/dal/src/workspace_snapshot/edge_weight.rs +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -1,5 +1,6 @@ //! Edges +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -45,6 +46,12 @@ impl EdgeWeight { self.kind } + pub fn mark_seen_at(&mut self, change_set: &ChangeSet, seen_at: DateTime) { + if self.vector_clock_first_seen.entry_for(change_set).is_none() { + self.vector_clock_first_seen.inc_to(change_set, seen_at); + } + } + pub fn new(change_set: &ChangeSet, kind: EdgeWeightKind) -> EdgeWeightResult { Ok(Self { kind, diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 0b1d23ff20..e606cc6722 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -1,3 +1,4 @@ +use chrono::Utc; use petgraph::{algo, prelude::*, visit::DfsEvent}; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet}; @@ -966,6 +967,18 @@ impl WorkspaceSnapshotGraph { && algo::has_path_connecting(&self.graph, node, end, None) } + pub fn mark_graph_seen(&mut self, change_set: &ChangeSet) -> WorkspaceSnapshotGraphResult<()> { + let seen_at = Utc::now(); + for edge in self.graph.edge_weights_mut() { + edge.mark_seen_at(change_set, seen_at.clone()); + } + for node in self.graph.node_weights_mut() { + node.mark_seen_at(change_set, seen_at.clone()); + } + + Ok(()) + } + pub fn ordered_children_for_node( &self, container_node_index: NodeIndex, @@ -4234,4 +4247,271 @@ mod test { updates ); } + + #[test] + fn detect_conflicts_and_updates_simple_ordering_with_no_conflicts_add_in_onto_remove_in_to_rebase( + ) { + let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Cannot generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_change_set, + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_id) + .expect("Cannot get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let container_prop_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let container_prop_index = initial_graph + .add_ordered_node( + initial_change_set, + NodeWeight::new_content( + initial_change_set, + container_prop_id, + ContentAddress::Prop(ContentHash::new( + container_prop_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add container prop"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + container_prop_index, + ) + .expect("Unable to add schema variant -> container prop edge"); + + let ordered_prop_1_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_1_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 1"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_1_index, + ) + .expect("Unable to add container prop -> ordered prop 1 edge"); + + let ordered_prop_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_2_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 2"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_2_index, + ) + .expect("Unable to add container prop -> ordered prop 2 edge"); + + let ordered_prop_3_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_3_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 3"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_3_index, + ) + .expect("Unable to add container prop -> ordered prop 3 edge"); + + let ordered_prop_4_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_4_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 4"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"), + ordered_prop_4_index, + ) + .expect("Unable to add container prop -> ordered prop 4 edge"); + + initial_graph.cleanup(); + initial_graph + .mark_graph_seen(initial_change_set) + .expect("Unable to update recently seen information"); + // initial_graph.dot(); + + let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = initial_graph.clone(); + + new_graph + .remove_edge( + new_change_set, + new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex"), + ordered_prop_2_index, + EdgeWeightKind::Uses, + ) + .expect("Unable to remove container prop -> prop 2 edge"); + + let ordered_prop_5_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_5_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_5_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_5_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 5"); + + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + .expect("Unable to create EdgeWeight"); + initial_graph + .add_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + new_edge_weight.clone(), + ordered_prop_5_index, + ) + .expect("Unable to add container prop -> ordered prop 5 edge"); + + initial_graph.cleanup(); + initial_graph.dot(); + + new_graph.cleanup(); + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + assert_eq!( + vec![Update::NewEdge { + source: new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get new_graph container NodeIndex"), + destination: initial_graph + .get_node_index_by_id(ordered_prop_5_id) + .expect("Unable to get ordered prop 5 NodeIndex"), + edge_weight: new_edge_weight, + }], + updates + ); + } } diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index 547fba14d7..f1d25fa396 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -73,6 +73,15 @@ impl NodeWeight { } } + pub fn mark_seen_at(&mut self, change_set: &ChangeSet, seen_at: DateTime) { + match self { + NodeWeight::Content(content_weight) => content_weight.mark_seen_at(change_set, seen_at), + NodeWeight::Ordering(ordering_weight) => { + ordering_weight.mark_seen_at(change_set, seen_at) + } + } + } + pub fn merge_clocks( &mut self, change_set: &ChangeSet, diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index 9d1d5d0cb7..72b29c1bd2 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -106,10 +106,12 @@ impl ContentNodeWeight { self.lineage_id } - pub fn mark_seen(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + pub fn mark_seen_at(&mut self, change_set: &ChangeSet, seen_at: DateTime) { self.vector_clock_recently_seen - .inc(change_set) - .map_err(Into::into) + .inc_to(change_set, seen_at.clone()); + if self.vector_clock_first_seen.entry_for(change_set).is_none() { + self.vector_clock_first_seen.inc_to(change_set, seen_at); + } } pub fn merge_clocks( diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs index 1ae9ab4a4e..ecfb3d6f77 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -45,6 +45,14 @@ impl OrderingNodeWeight { self.lineage_id } + pub fn mark_seen_at(&mut self, change_set: &ChangeSet, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(change_set, seen_at.clone()); + if self.vector_clock_first_seen.entry_for(change_set).is_none() { + self.vector_clock_first_seen.inc_to(change_set, seen_at); + } + } + pub fn merge_clocks( &mut self, change_set: &ChangeSet, From 0be06c9010ebc6abe046ea287673d1508de39302 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Mon, 28 Aug 2023 12:44:14 -0400 Subject: [PATCH 15/92] Fix gobbler-server compilation by removing Subscription Subscription (from NATS) will not be used as we will be using RabbitMQ instead for gobbler-server. Since there was an upstream change that breaks compilation, it is not worth saving this specific Subscription usage. Signed-off-by: Nick Gerace --- lib/gobbler-server/src/server.rs | 66 -------------------------------- 1 file changed, 66 deletions(-) diff --git a/lib/gobbler-server/src/server.rs b/lib/gobbler-server/src/server.rs index dbaa2d1106..564871db64 100644 --- a/lib/gobbler-server/src/server.rs +++ b/lib/gobbler-server/src/server.rs @@ -265,53 +265,6 @@ pub struct JobItem { request: Result>, } -pub struct Subscriber; - -impl Subscriber { - pub async fn jobs( - metadata: Arc, - pg_pool: PgPool, - nats: NatsClient, - veritech: veritech_client::Client, - job_processor: Box, - encryption_key: Arc, - ) -> Result> { - let subject = nats_jobs_subject(nats.metadata().subject_prefix()); - debug!( - messaging.destination = &subject.as_str(), - "subscribing for job requests" - ); - - let services_context = ServicesContext::new( - pg_pool, - nats.clone(), - job_processor, - veritech.clone(), - encryption_key, - None, - None, - (), - ); - - // Make non blocking context here, and update it for each job - // Since the any blocking job should block on its child jobs - let ctx_builder = DalContext::builder(services_context, false); - - let messaging_destination = Arc::new(subject.clone()); - - Ok(nats_subscriber::Subscriber::create(subject) - .queue_name(NATS_JOBS_DEFAULT_QUEUE) - .start(&nats) - .await? - .map(move |request| JobItem { - metadata: metadata.clone(), - messaging_destination: messaging_destination.clone(), - ctx_builder: ctx_builder.clone(), - request: request.map_err(Into::into), - })) - } -} - #[allow(clippy::too_many_arguments)] async fn receive_job_requests_task( tx: UnboundedSender, @@ -350,25 +303,6 @@ async fn receive_job_requests( encryption_key: Arc, mut shutdown_watch_rx: watch::Receiver<()>, ) -> Result<()> { - let mut requests = Subscriber::jobs( - metadata, - pg_pool, - nats, - veritech, - job_processor, - encryption_key, - ) - .await? - .take_until_if(Box::pin(shutdown_watch_rx.changed().map(|_| true))); - - // Forward each request off the stream to a consuming task via an *unbounded* channel so we - // buffer requests until we run out of memory. Have fun! - while let Some(job) = requests.next().await { - if let Err(_job) = tx.send(job) { - error!("process_job_requests rx has already closed"); - } - } - Ok(()) } From d6074f756757eeae4eefb9bbdf0da6565bceb84f Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Thu, 24 Aug 2023 17:27:07 -0400 Subject: [PATCH 16/92] Add gobbler client/server architecture This commit adds the gobbler client/server architecture. Primarily, it operates on three crates: - si-rabbitmq (modified): the crate for interacting with RabbitMQ that SI services will use or are using - gobbler-server (modified): the crate for running an outer management consumer loop as well as inner core consumer loops - gobbler-client (new): the crate for interfacing with the gobbler-server and testing the communication between the two The addition of gobbler-client is at the center of this commit. Essentially, it is not only being used as the interface to gobbler-server, but it is also being used as the entrypoint for testing the gobbler-server lifecycle. The outer and inner consumer loops provide the ability to manage gobbler singletons as well as run the gobbler singletons. The outer loop consumers on the "gobbler-management" stream and its lifecycle is owned by the gobbler-server. The inner loops are created on a per-change-set basis and are only spun up and down by requests from gobbler-clients. The inner loops provide the core work for the "gobbler". However, in this commit, all the inner loops do is directly return the message they were sent. This is done for testing purposes to ensure that all the scaffolding is there and sound. As far as the inner RabbitMQ communications go, this commit exposes "OffsetSpecification" for consumers. It is unclear if this will be a temporary change or not. We should know the exact offset of type u64 in the future, but for now, we are using what works for testing purposes. For context, these changes were driven by tests in the gobbler-client library. As a bonus, this commit also adds si-rabbitmq testin to protect against regressions. It's minimal for now and may remain that way. Signed-off-by: Nick Gerace --- Cargo.lock | 20 +- Cargo.toml | 47 +- lib/gobbler-client/BUCK | 18 + lib/gobbler-client/Cargo.toml | 17 + lib/gobbler-client/src/client.rs | 198 ++++++++ lib/gobbler-client/src/lib.rs | 125 +++++ lib/gobbler-server/Cargo.toml | 1 + lib/gobbler-server/src/config.rs | 49 +- lib/gobbler-server/src/lib.rs | 73 ++- lib/gobbler-server/src/server.rs | 430 ++++++++---------- .../tests/integration_test/mod.rs | 1 - .../tests/integration_test/queue.rs | 107 ----- lib/si-rabbitmq/BUCK | 5 - lib/si-rabbitmq/Cargo.toml | 5 - lib/si-rabbitmq/src/consumer.rs | 52 ++- lib/si-rabbitmq/src/delivery.rs | 37 ++ lib/si-rabbitmq/src/environment.rs | 32 +- lib/si-rabbitmq/src/error.rs | 9 +- lib/si-rabbitmq/src/lib.rs | 62 +++ lib/si-rabbitmq/src/producer.rs | 57 ++- 20 files changed, 845 insertions(+), 500 deletions(-) create mode 100644 lib/gobbler-client/BUCK create mode 100644 lib/gobbler-client/Cargo.toml create mode 100644 lib/gobbler-client/src/client.rs create mode 100644 lib/gobbler-client/src/lib.rs delete mode 100644 lib/gobbler-server/tests/integration_test/queue.rs create mode 100644 lib/si-rabbitmq/src/delivery.rs diff --git a/Cargo.lock b/Cargo.lock index 8f850db7f6..e0c49d557f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2112,6 +2112,21 @@ dependencies = [ "tokio", ] +[[package]] +name = "gobbler-client" +version = "0.1.0" +dependencies = [ + "gobbler-server", + "remain", + "serde", + "serde_json", + "si-rabbitmq", + "telemetry", + "thiserror", + "tokio", + "ulid", +] + [[package]] name = "gobbler-server" version = "0.1.0" @@ -5111,18 +5126,13 @@ name = "si-rabbitmq" version = "0.1.0" dependencies = [ "futures", - "futures-lite", - "pin-project-lite", "rabbitmq-stream-client", "remain", "serde", "serde_json", - "si-data-nats", - "si-test-macros", "telemetry", "thiserror", "tokio", - "ulid", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index c62d10458f..5bbb2d9569 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -23,6 +23,7 @@ members = [ "lib/dal", "lib/dal-test", "lib/deadpool-cyclone", + "lib/gobbler-client", "lib/gobbler-server", "lib/module-index-client", "lib/module-index-server", @@ -62,9 +63,9 @@ clap = { version = "4.2.7", features = ["derive", "color", "env", "wrap_help"] } color-eyre = "0.6.2" colored = "2.0.4" comfy-table = { version = "7.0.1", features = [ - "crossterm", - "tty", - "custom_styling", + "crossterm", + "tty", + "custom_styling", ] } config = { version = "0.13.3", default-features = false, features = ["toml"] } console = "0.15.7" @@ -84,13 +85,13 @@ futures-lite = "1.13.0" hex = "0.4.3" http = "0.2.9" hyper = { version = "0.14.26", features = [ - "client", - "http1", - "runtime", - "server", + "client", + "http1", + "runtime", + "server", ] } hyperlocal = { version = "0.8.0", default-features = false, features = [ - "client", + "client", ] } iftree = "1.0.4" indicatif = "0.17.5" @@ -106,8 +107,8 @@ num_cpus = "1.15.0" once_cell = "1.17.1" open = "5.0.0" opentelemetry = { version = "~0.18.0", features = [ - "rt-tokio", - "trace", + "rt-tokio", + "trace", ] } # pinned, pending new release of tracing-opentelemetry, 0.18 opentelemetry-otlp = "~0.11.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 opentelemetry-semantic-conventions = "~0.10.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 @@ -127,20 +128,20 @@ refinery = { version = "0.8.9", features = ["tokio-postgres"] } regex = "1.8.1" remain = "0.2.8" reqwest = { version = "0.11.17", default-features = false, features = [ - "rustls-tls", - "json", - "multipart", + "rustls-tls", + "json", + "multipart", ] } rust-s3 = { version = "0.33.0", default-features = false, features = [ - "tokio-rustls-tls", + "tokio-rustls-tls", ] } rustls = "0.21.6" # pinned, pending update from tokio-rustls for async-nats sea-orm = { version = "0.11", features = [ - "sqlx-postgres", - "runtime-tokio-rustls", - "macros", - "with-chrono", - "debug-print", + "sqlx-postgres", + "runtime-tokio-rustls", + "macros", + "with-chrono", + "debug-print", ] } self-replace = "1.3.5" serde = { version = "1.0.160", features = ["derive", "rc"] } @@ -156,14 +157,14 @@ syn = { version = "2.0.15", features = ["full", "extra-traits"] } tar = "0.4.38" tempfile = "3.5.0" test-log = { version = "0.2.11", default-features = false, features = [ - "trace", + "trace", ] } thiserror = "1.0.40" tokio = { version = "1.28.0", features = ["full"] } tokio-postgres = { version = "0.7.8", features = [ - "runtime", - "with-chrono-0_4", - "with-serde_json-1", + "runtime", + "with-chrono-0_4", + "with-serde_json-1", ] } tokio-serde = { version = "0.8.0", features = ["json"] } tokio-stream = "0.1.14" diff --git a/lib/gobbler-client/BUCK b/lib/gobbler-client/BUCK new file mode 100644 index 0000000000..5a739aa551 --- /dev/null +++ b/lib/gobbler-client/BUCK @@ -0,0 +1,18 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "gobbler-client", + deps = [ + "//lib/gobbler-server:gobbler-server", + "//lib/si-rabbitmq:si-rabbitmq", + "//lib/telemetry-rs:telemetry", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:serde_json", + "//third-party/rust:thiserror", + "//third-party/rust:ulid", + ], + srcs = glob([ + "src/**/*.rs", + ]), +) diff --git a/lib/gobbler-client/Cargo.toml b/lib/gobbler-client/Cargo.toml new file mode 100644 index 0000000000..2ca468f6b8 --- /dev/null +++ b/lib/gobbler-client/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "gobbler-client" +version = "0.1.0" +edition = "2021" +rust-version = "1.64" +publish = false + +[dependencies] +gobbler-server = { path = "../../lib/gobbler-server" } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +si-rabbitmq = { path = "../../lib/si-rabbitmq" } +telemetry = { path = "../../lib/telemetry-rs" } +thiserror = { workspace = true } +tokio = { workspace = true } +ulid = { workspace = true } diff --git a/lib/gobbler-client/src/client.rs b/lib/gobbler-client/src/client.rs new file mode 100644 index 0000000000..4df441d79d --- /dev/null +++ b/lib/gobbler-client/src/client.rs @@ -0,0 +1,198 @@ +//! This module provides [`Client`], which is used for communicating with a running +//! gobbler [`Server`](gobbler_server::Server). + +use gobbler_server::{ManagementMessage, ManagementMessageAction, GOBBLER_MANAGEMENT_STREAM}; +use serde::Serialize; +use si_rabbitmq::{Consumer, ConsumerOffsetSpecification, Environment, Producer}; +use std::collections::HashMap; +use telemetry::prelude::{debug, error}; +use ulid::Ulid; + +use crate::{ClientError, ClientResult}; + +const GOBBLER_REPLY_STREAM_PREFIX: &str = "gobbler-reply"; + +/// A client for communicating with a running gobbler [`Server`](gobbler_server::Server). +#[allow(missing_debug_implementations)] +pub struct Client { + management_stream: Stream, + streams: HashMap, +} + +#[allow(missing_debug_implementations)] +struct Stream { + producer: Producer, + reply_stream: String, + reply_consumer: Consumer, +} + +impl Client { + /// Creates a new [`Client`] to communicate with a running gobbler + /// [`Server`](gobbler_server::Server). + pub async fn new() -> ClientResult { + let environment = Environment::new().await?; + + // First, create the reply stream. We do not check if it already exists since the reply + // stream name is ULID-based. It's unlikely that there will be a collision. + let unique_identifier = Ulid::new().to_string(); + let management_reply_stream = format!("gobbler-management-reply-{unique_identifier}"); + environment.create_stream(&management_reply_stream).await?; + let management_reply_consumer = Consumer::new( + &environment, + &management_reply_stream, + ConsumerOffsetSpecification::Next, + ) + .await?; + + // Name the producer using the reply stream, but produce to the primary gobbler stream. This + // may... will... uh... potentially?... be useful for tracing. + let management_producer = + Producer::new(&environment, unique_identifier, GOBBLER_MANAGEMENT_STREAM).await?; + + Ok(Self { + management_stream: Stream { + producer: management_producer, + reply_stream: management_reply_stream, + reply_consumer: management_reply_consumer, + }, + streams: HashMap::new(), + }) + } + + /// Send a message to a gobbler stream for a change set and block for a reply. + pub async fn send_with_reply( + &mut self, + message: T, + change_set_id: Ulid, + ) -> ClientResult> { + let stream = self + .streams + .get_mut(&change_set_id) + .ok_or(ClientError::GobblerStreamForChangeSetNotFound)?; + stream + .producer + .send_single(message, Some(stream.reply_stream.clone())) + .await?; + if let Some(delivery) = stream.reply_consumer.next().await? { + if let Some(contents) = delivery.message_contents { + return Ok(Some(serde_json::from_value(contents)?)); + } + } + Ok(None) + } + + /// Send a message to the management stream to open a gobbler loop and block for a reply. + pub async fn send_management_open( + &mut self, + change_set_id: Ulid, + ) -> ClientResult> { + self.management_stream + .producer + .send_single( + ManagementMessage { + change_set_id, + action: ManagementMessageAction::Open, + }, + Some(self.management_stream.reply_stream.clone()), + ) + .await?; + if let Some(delivery) = self.management_stream.reply_consumer.next().await? { + if let Some(contents) = delivery.message_contents { + let change_set_stream: String = serde_json::from_value(contents)?; + + let environment = Environment::new().await?; + let reply_stream = format!("{GOBBLER_REPLY_STREAM_PREFIX}-{change_set_id}"); + environment.create_stream(&reply_stream).await?; + + // FIXME(nick): name the producer properly. + let producer = Producer::new(&environment, "producer", &change_set_stream).await?; + let reply_consumer = Consumer::new( + &environment, + &reply_stream, + ConsumerOffsetSpecification::First, + ) + .await?; + + self.streams.insert( + change_set_id, + Stream { + producer, + reply_stream, + reply_consumer, + }, + ); + return Ok(Some(change_set_stream)); + } + } + Ok(None) + } + + /// Send a message to the management stream to close a gobbler loop and do not wait for a reply. + pub async fn send_management_close(&mut self, change_set_id: Ulid) -> ClientResult<()> { + self.management_stream + .producer + .send_single( + ManagementMessage { + change_set_id, + action: ManagementMessageAction::Close, + }, + Some(self.management_stream.reply_stream.clone()), + ) + .await?; + + match self.streams.remove(&change_set_id) { + Some(stream) => { + if let Err(e) = stream.producer.close().await { + error!("{e}"); + } + let handle = stream.reply_consumer.handle(); + if let Err(e) = handle.close().await { + error!("{e}"); + } + let environment = Environment::new().await?; + environment.delete_stream(stream.reply_stream).await?; + } + None => { + debug!("producer and reply consumer not found for change set id: {change_set_id}") + } + } + Ok(()) + } + + /// This method performs an infallible close of all producers and consumers created by the + /// client. + pub async fn close(mut self) { + // First, close all producers and consumers for the streams. + for (_, stream) in self.streams.drain() { + if let Err(e) = stream.producer.close().await { + error!("{e}"); + } + let handle = stream.reply_consumer.handle(); + if let Err(e) = handle.close().await { + error!("{e}"); + } + } + + // Then, close the management producer and consumer. + if let Err(e) = self.management_stream.producer.close().await { + error!("{e}"); + } + let handle = self.management_stream.reply_consumer.handle(); + if let Err(e) = handle.close().await { + error!("{e}"); + } + + // Finally, delete the reply stream. + match Environment::new().await { + Ok(environment) => { + if let Err(e) = environment + .delete_stream(self.management_stream.reply_stream) + .await + { + error!("{e}"); + } + } + Err(e) => error!("{e}"), + } + } +} diff --git a/lib/gobbler-client/src/lib.rs b/lib/gobbler-client/src/lib.rs new file mode 100644 index 0000000000..bdcd625045 --- /dev/null +++ b/lib/gobbler-client/src/lib.rs @@ -0,0 +1,125 @@ +//! This crate provides the gobbler [`Client`], which is used for communicating with a running +//! gobbler [`Server`](gobbler_server::Server). + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + +mod client; + +pub use client::Client; + +use si_rabbitmq::RabbitError; +use telemetry::prelude::error; +use thiserror::Error; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Debug, Error)] +pub enum ClientError { + #[error("gobbler stream for change set not found")] + GobblerStreamForChangeSetNotFound, + #[error("si rabbitmq error: {0}")] + Rabbit(#[from] RabbitError), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), +} + +#[allow(missing_docs)] +pub type ClientResult = Result; + +#[cfg(test)] +mod tests { + use super::*; + use gobbler_server::{ConfigBuilder, Server}; + use tokio::test; + use ulid::Ulid; + + async fn test_setup() -> Client { + // FIXME(nick): make this not brittle... make strong! + let config = ConfigBuilder::default() + .cyclone_encryption_key_path( + "../../lib/cyclone-server/src/dev.encryption.key" + .try_into() + .expect("could not convert"), + ) + .build() + .expect("could not build config"); + let server = Server::from_config(config) + .await + .expect("could not build server"); + tokio::spawn(server.run()); + + Client::new().await.expect("could not build client") + } + + #[test] + async fn connect() { + let client = test_setup().await; + client.close().await; + } + + #[test] + async fn send_management() { + let mut client = test_setup().await; + + let change_set_id = Ulid::new(); + let _new_stream_to_produce_to = client + .send_management_open(change_set_id) + .await + .expect("could not create new gobbler loop for change set") + .expect("no message returned"); + + client + .send_management_close(change_set_id) + .await + .expect("could not close the gobbler loop for change set"); + + client.close().await; + } + + #[test] + async fn send_management_and_round_trip() { + let mut client = test_setup().await; + + let change_set_id = Ulid::new(); + let _new_stream_to_produce_to = client + .send_management_open(change_set_id) + .await + .expect("could not create new gobbler loop for change set") + .expect("no message returned"); + + let contents = "MUSTANG GTD"; + let message = client + .send_with_reply(contents, change_set_id) + .await + .expect("could not send message") + .expect("no message returned"); + assert_eq!(contents, &message); + + client + .send_management_close(change_set_id) + .await + .expect("could not close the gobbler loop for change set"); + + client.close().await; + } +} diff --git a/lib/gobbler-server/Cargo.toml b/lib/gobbler-server/Cargo.toml index dc236b9cb4..e095c4a75b 100644 --- a/lib/gobbler-server/Cargo.toml +++ b/lib/gobbler-server/Cargo.toml @@ -5,6 +5,7 @@ edition = "2021" rust-version = "1.64" publish = false +# TODO(nick): validate all these dependencies. [dependencies] buck2-resources = { path = "../../lib/buck2-resources" } dal = { path = "../../lib/dal" } diff --git a/lib/gobbler-server/src/config.rs b/lib/gobbler-server/src/config.rs index 3673f79b8a..71f998211f 100644 --- a/lib/gobbler-server/src/config.rs +++ b/lib/gobbler-server/src/config.rs @@ -9,12 +9,11 @@ use si_std::CanonicalFile; use si_std::CanonicalFileError; use telemetry::prelude::*; use thiserror::Error; -use ulid::Ulid; -pub use si_settings::{StandardConfig, StandardConfigFile}; - -const DEFAULT_CONCURRENCY_LIMIT: usize = 5; +use crate::StandardConfig; +use crate::StandardConfigFile; +#[allow(missing_docs)] #[remain::sorted] #[derive(Debug, Error)] pub enum ConfigError { @@ -36,6 +35,7 @@ impl ConfigError { type Result = std::result::Result; +/// The set of configuration options for building a [`Server`]. #[derive(Debug, Builder)] pub struct Config { #[builder(default = "PgPoolConfig::default()")] @@ -46,11 +46,8 @@ pub struct Config { cyclone_encryption_key_path: CanonicalFile, - #[builder(default = "default_concurrency_limit()")] - concurrency: usize, - - #[builder(default = "random_instance_id()")] - instance_id: String, + #[builder(default = "false")] + recreate_management_stream: bool, } impl StandardConfig for Config { @@ -81,17 +78,13 @@ impl Config { self.cyclone_encryption_key_path.as_path() } - /// Gets the config's concurrency limit. - pub fn concurrency(&self) -> usize { - self.concurrency - } - - /// Gets the config's instance ID. - pub fn instance_id(&self) -> &str { - self.instance_id.as_ref() + /// Gets the toggle on if the RabbitMQ Stream will be re-created + pub fn recreate_management_stream(&self) -> bool { + self.recreate_management_stream } } +/// The configuration file for creating a [`Server`]. #[derive(Clone, Debug, Deserialize, Serialize)] pub struct ConfigFile { #[serde(default)] @@ -100,10 +93,8 @@ pub struct ConfigFile { nats: NatsConfig, #[serde(default = "default_cyclone_encryption_key_path")] cyclone_encryption_key_path: String, - #[serde(default = "default_concurrency_limit")] - concurrency_limit: usize, - #[serde(default = "random_instance_id")] - instance_id: String, + #[serde(default = "default_recreate_management_stream")] + recreate_management_stream: bool, } impl Default for ConfigFile { @@ -112,8 +103,7 @@ impl Default for ConfigFile { pg: Default::default(), nats: Default::default(), cyclone_encryption_key_path: default_cyclone_encryption_key_path(), - concurrency_limit: default_concurrency_limit(), - instance_id: random_instance_id(), + recreate_management_stream: false, } } } @@ -132,26 +122,21 @@ impl TryFrom for Config { config.pg_pool(value.pg); config.nats(value.nats); config.cyclone_encryption_key_path(value.cyclone_encryption_key_path.try_into()?); - config.concurrency(value.concurrency_limit); - config.instance_id(value.instance_id); + config.recreate_management_stream(value.recreate_management_stream); config.build().map_err(Into::into) } } -fn random_instance_id() -> String { - Ulid::new().to_string() -} - fn default_cyclone_encryption_key_path() -> String { "/run/gobbler/cyclone_encryption.key".to_string() } -fn default_concurrency_limit() -> usize { - DEFAULT_CONCURRENCY_LIMIT +fn default_recreate_management_stream() -> bool { + false } #[allow(clippy::disallowed_methods)] // Used to determine if running in development -pub fn detect_and_configure_development(config: &mut ConfigFile) -> Result<()> { +fn detect_and_configure_development(config: &mut ConfigFile) -> Result<()> { if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { buck2_development(config) } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { diff --git a/lib/gobbler-server/src/lib.rs b/lib/gobbler-server/src/lib.rs index e284820890..8cea39cd5b 100644 --- a/lib/gobbler-server/src/lib.rs +++ b/lib/gobbler-server/src/lib.rs @@ -1,25 +1,62 @@ +//! This crate provides the gobbler [`Server`]. + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + mod config; -pub mod server; +mod server; + +pub use config::Config; +pub use config::ConfigBuilder; +pub use config::ConfigError; +pub use config::ConfigFile; +pub use server::Server; +pub use si_settings::StandardConfig; +pub use si_settings::StandardConfigFile; + +use serde::{Deserialize, Serialize}; +use ulid::Ulid; -pub use crate::{ - config::{ - detect_and_configure_development, Config, ConfigBuilder, ConfigError, ConfigFile, - StandardConfig, StandardConfigFile, - }, - server::{Server, ServerError}, -}; +/// Stream to manage gobbler consumer loops. +pub const GOBBLER_MANAGEMENT_STREAM: &str = "gobbler-management"; -const NATS_JOBS_DEFAULT_SUBJECT: &str = "gobbler-jobs"; -const NATS_JOBS_DEFAULT_QUEUE: &str = "gobbler"; +/// Stream prefix for gobbler consumer loops. +pub const GOBBLER_STREAM_PREFIX: &str = "gobbler"; -pub fn nats_jobs_subject(prefix: Option<&str>) -> String { - nats_subject(prefix, NATS_JOBS_DEFAULT_SUBJECT) +/// The action for the gobbler management loop. +#[derive(Debug, Serialize, Deserialize)] +pub enum ManagementMessageAction { + /// Close the inner gobbler loop for a change set. If it has already been closed, this is a + /// no-op. + Close, + /// Open the inner gobbler loop for a change set. If one already exists, it is a no-op. + Open, } -pub fn nats_subject(prefix: Option<&str>, suffix: impl AsRef) -> String { - let suffix = suffix.as_ref(); - match prefix { - Some(prefix) => format!("{prefix}.{suffix}"), - None => suffix.to_string(), - } +/// The message that the gobbler management consumer expects in the server. +#[derive(Debug, Serialize, Deserialize)] +pub struct ManagementMessage { + /// The ID of the change set wishing to be operated on. + pub change_set_id: Ulid, + /// The action to instruct the management loop to perform. + pub action: ManagementMessageAction, } diff --git a/lib/gobbler-server/src/server.rs b/lib/gobbler-server/src/server.rs index 564871db64..59a88ef4bf 100644 --- a/lib/gobbler-server/src/server.rs +++ b/lib/gobbler-server/src/server.rs @@ -1,35 +1,35 @@ +use std::collections::HashMap; use std::{io, path::Path, sync::Arc}; use dal::{ - job::{ - consumer::{JobConsumer, JobConsumerError, JobInfo}, - definition::{FixesJob, RefreshJob}, - producer::BlockingJobError, - }, - DalContext, DalContextBuilder, DependentValuesUpdate, InitializationError, JobFailure, - JobFailureError, JobQueueProcessor, NatsProcessor, ServicesContext, TransactionsError, + job::consumer::JobConsumerError, DalContext, InitializationError, JobFailureError, + JobQueueProcessor, NatsProcessor, ServicesContext, TransactionsError, }; use futures::{FutureExt, Stream, StreamExt}; -use nats_subscriber::{Request, SubscriberError}; -use si_crypto::SymmetricCryptoService; +use nats_subscriber::SubscriberError; use si_data_nats::{NatsClient, NatsConfig, NatsError}; use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; -use stream_cancel::StreamExt as StreamCancelStreamExt; +use si_rabbitmq::{ + Consumer, ConsumerHandle, ConsumerOffsetSpecification, Environment, Producer, RabbitError, +}; use telemetry::prelude::*; use thiserror::Error; use tokio::{ signal::unix, sync::{ - mpsc::{self, UnboundedReceiver, UnboundedSender}, + mpsc::{self}, oneshot, watch, }, - task, }; use tokio_stream::wrappers::UnboundedReceiverStream; +use ulid::Ulid; use veritech_client::{Client as VeritechClient, CycloneEncryptionKey, CycloneEncryptionKeyError}; -use crate::{nats_jobs_subject, Config, NATS_JOBS_DEFAULT_QUEUE}; +use crate::GOBBLER_STREAM_PREFIX; +use crate::{Config, GOBBLER_MANAGEMENT_STREAM}; +use crate::{ManagementMessage, ManagementMessageAction}; +#[allow(missing_docs)] #[remain::sorted] #[derive(Debug, Error)] pub enum ServerError { @@ -41,10 +41,16 @@ pub enum ServerError { JobConsumer(#[from] JobConsumerError), #[error(transparent)] JobFailure(#[from] Box), + #[error("missing management message contents")] + MissingManagementMessageContents, + #[error("missing management message \"reply_to\" field")] + MissingManagementMessageReplyTo, #[error(transparent)] Nats(#[from] NatsError), #[error(transparent)] PgPool(#[from] Box), + #[error("rabbit error {0}")] + Rabbit(#[from] RabbitError), #[error(transparent)] SerdeJson(#[from] serde_json::Error), #[error("failed to setup signal handler")] @@ -53,8 +59,6 @@ pub enum ServerError { Subscriber(#[from] SubscriberError), #[error(transparent)] Transactions(#[from] Box), - #[error("unknown job kind {0}")] - UnknownJobKind(String), } impl From for ServerError { @@ -75,10 +79,11 @@ impl From for ServerError { } } -type Result = std::result::Result; +type ServerResult = Result; +/// The [`Server`] for managing gobbler tasks. +#[allow(missing_debug_implementations)] pub struct Server { - concurrency_limit: usize, encryption_key: Arc, nats: NatsClient, pg_pool: PgPool, @@ -90,15 +95,18 @@ pub struct Server { /// An external shutdown sender handle which can be handed out to external callers who wish to /// trigger a server shutdown at will. external_shutdown_tx: mpsc::Sender, - /// An internal graceful shutdown receiever handle which the server's main thread uses to stop + /// An internal graceful shutdown receiver handle which the server's main thread uses to stop /// accepting work when a shutdown event is in progress. graceful_shutdown_rx: oneshot::Receiver<()>, - metadata: Arc, + /// If enabled, re-create the RabbitMQ Stream. If disabled, create the Stream if it does not + /// exist. + recreate_management_stream: bool, } impl Server { + /// Build a [`Server`] from a given [`Config`]. #[instrument(name = "gobbler.init.from_config", skip_all)] - pub async fn from_config(config: Config) -> Result { + pub async fn from_config(config: Config) -> ServerResult { dal::init()?; let encryption_key = @@ -109,27 +117,25 @@ impl Server { let job_processor = Self::create_job_processor(nats.clone()); Self::from_services( - config.instance_id().to_string(), - config.concurrency(), encryption_key, nats, pg_pool, veritech, job_processor, + config.recreate_management_stream(), ) } - #[allow(clippy::too_many_arguments)] + /// Build a [`Server`] from information provided via companion services. #[instrument(name = "gobbler.init.from_services", skip_all)] pub fn from_services( - instance_id: impl Into, - concurrency_limit: usize, encryption_key: Arc, nats: NatsClient, pg_pool: PgPool, veritech: VeritechClient, job_processor: Box, - ) -> Result { + recreate_management_stream: bool, + ) -> ServerResult { // An mpsc channel which can be used to externally shut down the server. let (external_shutdown_tx, external_shutdown_rx) = mpsc::channel(4); // A watch channel used to notify internal parts of the server that a shutdown event is in @@ -139,16 +145,11 @@ impl Server { dal::init()?; - let metadata = ServerMetadata { - job_instance: instance_id.into(), - job_invoked_provider: "si", - }; - let graceful_shutdown_rx = prepare_graceful_shutdown(external_shutdown_rx, shutdown_watch_tx)?; Ok(Server { - concurrency_limit, + recreate_management_stream, pg_pool, nats, veritech, @@ -157,24 +158,14 @@ impl Server { shutdown_watch_rx, external_shutdown_tx, graceful_shutdown_rx, - metadata: Arc::new(metadata), }) } - pub async fn run(self) -> Result<()> { - let (tx, rx) = mpsc::unbounded_channel(); - - // Span a task to receive and process jobs from the unbounded channel - drop(task::spawn(process_job_requests_task( - rx, - self.concurrency_limit, - ))); - - // Run "the main loop" which pulls message from a subscription off NATS and forwards each - // request to an unbounded channel - receive_job_requests_task( - tx, - self.metadata, + /// The primary function for running the server. This should be called when deciding to run + /// the server as a task, in a standalone binary, etc. + pub async fn run(self) -> ServerResult<()> { + consume_stream_task( + self.recreate_management_stream, self.pg_pool, self.nats, self.veritech, @@ -190,28 +181,30 @@ impl Server { Ok(()) } - /// Gets a [`ShutdownHandle`](GobblerShutdownHandle) that can externally or on demand trigger the server's shutdown + /// Gets a [`ShutdownHandle`](ServerShutdownHandle) that can externally or on demand trigger the server's shutdown /// process. - pub fn shutdown_handle(&self) -> GobblerShutdownHandle { - GobblerShutdownHandle { + pub fn shutdown_handle(&self) -> ServerShutdownHandle { + ServerShutdownHandle { shutdown_tx: self.external_shutdown_tx.clone(), } } #[instrument(name = "gobbler.init.load_encryption_key", skip_all)] - async fn load_encryption_key(path: impl AsRef) -> Result> { + async fn load_encryption_key( + path: impl AsRef, + ) -> ServerResult> { Ok(Arc::new(CycloneEncryptionKey::load(path).await?)) } #[instrument(name = "gobbler.init.connect_to_nats", skip_all)] - async fn connect_to_nats(nats_config: &NatsConfig) -> Result { + async fn connect_to_nats(nats_config: &NatsConfig) -> ServerResult { let client = NatsClient::new(nats_config).await?; debug!("successfully connected nats client"); Ok(client) } #[instrument(name = "gobbler.init.create_pg_pool", skip_all)] - async fn create_pg_pool(pg_pool_config: &PgPoolConfig) -> Result { + async fn create_pg_pool(pg_pool_config: &PgPoolConfig) -> ServerResult { let pool = PgPool::new(pg_pool_config).await?; debug!("successfully started pg pool (note that not all connections may be healthy)"); Ok(pool) @@ -228,17 +221,13 @@ impl Server { } } -#[derive(Clone, Debug)] -pub struct ServerMetadata { - job_instance: String, - job_invoked_provider: &'static str, -} - -pub struct GobblerShutdownHandle { +#[allow(missing_docs, missing_debug_implementations)] +pub struct ServerShutdownHandle { shutdown_tx: mpsc::Sender, } -impl GobblerShutdownHandle { +impl ServerShutdownHandle { + /// Perform server shutdown with the handle. pub async fn shutdown(self) { if let Err(err) = self.shutdown_tx.send(ShutdownSource::Handle).await { warn!(error = ?err, "shutdown tx returned error, receiver is likely already closed"); @@ -248,7 +237,7 @@ impl GobblerShutdownHandle { #[remain::sorted] #[derive(Debug, Eq, PartialEq)] -pub enum ShutdownSource { +enum ShutdownSource { Handle, } @@ -258,17 +247,9 @@ impl Default for ShutdownSource { } } -pub struct JobItem { - metadata: Arc, - messaging_destination: Arc, - ctx_builder: DalContextBuilder, - request: Result>, -} - #[allow(clippy::too_many_arguments)] -async fn receive_job_requests_task( - tx: UnboundedSender, - metadata: Arc, +async fn consume_stream_task( + recreate_management_stream: bool, pg_pool: PgPool, nats: NatsClient, veritech: veritech_client::Client, @@ -276,9 +257,8 @@ async fn receive_job_requests_task( encryption_key: Arc, shutdown_watch_rx: watch::Receiver<()>, ) { - if let Err(err) = receive_job_requests( - tx, - metadata, + if let Err(err) = consume_stream( + recreate_management_stream, pg_pool, nats, veritech, @@ -288,212 +268,158 @@ async fn receive_job_requests_task( ) .await { - warn!(error = ?err, "processing job requests failed"); + info!(error = ?err, "consuming stream failed"); } } #[allow(clippy::too_many_arguments)] -async fn receive_job_requests( - tx: UnboundedSender, - metadata: Arc, +async fn consume_stream( + recreate_management_stream: bool, pg_pool: PgPool, nats: NatsClient, veritech: veritech_client::Client, job_processor: Box, encryption_key: Arc, mut shutdown_watch_rx: watch::Receiver<()>, -) -> Result<()> { - Ok(()) -} +) -> ServerResult<()> { + let services_context = ServicesContext::new( + pg_pool, + nats.clone(), + job_processor, + veritech.clone(), + encryption_key, + None, + None, + (), + ); + let _ctx_builder = DalContext::builder(services_context, false); + + // Meta: we can only have one gobbler instance right now due to https://github.com/rabbitmq/rabbitmq-stream-rust-client/issues/130 + // + // 1) subscribe to "next" for changeset close/create events --> stream for ChangeSetClose or ChangeSetOpen + // --> "gobbler-management" + // 2) query db for all named, open changesets + // 3) start a subscription for each result for step 2 + // --> "gobbler-" + // 1:N --> "gobbler--reply--" + // (e.g. "gobbler--reply-sdf-") + // note: requester deletes stream upon reply + // + // NOTE: QUERY DB FOR OFFSET NUMBER OR GO TO FIRST SPECIFICATION + + // Prepare the environment and management stream. + let environment = Environment::new().await?; + if recreate_management_stream { + environment.delete_stream(GOBBLER_MANAGEMENT_STREAM).await?; + } + environment.create_stream(GOBBLER_MANAGEMENT_STREAM).await?; -async fn process_job_requests_task(rx: UnboundedReceiver, concurrency_limit: usize) { - UnboundedReceiverStream::new(rx) - .for_each_concurrent(concurrency_limit, |job| async move { - // Got the next message from the subscriber - trace!("pulled request into an available concurrent task"); - - match job.request { - Ok(request) => { - // Spawn a task and process the request - let join_handle = task::spawn(execute_job_task( - job.metadata, - job.messaging_destination, - job.ctx_builder, - request, - )); - if let Err(err) = join_handle.await { - // NOTE(fnichol): This likely happens when there is contention or - // an error in the Tokio runtime so we will be loud and log an - // error under the assumptions that 1) this event rarely - // happens and 2) the task code did not contribute to trigger - // the `JoinError`. - error!( - error = ?err, - "execute-job-task failed to execute to completion" - ); - }; + let mut management_consumer = Consumer::new( + &environment, + GOBBLER_MANAGEMENT_STREAM, + ConsumerOffsetSpecification::Next, + ) + .await?; + let management_handle = management_consumer.handle(); + let mut gobbler_handles: HashMap = HashMap::new(); + + while let Some(management_delivery) = management_consumer.next().await? { + let contents = management_delivery + .message_contents + .ok_or(ServerError::MissingManagementMessageContents)?; + let reply_to = management_delivery + .reply_to + .ok_or(ServerError::MissingManagementMessageReplyTo)?; + let mm: ManagementMessage = serde_json::from_value(contents)?; + + match mm.action { + ManagementMessageAction::Close => match gobbler_handles.remove(&mm.change_set_id) { + Some((stream, handle)) => { + if let Err(e) = handle.close().await { + error!("{e}"); + } + if let Err(e) = environment.delete_stream(stream).await { + error!("{e}"); + } } - Err(err) => { - warn!(error = ?err, "next job request had an error, job will not be executed"); + None => debug!( + "did not find handle for change set id: {}", + mm.change_set_id + ), + }, + ManagementMessageAction::Open => { + let new_stream = format!("{GOBBLER_STREAM_PREFIX}-{}", mm.change_set_id); + let stream_already_exists = environment.create_stream(&new_stream).await?; + + // Only create the new stream if it does not already exist. + if !stream_already_exists { + let consumer = + Consumer::new(&environment, &new_stream, ConsumerOffsetSpecification::Next) + .await?; + let handle = consumer.handle(); + gobbler_handles.insert(mm.change_set_id, (new_stream.clone(), handle)); + + tokio::spawn(gobbler_loop_infallible_wrapper(consumer)); } - } - }) - .await; -} - -#[instrument( -name = "execute_job_task", -skip_all, -fields( -job.id = request.payload.id, -job.instance = metadata.job_instance, -job.invoked_name = request.payload.kind, -job.invoked_args = Empty, -job.invoked_provider = metadata.job_invoked_provider, -job.trigger = "pubsub", -messaging.destination = Empty, -messaging.destination_kind = "topic", -messaging.operation = "process", -otel.kind = % FormattedSpanKind(SpanKind::Consumer), -otel.name = Empty, -otel.status_code = Empty, -otel.status_message = Empty, -) -)] -async fn execute_job_task( - metadata: Arc, - messaging_destination: Arc, - ctx_builder: DalContextBuilder, - request: Request, -) { - let span = Span::current(); - let id = request.payload.id.clone(); - - let arg_str = serde_json::to_string(&request.payload.arg) - .unwrap_or_else(|_| "arg failed to serialize".to_string()); - span.record("job.invoked_arg", arg_str); - span.record("messaging.destination", messaging_destination.as_str()); - span.record( - "otel.name", - format!("{} process", &messaging_destination).as_str(), - ); - - let maybe_reply_channel = request.reply_mailbox.clone(); - let reply_message = match execute_job( - &metadata, - messaging_destination, - ctx_builder.clone(), - request, - ) - .await - { - Ok(_) => { - span.record_ok(); - Ok(()) + // Return the requested stream and then close the producer. + let mut producer = Producer::for_reply(&environment, &new_stream, reply_to).await?; + producer.send_single(new_stream, None).await?; + producer.close().await?; + } } - Err(err) => { - error!( - error = ?err, - job.invocation_id = %id, - job.instance = &metadata.job_instance, - "job execution failed" - ); - let new_err = Err(BlockingJobError::JobExecution(err.to_string())); - span.record_err(err); - - new_err + } + + for (_, (stream, handle)) in gobbler_handles.drain() { + if let Err(e) = handle.close().await { + error!("{e}"); } - }; - - if let Some(reply_channel) = maybe_reply_channel { - if let Ok(message) = serde_json::to_vec(&reply_message) { - if let Err(err) = ctx_builder - .nats_conn() - .publish(reply_channel, message) - .await - { - error!(error = ?err, "Unable to notify spawning job of blocking job completion"); - }; + if let Err(e) = environment.delete_stream(stream).await { + error!("{e}") } } -} - -async fn execute_job( - _metadata: &Arc, - _messaging_destination: Arc, - mut ctx_builder: DalContextBuilder, - request: Request, -) -> Result<()> { - let (job_info, _) = request.into_parts(); - if job_info.blocking { - ctx_builder.set_blocking(); + if let Err(e) = management_handle.close().await { + error!("{e}"); } + Ok(()) +} - let current_span = tracing::Span::current(); - if !current_span.is_disabled() { - tracing::Span::current().record("job_info.id", &job_info.id); - tracing::Span::current().record("job_info.kind", &job_info.kind); - let arg_str = serde_json::to_string(&job_info.arg)?; - tracing::Span::current().record("job_info.arg", arg_str); - tracing::Span::current().record( - "job_info.access_builder", - serde_json::to_string(&job_info.access_builder)?, - ); - tracing::Span::current().record( - "job_info.visibility", - serde_json::to_string(&job_info.visibility)?, - ); - tracing::Span::current().record("job_info.blocking", job_info.blocking); +async fn gobbler_loop_infallible_wrapper(consumer: Consumer) { + if let Err(e) = gobbler_loop(consumer).await { + dbg!(e); } +} - let job = - match job_info.kind.as_str() { - stringify!(DependentValuesUpdate) => { - Box::new(DependentValuesUpdate::try_from(job_info.clone())?) - as Box - } - stringify!(FixesJob) => Box::new(FixesJob::try_from(job_info.clone())?) - as Box, - stringify!(RefreshJob) => Box::new(RefreshJob::try_from(job_info.clone())?) - as Box, - kind => return Err(ServerError::UnknownJobKind(kind.to_owned())), - }; - - info!("Processing job"); - - if let Err(err) = job.run_job(ctx_builder.clone()).await { - // The missing part is this, should we execute subsequent jobs if the one they depend on fail or not? - record_job_failure(ctx_builder, job, err).await?; +async fn gobbler_loop(mut consumer: Consumer) -> ServerResult<()> { + // Create an environment for reply streams. + let environment = Environment::new().await?; + while let Some(delivery) = consumer.next().await? { + if let Some(reply_to) = delivery.reply_to { + let mut producer = + Producer::for_reply(&environment, consumer.stream(), reply_to).await?; + + // ----------------------------------------- + // TODO(nick): this is where the fun begins. + // 1) succeed everywhere + // 2) store offset with changeset + // 3) update requester stream w/out waiting for reply + // ----------------------------------------- + + // TODO(nick): for now, just send back the message. Unwrapping is fine because we know + // that it must have content. + producer + .send_single(delivery.message_contents.unwrap(), None) + .await?; + producer.close().await?; + } } - - info!("Finished processing job"); - Ok(()) } -async fn record_job_failure( - ctx_builder: DalContextBuilder, - job: Box, - err: JobConsumerError, -) -> Result<()> { - warn!(error = ?err, "job execution failed, recording a job failure to the database"); - - let access_builder = job.access_builder(); - let visibility = job.visibility(); - let ctx = ctx_builder.build(access_builder.build(visibility)).await?; - - JobFailure::new(&ctx, job.type_name(), err.to_string()).await?; - - ctx.commit().await?; - - Err(err.into()) -} - fn prepare_graceful_shutdown( mut external_shutdown_rx: mpsc::Receiver, shutdown_watch_tx: watch::Sender<()>, -) -> Result> { +) -> ServerResult> { // A oneshot channel signaling the start of a graceful shutdown. Receivers can use this to // perform an clean/graceful shutdown work that needs to happen to preserve server integrity. let (graceful_shutdown_tx, graceful_shutdown_rx) = oneshot::channel::<()>(); diff --git a/lib/gobbler-server/tests/integration_test/mod.rs b/lib/gobbler-server/tests/integration_test/mod.rs index d8748899ab..6fa1f6955a 100644 --- a/lib/gobbler-server/tests/integration_test/mod.rs +++ b/lib/gobbler-server/tests/integration_test/mod.rs @@ -1,2 +1 @@ mod connection; -mod queue; diff --git a/lib/gobbler-server/tests/integration_test/queue.rs b/lib/gobbler-server/tests/integration_test/queue.rs deleted file mode 100644 index 2a4dadafef..0000000000 --- a/lib/gobbler-server/tests/integration_test/queue.rs +++ /dev/null @@ -1,107 +0,0 @@ -use dal::DalContext; -use pretty_assertions_sorted::assert_eq; -use si_rabbitmq::{Consumer, Environment, Producer}; -use si_test_macros::gobbler_test as test; - -#[test] -async fn produce(_ctx: &DalContext) { - let stream = "test-produce"; - let environment = Environment::new().await.expect("could not connect"); - - // FIXME(nick): add stream setup to test macro. - environment - .delete_stream(stream) - .await - .expect("could not delete stream"); - environment - .create_stream(stream) - .await - .expect("could not create stream"); - - let mut producer = Producer::new(&environment, "producer", stream) - .await - .expect("could not create producer"); - producer - .send_single("foo") - .await - .expect("could not singe message"); - producer - .send_batch(vec!["bar".as_bytes(), "baz".as_bytes()]) - .await - .expect("could not send message batch"); - producer.close().await.expect("could not close producer"); -} - -#[test] -async fn consume(_ctx: &DalContext) { - let stream = "test-consume"; - let environment = Environment::new().await.expect("could not connect"); - - // FIXME(nick): add stream setup to test macro. - environment - .delete_stream(stream) - .await - .expect("could not delete stream"); - environment - .create_stream(stream) - .await - .expect("could not create stream"); - - let mut producer = Producer::new(&environment, "producer", stream) - .await - .expect("could not create producer"); - producer - .send_single("foo") - .await - .expect("could not singe message"); - producer - .send_batch(vec!["bar".as_bytes(), "baz".as_bytes()]) - .await - .expect("could not send message batch"); - producer.close().await.expect("could not close producer"); - - let mut consumer = Consumer::new(&environment, stream) - .await - .expect("could not create consumer"); - let handle = consumer.handle(); - - // Grab the three deliveries that we expect. - let delivery = consumer - .next() - .await - .expect("could not consume next delivery") - .expect("no delivery to consume") - .expect("consumer delivery error"); - let data = consumer - .process_delivery(&delivery) - .expect("could not process delivery") - .expect("no data in message"); - assert_eq!("foo", &data); - let delivery = consumer - .next() - .await - .expect("could not consume next delivery") - .expect("no delivery to consume") - .expect("consumer delivery error"); - let data = consumer - .process_delivery(&delivery) - .expect("could not process delivery") - .expect("no data in message"); - assert_eq!("bar", &data); - let delivery = consumer - .next() - .await - .expect("could not consume next delivery") - .expect("no delivery to consume") - .expect("consumer delivery error"); - let data = consumer - .process_delivery(&delivery) - .expect("could not process delivery") - .expect("no data in message"); - assert_eq!("baz", &data); - - handle - .close() - .await - .expect("could not close the consumer associated to this hangler"); -} diff --git a/lib/si-rabbitmq/BUCK b/lib/si-rabbitmq/BUCK index 18b9005d0e..c4d4bb980a 100644 --- a/lib/si-rabbitmq/BUCK +++ b/lib/si-rabbitmq/BUCK @@ -3,19 +3,14 @@ load("@prelude-si//:macros.bzl", "rust_library") rust_library( name = "si-rabbitmq", deps = [ - "//lib/si-data-nats:si-data-nats", - "//lib/si-test-macros:si-test-macros", "//lib/telemetry-rs:telemetry", "//third-party/rust:futures", - "//third-party/rust:futures-lite", - "//third-party/rust:pin-project-lite", "//third-party/rust:rabbitmq-stream-client", "//third-party/rust:remain", "//third-party/rust:serde", "//third-party/rust:serde_json", "//third-party/rust:thiserror", "//third-party/rust:tokio", - "//third-party/rust:ulid", ], srcs = glob(["src/**/*.rs"]), ) diff --git a/lib/si-rabbitmq/Cargo.toml b/lib/si-rabbitmq/Cargo.toml index b6deedf7e9..297ca0091b 100644 --- a/lib/si-rabbitmq/Cargo.toml +++ b/lib/si-rabbitmq/Cargo.toml @@ -6,15 +6,10 @@ publish = false [dependencies] futures = { workspace = true } -futures-lite = { workspace = true } -pin-project-lite = { workspace = true } rabbitmq-stream-client = { workspace = true } remain = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } -si-data-nats = { path = "../../lib/si-data-nats" } -si-test-macros = { path = "../../lib/si-test-macros" } telemetry = { path = "../../lib/telemetry-rs" } thiserror = { workspace = true } tokio = { workspace = true } -ulid = { workspace = true } diff --git a/lib/si-rabbitmq/src/consumer.rs b/lib/si-rabbitmq/src/consumer.rs index 18c8ade0d7..56d92df29a 100644 --- a/lib/si-rabbitmq/src/consumer.rs +++ b/lib/si-rabbitmq/src/consumer.rs @@ -1,48 +1,62 @@ -use crate::environment::Environment; use futures::StreamExt; -use rabbitmq_stream_client::error::ConsumerDeliveryError; -use rabbitmq_stream_client::types::{Delivery, Message, OffsetSpecification}; -use rabbitmq_stream_client::{Consumer as UpstreamConsumer, ConsumerHandle}; +use rabbitmq_stream_client::types::OffsetSpecification; +use rabbitmq_stream_client::{ + Consumer as UpstreamConsumer, ConsumerHandle as UpstreamConsumerHandle, +}; use telemetry::prelude::*; use tokio::task; +use crate::Delivery; +use crate::Environment; use crate::RabbitResult; +/// A type alias to the upstream [`ConsumerHandle`](rabbitmq_stream_client::ConsumerHandle). +pub type ConsumerHandle = UpstreamConsumerHandle; + +/// A type alias to the upstream [`OffsetSpecification`](OffsetSpecification). +pub type ConsumerOffsetSpecification = OffsetSpecification; + /// An interface for consuming RabbitMQ stream messages. #[allow(missing_debug_implementations)] pub struct Consumer { + stream: String, inner: UpstreamConsumer, } impl Consumer { /// Creates a new [`Consumer`] for consuming RabbitMQ stream messages. - pub async fn new(environment: &Environment, stream: &str) -> RabbitResult { + pub async fn new( + environment: &Environment, + stream: impl Into, + offset_specification: ConsumerOffsetSpecification, + ) -> RabbitResult { + let stream = stream.into(); let inner = environment .inner() .consumer() - .offset(OffsetSpecification::First) - .build(stream) + .offset(offset_specification) + .build(&stream) .await?; - Ok(Self { inner }) + Ok(Self { stream, inner }) } - pub async fn next(&mut self) -> RabbitResult>> { - Ok(self.inner.next().await) + /// A wrapper around the upstream stream polling implementation. + pub async fn next(&mut self) -> RabbitResult> { + if let Some(unprocessed_delivery) = self.inner.next().await { + let delivery = unprocessed_delivery?; + return Ok(Some(Delivery::try_from(delivery)?)); + } + Ok(None) } + /// Provides a [`ConsumerHandle`]. pub fn handle(&self) -> ConsumerHandle { self.inner.handle() } - pub fn process_delivery(&self, delivery: &Delivery) -> RabbitResult> { - let maybe_data = delivery - .message() - .data() - .map(|data| String::from_utf8(data.to_vec())); - Ok(match maybe_data { - Some(data) => Some(data?), - None => None, - }) + /// Returns the stream name for the [`Consumer`](Consumer). + pub fn stream(&self) -> &String { + &self.stream } } diff --git a/lib/si-rabbitmq/src/delivery.rs b/lib/si-rabbitmq/src/delivery.rs new file mode 100644 index 0000000000..8287cb9d08 --- /dev/null +++ b/lib/si-rabbitmq/src/delivery.rs @@ -0,0 +1,37 @@ +use rabbitmq_stream_client::types::Delivery as UpstreamDelivery; +use serde_json::Value; + +use crate::RabbitError; + +/// This type is a deconstruction of the upstream +/// [`Delivery`](rabbitmq_stream_client::types::Delivery) type. +#[derive(Debug)] +pub struct Delivery { + /// The contents of the message. + pub message_contents: Option, + /// The contents of the "reply_to" field from the message properties. + pub reply_to: Option, +} + +impl TryFrom for Delivery { + type Error = RabbitError; + + fn try_from(value: UpstreamDelivery) -> Result { + let message = value.message(); + + let contents: Option = match message.data() { + Some(data) => serde_json::from_slice(data)?, + None => None, + }; + + let reply_to = match message.properties() { + Some(properties) => properties.reply_to.clone(), + None => None, + }; + + Ok(Self { + message_contents: contents, + reply_to, + }) + } +} diff --git a/lib/si-rabbitmq/src/environment.rs b/lib/si-rabbitmq/src/environment.rs index 3e87055598..8c1828c461 100644 --- a/lib/si-rabbitmq/src/environment.rs +++ b/lib/si-rabbitmq/src/environment.rs @@ -1,9 +1,11 @@ -use rabbitmq_stream_client::error::StreamDeleteError; +use rabbitmq_stream_client::error::{StreamCreateError, StreamDeleteError}; use rabbitmq_stream_client::types::{ByteCapacity, ResponseCode}; use rabbitmq_stream_client::Environment as UpstreamEnvironment; use crate::error::RabbitResult; +const STREAM_LENGTH_CAPACTIY_IN_MEGABYTES: u64 = 10; + /// A connection to a RabbitMQ node. #[allow(missing_debug_implementations)] pub struct Environment { @@ -28,23 +30,37 @@ impl Environment { &self.inner } - pub async fn create_stream(&self, stream: impl AsRef) -> RabbitResult<()> { - Ok(self + /// Attempts to create the stream and returns a boolean indicates if the stream was actually + /// created (i.e. "false" if it already exists). + pub async fn create_stream(&self, stream: impl AsRef) -> RabbitResult { + match self .inner .stream_creator() - .max_length(ByteCapacity::KB(400)) + .max_length(ByteCapacity::MB(STREAM_LENGTH_CAPACTIY_IN_MEGABYTES)) .create(stream.as_ref()) - .await?) + .await + { + Ok(()) => Ok(false), + Err(e) => match e { + StreamCreateError::Create { + status: ResponseCode::StreamAlreadyExists, + stream: _, + } => Ok(true), + e => Err(e.into()), + }, + } } - pub async fn delete_stream(&self, stream: impl AsRef) -> RabbitResult<()> { + /// Attempts to delete the stream and returns a boolean indicates if the stream was actually + /// deleted (i.e. "false" if it does not currently exist). + pub async fn delete_stream(&self, stream: impl AsRef) -> RabbitResult { match self.inner.delete_stream(stream.as_ref()).await { - Ok(()) => Ok(()), + Ok(()) => Ok(true), Err(e) => match e { StreamDeleteError::Delete { status: ResponseCode::StreamDoesNotExist, stream: _, - } => Ok(()), + } => Ok(false), e => Err(e.into()), }, } diff --git a/lib/si-rabbitmq/src/error.rs b/lib/si-rabbitmq/src/error.rs index 3f660b9321..434a701154 100644 --- a/lib/si-rabbitmq/src/error.rs +++ b/lib/si-rabbitmq/src/error.rs @@ -1,6 +1,7 @@ use rabbitmq_stream_client::error::{ - ClientError, ConsumerCloseError, ConsumerCreateError, ProducerCloseError, ProducerCreateError, - ProducerPublishError, StreamCreateError, StreamDeleteError, + ClientError, ConsumerCloseError, ConsumerCreateError, ConsumerDeliveryError, + ProducerCloseError, ProducerCreateError, ProducerPublishError, StreamCreateError, + StreamDeleteError, }; use std::string::FromUtf8Error; use thiserror::Error; @@ -15,6 +16,8 @@ pub enum RabbitError { ConsumerClose(#[from] ConsumerCloseError), #[error("consumer create error: {0}")] ConsumerCreate(#[from] ConsumerCreateError), + #[error("consumer delivery error: {0}")] + ConsumerDelivery(#[from] ConsumerDeliveryError), #[error("from utf-8 error: {0}")] FromUtf8(#[from] FromUtf8Error), #[error("producer close error: {0}")] @@ -25,6 +28,8 @@ pub enum RabbitError { ProducerCreate(#[from] ProducerCreateError), #[error("producer publish error: {0}")] ProducerPublish(#[from] ProducerPublishError), + #[error("serde_json error: {0}")] + SerdeJson(#[from] serde_json::Error), #[error("stream create error: {0}")] StreamCreate(#[from] StreamCreateError), #[error("stream delete error: {0}")] diff --git a/lib/si-rabbitmq/src/lib.rs b/lib/si-rabbitmq/src/lib.rs index 137001ff94..709352fd84 100644 --- a/lib/si-rabbitmq/src/lib.rs +++ b/lib/si-rabbitmq/src/lib.rs @@ -24,12 +24,74 @@ )] mod consumer; +mod delivery; mod environment; mod error; mod producer; pub use consumer::Consumer; +pub use consumer::ConsumerHandle; +pub use consumer::ConsumerOffsetSpecification; +pub use delivery::Delivery; pub use environment::Environment; pub use error::RabbitError; pub use error::RabbitResult; pub use producer::Producer; + +#[cfg(test)] +mod tests { + use super::*; + use rabbitmq_stream_client::types::OffsetSpecification; + use tokio::test; + + #[test] + async fn round_trip() { + let environment = Environment::new() + .await + .expect("could not create environment"); + + let stream = "test-stream"; + + environment + .delete_stream(stream) + .await + .expect("could not delete stream"); + environment + .create_stream(stream) + .await + .expect("could not create stream"); + + let mut producer = Producer::new(&environment, "producer", stream) + .await + .expect("could not create producer"); + + let mut consumer = Consumer::new(&environment, stream, OffsetSpecification::Next) + .await + .expect("could not create consumer"); + + let message = "starfield"; + producer + .send_single(message, None) + .await + .expect("could not send message"); + + let delivery = consumer + .next() + .await + .expect("could not consume") + .expect("empty delivery"); + let found_contents: String = + serde_json::from_value(delivery.message_contents.expect("message contents empty")) + .expect("could not deserialize"); + + assert_eq!(message, &found_contents); + + producer.close().await.expect("could not close producer"); + let handle = consumer.handle(); + handle.close().await.expect("could not close consumer"); + environment + .delete_stream(stream) + .await + .expect("could not delete stream"); + } +} diff --git a/lib/si-rabbitmq/src/producer.rs b/lib/si-rabbitmq/src/producer.rs index 014d48ffc0..44baf38874 100644 --- a/lib/si-rabbitmq/src/producer.rs +++ b/lib/si-rabbitmq/src/producer.rs @@ -1,7 +1,6 @@ use rabbitmq_stream_client::types::Message; -use rabbitmq_stream_client::{Dedup, NoDedup, Producer as UpstreamProducer}; -use telemetry::prelude::warn; -use tokio::task; +use rabbitmq_stream_client::{Dedup, Producer as UpstreamProducer}; +use serde::Serialize; use crate::environment::Environment; use crate::{RabbitError, RabbitResult}; @@ -32,34 +31,46 @@ impl Producer { }) } - /// Sends a single message to a stream. - pub async fn send_single(&mut self, message: impl Into>) -> RabbitResult<()> { - if self.closed { - return Err(RabbitError::ProducerClosed); - } - self.inner - .send_with_confirm(Message::builder().body(message).build()) - .await?; - Ok(()) + /// Creates a new [`Producer`] for replying to the sender from an inbound stream. + pub async fn for_reply( + environment: &Environment, + inbound_stream: impl AsRef, + reply_to_stream: impl AsRef, + ) -> RabbitResult { + let inbound_stream = inbound_stream.as_ref(); + let reply_to_stream = reply_to_stream.as_ref(); + Self::new( + &environment, + format!("{inbound_stream}-reply-{reply_to_stream}"), + reply_to_stream, + ) + .await } - /// Sends a batch of messages to a stream. - pub async fn send_batch(&mut self, messages: Vec>>) -> RabbitResult<()> { + /// Sends a single message to a stream. + pub async fn send_single( + &mut self, + input: T, + reply_to: Option, + ) -> RabbitResult<()> { if self.closed { return Err(RabbitError::ProducerClosed); } - self.inner - .batch_send_with_confirm( - messages - .into_iter() - .map(|m| Message::builder().body(m.into()).build()) - .collect(), - ) - .await?; + let value = serde_json::to_value(input)?; + let mut message_builder = Message::builder().body(serde_json::to_vec(&value)?); + if let Some(reply_to) = reply_to { + message_builder = message_builder + .properties() + .reply_to(reply_to) + .message_builder(); + } + let message = message_builder.build(); + + self.inner.send_with_confirm(message).await?; Ok(()) } - // Closes the producer connection and renders the producer unusable. + /// Closes the producer connection and renders the producer unusable. pub async fn close(mut self) -> RabbitResult<()> { self.inner.close().await?; self.closed = true; From 86b95b69689eb9961f9c93477665f9fa061bdc2d Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Wed, 6 Sep 2023 12:30:08 -0400 Subject: [PATCH 17/92] Fix "buck2 build //lib/..." for gobbler Signed-off-by: Nick Gerace --- lib/gobbler-client/BUCK | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/gobbler-client/BUCK b/lib/gobbler-client/BUCK index 5a739aa551..ac1969b699 100644 --- a/lib/gobbler-client/BUCK +++ b/lib/gobbler-client/BUCK @@ -10,6 +10,7 @@ rust_library( "//third-party/rust:serde", "//third-party/rust:serde_json", "//third-party/rust:thiserror", + "//third-party/rust:tokio", "//third-party/rust:ulid", ], srcs = glob([ From 7297def7d65e332608b8db28b2d6967b72aafcbb Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Wed, 6 Sep 2023 18:59:30 -0400 Subject: [PATCH 18/92] Rename gobbler to rebaser Rename gobbler to rebaser to avoid any unintentional connotations as well as to provide clarity to the service's purpose. Signed-off-by: Nick Gerace --- .ci/docker-compose.test-integration.yml | 2 +- Cargo.lock | 1807 ++++++++++------- Cargo.toml | 6 +- bin/{gobbler => rebaser}/BUCK | 8 +- bin/{gobbler => rebaser}/Cargo.toml | 6 +- bin/{gobbler => rebaser}/Dockerfile | 6 +- bin/{gobbler => rebaser}/src/args.rs | 6 +- bin/{gobbler => rebaser}/src/main.rs | 8 +- component/postgres/BUCK | 2 +- dev/docker-compose.platform.yml | 2 +- lib/{gobbler-client => rebaser-client}/BUCK | 4 +- .../Cargo.toml | 4 +- .../src/client.rs | 28 +- .../src/lib.rs | 18 +- lib/{gobbler-server => rebaser-server}/BUCK | 2 +- .../Cargo.toml | 2 +- .../src/config.rs | 2 +- .../src/lib.rs | 18 +- .../src/server.rs | 52 +- .../tests/integration.rs | 2 +- .../tests/integration_test/connection.rs | 2 +- .../tests/integration_test/mod.rs | 0 lib/si-test-macros/src/lib.rs | 4 +- 23 files changed, 1128 insertions(+), 863 deletions(-) rename bin/{gobbler => rebaser}/BUCK (76%) rename bin/{gobbler => rebaser}/Cargo.toml (76%) rename bin/{gobbler => rebaser}/Dockerfile (90%) rename bin/{gobbler => rebaser}/src/args.rs (94%) rename bin/{gobbler => rebaser}/src/main.rs (89%) rename lib/{gobbler-client => rebaser-client}/BUCK (85%) rename lib/{gobbler-client => rebaser-client}/Cargo.toml (82%) rename lib/{gobbler-client => rebaser-client}/src/client.rs (87%) rename lib/{gobbler-client => rebaser-client}/src/lib.rs (85%) rename lib/{gobbler-server => rebaser-server}/BUCK (98%) rename lib/{gobbler-server => rebaser-server}/Cargo.toml (97%) rename lib/{gobbler-server => rebaser-server}/src/config.rs (98%) rename lib/{gobbler-server => rebaser-server}/src/lib.rs (71%) rename lib/{gobbler-server => rebaser-server}/src/server.rs (91%) rename lib/{gobbler-server => rebaser-server}/tests/integration.rs (75%) rename lib/{gobbler-server => rebaser-server}/tests/integration_test/connection.rs (92%) rename lib/{gobbler-server => rebaser-server}/tests/integration_test/mod.rs (100%) diff --git a/.ci/docker-compose.test-integration.yml b/.ci/docker-compose.test-integration.yml index 2adb66b52a..4197b04ede 100644 --- a/.ci/docker-compose.test-integration.yml +++ b/.ci/docker-compose.test-integration.yml @@ -20,7 +20,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si" - "POSTGRES_DB=si" - - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index,si_test,si_test_dal,si_test_sdf_server,si_test_gobbler" + - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index,si_test,si_test_dal,si_test_sdf_server,si_test_rebaser" nats: image: systeminit/nats:stable diff --git a/Cargo.lock b/Cargo.lock index e0c49d557f..5119864ccc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -25,31 +25,32 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", "once_cell", "version_check", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.0.4" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] @@ -83,51 +84,50 @@ dependencies = [ [[package]] name = "anstream" -version = "0.3.2" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is-terminal", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "1.0.2" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" dependencies = [ "anstyle", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -154,18 +154,18 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8257238e2a3629ee5618502a75d1b91f8017c24638c75349fc8d2d80cf1f7c4c" dependencies = [ - "base64 0.21.2", - "bytes 1.4.0", + "base64 0.21.5", + "bytes 1.5.0", "futures", "http", "itoa", "memchr", - "nkeys 0.3.1", + "nkeys 0.3.2", "nuid", "once_cell", "rand 0.8.5", "regex", - "ring", + "ring 0.16.20", "rustls-native-certs", "rustls-pemfile", "rustls-webpki", @@ -174,7 +174,7 @@ dependencies = [ "serde_nanos", "serde_repr", "thiserror", - "time 0.3.27", + "time", "tokio", "tokio-retry", "tokio-rustls 0.24.1", @@ -184,13 +184,13 @@ dependencies = [ [[package]] name = "async-recursion" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" +checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -212,18 +212,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "async-trait" -version = "0.1.73" +version = "0.1.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" +checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -232,7 +232,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures-sink", "futures-util", "memchr", @@ -262,7 +262,7 @@ checksum = "1fcf00bc6d5abb29b5f97e3c61a90b6d3caa12f3faf897d4a3e3607c050a35a7" dependencies = [ "http", "log", - "rustls 0.20.8", + "rustls 0.20.9", "serde", "serde_json", "url", @@ -305,15 +305,15 @@ dependencies = [ "rust-ini", "serde", "thiserror", - "time 0.3.27", + "time", "url", ] [[package]] name = "aws-region" -version = "0.25.3" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba" +checksum = "42fed2b9fca70f2908268d057a607f2a906f47edbf856ea8587de9038d264e22" dependencies = [ "thiserror", ] @@ -327,9 +327,9 @@ dependencies = [ "async-trait", "axum-core", "axum-macros", - "base64 0.21.2", + "base64 0.21.5", "bitflags 1.3.2", - "bytes 1.4.0", + "bytes 1.5.0", "futures-util", "http", "http-body", @@ -349,7 +349,7 @@ dependencies = [ "sha1", "sync_wrapper", "tokio", - "tokio-tungstenite 0.20.0", + "tokio-tungstenite 0.20.1", "tower", "tower-layer", "tower-service", @@ -362,7 +362,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", - "bytes 1.4.0", + "bytes 1.5.0", "futures-util", "http", "http-body", @@ -381,7 +381,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -426,9 +426,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.2" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" [[package]] name = "base64ct" @@ -461,9 +461,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" [[package]] name = "bitvec" @@ -479,16 +479,15 @@ dependencies = [ [[package]] name = "blake3" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "199c42ab6972d92c9f8995f086273d25c42fc0f7b2a1fcefba465c1352d25ba5" +checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", "constant_time_eq", - "digest 0.10.7", ] [[package]] @@ -515,9 +514,9 @@ version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f03db470b3c0213c47e978da93200259a1eb4dae2e5512cba9955e2b540a6fc6" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "bollard-stubs", - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "futures-util", "hex", @@ -546,59 +545,38 @@ checksum = "b58071e8fd9ec1e930efd28e3a90c1251015872a2ce49f81f36421b86466932e" dependencies = [ "serde", "serde_repr", - "serde_with 3.3.0", + "serde_with 3.4.0", ] [[package]] name = "borsh" -version = "0.10.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b" +checksum = "9897ef0f1bd2362169de6d7e436ea2237dc1085d7d1e4db75f4be34d86f309d1" dependencies = [ "borsh-derive", - "hashbrown 0.13.2", + "cfg_aliases", ] [[package]] name = "borsh-derive" -version = "0.10.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" -dependencies = [ - "borsh-derive-internal", - "borsh-schema-derive-internal", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" +checksum = "478b41ff04256c5c8330f3dfdaaae2a5cc976a8e75088bafa4625b0d0208de8c" dependencies = [ + "once_cell", + "proc-macro-crate 2.0.0", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.40", + "syn_derive", ] [[package]] name = "bstr" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" +checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" dependencies = [ "memchr", "serde", @@ -614,9 +592,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "bytecheck" @@ -642,9 +620,9 @@ dependencies = [ [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" @@ -654,9 +632,9 @@ checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" [[package]] name = "bytes" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" dependencies = [ "serde", ] @@ -665,7 +643,7 @@ dependencies = [ name = "bytes-lines-codec" version = "0.1.0" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures", "serde", "tokio", @@ -689,20 +667,25 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + [[package]] name = "chrono" -version = "0.4.26" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", - "time 0.1.45", "wasm-bindgen", - "winapi", + "windows-targets 0.48.5", ] [[package]] @@ -734,20 +717,19 @@ dependencies = [ [[package]] name = "clap" -version = "4.3.24" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487" +checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.3.24" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e" +checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" dependencies = [ "anstream", "anstyle", @@ -758,27 +740,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.3.12" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "clap_lex" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "coarsetime" -version = "0.1.23" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a90d114103adbc625300f346d4d09dfb4ab1c4a8df6868435dd903392ecf4354" +checksum = "71367d3385c716342014ad17e3d19f7788ae514885a1f4c24f500260fb365e1a" dependencies = [ "libc", "once_cell", @@ -803,9 +785,9 @@ dependencies = [ [[package]] name = "color-spantrace" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" +checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" dependencies = [ "once_cell", "owo-colors", @@ -821,33 +803,32 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "colored" -version = "2.0.4" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6" +checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" dependencies = [ - "is-terminal", "lazy_static", "windows-sys 0.48.0", ] [[package]] name = "comfy-table" -version = "7.0.1" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ab77dbd8adecaf3f0db40581631b995f312a8a5ae3aa9993188bb8f23d83a5b" +checksum = "7c64043d6c7b7a4c58e39e7efccfdea7b93d885a795d0c054a69dbbf4dd52686" dependencies = [ "console", - "crossterm 0.26.1", - "strum", - "strum_macros", + "crossterm 0.27.0", + "strum 0.25.0", + "strum_macros 0.25.3", "unicode-width", ] [[package]] name = "config" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7" +checksum = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca" dependencies = [ "async-trait", "lazy_static", @@ -872,7 +853,7 @@ dependencies = [ "serde_yaml", "thiserror", "tokio", - "toml 0.7.6", + "toml 0.7.8", "tracing", ] @@ -972,9 +953,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -982,9 +963,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "council" @@ -1016,9 +997,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" dependencies = [ "libc", ] @@ -1042,6 +1023,30 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset 0.9.0", + "scopeguard", +] + [[package]] name = "crossbeam-queue" version = "0.3.8" @@ -1079,17 +1084,14 @@ dependencies = [ [[package]] name = "crossterm" -version = "0.26.1" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13" +checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "crossterm_winapi", "libc", - "mio", "parking_lot 0.12.1", - "signal-hook", - "signal-hook-mio", "winapi", ] @@ -1104,9 +1106,9 @@ dependencies = [ [[package]] name = "crypto-bigint" -version = "0.5.2" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -1143,6 +1145,33 @@ dependencies = [ "zeroize", ] +[[package]] +name = "curve25519-dalek" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version", + "subtle", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] + [[package]] name = "cyclone" version = "0.1.0" @@ -1159,7 +1188,7 @@ name = "cyclone-client" version = "0.1.0" dependencies = [ "async-trait", - "base64 0.21.2", + "base64 0.21.5", "buck2-resources", "cyclone-core", "cyclone-server", @@ -1186,8 +1215,8 @@ dependencies = [ name = "cyclone-core" version = "0.1.0" dependencies = [ - "base64 0.21.2", - "nix 0.26.2", + "base64 0.21.5", + "nix 0.26.4", "remain", "serde", "serde_json", @@ -1206,7 +1235,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum", - "base64 0.21.2", + "base64 0.21.5", "bytes-lines-codec", "chrono", "cyclone-core", @@ -1236,7 +1265,7 @@ version = "0.1.0" dependencies = [ "async-recursion", "async-trait", - "base64 0.21.2", + "base64 0.21.5", "blake3", "buck2-resources", "chrono", @@ -1250,7 +1279,7 @@ dependencies = [ "futures", "hex", "iftree", - "itertools", + "itertools 0.10.5", "jwt-simple", "lazy_static", "nats-subscriber", @@ -1267,14 +1296,14 @@ dependencies = [ "serde", "serde-aux", "serde_json", - "serde_with 3.3.0", + "serde_with 3.4.0", "si-crypto", "si-data-nats", "si-data-pg", "si-hash", "si-pkg", "sodiumoxide", - "strum", + "strum 0.24.1", "telemetry", "tempfile", "thiserror", @@ -1363,7 +1392,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -1385,7 +1414,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -1397,27 +1426,27 @@ dependencies = [ "anyhow", "html-escape", "nom", - "ordered-float 2.10.0", + "ordered-float 2.10.1", ] [[package]] name = "dashmap" -version = "5.5.1" +version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown 0.14.0", + "hashbrown 0.14.3", "lock_api", "once_cell", - "parking_lot_core 0.9.8", + "parking_lot_core 0.9.9", ] [[package]] name = "data-encoding" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" [[package]] name = "deadpool" @@ -1444,7 +1473,7 @@ dependencies = [ "deadpool", "derive_builder", "futures", - "nix 0.26.2", + "nix 0.26.4", "rand 0.8.5", "remain", "serde", @@ -1473,9 +1502,9 @@ dependencies = [ [[package]] name = "deadpool-runtime" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" +checksum = "63dfa964fe2a66f3fde91fc70b267fe193d822c7e603e2a675a49a7f46ad3f49" dependencies = [ "tokio", ] @@ -1513,10 +1542,11 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.8" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc" dependencies = [ + "powerfmt", "serde", ] @@ -1646,7 +1676,7 @@ dependencies = [ "asynchronous-codec", "base64 0.13.1", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "containers-api 0.9.0", "docker-api-stubs", @@ -1681,22 +1711,22 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "dyn-clone" -version = "1.0.13" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555" +checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" [[package]] name = "ecdsa" -version = "0.16.8" +version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der 0.7.8", "digest 0.10.7", "elliptic-curve", "rfc6979", - "signature 2.1.0", - "spki 0.7.2", + "signature 2.2.0", + "spki 0.7.3", ] [[package]] @@ -1708,6 +1738,15 @@ dependencies = [ "signature 1.6.4", ] +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "signature 2.2.0", +] + [[package]] name = "ed25519-compact" version = "2.0.4" @@ -1715,7 +1754,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a3d382e8464107391c8706b4c14b087808ecb909f6c15c34114bc42e53a9e4c" dependencies = [ "ct-codecs", - "getrandom 0.2.10", + "getrandom 0.2.11", ] [[package]] @@ -1724,17 +1763,30 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek", - "ed25519", + "curve25519-dalek 3.2.0", + "ed25519 1.5.3", "sha2 0.9.9", "zeroize", ] +[[package]] +name = "ed25519-dalek" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0" +dependencies = [ + "curve25519-dalek 4.1.1", + "ed25519 2.2.3", + "sha2 0.10.8", + "signature 2.2.0", + "subtle", +] + [[package]] name = "educe" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "079044df30bb07de7d846d41a184c4b00e66ebdac93ee459253474f3a47e50ae" +checksum = "0f0042ff8246a363dbe77d2ceedb073339e85a804b9a47636c6e016a9a32c05f" dependencies = [ "enum-ordinalize", "proc-macro2", @@ -1750,9 +1802,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "elliptic-curve" -version = "0.13.5" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "968405c8fdc9b3bf4df0a6638858cc0b52462836ab6b1c87377785dd09cf1c0b" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", @@ -1786,15 +1838,15 @@ dependencies = [ [[package]] name = "enum-ordinalize" -version = "3.1.13" +version = "3.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4f76552f53cefc9a7f64987c3701b99d982f7690606fd67de1d09712fbf52f1" +checksum = "1bf1fa3f06bbff1ea5b1a9c7b14aa992a39657db60a2759457328d7e058f49ee" dependencies = [ "num-bigint", "num-traits", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -1805,23 +1857,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ - "cc", "libc", + "windows-sys 0.52.0", ] [[package]] @@ -1832,9 +1873,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "eyre" -version = "0.6.8" +version = "0.6.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +checksum = "8bbb8258be8305fb0237d7b295f47bb24ff1b136a535f473baf40e70468515aa" dependencies = [ "indenter", "once_cell", @@ -1857,9 +1898,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "ff" @@ -1871,18 +1912,30 @@ dependencies = [ "subtle", ] +[[package]] +name = "fiat-crypto" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" + [[package]] name = "filetime" -version = "0.2.22" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", ] +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + [[package]] name = "fixedbitset" version = "0.4.2" @@ -1891,9 +1944,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.27" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "miniz_oxide", @@ -1907,9 +1960,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -1922,9 +1975,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" dependencies = [ "futures-channel", "futures-core", @@ -1953,9 +2006,9 @@ checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" dependencies = [ "futures-core", "futures-task", @@ -2002,7 +2055,7 @@ checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -2073,9 +2126,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", "libc", @@ -2084,77 +2137,21 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "globset" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d" +checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" dependencies = [ "aho-corasick", "bstr", - "fnv", "log", - "regex", -] - -[[package]] -name = "gobbler" -version = "0.1.0" -dependencies = [ - "clap", - "color-eyre", - "gobbler-server", - "telemetry-application", - "tokio", -] - -[[package]] -name = "gobbler-client" -version = "0.1.0" -dependencies = [ - "gobbler-server", - "remain", - "serde", - "serde_json", - "si-rabbitmq", - "telemetry", - "thiserror", - "tokio", - "ulid", -] - -[[package]] -name = "gobbler-server" -version = "0.1.0" -dependencies = [ - "buck2-resources", - "dal", - "dal-test", - "derive_builder", - "futures", - "nats-subscriber", - "pretty_assertions_sorted", - "remain", - "serde", - "serde_json", - "si-crypto", - "si-data-nats", - "si-data-pg", - "si-rabbitmq", - "si-settings", - "si-std", - "si-test-macros", - "stream-cancel", - "telemetry", - "thiserror", - "tokio", - "tokio-stream", - "ulid", - "veritech-client", + "regex-automata 0.4.3", + "regex-syntax 0.8.2", ] [[package]] @@ -2170,17 +2167,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.21" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "fnv", "futures-core", "futures-sink", "futures-util", "http", - "indexmap 1.9.3", + "indexmap 2.1.0", "slab", "tokio", "tokio-util", @@ -2199,35 +2196,26 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.7", ] [[package]] name = "hashbrown" -version = "0.13.2" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ - "ahash 0.8.3", -] - -[[package]] -name = "hashbrown" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" -dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "allocator-api2", ] [[package]] name = "hashlink" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.14.0", + "hashbrown 0.14.3", ] [[package]] @@ -2250,9 +2238,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" [[package]] name = "hex" @@ -2302,6 +2290,15 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "home" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +dependencies = [ + "windows-sys 0.48.0", +] + [[package]] name = "html-escape" version = "0.2.13" @@ -2313,22 +2310,22 @@ dependencies = [ [[package]] name = "http" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "fnv", "itoa", ] [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "http", "pin-project-lite", ] @@ -2357,7 +2354,7 @@ version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures-channel", "futures-core", "futures-util", @@ -2368,7 +2365,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.9", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -2377,14 +2374,14 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http", "hyper", - "rustls 0.21.6", + "rustls 0.21.10", "tokio", "tokio-rustls 0.24.1", ] @@ -2415,16 +2412,16 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.57" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows", + "windows-core", ] [[package]] @@ -2444,9 +2441,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -2463,23 +2460,22 @@ dependencies = [ "quote", "serde", "syn 1.0.109", - "toml 0.7.6", + "toml 0.7.8", "unicode-xid", ] [[package]] name = "ignore" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492" +checksum = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060" dependencies = [ + "crossbeam-deque", "globset", - "lazy_static", "log", "memchr", - "regex", + "regex-automata 0.4.3", "same-file", - "thread_local", "walkdir", "winapi-util", ] @@ -2503,20 +2499,20 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.14.3", "serde", ] [[package]] name = "indicatif" -version = "0.17.6" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730" +checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" dependencies = [ "console", "instant", @@ -2527,9 +2523,9 @@ dependencies = [ [[package]] name = "indoc" -version = "2.0.3" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4" +checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" [[package]] name = "inquire" @@ -2556,22 +2552,11 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "ipnet" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "is-docker" @@ -2582,17 +2567,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "is-terminal" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" -dependencies = [ - "hermit-abi", - "rustix 0.38.8", - "windows-sys 0.48.0", -] - [[package]] name = "is-wsl" version = "0.4.0" @@ -2612,26 +2586,35 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" dependencies = [ "wasm-bindgen", ] [[package]] name = "jwt-simple" -version = "0.11.6" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733741e7bcd1532b56c9ba6c698c069f274f3782ad956f0d2c7f31650cedaa1b" +checksum = "357892bb32159d763abdea50733fadcb9a8e1c319a9aa77592db8555d05af83e" dependencies = [ "anyhow", "binstring", @@ -2655,16 +2638,16 @@ dependencies = [ [[package]] name = "k256" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc" +checksum = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b" dependencies = [ "cfg-if", "ecdsa", "elliptic-curve", "once_cell", - "sha2 0.10.7", - "signature 2.1.0", + "sha2 0.10.8", + "signature 2.2.0", ] [[package]] @@ -2687,15 +2670,26 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.147" +version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" [[package]] name = "libm" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libredox" +version = "0.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +dependencies = [ + "bitflags 2.4.1", + "libc", + "redox_syscall 0.4.1", +] [[package]] name = "libsodium-sys" @@ -2717,21 +2711,15 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - -[[package]] -name = "linux-raw-sys" -version = "0.4.5" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" [[package]] name = "lock_api" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -2754,9 +2742,9 @@ dependencies = [ [[package]] name = "matchit" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "maybe-async" @@ -2771,10 +2759,11 @@ dependencies = [ [[package]] name = "md-5" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ + "cfg-if", "digest 0.10.7", ] @@ -2786,9 +2775,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" [[package]] name = "memmap2" @@ -2818,6 +2807,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + [[package]] name = "mime" version = "0.3.17" @@ -2851,9 +2849,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", "log", @@ -2895,7 +2893,7 @@ version = "0.1.0" dependencies = [ "auth-api-client", "axum", - "base64 0.21.2", + "base64 0.21.5", "buck2-resources", "chrono", "derive_builder", @@ -2932,7 +2930,7 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "encoding_rs", "futures-util", "http", @@ -2998,16 +2996,15 @@ dependencies = [ [[package]] name = "nix" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" dependencies = [ "bitflags 1.3.2", "cfg-if", "libc", "memoffset 0.7.1", "pin-utils", - "static_assertions", ] [[package]] @@ -3018,27 +3015,27 @@ checksum = "0e66a7cd1358277b2a6f77078e70aea7315ff2f20db969cc61153103ec162594" dependencies = [ "byteorder", "data-encoding", - "ed25519-dalek", - "getrandom 0.2.10", + "ed25519-dalek 1.0.1", + "getrandom 0.2.11", "log", "rand 0.8.5", - "signatory", + "signatory 0.23.2", ] [[package]] name = "nkeys" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9261eb915c785ea65708bc45ef43507ea46914e1a73f1412d1a38aba967c8e" +checksum = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47" dependencies = [ "byteorder", "data-encoding", - "ed25519", - "ed25519-dalek", - "getrandom 0.2.10", + "ed25519 2.2.3", + "ed25519-dalek 2.1.0", + "getrandom 0.2.11", "log", "rand 0.8.5", - "signatory", + "signatory 0.27.1", ] [[package]] @@ -3122,9 +3119,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", "libm", @@ -3158,7 +3155,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -3169,9 +3166,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.0" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "memchr", ] @@ -3184,7 +3181,7 @@ dependencies = [ "remain", "serde", "si-hash", - "strum", + "strum 0.24.1", "tar", "tempfile", "thiserror", @@ -3195,9 +3192,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "opaque-debug" @@ -3207,9 +3204,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "open" -version = "5.0.0" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfabf1927dce4d6fdf563d63328a0a506101ced3ec780ca2135747336c98cef8" +checksum = "90878fb664448b54c4e592455ad02831e23a3f7e157374a8b95654731aac7349" dependencies = [ "is-wsl", "libc", @@ -3319,18 +3316,18 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "ordered-float" -version = "2.10.0" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" dependencies = [ "num-traits", ] [[package]] name = "ordered-float" -version = "3.7.0" +version = "3.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fc2dbde8f8a79f2102cc474ceb0ad68e3b80b85289ea62389b60e66777e4213" +checksum = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc" dependencies = [ "num-traits", ] @@ -3389,7 +3386,7 @@ dependencies = [ "ecdsa", "elliptic-curve", "primeorder", - "sha2 0.10.7", + "sha2 0.10.8", ] [[package]] @@ -3401,14 +3398,14 @@ dependencies = [ "ecdsa", "elliptic-curve", "primeorder", - "sha2 0.10.7", + "sha2 0.10.8", ] [[package]] name = "parking" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" [[package]] name = "parking_lot" @@ -3428,7 +3425,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.8", + "parking_lot_core 0.9.9", ] [[package]] @@ -3447,13 +3444,13 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", + "redox_syscall 0.4.1", "smallvec", "windows-targets 0.48.5", ] @@ -3499,9 +3496,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" @@ -3510,7 +3507,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.0.0", + "indexmap 2.1.0", "serde", "serde_derive", ] @@ -3530,7 +3527,7 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ - "siphasher", + "siphasher 0.3.11", ] [[package]] @@ -3570,14 +3567,14 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "pin-project-lite" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -3663,7 +3660,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der 0.7.8", - "spki 0.7.2", + "spki 0.7.3", ] [[package]] @@ -3672,6 +3669,12 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "platforms" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" + [[package]] name = "podman-api" version = "0.10.0" @@ -3680,7 +3683,7 @@ checksum = "4d0ade207138f12695cb4be3b590283f1cf764c5c4909f39966c4b4b0dba7c1e" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "containers-api 0.8.0", "flate2", @@ -3710,9 +3713,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.4.2" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" [[package]] name = "postgres-derive" @@ -3723,7 +3726,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -3732,15 +3735,15 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "fallible-iterator", "hmac", "md-5", "memchr", "rand 0.8.5", - "sha2 0.10.7", + "sha2 0.10.8", "stringprep", ] @@ -3750,7 +3753,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "fallible-iterator", "postgres-derive", @@ -3759,6 +3762,12 @@ dependencies = [ "serde_json", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -3797,30 +3806,30 @@ dependencies = [ [[package]] name = "primeorder" -version = "0.13.2" +version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro-crate" -version = "0.1.5" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ - "toml 0.5.11", + "once_cell", + "toml_edit 0.19.15", ] [[package]] name = "proc-macro-crate" -version = "1.3.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" dependencies = [ - "once_cell", - "toml_edit", + "toml_edit 0.20.7", ] [[package]] @@ -3849,9 +3858,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" dependencies = [ "unicode-ident", ] @@ -3862,7 +3871,7 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "prost-derive", ] @@ -3872,9 +3881,9 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "heck 0.4.1", - "itertools", + "itertools 0.10.5", "lazy_static", "log", "multimap", @@ -3895,7 +3904,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", - "itertools", + "itertools 0.10.5", "proc-macro2", "quote", "syn 1.0.109", @@ -3956,7 +3965,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "735d6cecec44dc27382b962309c05de47d40727ff9898a501ec8fadec76be9a8" dependencies = [ "async-trait", - "bytes 1.4.0", + "bytes 1.5.0", "dashmap", "futures", "pin-project 1.1.3", @@ -3982,7 +3991,7 @@ dependencies = [ "chrono", "derive_more", "num_enum", - "ordered-float 3.7.0", + "ordered-float 3.9.2", "uuid", ] @@ -4051,7 +4060,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", ] [[package]] @@ -4063,6 +4072,62 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "rebaser" +version = "0.1.0" +dependencies = [ + "clap", + "color-eyre", + "rebaser-server", + "telemetry-application", + "tokio", +] + +[[package]] +name = "rebaser-client" +version = "0.1.0" +dependencies = [ + "rebaser-server", + "remain", + "serde", + "serde_json", + "si-rabbitmq", + "telemetry", + "thiserror", + "tokio", + "ulid", +] + +[[package]] +name = "rebaser-server" +version = "0.1.0" +dependencies = [ + "buck2-resources", + "dal", + "dal-test", + "derive_builder", + "futures", + "nats-subscriber", + "pretty_assertions_sorted", + "remain", + "serde", + "serde_json", + "si-crypto", + "si-data-nats", + "si-data-pg", + "si-rabbitmq", + "si-settings", + "si-std", + "si-test-macros", + "stream-cancel", + "telemetry", + "thiserror", + "tokio", + "tokio-stream", + "ulid", + "veritech-client", +] + [[package]] name = "redox_syscall" version = "0.2.16" @@ -4074,29 +4139,29 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.3.5" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ - "getrandom 0.2.10", - "redox_syscall 0.2.16", + "getrandom 0.2.11", + "libredox", "thiserror", ] [[package]] name = "refinery" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb0436d0dd7bd8d4fce1e828751fa79742b08e35f27cfea7546f8a322b5ef24" +checksum = "529664dbccc0a296947615c997a857912d72d1c44be1fafb7bae54ecfa7a8c24" dependencies = [ "refinery-core", "refinery-macros", @@ -4104,9 +4169,9 @@ dependencies = [ [[package]] name = "refinery-core" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19206547cd047e8f4dfa6b20c30d3ecaf24be05841b6aa0aa926a47a3d0662bb" +checksum = "e895cb870cf06e92318cbbeb701f274d022d5ca87a16fa8244e291cd035ef954" dependencies = [ "async-trait", "cfg-if", @@ -4114,39 +4179,39 @@ dependencies = [ "log", "regex", "serde", - "siphasher", + "siphasher 1.0.0", "thiserror", - "time 0.3.27", + "time", "tokio", "tokio-postgres", - "toml 0.7.6", + "toml 0.7.8", "url", "walkdir", ] [[package]] name = "refinery-macros" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d94d4b9241859ba19eaa5c04c86e782eb3aa0aae2c5868e0cfa90c856e58a174" +checksum = "123e8b80f8010c3ae38330c81e76938fc7adf6cdbfbaad20295bb8c22718b4f1" dependencies = [ "proc-macro2", "quote", "refinery-core", "regex", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "regex" -version = "1.9.3" +version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.6", - "regex-syntax 0.7.4", + "regex-automata 0.4.3", + "regex-syntax 0.8.2", ] [[package]] @@ -4160,13 +4225,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.6" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.4", + "regex-syntax 0.8.2", ] [[package]] @@ -4177,9 +4242,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "remain" @@ -4189,26 +4254,26 @@ checksum = "bce3a7139d2ee67d07538ee5dba997364fbc243e7e7143e96eb830c74bfaa082" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "rend" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" +checksum = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd" dependencies = [ "bytecheck", ] [[package]] name = "reqwest" -version = "0.11.20" +version = "0.11.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" +checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" dependencies = [ - "base64 0.21.2", - "bytes 1.4.0", + "base64 0.21.5", + "bytes 1.5.0", "encoding_rs", "futures-core", "futures-util", @@ -4225,11 +4290,12 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.6", + "rustls 0.21.10", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", + "system-configuration", "tokio", "tokio-rustls 0.24.1", "tokio-util", @@ -4239,7 +4305,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.25.2", + "webpki-roots 0.25.3", "winreg", ] @@ -4261,17 +4327,31 @@ dependencies = [ [[package]] name = "ring" -version = "0.16.20" +version = "0.16.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +dependencies = [ + "cc", + "libc", + "once_cell", + "spin 0.5.2", + "untrusted 0.7.1", + "web-sys", + "winapi", +] + +[[package]] +name = "ring" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3053cf52e236a3ed746dfc745aa9cacf1b791d846bdaf412f60a8d7d6e17c8fc" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" dependencies = [ "cc", + "getrandom 0.2.11", "libc", - "once_cell", - "spin 0.5.2", - "untrusted", - "web-sys", - "winapi", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.48.0", ] [[package]] @@ -4343,7 +4423,7 @@ dependencies = [ "aws-creds", "aws-region", "base64 0.13.1", - "bytes 1.4.0", + "bytes 1.5.0", "cfg-if", "futures", "hex", @@ -4357,9 +4437,9 @@ dependencies = [ "reqwest", "serde", "serde_derive", - "sha2 0.10.7", + "sha2 0.10.8", "thiserror", - "time 0.3.27", + "time", "tokio", "tokio-stream", "url", @@ -4367,13 +4447,13 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.32.0" +version = "1.33.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4c4216490d5a413bc6d10fa4742bd7d4955941d062c0ef873141d6b0e7b30fd" +checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4" dependencies = [ "arrayvec", "borsh", - "bytes 1.4.0", + "bytes 1.5.0", "num-traits", "rand 0.8.5", "rkyv", @@ -4398,51 +4478,37 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.8" +version = "0.38.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "errno", "libc", - "linux-raw-sys 0.4.5", - "windows-sys 0.48.0", + "linux-raw-sys", + "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.20.8" +version = "0.20.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" dependencies = [ "log", - "ring", + "ring 0.16.20", "sct", "webpki", ] [[package]] name = "rustls" -version = "0.21.6" +version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ "log", - "ring", + "ring 0.17.7", "rustls-webpki", "sct", ] @@ -4461,21 +4527,21 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", ] [[package]] name = "rustls-webpki" -version = "0.101.4" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", - "untrusted", + "ring 0.17.7", + "untrusted 0.9.0", ] [[package]] @@ -4486,9 +4552,9 @@ checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] name = "same-file" @@ -4516,12 +4582,12 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", - "untrusted", + "ring 0.17.7", + "untrusted 0.9.0", ] [[package]] @@ -4541,7 +4607,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum", - "base64 0.21.2", + "base64 0.21.5", "buck2-resources", "chrono", "convert_case 0.6.0", @@ -4553,7 +4619,7 @@ dependencies = [ "hyper", "module-index-client", "names", - "nix 0.26.2", + "nix 0.26.4", "once_cell", "pathdiff", "pretty_assertions_sorted", @@ -4563,7 +4629,7 @@ dependencies = [ "serde", "serde_json", "serde_url_params", - "serde_with 3.3.0", + "serde_with 3.4.0", "si-crypto", "si-data-nats", "si-data-pg", @@ -4572,7 +4638,7 @@ dependencies = [ "si-settings", "si-std", "sodiumoxide", - "strum", + "strum 0.24.1", "telemetry", "thiserror", "tokio", @@ -4583,7 +4649,7 @@ dependencies = [ "url", "veritech-client", "y-sync", - "yrs", + "yrs 0.16.10", ] [[package]] @@ -4608,7 +4674,7 @@ dependencies = [ "serde_json", "sqlx", "thiserror", - "time 0.3.27", + "time", "tracing", "url", "uuid", @@ -4638,7 +4704,7 @@ dependencies = [ "rust_decimal", "sea-query-derive", "serde_json", - "time 0.3.27", + "time", "uuid", ] @@ -4654,7 +4720,7 @@ dependencies = [ "sea-query", "serde_json", "sqlx", - "time 0.3.27", + "time", "uuid", ] @@ -4748,24 +4814,24 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" +checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "serde" -version = "1.0.186" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f5db24220c009de9bd45e69fb2938f4b6d2df856aa9304ce377b3180f83b7c1" +checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" dependencies = [ "serde_derive", ] [[package]] name = "serde-aux" -version = "4.2.0" +version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3dfe1b7eb6f9dcf011bd6fad169cdeaae75eda0d61b1a99a3f015b41b0cae39" +checksum = "184eba62ebddb71658697c8b08822edee89970bf318c5362189f0de27f85b498" dependencies = [ "chrono", "serde", @@ -4774,22 +4840,22 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.186" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad697f7e0b65af4983a4ce8f56ed5b357e8d3c36651bf6a7e13639c17b8e670" +checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "serde_json" -version = "1.0.105" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -4816,20 +4882,20 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" +checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "serde_spanned" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" +checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" dependencies = [ "serde", ] @@ -4869,24 +4935,24 @@ dependencies = [ "serde", "serde_json", "serde_with_macros 2.3.3", - "time 0.3.27", + "time", ] [[package]] name = "serde_with" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237" +checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.0.0", + "indexmap 2.1.0", "serde", "serde_json", - "serde_with_macros 3.3.0", - "time 0.3.27", + "serde_with_macros 3.4.0", + "time", ] [[package]] @@ -4898,28 +4964,28 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "serde_with_macros" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c" +checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "serde_yaml" -version = "0.9.25" +version = "0.9.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" +checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -4928,9 +4994,9 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -4952,9 +5018,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -4963,9 +5029,9 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] @@ -4980,7 +5046,7 @@ dependencies = [ "serde_json", "si-cli", "si-posthog", - "strum", + "strum 0.24.1", "telemetry-application", "tokio", ] @@ -4991,7 +5057,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum", - "base64 0.21.2", + "base64 0.21.5", "color-eyre", "colored", "comfy-table", @@ -5018,14 +5084,14 @@ dependencies = [ "tempfile", "thiserror", "tokio", - "toml 0.7.6", + "toml 0.7.8", ] [[package]] name = "si-crypto" version = "0.1.0" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "ciborium", "remain", "serde", @@ -5060,7 +5126,7 @@ dependencies = [ name = "si-data-pg" version = "0.1.0" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "deadpool", "deadpool-postgres", "futures", @@ -5089,7 +5155,7 @@ dependencies = [ name = "si-pkg" version = "0.1.0" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "chrono", "derive_builder", "object-tree", @@ -5098,7 +5164,7 @@ dependencies = [ "serde", "serde_json", "si-hash", - "strum", + "strum 0.24.1", "tempfile", "thiserror", "tokio", @@ -5115,7 +5181,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "strum", + "strum 0.24.1", "telemetry", "thiserror", "tokio", @@ -5152,7 +5218,7 @@ version = "0.1.0" dependencies = [ "remain", "serde", - "serde_with 3.3.0", + "serde_with 3.4.0", "thiserror", ] @@ -5162,7 +5228,7 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -5207,6 +5273,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "signatory" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1e303f8205714074f6068773f0e29527e0453937fe837c9717d066635b65f31" +dependencies = [ + "pkcs8 0.10.2", + "rand_core 0.6.4", + "signature 2.2.0", + "zeroize", +] + [[package]] name = "signature" version = "1.6.4" @@ -5219,9 +5297,9 @@ dependencies = [ [[package]] name = "signature" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -5239,6 +5317,12 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "siphasher" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54ac45299ccbd390721be55b412d41931911f654fa99e2cb8bfb57184b2061fe" + [[package]] name = "slab" version = "0.4.9" @@ -5257,17 +5341,26 @@ dependencies = [ "smallvec", ] +[[package]] +name = "smallstr" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63b1aefdf380735ff8ded0b15f31aab05daf1f70216c01c02a12926badd1df9d" +dependencies = [ + "smallvec", +] + [[package]] name = "smallvec" -version = "1.11.0" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" +checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" [[package]] name = "socket2" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" dependencies = [ "libc", "winapi", @@ -5275,9 +5368,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", "windows-sys 0.48.0", @@ -5289,7 +5382,7 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e26be3acb6c2d9a7aac28482586a7856436af4cfe7100031d219de2d2ecb0028" dependencies = [ - "ed25519", + "ed25519 1.5.3", "libc", "libsodium-sys", "serde", @@ -5328,9 +5421,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der 0.7.8", @@ -5338,11 +5431,11 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" +checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" dependencies = [ - "itertools", + "itertools 0.12.0", "nom", "unicode_categories", ] @@ -5363,13 +5456,13 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.7", "atoi", "base64 0.13.1", "bigdecimal", "bitflags 1.3.2", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "crossbeam-queue", "dirs", @@ -5396,18 +5489,18 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "rust_decimal", - "rustls 0.20.8", + "rustls 0.20.9", "rustls-pemfile", "serde", "serde_json", "sha1", - "sha2 0.10.7", + "sha2 0.10.8", "smallvec", "sqlformat", "sqlx-rt", "stringprep", "thiserror", - "time 0.3.27", + "time", "tokio-stream", "url", "uuid", @@ -5451,12 +5544,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "stream-cancel" version = "0.8.1" @@ -5470,10 +5557,11 @@ dependencies = [ [[package]] name = "stringprep" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" dependencies = [ + "finl_unicode", "unicode-bidi", "unicode-normalization", ] @@ -5490,9 +5578,15 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", ] +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" + [[package]] name = "strum_macros" version = "0.24.3" @@ -5506,6 +5600,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.25.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.40", +] + [[package]] name = "subtle" version = "2.5.0" @@ -5525,21 +5632,54 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.29" +version = "2.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" +checksum = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "syn_derive" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.40", +] + [[package]] name = "sync_wrapper" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tap" version = "1.0.1" @@ -5595,56 +5735,66 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.8.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" dependencies = [ "cfg-if", - "fastrand 2.0.0", - "redox_syscall 0.3.5", - "rustix 0.38.8", + "fastrand 2.0.1", + "redox_syscall 0.4.1", + "rustix", "windows-sys 0.48.0", ] [[package]] name = "terminal_size" -version = "0.2.6" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237" +checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" dependencies = [ - "rustix 0.37.23", + "rustix", "windows-sys 0.48.0", ] [[package]] name = "test-log" -version = "0.2.12" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6159ab4116165c99fc88cce31f99fa2c9dbe08d3691cb38da02fc3b45f357d2b" +dependencies = [ + "test-log-macros", + "tracing-subscriber", +] + +[[package]] +name = "test-log-macros" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9601d162c1d77e62c1ea0bc8116cd1caf143ce3af947536c3c9052a1677fe0c" +checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.40", ] [[package]] name = "thiserror" -version = "1.0.47" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f" +checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.47" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b" +checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -5659,23 +5809,13 @@ dependencies = [ [[package]] name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - -[[package]] -name = "time" -version = "0.3.27" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb39ee79a6d8de55f48f2293a830e040392f1c5f16e336bdd1788cd0aadce07" +checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" dependencies = [ "deranged", "itoa", + "powerfmt", "serde", "time-core", "time-macros", @@ -5683,15 +5823,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.13" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733d258752e9303d392b94b75230d07b0b9c489350c69b851fc6c065fde3e8f9" +checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" dependencies = [ "time-core", ] @@ -5713,19 +5853,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.32.0" +version = "1.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" +checksum = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c" dependencies = [ "backtrace", - "bytes 1.4.0", + "bytes 1.5.0", "libc", "mio", "num_cpus", "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.3", + "socket2 0.5.5", "tokio-macros", "windows-sys 0.48.0", ] @@ -5742,24 +5882,24 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "tokio-postgres" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "000387915083ea6406ee44b50ca74813aba799fe682a7689e382bf9e13b74ce9" +checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" dependencies = [ "async-trait", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "fallible-iterator", "futures-channel", "futures-util", @@ -5771,7 +5911,7 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand 0.8.5", - "socket2 0.5.3", + "socket2 0.5.5", "tokio", "tokio-util", "whoami", @@ -5794,7 +5934,7 @@ version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ - "rustls 0.20.8", + "rustls 0.20.9", "tokio", "webpki", ] @@ -5805,7 +5945,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.6", + "rustls 0.21.10", "tokio", ] @@ -5815,7 +5955,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "911a61637386b789af998ee23f50aa30d5fd7edcec8d6d3dedae5e5815205466" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "educe", "futures-core", "futures-sink", @@ -5842,7 +5982,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89b3cbabd3ae862100094ae433e1def582cf86451b4e9bf83aa7ac1d8a7d719" dependencies = [ "async-stream", - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "tokio", "tokio-stream", @@ -5862,23 +6002,23 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", "tokio", - "tungstenite 0.20.0", + "tungstenite 0.20.1", ] [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "futures-sink", "pin-project-lite", @@ -5892,7 +6032,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52a15c15b1bc91f90902347eff163b5b682643aff0c8e972912cca79bd9208dd" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures", "libc", "tokio", @@ -5910,38 +6050,49 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.6" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_edit 0.19.15", ] [[package]] name = "toml_datetime" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.19.14" +version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.1.0", "serde", "serde_spanned", "toml_datetime", "winnow", ] +[[package]] +name = "toml_edit" +version = "0.20.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +dependencies = [ + "indexmap 2.1.0", + "toml_datetime", + "winnow", +] + [[package]] name = "tonic" version = "0.8.3" @@ -5952,7 +6103,7 @@ dependencies = [ "async-trait", "axum", "base64 0.13.1", - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "futures-util", "h2", @@ -6009,12 +6160,12 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ae70283aba8d2a8b411c695c437fe25b8b5e44e23e780662002fc72fb47a82" +checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" dependencies = [ - "bitflags 2.4.0", - "bytes 1.4.0", + "bitflags 2.4.1", + "bytes 1.5.0", "futures-core", "futures-util", "http", @@ -6040,11 +6191,10 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -6053,20 +6203,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -6094,12 +6244,23 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "lazy_static", "log", + "once_cell", "tracing-core", ] @@ -6113,15 +6274,15 @@ dependencies = [ "opentelemetry", "tracing", "tracing-core", - "tracing-log", + "tracing-log 0.1.4", "tracing-subscriber", ] [[package]] name = "tracing-subscriber" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", "nu-ansi-term", @@ -6132,14 +6293,14 @@ dependencies = [ "thread_local", "tracing", "tracing-core", - "tracing-log", + "tracing-log 0.2.0", ] [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "tungstenite" @@ -6149,7 +6310,7 @@ checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "http", "httparse", "log", @@ -6162,12 +6323,12 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" dependencies = [ "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "data-encoding", "http", "httparse", @@ -6181,15 +6342,15 @@ dependencies = [ [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ulid" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13a3aaa69b04e5b66cc27309710a569ea23593612387d67daaf102e73aa974fd" +checksum = "7e37c4b6cbcc59a8dcd09a6429fbc7890286bcbb79215cea7b38a3c4c0921d93" dependencies = [ "rand 0.8.5", "serde", @@ -6206,15 +6367,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" [[package]] name = "unicode-ident" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" @@ -6233,9 +6394,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode-xid" @@ -6261,11 +6422,17 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", @@ -6281,9 +6448,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8-width" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" +checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" [[package]] name = "utf8parse" @@ -6293,11 +6460,11 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.4.1" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "serde", ] @@ -6322,7 +6489,7 @@ dependencies = [ name = "veritech-client" version = "0.1.0" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "cyclone-core", "futures", "indoc", @@ -6404,15 +6571,15 @@ dependencies = [ [[package]] name = "waker-fn" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" [[package]] name = "walkdir" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" dependencies = [ "same-file", "winapi-util", @@ -6433,12 +6600,6 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -6447,9 +6608,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -6457,24 +6618,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.37" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12" dependencies = [ "cfg-if", "js-sys", @@ -6484,9 +6645,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6494,22 +6655,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" [[package]] name = "wasm-streams" @@ -6526,9 +6687,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.64" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" dependencies = [ "js-sys", "wasm-bindgen", @@ -6536,12 +6697,12 @@ dependencies = [ [[package]] name = "webpki" -version = "0.22.0" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "ring", - "untrusted", + "ring 0.17.7", + "untrusted 0.9.0", ] [[package]] @@ -6555,19 +6716,20 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.2" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" +checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" [[package]] name = "which" -version = "4.4.0" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ "either", - "libc", + "home", "once_cell", + "rustix", ] [[package]] @@ -6598,9 +6760,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -6612,10 +6774,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "windows" -version = "0.48.0" +name = "windows-core" +version = "0.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" dependencies = [ "windows-targets 0.48.5", ] @@ -6638,6 +6800,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -6668,6 +6839,21 @@ dependencies = [ "windows_x86_64_msvc 0.48.5", ] +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -6680,6 +6866,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -6692,6 +6884,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -6704,6 +6902,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -6716,6 +6920,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -6728,6 +6938,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -6740,6 +6956,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -6752,11 +6974,17 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + [[package]] name = "winnow" -version = "0.5.15" +version = "0.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" +checksum = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2" dependencies = [ "memchr", ] @@ -6782,11 +7010,13 @@ dependencies = [ [[package]] name = "xattr" -version = "1.0.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" +checksum = "d367426ae76bdfce3d8eaea6e94422afd6def7d46f9c89e2980309115b3c2c41" dependencies = [ "libc", + "linux-raw-sys", + "rustix", ] [[package]] @@ -6799,7 +7029,7 @@ dependencies = [ "lib0", "thiserror", "tokio", - "yrs", + "yrs 0.17.2", ] [[package]] @@ -6826,16 +7056,51 @@ dependencies = [ "atomic_refcell", "lib0", "rand 0.7.3", - "smallstr", + "smallstr 0.2.0", + "smallvec", + "thiserror", +] + +[[package]] +name = "yrs" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68aea14c6c33f2edd8a5ff9415360cfa5b98d90cce30c5ee3be59a8419fb15a9" +dependencies = [ + "atomic_refcell", + "rand 0.7.3", + "serde", + "serde_json", + "smallstr 0.3.0", "smallvec", "thiserror", ] +[[package]] +name = "zerocopy" +version = "0.7.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] + [[package]] name = "zeroize" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" dependencies = [ "zeroize_derive", ] @@ -6848,5 +7113,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] diff --git a/Cargo.toml b/Cargo.toml index 5bbb2d9569..5dc282f13f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,7 +6,7 @@ resolver = "2" members = [ "bin/council", "bin/cyclone", - "bin/gobbler", + "bin/rebaser", "bin/module-index", "bin/pinga", "bin/sdf", @@ -23,8 +23,8 @@ members = [ "lib/dal", "lib/dal-test", "lib/deadpool-cyclone", - "lib/gobbler-client", - "lib/gobbler-server", + "lib/rebaser-client", + "lib/rebaser-server", "lib/module-index-client", "lib/module-index-server", "lib/nats-subscriber", diff --git a/bin/gobbler/BUCK b/bin/rebaser/BUCK similarity index 76% rename from bin/gobbler/BUCK rename to bin/rebaser/BUCK index ec6f0ca090..3264ee3b83 100644 --- a/bin/gobbler/BUCK +++ b/bin/rebaser/BUCK @@ -5,9 +5,9 @@ load( ) rust_binary( - name = "gobbler", + name = "rebaser", deps = [ - "//lib/gobbler-server:gobbler-server", + "//lib/rebaser-server:rebaser-server", "//lib/telemetry-application-rs:telemetry-application", "//third-party/rust:clap", "//third-party/rust:color-eyre", @@ -21,6 +21,6 @@ rust_binary( docker_image( name = "image", - image_name = "gobbler", - build_deps = ["//bin/gobbler:gobbler"] + image_name = "rebaser", + build_deps = ["//bin/rebaser:rebaser"] ) diff --git a/bin/gobbler/Cargo.toml b/bin/rebaser/Cargo.toml similarity index 76% rename from bin/gobbler/Cargo.toml rename to bin/rebaser/Cargo.toml index ec3fc26da1..29b908ee8a 100644 --- a/bin/gobbler/Cargo.toml +++ b/bin/rebaser/Cargo.toml @@ -1,17 +1,17 @@ [package] -name = "gobbler" +name = "rebaser" version = "0.1.0" edition = "2021" rust-version = "1.64" publish = false [[bin]] -name = "gobbler" +name = "rebaser" path = "src/main.rs" [dependencies] clap = { workspace = true } color-eyre = { workspace = true } -gobbler-server = { path = "../../lib/gobbler-server" } +rebaser-server = { path = "../../lib/rebaser-server" } telemetry-application = { path = "../../lib/telemetry-application-rs" } tokio = { workspace = true } diff --git a/bin/gobbler/Dockerfile b/bin/rebaser/Dockerfile similarity index 90% rename from bin/gobbler/Dockerfile rename to bin/rebaser/Dockerfile index 7db829a9dd..0be26bbabc 100644 --- a/bin/gobbler/Dockerfile +++ b/bin/rebaser/Dockerfile @@ -1,6 +1,6 @@ # hadolint ignore=DL3007 FROM nixos/nix:latest AS builder -ARG BIN=gobbler +ARG BIN=rebaser COPY . /workdir WORKDIR /workdir @@ -19,7 +19,7 @@ RUN cp -R $(nix-store --query --requisites result/) /tmp/nix-store-closure RUN ln -snf $(nix-store --query result/)/bin/* /tmp/local-bin/ FROM alpine:3 AS final -ARG BIN=gobbler +ARG BIN=rebaser # hadolint ignore=DL3018 RUN set -eux; \ @@ -34,5 +34,5 @@ COPY --from=builder /tmp/nix-store-closure /nix/store COPY --from=builder /tmp/local-bin/* /usr/local/bin/ ENTRYPOINT [ \ - "/sbin/runuser", "-u", "app", "--", "/usr/local/bin/gobbler" \ + "/sbin/runuser", "-u", "app", "--", "/usr/local/bin/rebaser" \ ] diff --git a/bin/gobbler/src/args.rs b/bin/rebaser/src/args.rs similarity index 94% rename from bin/gobbler/src/args.rs rename to bin/rebaser/src/args.rs index 28ee5b2d5f..b74aa412c7 100644 --- a/bin/gobbler/src/args.rs +++ b/bin/rebaser/src/args.rs @@ -1,7 +1,7 @@ use clap::{ArgAction, Parser}; -use gobbler_server::{Config, ConfigError, ConfigFile, StandardConfigFile}; +use rebaser_server::{Config, ConfigError, ConfigFile, StandardConfigFile}; -const NAME: &str = "gobbler"; +const NAME: &str = "rebaser"; /// Parse, validate, and return the CLI arguments as a typed struct. pub(crate) fn parse() -> Args { @@ -45,7 +45,7 @@ pub(crate) struct Args { #[arg(long)] pub(crate) disable_opentelemetry: bool, - /// Cyclone encryption key file location [default: /run/gobbler/cyclone_encryption.key] + /// Cyclone encryption key file location [default: /run/rebaser/cyclone_encryption.key] #[arg(long)] pub(crate) cyclone_encryption_key_path: Option, diff --git a/bin/gobbler/src/main.rs b/bin/rebaser/src/main.rs similarity index 89% rename from bin/gobbler/src/main.rs rename to bin/rebaser/src/main.rs index 121cb07a37..44d9817db0 100644 --- a/bin/gobbler/src/main.rs +++ b/bin/rebaser/src/main.rs @@ -1,5 +1,5 @@ use color_eyre::Result; -use gobbler_server::{Config, Server}; +use rebaser_server::{Config, Server}; use telemetry_application::{ prelude::*, start_tracing_level_signal_handler_task, ApplicationTelemetryClient, TelemetryClient, TelemetryConfig, @@ -14,7 +14,7 @@ fn main() -> Result<()> { let thread_handler = thread_builder.spawn(|| { tokio::runtime::Builder::new_multi_thread() .thread_stack_size(RT_DEFAULT_THREAD_STACK_SIZE) - .thread_name("bin/gobbler-tokio::runtime") + .thread_name("bin/rebaser-tokio::runtime") .enable_all() .build()? .block_on(async_main()) @@ -25,10 +25,10 @@ fn main() -> Result<()> { async fn async_main() -> Result<()> { color_eyre::install()?; let config = TelemetryConfig::builder() - .service_name("gobbler") + .service_name("rebaser") .service_namespace("si") .log_env_var_prefix("SI") - .app_modules(vec!["gobbler", "gobbler_server"]) + .app_modules(vec!["rebaser", "rebaser_server"]) .build()?; let telemetry = telemetry_application::init(config)?; let args = args::parse(); diff --git a/component/postgres/BUCK b/component/postgres/BUCK index 37108fd0be..9feb0256ce 100644 --- a/component/postgres/BUCK +++ b/component/postgres/BUCK @@ -28,7 +28,7 @@ docker_image( "--env", "POSTGRES_DB=si", "--env", - "POSTGRES_MULTIPLE_DBS=si_auth,si_test,si_test_dal,si_test_sdf_server,si_test_gobbler", + "POSTGRES_MULTIPLE_DBS=si_auth,si_test,si_test_dal,si_test_sdf_server,si_test_rebaser", "--publish", "5432:5432", ], diff --git a/dev/docker-compose.platform.yml b/dev/docker-compose.platform.yml index f44e54ca43..bd2b9894d0 100644 --- a/dev/docker-compose.platform.yml +++ b/dev/docker-compose.platform.yml @@ -9,7 +9,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si" - "POSTGRES_DB=si" - - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index,si_test,si_test_dal,si_test_sdf_server,si_test_gobbler" + - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index,si_test,si_test_dal,si_test_sdf_server,si_test_rebaser" ports: - "5432:5432" diff --git a/lib/gobbler-client/BUCK b/lib/rebaser-client/BUCK similarity index 85% rename from lib/gobbler-client/BUCK rename to lib/rebaser-client/BUCK index ac1969b699..b648c90c77 100644 --- a/lib/gobbler-client/BUCK +++ b/lib/rebaser-client/BUCK @@ -1,9 +1,9 @@ load("@prelude-si//:macros.bzl", "rust_library") rust_library( - name = "gobbler-client", + name = "rebaser-client", deps = [ - "//lib/gobbler-server:gobbler-server", + "//lib/rebaser-server:rebaser-server", "//lib/si-rabbitmq:si-rabbitmq", "//lib/telemetry-rs:telemetry", "//third-party/rust:remain", diff --git a/lib/gobbler-client/Cargo.toml b/lib/rebaser-client/Cargo.toml similarity index 82% rename from lib/gobbler-client/Cargo.toml rename to lib/rebaser-client/Cargo.toml index 2ca468f6b8..0a4cd4ff54 100644 --- a/lib/gobbler-client/Cargo.toml +++ b/lib/rebaser-client/Cargo.toml @@ -1,12 +1,12 @@ [package] -name = "gobbler-client" +name = "rebaser-client" version = "0.1.0" edition = "2021" rust-version = "1.64" publish = false [dependencies] -gobbler-server = { path = "../../lib/gobbler-server" } +rebaser-server = { path = "../../lib/rebaser-server" } remain = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } diff --git a/lib/gobbler-client/src/client.rs b/lib/rebaser-client/src/client.rs similarity index 87% rename from lib/gobbler-client/src/client.rs rename to lib/rebaser-client/src/client.rs index 4df441d79d..8a3a196055 100644 --- a/lib/gobbler-client/src/client.rs +++ b/lib/rebaser-client/src/client.rs @@ -1,7 +1,7 @@ //! This module provides [`Client`], which is used for communicating with a running -//! gobbler [`Server`](gobbler_server::Server). +//! rebaser [`Server`](rebaser_server::Server). -use gobbler_server::{ManagementMessage, ManagementMessageAction, GOBBLER_MANAGEMENT_STREAM}; +use rebaser_server::{ManagementMessage, ManagementMessageAction, REBASER_MANAGEMENT_STREAM}; use serde::Serialize; use si_rabbitmq::{Consumer, ConsumerOffsetSpecification, Environment, Producer}; use std::collections::HashMap; @@ -10,9 +10,9 @@ use ulid::Ulid; use crate::{ClientError, ClientResult}; -const GOBBLER_REPLY_STREAM_PREFIX: &str = "gobbler-reply"; +const REBASER_REPLY_STREAM_PREFIX: &str = "rebaser-reply"; -/// A client for communicating with a running gobbler [`Server`](gobbler_server::Server). +/// A client for communicating with a running rebaser [`Server`](rebaser_server::Server). #[allow(missing_debug_implementations)] pub struct Client { management_stream: Stream, @@ -27,15 +27,15 @@ struct Stream { } impl Client { - /// Creates a new [`Client`] to communicate with a running gobbler - /// [`Server`](gobbler_server::Server). + /// Creates a new [`Client`] to communicate with a running rebaser + /// [`Server`](rebaser_server::Server). pub async fn new() -> ClientResult { let environment = Environment::new().await?; // First, create the reply stream. We do not check if it already exists since the reply // stream name is ULID-based. It's unlikely that there will be a collision. let unique_identifier = Ulid::new().to_string(); - let management_reply_stream = format!("gobbler-management-reply-{unique_identifier}"); + let management_reply_stream = format!("rebaser-management-reply-{unique_identifier}"); environment.create_stream(&management_reply_stream).await?; let management_reply_consumer = Consumer::new( &environment, @@ -44,10 +44,10 @@ impl Client { ) .await?; - // Name the producer using the reply stream, but produce to the primary gobbler stream. This + // Name the producer using the reply stream, but produce to the primary rebaser stream. This // may... will... uh... potentially?... be useful for tracing. let management_producer = - Producer::new(&environment, unique_identifier, GOBBLER_MANAGEMENT_STREAM).await?; + Producer::new(&environment, unique_identifier, REBASER_MANAGEMENT_STREAM).await?; Ok(Self { management_stream: Stream { @@ -59,7 +59,7 @@ impl Client { }) } - /// Send a message to a gobbler stream for a change set and block for a reply. + /// Send a message to a rebaser stream for a change set and block for a reply. pub async fn send_with_reply( &mut self, message: T, @@ -68,7 +68,7 @@ impl Client { let stream = self .streams .get_mut(&change_set_id) - .ok_or(ClientError::GobblerStreamForChangeSetNotFound)?; + .ok_or(ClientError::RebaserStreamForChangeSetNotFound)?; stream .producer .send_single(message, Some(stream.reply_stream.clone())) @@ -81,7 +81,7 @@ impl Client { Ok(None) } - /// Send a message to the management stream to open a gobbler loop and block for a reply. + /// Send a message to the management stream to open a rebaser loop and block for a reply. pub async fn send_management_open( &mut self, change_set_id: Ulid, @@ -101,7 +101,7 @@ impl Client { let change_set_stream: String = serde_json::from_value(contents)?; let environment = Environment::new().await?; - let reply_stream = format!("{GOBBLER_REPLY_STREAM_PREFIX}-{change_set_id}"); + let reply_stream = format!("{REBASER_REPLY_STREAM_PREFIX}-{change_set_id}"); environment.create_stream(&reply_stream).await?; // FIXME(nick): name the producer properly. @@ -127,7 +127,7 @@ impl Client { Ok(None) } - /// Send a message to the management stream to close a gobbler loop and do not wait for a reply. + /// Send a message to the management stream to close a rebaser loop and do not wait for a reply. pub async fn send_management_close(&mut self, change_set_id: Ulid) -> ClientResult<()> { self.management_stream .producer diff --git a/lib/gobbler-client/src/lib.rs b/lib/rebaser-client/src/lib.rs similarity index 85% rename from lib/gobbler-client/src/lib.rs rename to lib/rebaser-client/src/lib.rs index bdcd625045..dab163217f 100644 --- a/lib/gobbler-client/src/lib.rs +++ b/lib/rebaser-client/src/lib.rs @@ -1,5 +1,5 @@ -//! This crate provides the gobbler [`Client`], which is used for communicating with a running -//! gobbler [`Server`](gobbler_server::Server). +//! This crate provides the rebaser [`Client`], which is used for communicating with a running +//! rebaser [`Server`](rebaser_server::Server). #![warn( missing_debug_implementations, @@ -35,10 +35,10 @@ use thiserror::Error; #[remain::sorted] #[derive(Debug, Error)] pub enum ClientError { - #[error("gobbler stream for change set not found")] - GobblerStreamForChangeSetNotFound, #[error("si rabbitmq error: {0}")] Rabbit(#[from] RabbitError), + #[error("rebaser stream for change set not found")] + RebaserStreamForChangeSetNotFound, #[error("serde json error: {0}")] SerdeJson(#[from] serde_json::Error), } @@ -49,7 +49,7 @@ pub type ClientResult = Result; #[cfg(test)] mod tests { use super::*; - use gobbler_server::{ConfigBuilder, Server}; + use rebaser_server::{ConfigBuilder, Server}; use tokio::test; use ulid::Ulid; @@ -85,13 +85,13 @@ mod tests { let _new_stream_to_produce_to = client .send_management_open(change_set_id) .await - .expect("could not create new gobbler loop for change set") + .expect("could not create new rebaser loop for change set") .expect("no message returned"); client .send_management_close(change_set_id) .await - .expect("could not close the gobbler loop for change set"); + .expect("could not close the rebaser loop for change set"); client.close().await; } @@ -104,7 +104,7 @@ mod tests { let _new_stream_to_produce_to = client .send_management_open(change_set_id) .await - .expect("could not create new gobbler loop for change set") + .expect("could not create new rebaser loop for change set") .expect("no message returned"); let contents = "MUSTANG GTD"; @@ -118,7 +118,7 @@ mod tests { client .send_management_close(change_set_id) .await - .expect("could not close the gobbler loop for change set"); + .expect("could not close the rebaser loop for change set"); client.close().await; } diff --git a/lib/gobbler-server/BUCK b/lib/rebaser-server/BUCK similarity index 98% rename from lib/gobbler-server/BUCK rename to lib/rebaser-server/BUCK index 7b6c9e8758..5b97ec8892 100644 --- a/lib/gobbler-server/BUCK +++ b/lib/rebaser-server/BUCK @@ -1,7 +1,7 @@ load("@prelude-si//:macros.bzl", "rust_library") rust_library( - name = "gobbler-server", + name = "rebaser-server", deps = [ "//lib/buck2-resources:buck2-resources", "//lib/dal:dal", diff --git a/lib/gobbler-server/Cargo.toml b/lib/rebaser-server/Cargo.toml similarity index 97% rename from lib/gobbler-server/Cargo.toml rename to lib/rebaser-server/Cargo.toml index e095c4a75b..9492702409 100644 --- a/lib/gobbler-server/Cargo.toml +++ b/lib/rebaser-server/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "gobbler-server" +name = "rebaser-server" version = "0.1.0" edition = "2021" rust-version = "1.64" diff --git a/lib/gobbler-server/src/config.rs b/lib/rebaser-server/src/config.rs similarity index 98% rename from lib/gobbler-server/src/config.rs rename to lib/rebaser-server/src/config.rs index 71f998211f..6fdde41193 100644 --- a/lib/gobbler-server/src/config.rs +++ b/lib/rebaser-server/src/config.rs @@ -128,7 +128,7 @@ impl TryFrom for Config { } fn default_cyclone_encryption_key_path() -> String { - "/run/gobbler/cyclone_encryption.key".to_string() + "/run/rebaser/cyclone_encryption.key".to_string() } fn default_recreate_management_stream() -> bool { diff --git a/lib/gobbler-server/src/lib.rs b/lib/rebaser-server/src/lib.rs similarity index 71% rename from lib/gobbler-server/src/lib.rs rename to lib/rebaser-server/src/lib.rs index 8cea39cd5b..d048be225b 100644 --- a/lib/gobbler-server/src/lib.rs +++ b/lib/rebaser-server/src/lib.rs @@ -1,4 +1,4 @@ -//! This crate provides the gobbler [`Server`]. +//! This crate provides the rebaser [`Server`]. #![warn( missing_debug_implementations, @@ -36,23 +36,23 @@ pub use si_settings::StandardConfigFile; use serde::{Deserialize, Serialize}; use ulid::Ulid; -/// Stream to manage gobbler consumer loops. -pub const GOBBLER_MANAGEMENT_STREAM: &str = "gobbler-management"; +/// Stream to manage rebaser consumer loops. +pub const REBASER_MANAGEMENT_STREAM: &str = "rebaser-management"; -/// Stream prefix for gobbler consumer loops. -pub const GOBBLER_STREAM_PREFIX: &str = "gobbler"; +/// Stream prefix for rebaser consumer loops. +pub const REBASER_STREAM_PREFIX: &str = "rebaser"; -/// The action for the gobbler management loop. +/// The action for the rebaser management loop. #[derive(Debug, Serialize, Deserialize)] pub enum ManagementMessageAction { - /// Close the inner gobbler loop for a change set. If it has already been closed, this is a + /// Close the inner rebaser loop for a change set. If it has already been closed, this is a /// no-op. Close, - /// Open the inner gobbler loop for a change set. If one already exists, it is a no-op. + /// Open the inner rebaser loop for a change set. If one already exists, it is a no-op. Open, } -/// The message that the gobbler management consumer expects in the server. +/// The message that the rebaser management consumer expects in the server. #[derive(Debug, Serialize, Deserialize)] pub struct ManagementMessage { /// The ID of the change set wishing to be operated on. diff --git a/lib/gobbler-server/src/server.rs b/lib/rebaser-server/src/server.rs similarity index 91% rename from lib/gobbler-server/src/server.rs rename to lib/rebaser-server/src/server.rs index 59a88ef4bf..02d3711692 100644 --- a/lib/gobbler-server/src/server.rs +++ b/lib/rebaser-server/src/server.rs @@ -25,8 +25,8 @@ use tokio_stream::wrappers::UnboundedReceiverStream; use ulid::Ulid; use veritech_client::{Client as VeritechClient, CycloneEncryptionKey, CycloneEncryptionKeyError}; -use crate::GOBBLER_STREAM_PREFIX; -use crate::{Config, GOBBLER_MANAGEMENT_STREAM}; +use crate::REBASER_STREAM_PREFIX; +use crate::{Config, REBASER_MANAGEMENT_STREAM}; use crate::{ManagementMessage, ManagementMessageAction}; #[allow(missing_docs)] @@ -81,7 +81,7 @@ impl From for ServerError { type ServerResult = Result; -/// The [`Server`] for managing gobbler tasks. +/// The [`Server`] for managing rebaser tasks. #[allow(missing_debug_implementations)] pub struct Server { encryption_key: Arc, @@ -105,7 +105,7 @@ pub struct Server { impl Server { /// Build a [`Server`] from a given [`Config`]. - #[instrument(name = "gobbler.init.from_config", skip_all)] + #[instrument(name = "rebaser.init.from_config", skip_all)] pub async fn from_config(config: Config) -> ServerResult { dal::init()?; @@ -127,7 +127,7 @@ impl Server { } /// Build a [`Server`] from information provided via companion services. - #[instrument(name = "gobbler.init.from_services", skip_all)] + #[instrument(name = "rebaser.init.from_services", skip_all)] pub fn from_services( encryption_key: Arc, nats: NatsClient, @@ -196,26 +196,26 @@ impl Server { Ok(Arc::new(CycloneEncryptionKey::load(path).await?)) } - #[instrument(name = "gobbler.init.connect_to_nats", skip_all)] + #[instrument(name = "rebaser.init.connect_to_nats", skip_all)] async fn connect_to_nats(nats_config: &NatsConfig) -> ServerResult { let client = NatsClient::new(nats_config).await?; debug!("successfully connected nats client"); Ok(client) } - #[instrument(name = "gobbler.init.create_pg_pool", skip_all)] + #[instrument(name = "rebaser.init.create_pg_pool", skip_all)] async fn create_pg_pool(pg_pool_config: &PgPoolConfig) -> ServerResult { let pool = PgPool::new(pg_pool_config).await?; debug!("successfully started pg pool (note that not all connections may be healthy)"); Ok(pool) } - #[instrument(name = "gobbler.init.create_veritech_client", skip_all)] + #[instrument(name = "rebaser.init.create_veritech_client", skip_all)] fn create_veritech_client(nats: NatsClient) -> VeritechClient { VeritechClient::new(nats) } - #[instrument(name = "gobbler.init.create_job_processor", skip_all)] + #[instrument(name = "rebaser.init.create_job_processor", skip_all)] fn create_job_processor(nats: NatsClient) -> Box { Box::new(NatsProcessor::new(nats)) as Box } @@ -294,15 +294,15 @@ async fn consume_stream( ); let _ctx_builder = DalContext::builder(services_context, false); - // Meta: we can only have one gobbler instance right now due to https://github.com/rabbitmq/rabbitmq-stream-rust-client/issues/130 + // Meta: we can only have one rebaser instance right now due to https://github.com/rabbitmq/rabbitmq-stream-rust-client/issues/130 // // 1) subscribe to "next" for changeset close/create events --> stream for ChangeSetClose or ChangeSetOpen - // --> "gobbler-management" + // --> "rebaser-management" // 2) query db for all named, open changesets // 3) start a subscription for each result for step 2 - // --> "gobbler-" - // 1:N --> "gobbler--reply--" - // (e.g. "gobbler--reply-sdf-") + // --> "rebaser-" + // 1:N --> "rebaser--reply--" + // (e.g. "rebaser--reply-sdf-") // note: requester deletes stream upon reply // // NOTE: QUERY DB FOR OFFSET NUMBER OR GO TO FIRST SPECIFICATION @@ -310,18 +310,18 @@ async fn consume_stream( // Prepare the environment and management stream. let environment = Environment::new().await?; if recreate_management_stream { - environment.delete_stream(GOBBLER_MANAGEMENT_STREAM).await?; + environment.delete_stream(REBASER_MANAGEMENT_STREAM).await?; } - environment.create_stream(GOBBLER_MANAGEMENT_STREAM).await?; + environment.create_stream(REBASER_MANAGEMENT_STREAM).await?; let mut management_consumer = Consumer::new( &environment, - GOBBLER_MANAGEMENT_STREAM, + REBASER_MANAGEMENT_STREAM, ConsumerOffsetSpecification::Next, ) .await?; let management_handle = management_consumer.handle(); - let mut gobbler_handles: HashMap = HashMap::new(); + let mut rebaser_handles: HashMap = HashMap::new(); while let Some(management_delivery) = management_consumer.next().await? { let contents = management_delivery @@ -333,7 +333,7 @@ async fn consume_stream( let mm: ManagementMessage = serde_json::from_value(contents)?; match mm.action { - ManagementMessageAction::Close => match gobbler_handles.remove(&mm.change_set_id) { + ManagementMessageAction::Close => match rebaser_handles.remove(&mm.change_set_id) { Some((stream, handle)) => { if let Err(e) = handle.close().await { error!("{e}"); @@ -348,7 +348,7 @@ async fn consume_stream( ), }, ManagementMessageAction::Open => { - let new_stream = format!("{GOBBLER_STREAM_PREFIX}-{}", mm.change_set_id); + let new_stream = format!("{REBASER_STREAM_PREFIX}-{}", mm.change_set_id); let stream_already_exists = environment.create_stream(&new_stream).await?; // Only create the new stream if it does not already exist. @@ -357,9 +357,9 @@ async fn consume_stream( Consumer::new(&environment, &new_stream, ConsumerOffsetSpecification::Next) .await?; let handle = consumer.handle(); - gobbler_handles.insert(mm.change_set_id, (new_stream.clone(), handle)); + rebaser_handles.insert(mm.change_set_id, (new_stream.clone(), handle)); - tokio::spawn(gobbler_loop_infallible_wrapper(consumer)); + tokio::spawn(rebaser_loop_infallible_wrapper(consumer)); } // Return the requested stream and then close the producer. @@ -370,7 +370,7 @@ async fn consume_stream( } } - for (_, (stream, handle)) in gobbler_handles.drain() { + for (_, (stream, handle)) in rebaser_handles.drain() { if let Err(e) = handle.close().await { error!("{e}"); } @@ -384,13 +384,13 @@ async fn consume_stream( Ok(()) } -async fn gobbler_loop_infallible_wrapper(consumer: Consumer) { - if let Err(e) = gobbler_loop(consumer).await { +async fn rebaser_loop_infallible_wrapper(consumer: Consumer) { + if let Err(e) = rebaser_loop(consumer).await { dbg!(e); } } -async fn gobbler_loop(mut consumer: Consumer) -> ServerResult<()> { +async fn rebaser_loop(mut consumer: Consumer) -> ServerResult<()> { // Create an environment for reply streams. let environment = Environment::new().await?; while let Some(delivery) = consumer.next().await? { diff --git a/lib/gobbler-server/tests/integration.rs b/lib/rebaser-server/tests/integration.rs similarity index 75% rename from lib/gobbler-server/tests/integration.rs rename to lib/rebaser-server/tests/integration.rs index 389866fa81..1bfe9c6abf 100644 --- a/lib/gobbler-server/tests/integration.rs +++ b/lib/rebaser-server/tests/integration.rs @@ -4,6 +4,6 @@ //! SI_TEST_BUILTIN_SCHEMAS=none //! ``` -const TEST_PG_DBNAME: &str = "si_test_gobbler"; +const TEST_PG_DBNAME: &str = "si_test_rebaser"; mod integration_test; diff --git a/lib/gobbler-server/tests/integration_test/connection.rs b/lib/rebaser-server/tests/integration_test/connection.rs similarity index 92% rename from lib/gobbler-server/tests/integration_test/connection.rs rename to lib/rebaser-server/tests/integration_test/connection.rs index f6c2e4f290..f070464869 100644 --- a/lib/gobbler-server/tests/integration_test/connection.rs +++ b/lib/rebaser-server/tests/integration_test/connection.rs @@ -1,7 +1,7 @@ use dal::workspace_snapshot::change_set::ChangeSet; use dal::{DalContext, WorkspaceSnapshot}; use si_rabbitmq::Environment; -use si_test_macros::gobbler_test as test; +use si_test_macros::rebaser_test as test; #[test] async fn connect_to_database(ctx: &DalContext) { diff --git a/lib/gobbler-server/tests/integration_test/mod.rs b/lib/rebaser-server/tests/integration_test/mod.rs similarity index 100% rename from lib/gobbler-server/tests/integration_test/mod.rs rename to lib/rebaser-server/tests/integration_test/mod.rs diff --git a/lib/si-test-macros/src/lib.rs b/lib/si-test-macros/src/lib.rs index 4fbd3ed262..0afaf03f03 100644 --- a/lib/si-test-macros/src/lib.rs +++ b/lib/si-test-macros/src/lib.rs @@ -391,11 +391,11 @@ pub fn sdf_test(attr: TokenStream, input: TokenStream) -> TokenStream { sdf_test::expand(item, args).into() } -/// A procedural macro which helps to streamline, setup, and manage gobbler-related tests. +/// A procedural macro which helps to streamline, setup, and manage rebaser-related tests. /// /// Currently, this macro is equivalent to [`dal_test`](dal_test()). #[proc_macro_attribute] -pub fn gobbler_test(attr: TokenStream, input: TokenStream) -> TokenStream { +pub fn rebaser_test(attr: TokenStream, input: TokenStream) -> TokenStream { let args = parse_macro_input!(attr as Args); let item = parse_macro_input!(input as ItemFn); dal_test::expand(item, args).into() From 0729b9de629e6b7f3864cc7f2f317509e0b565a1 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Thu, 7 Sep 2023 14:53:03 -0400 Subject: [PATCH 19/92] Initial round trip loop of rebaser using graph logic Primary: - Use graph logic in the rebaser via a round-trip loop from a dal integration test - Add rebaser-core to ensure that rebaser-client never relies on the dal - Ensure change set loop deliveries reply to the clients if a failure occurs when processing a delivery - Add timeouts when clients are waiting for rebaser replies - Add rebaser-server to dal-test and si-test-macros Secondary: - Rename temporary change set struct to change set pointers - Add change set pointers table, but allow for unit tests to continue using in-memory versions (i.e. use "new_local()" for construction) - Add "query_none" for queries where we expect no rows to be returned Signed-off-by: Nick Gerace --- Cargo.lock | 19 ++- Cargo.toml | 1 + lib/dal-test/BUCK | 1 + lib/dal-test/Cargo.toml | 1 + lib/dal-test/src/lib.rs | 28 ++++ lib/dal/BUCK | 3 +- lib/dal/Cargo.toml | 2 + lib/dal/src/change_set_pointer.rs | 125 ++++++++++++++ lib/dal/src/lib.rs | 1 + .../migrations/U3002__change_set_pointers.sql | 19 +++ .../src/queries/change_set_pointers/find.sql | 3 + .../src/queries/workspace_snapshot/find.sql | 3 + .../find_for_change_set.sql | 5 + lib/dal/src/workspace_snapshot.rs | 98 ++++++++++- lib/dal/src/workspace_snapshot/change_set.rs | 54 ------ lib/dal/src/workspace_snapshot/conflict.rs | 4 +- lib/dal/src/workspace_snapshot/edge_weight.rs | 17 +- lib/dal/src/workspace_snapshot/graph.rs | 101 +++++------ .../src/workspace_snapshot/lamport_clock.rs | 4 +- lib/dal/src/workspace_snapshot/node_weight.rs | 29 ++-- .../node_weight/content_node_weight.rs | 19 ++- .../node_weight/ordering_node_weight.rs | 31 ++-- lib/dal/src/workspace_snapshot/update.rs | 3 +- .../src/workspace_snapshot/vector_clock.rs | 28 ++-- .../tests/integration_test/internal/mod.rs | 1 + .../integration_test/internal/rebaser.rs | 96 +++++++++++ lib/rebaser-client/BUCK | 1 - lib/rebaser-client/Cargo.toml | 3 +- lib/rebaser-client/src/client.rs | 137 +++++++++------ lib/rebaser-client/src/lib.rs | 87 +--------- lib/rebaser-core/BUCK | 16 ++ lib/rebaser-core/Cargo.toml | 15 ++ lib/rebaser-core/src/lib.rs | 131 +++++++++++++++ lib/rebaser-server/Cargo.toml | 1 + lib/rebaser-server/src/config.rs | 8 +- lib/rebaser-server/src/lib.rs | 29 +--- lib/rebaser-server/src/server.rs | 37 +++-- .../src/server/change_set_loop.rs | 126 ++++++++++++++ .../src/server/management_loop.rs | 157 ++++++++++++++++++ .../tests/integration_test/connection.rs | 6 +- lib/si-data-pg/src/lib.rs | 34 ++++ lib/si-rabbitmq/src/delivery.rs | 2 +- lib/si-test-macros/src/dal_test.rs | 36 ++++ lib/si-test-macros/src/expand.rs | 57 +++++++ lib/si-test-macros/src/lib.rs | 4 + lib/si-test-macros/src/sdf_test.rs | 36 ++++ 46 files changed, 1275 insertions(+), 344 deletions(-) create mode 100644 lib/dal/src/change_set_pointer.rs create mode 100644 lib/dal/src/migrations/U3002__change_set_pointers.sql create mode 100644 lib/dal/src/queries/change_set_pointers/find.sql create mode 100644 lib/dal/src/queries/workspace_snapshot/find.sql create mode 100644 lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql delete mode 100644 lib/dal/src/workspace_snapshot/change_set.rs create mode 100644 lib/dal/tests/integration_test/internal/rebaser.rs create mode 100644 lib/rebaser-core/BUCK create mode 100644 lib/rebaser-core/Cargo.toml create mode 100644 lib/rebaser-core/src/lib.rs create mode 100644 lib/rebaser-server/src/server/change_set_loop.rs create mode 100644 lib/rebaser-server/src/server/management_loop.rs diff --git a/Cargo.lock b/Cargo.lock index 5119864ccc..869136b84b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1290,6 +1290,8 @@ dependencies = [ "postgres-types", "pretty_assertions_sorted", "rand 0.8.5", + "rebaser-client", + "rebaser-server", "refinery", "regex", "remain", @@ -1328,6 +1330,7 @@ dependencies = [ "module-index-client", "names", "pinga-server", + "rebaser-server", "remain", "serde", "serde_json", @@ -4087,7 +4090,8 @@ dependencies = [ name = "rebaser-client" version = "0.1.0" dependencies = [ - "rebaser-server", + "log", + "rebaser-core", "remain", "serde", "serde_json", @@ -4098,6 +4102,18 @@ dependencies = [ "ulid", ] +[[package]] +name = "rebaser-core" +version = "0.1.0" +dependencies = [ + "pretty_assertions_sorted", + "rebaser-client", + "rebaser-server", + "serde", + "tokio", + "ulid", +] + [[package]] name = "rebaser-server" version = "0.1.0" @@ -4109,6 +4125,7 @@ dependencies = [ "futures", "nats-subscriber", "pretty_assertions_sorted", + "rebaser-core", "remain", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index 5dc282f13f..a02dc0e3ef 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,6 +24,7 @@ members = [ "lib/dal-test", "lib/deadpool-cyclone", "lib/rebaser-client", + "lib/rebaser-core", "lib/rebaser-server", "lib/module-index-client", "lib/module-index-server", diff --git a/lib/dal-test/BUCK b/lib/dal-test/BUCK index 13228a2cd5..8267764a6b 100644 --- a/lib/dal-test/BUCK +++ b/lib/dal-test/BUCK @@ -8,6 +8,7 @@ rust_library( "//lib/dal:dal", "//lib/module-index-client:module-index-client", "//lib/pinga-server:pinga-server", + "//lib/rebaser-server:rebaser-server", "//lib/si-crypto:si-crypto", "//lib/si-data-nats:si-data-nats", "//lib/si-data-pg:si-data-pg", diff --git a/lib/dal-test/Cargo.toml b/lib/dal-test/Cargo.toml index 7a59d21518..6fd7749ca0 100644 --- a/lib/dal-test/Cargo.toml +++ b/lib/dal-test/Cargo.toml @@ -16,6 +16,7 @@ lazy_static = { workspace = true } module-index-client = { path = "../../lib/module-index-client" } names = { workspace = true } pinga-server = { path = "../../lib/pinga-server" } +rebaser-server = { path = "../../lib/rebaser-server" } remain = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } diff --git a/lib/dal-test/src/lib.rs b/lib/dal-test/src/lib.rs index cfbabf23b4..4b4842f709 100644 --- a/lib/dal-test/src/lib.rs +++ b/lib/dal-test/src/lib.rs @@ -448,6 +448,31 @@ pub fn pinga_server(services_context: &ServicesContext) -> Result Result { + let _config: rebaser_server::Config = { + let mut config_file = rebaser_server::ConfigFile::default(); + rebaser_server::detect_and_configure_development(&mut config_file) + .wrap_err("failed to detect and configure Rebaser ConfigFile")?; + config_file + .try_into() + .wrap_err("failed to build Rebaser server config")? + }; + + let server = rebaser_server::Server::from_services( + services_context.encryption_key(), + services_context.nats_conn().clone(), + services_context.pg_pool().clone(), + services_context.veritech().clone(), + services_context.job_processor(), + false, + ) + .wrap_err("failed to create Rebaser server")?; + + Ok(server) +} + /// Configures and builds a [`veritech_server::Server`] suitable for running alongside DAL /// object-related tests. pub async fn veritech_server_for_uds_cyclone( @@ -500,6 +525,9 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { let pinga_server_handle = pinga_server.shutdown_handle(); tokio::spawn(pinga_server.run()); + // Do not start up the Rebaser server since we do not need it for initial migrations. + info!("skipping Rebaser server startup and shutdown for initial migrations"); + // Start up a Veritech server as a task exclusively to allow the migrations to run info!("starting Veritech server for initial migrations"); let veritech_server = veritech_server_for_uds_cyclone(test_context.config.nats.clone()).await?; diff --git a/lib/dal/BUCK b/lib/dal/BUCK index 21a449f516..36bff71bae 100644 --- a/lib/dal/BUCK +++ b/lib/dal/BUCK @@ -10,6 +10,7 @@ rust_library( "//lib/council-server:council-server", "//lib/nats-subscriber:nats-subscriber", "//lib/object-tree:object-tree", + "//lib/rebaser-client:rebaser-client", "//lib/si-crypto:si-crypto", "//lib/si-data-nats:si-data-nats", "//lib/si-data-pg:si-data-pg", @@ -21,8 +22,8 @@ rust_library( "//third-party/rust:async-trait", "//third-party/rust:base64", "//third-party/rust:blake3", - "//third-party/rust:ciborium", "//third-party/rust:chrono", + "//third-party/rust:ciborium", "//third-party/rust:convert_case", "//third-party/rust:derive_more", "//third-party/rust:diff", diff --git a/lib/dal/Cargo.toml b/lib/dal/Cargo.toml index df858e42ba..fb192e1bc0 100644 --- a/lib/dal/Cargo.toml +++ b/lib/dal/Cargo.toml @@ -58,4 +58,6 @@ buck2-resources = { path = "../../lib/buck2-resources" } dal-test = { path = "../../lib/dal-test" } itertools = { workspace = true } pretty_assertions_sorted = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client" } +rebaser-server = { path = "../../lib/rebaser-server" } tempfile = { workspace = true } diff --git a/lib/dal/src/change_set_pointer.rs b/lib/dal/src/change_set_pointer.rs new file mode 100644 index 0000000000..c1c10a4877 --- /dev/null +++ b/lib/dal/src/change_set_pointer.rs @@ -0,0 +1,125 @@ +//! The sequel to [`ChangeSets`](crate::ChangeSet). Coming to an SI instance near you! + +use std::sync::{Arc, Mutex}; + +use serde::{Deserialize, Serialize}; +use serde_json::Value; +use si_data_pg::PgError; +use telemetry::prelude::*; +use thiserror::Error; +use ulid::{Generator, Ulid}; + +use crate::workspace_snapshot::WorkspaceSnapshotId; +use crate::{pk, standard_model, DalContext, StandardModelError, Timestamp, TransactionsError}; + +const FIND: &str = include_str!("queries/change_set_pointers/find.sql"); + +#[remain::sorted] +#[derive(Debug, Error)] +pub enum ChangeSetPointerError { + #[error("ulid monotonic error: {0}")] + Monotonic(#[from] ulid::MonotonicError), + #[error("mutex error: {0}")] + Mutex(String), + #[error("pg error: {0}")] + Pg(#[from] PgError), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("standard model error: {0}")] + StandardModel(#[from] StandardModelError), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), +} + +pub type ChangeSetPointerResult = Result; + +pk!(ChangeSetPointerId); + +#[derive(Clone, Serialize, Deserialize)] +pub struct ChangeSetPointer { + pub id: ChangeSetPointerId, + #[serde(flatten)] + pub timestamp: Timestamp, + #[serde(skip)] + pub generator: Arc>, + pub workspace_snapshot_id: Option, + pub name: String, +} + +impl ChangeSetPointer { + pub fn new_local() -> ChangeSetPointerResult { + let mut generator = Generator::new(); + let id = generator.generate()?; + + Ok(Self { + id: id.into(), + timestamp: Timestamp::now(), + generator: Arc::new(Mutex::new(generator)), + workspace_snapshot_id: None, + name: "".to_string(), + }) + } + + pub async fn new(ctx: &DalContext, name: impl AsRef) -> ChangeSetPointerResult { + let name = name.as_ref(); + let row = ctx + .txns() + .await? + .pg() + .query_one( + "SELECT change_set_pointer_create_v1($1) AS object", + &[&name], + ) + .await?; + let json: Value = row.try_get("object")?; + let object: Self = serde_json::from_value(json)?; + Ok(object) + } + + pub fn generate_ulid(&self) -> ChangeSetPointerResult { + self.generator + .lock() + .map_err(|e| ChangeSetPointerError::Mutex(e.to_string()))? + .generate() + .map_err(Into::into) + } + + pub async fn update_pointer( + &mut self, + ctx: &DalContext, + workspace_snapshot_id: WorkspaceSnapshotId, + ) -> ChangeSetPointerResult<()> { + ctx.txns() + .await? + .pg() + .query_none( + "UPDATE change_set_pointers AS object SET workspace_snapshot_id = $2 WHERE id = $1", + &[&self.id, &workspace_snapshot_id], + ) + .await?; + self.workspace_snapshot_id = Some(workspace_snapshot_id); + Ok(()) + } + + #[instrument(skip_all)] + pub async fn find( + ctx: &DalContext, + change_set_pointer_id: ChangeSetPointerId, + ) -> ChangeSetPointerResult { + let row = ctx + .txns() + .await? + .pg() + .query_one(FIND, &[&change_set_pointer_id]) + .await?; + Ok(standard_model::object_from_row(row)?) + } +} + +impl std::fmt::Debug for ChangeSetPointer { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ChangeSetPointer") + .field("id", &self.id.to_string()) + .finish() + } +} diff --git a/lib/dal/src/lib.rs b/lib/dal/src/lib.rs index 13dcad2a55..f409b197eb 100644 --- a/lib/dal/src/lib.rs +++ b/lib/dal/src/lib.rs @@ -131,6 +131,7 @@ pub mod attribute; pub mod authentication_prototype; pub mod builtins; pub mod change_set; +pub mod change_set_pointer; pub mod change_status; pub mod code_view; pub mod component; diff --git a/lib/dal/src/migrations/U3002__change_set_pointers.sql b/lib/dal/src/migrations/U3002__change_set_pointers.sql new file mode 100644 index 0000000000..d5e2bae19d --- /dev/null +++ b/lib/dal/src/migrations/U3002__change_set_pointers.sql @@ -0,0 +1,19 @@ +CREATE TABLE change_set_pointers +( + id ident NOT NULL DEFAULT ident_create_v1(), + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + name text NOT NULL, + workspace_snapshot_id ident +); + +CREATE UNIQUE INDEX unique_change_set_pointers ON change_set_pointers (id); + +CREATE OR REPLACE FUNCTION change_set_pointer_create_v1( + this_name text +) RETURNS jsonb AS +$$ +INSERT INTO change_set_pointers (name) +VALUES (this_name) +RETURNING row_to_json(change_set_pointers) AS object; +$$ LANGUAGE SQL VOLATILE; diff --git a/lib/dal/src/queries/change_set_pointers/find.sql b/lib/dal/src/queries/change_set_pointers/find.sql new file mode 100644 index 0000000000..d660730134 --- /dev/null +++ b/lib/dal/src/queries/change_set_pointers/find.sql @@ -0,0 +1,3 @@ +SELECT row_to_json(change_set_pointers.*) AS object +FROM change_set_pointers +WHERE change_set_pointers.id = $1 diff --git a/lib/dal/src/queries/workspace_snapshot/find.sql b/lib/dal/src/queries/workspace_snapshot/find.sql new file mode 100644 index 0000000000..6b0e2840ed --- /dev/null +++ b/lib/dal/src/queries/workspace_snapshot/find.sql @@ -0,0 +1,3 @@ +SELECT row_to_json(workspace_snapshots.*) AS object +FROM workspace_snapshots + WHERE workspace_snapshots.id = $1 diff --git a/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql b/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql new file mode 100644 index 0000000000..260a220772 --- /dev/null +++ b/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql @@ -0,0 +1,5 @@ +SELECT row_to_json(workspace_snapshots.*) AS object +FROM workspace_snapshots +JOIN change_set_pointers + ON change_set_pointers.id = $1 + AND change_set_pointers.workspace_snapshot_id = workspace_snapshots.id \ No newline at end of file diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs index 79abe042c5..6444044329 100644 --- a/lib/dal/src/workspace_snapshot.rs +++ b/lib/dal/src/workspace_snapshot.rs @@ -22,7 +22,6 @@ // clippy::missing_panics_doc // )] -pub mod change_set; pub mod conflict; pub mod edge_weight; pub mod graph; @@ -34,15 +33,25 @@ pub mod vector_clock; use petgraph::prelude::*; use serde::{Deserialize, Serialize}; use serde_json::Value; -use si_data_pg::PgError; +use si_data_pg::{PgError, PgRow}; +use telemetry::prelude::*; use thiserror::Error; use ulid::Ulid; +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; +use crate::workspace_snapshot::conflict::Conflict; +use crate::workspace_snapshot::edge_weight::EdgeWeight; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::update::Update; use crate::{ + pk, standard_model, workspace_snapshot::{graph::WorkspaceSnapshotGraphError, node_weight::NodeWeightError}, DalContext, StandardModelError, Timestamp, TransactionsError, WorkspaceSnapshotGraph, }; -use change_set::{ChangeSet, ChangeSetError, ChangeSetId}; + +const FIND: &str = include_str!("queries/workspace_snapshot/find.sql"); +const FIND_FOR_CHANGE_SET: &str = + include_str!("queries/workspace_snapshot/find_for_change_set.sql"); #[remain::sorted] #[derive(Error, Debug)] @@ -69,11 +78,11 @@ pub enum WorkspaceSnapshotError { pub type WorkspaceSnapshotResult = Result; -pub type WorkspaceSnapshotId = Ulid; +pk!(WorkspaceSnapshotId); #[derive(Debug, Serialize, Deserialize)] pub struct WorkspaceSnapshot { - id: WorkspaceSnapshotId, + pub id: WorkspaceSnapshotId, #[serde(flatten)] timestamp: Timestamp, snapshot: Value, @@ -82,7 +91,10 @@ pub struct WorkspaceSnapshot { } impl WorkspaceSnapshot { - pub async fn new(ctx: &DalContext, change_set: &ChangeSet) -> WorkspaceSnapshotResult { + pub async fn initial( + ctx: &DalContext, + change_set: &ChangeSetPointer, + ) -> WorkspaceSnapshotResult { let snapshot = WorkspaceSnapshotGraph::new(change_set)?; let serialized_snapshot = serde_json::to_value(&snapshot)?; @@ -100,7 +112,7 @@ impl WorkspaceSnapshot { Ok(object) } - pub async fn write(mut self, ctx: &DalContext) -> WorkspaceSnapshotResult { + pub async fn write(&mut self, ctx: &DalContext) -> WorkspaceSnapshotResult<()> { let working_copy = self.working_copy()?; working_copy.cleanup(); @@ -114,9 +126,14 @@ impl WorkspaceSnapshot { &[&serialized_snapshot], ) .await?; + let json: Value = row.try_get("object")?; let object: WorkspaceSnapshot = serde_json::from_value(json)?; - Ok(object) + self.id = object.id; + self.timestamp = object.timestamp; + self.snapshot = object.snapshot; + + Ok(()) } fn working_copy(&mut self) -> WorkspaceSnapshotResult<&mut WorkspaceSnapshotGraph> { @@ -127,4 +144,69 @@ impl WorkspaceSnapshot { .as_mut() .ok_or(WorkspaceSnapshotError::WorkspaceSnapshotGraphMissing) } + + fn snapshot(&self) -> WorkspaceSnapshotResult { + Ok(serde_json::from_value(self.snapshot.clone())?) + } + + pub fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotResult { + Ok(self.working_copy()?.add_node(node)?) + } + + pub fn add_edge( + &mut self, + change_set: &ChangeSetPointer, + from_node_index: NodeIndex, + edge_weight: EdgeWeight, + to_node_index: NodeIndex, + ) -> WorkspaceSnapshotResult { + Ok(self.working_copy()?.add_edge( + change_set, + from_node_index, + edge_weight, + to_node_index, + )?) + } + + pub async fn detect_conflicts_and_updates( + &self, + ctx: &DalContext, + to_rebase_change_set: &ChangeSetPointer, + onto_change_set: &ChangeSetPointer, + ) -> WorkspaceSnapshotResult<(Vec, Vec)> { + let onto: WorkspaceSnapshot = Self::find_for_change_set(ctx, onto_change_set.id).await?; + Ok(self.snapshot()?.detect_conflicts_and_updates( + to_rebase_change_set, + &onto.snapshot()?, + onto_change_set, + )?) + } + + #[instrument(skip_all)] + pub async fn find( + ctx: &DalContext, + workspace_snapshot_id: WorkspaceSnapshotId, + ) -> WorkspaceSnapshotResult { + let row = ctx + .txns() + .await? + .pg() + .query_one(FIND, &[&workspace_snapshot_id]) + .await?; + Ok(standard_model::object_from_row(row)?) + } + + #[instrument(skip_all)] + pub async fn find_for_change_set( + ctx: &DalContext, + change_set_pointer_id: ChangeSetPointerId, + ) -> WorkspaceSnapshotResult { + let row = ctx + .txns() + .await? + .pg() + .query_one(FIND_FOR_CHANGE_SET, &[&change_set_pointer_id]) + .await?; + Ok(standard_model::object_from_row(row)?) + } } diff --git a/lib/dal/src/workspace_snapshot/change_set.rs b/lib/dal/src/workspace_snapshot/change_set.rs deleted file mode 100644 index 9ea9b13123..0000000000 --- a/lib/dal/src/workspace_snapshot/change_set.rs +++ /dev/null @@ -1,54 +0,0 @@ -use std::sync::{Arc, Mutex}; - -use serde::{Deserialize, Serialize}; -use thiserror::Error; -use ulid::{Generator, Ulid}; - -#[derive(Debug, Error)] -pub enum ChangeSetError { - #[error("Mutex error: {0}")] - Mutex(String), - #[error("Ulid Monotonic Error: {0}")] - Monotonic(#[from] ulid::MonotonicError), -} - -pub type ChangeSetResult = Result; - -// FIXME(nick): remove this in favor of the real one. -pub type ChangeSetId = Ulid; - -// FIXME(nick): remove this in favor of the real one. -#[derive(Clone, Serialize, Deserialize)] -pub struct ChangeSet { - pub id: ChangeSetId, - #[serde(skip)] - pub generator: Arc>, -} - -impl ChangeSet { - pub fn new() -> ChangeSetResult { - let mut generator = Generator::new(); - let id = generator.generate()?; - - Ok(Self { - id, - generator: Arc::new(Mutex::new(generator)), - }) - } - - pub fn generate_ulid(&self) -> ChangeSetResult { - self.generator - .lock() - .map_err(|e| ChangeSetError::Mutex(e.to_string()))? - .generate() - .map_err(Into::into) - } -} - -impl std::fmt::Debug for ChangeSet { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("ChangeSet") - .field("id", &self.id.to_string()) - .finish() - } -} diff --git a/lib/dal/src/workspace_snapshot/conflict.rs b/lib/dal/src/workspace_snapshot/conflict.rs index 06e95cdc1e..dbf71b52fa 100644 --- a/lib/dal/src/workspace_snapshot/conflict.rs +++ b/lib/dal/src/workspace_snapshot/conflict.rs @@ -1,9 +1,11 @@ use petgraph::stable_graph::NodeIndex; +use serde::Deserialize; +use serde::Serialize; /// Describe the type of conflict between the given locations in a /// workspace graph. #[remain::sorted] -#[derive(Debug, Copy, Clone, PartialEq, Eq)] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize)] pub enum Conflict { // TODO(nick,jacob): this variant will not be possible until ordering is in place. ChildOrder { diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs index b36bcc1419..92eac13e96 100644 --- a/lib/dal/src/workspace_snapshot/edge_weight.rs +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -4,10 +4,8 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use thiserror::Error; -use crate::workspace_snapshot::{ - change_set::ChangeSet, - vector_clock::{VectorClock, VectorClockError}, -}; +use crate::change_set_pointer::ChangeSetPointer; +use crate::workspace_snapshot::vector_clock::{VectorClock, VectorClockError}; #[derive(Debug, Error)] pub enum EdgeWeightError { @@ -36,7 +34,10 @@ pub struct EdgeWeight { } impl EdgeWeight { - pub fn increment_vector_clocks(&mut self, change_set: &ChangeSet) -> EdgeWeightResult<()> { + pub fn increment_vector_clocks( + &mut self, + change_set: &ChangeSetPointer, + ) -> EdgeWeightResult<()> { self.vector_clock_write.inc(change_set)?; Ok(()) @@ -46,13 +47,13 @@ impl EdgeWeight { self.kind } - pub fn mark_seen_at(&mut self, change_set: &ChangeSet, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { if self.vector_clock_first_seen.entry_for(change_set).is_none() { self.vector_clock_first_seen.inc_to(change_set, seen_at); } } - pub fn new(change_set: &ChangeSet, kind: EdgeWeightKind) -> EdgeWeightResult { + pub fn new(change_set: &ChangeSetPointer, kind: EdgeWeightKind) -> EdgeWeightResult { Ok(Self { kind, vector_clock_first_seen: VectorClock::new(change_set)?, @@ -62,7 +63,7 @@ impl EdgeWeight { pub fn new_with_incremented_vector_clocks( &self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, ) -> EdgeWeightResult { let mut new_weight = self.clone(); new_weight.increment_vector_clocks(change_set)?; diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index e606cc6722..195c84c8a9 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -6,9 +6,9 @@ use telemetry::prelude::*; use thiserror::Error; use ulid::Ulid; +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}; use crate::{ workspace_snapshot::{ - change_set::{ChangeSet, ChangeSetError}, conflict::Conflict, edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}, node_weight::{ContentAddress, NodeWeight, NodeWeightError}, @@ -26,7 +26,7 @@ pub enum WorkspaceSnapshotGraphError { #[error("Cannot compare ordering of container elements between ordered, and un-ordered container: {0:?}, {1:?}")] CannotCompareOrderedAndUnorderedContainers(NodeIndex, NodeIndex), #[error("ChangeSet error: {0}")] - ChangeSet(#[from] ChangeSetError), + ChangeSet(#[from] ChangeSetPointerError), #[error("Action would create a graph cycle")] CreateGraphCycle, #[error("EdgeWeight error: {0}")] @@ -71,7 +71,7 @@ impl std::fmt::Debug for WorkspaceSnapshotGraph { } impl WorkspaceSnapshotGraph { - pub fn new(change_set: &ChangeSet) -> WorkspaceSnapshotGraphResult { + pub fn new(change_set: &ChangeSetPointer) -> WorkspaceSnapshotGraphResult { let mut graph: StableDiGraph = StableDiGraph::with_capacity(1, 0); let root_index = graph.add_node(NodeWeight::new_content( change_set, @@ -88,7 +88,7 @@ impl WorkspaceSnapshotGraph { pub fn add_edge( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, from_node_index: NodeIndex, edge_weight: EdgeWeight, to_node_index: NodeIndex, @@ -135,7 +135,7 @@ impl WorkspaceSnapshotGraph { Ok(new_edge_index) } - fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotGraphResult { + pub fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotGraphResult { let new_node_index = self.graph.add_node(node); self.update_merkle_tree_hash(new_node_index)?; @@ -144,7 +144,7 @@ impl WorkspaceSnapshotGraph { fn add_ordered_node( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, node: NodeWeight, ) -> WorkspaceSnapshotGraphResult { let node_weight_id = node.id(); @@ -222,11 +222,11 @@ impl WorkspaceSnapshotGraph { Ok(new_node_index) } - fn detect_conflicts_and_updates( + pub fn detect_conflicts_and_updates( &self, - to_rebase_change_set: &ChangeSet, + to_rebase_change_set: &ChangeSetPointer, onto: &WorkspaceSnapshotGraph, - onto_change_set: &ChangeSet, + onto_change_set: &ChangeSetPointer, ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { let mut conflicts: Vec = Vec::new(); let mut updates: Vec = Vec::new(); @@ -250,9 +250,9 @@ impl WorkspaceSnapshotGraph { fn detect_conflicts_and_updates_process_dfs_event( &self, - to_rebase_change_set: &ChangeSet, + to_rebase_change_set: &ChangeSetPointer, onto: &WorkspaceSnapshotGraph, - onto_change_set: &ChangeSet, + onto_change_set: &ChangeSetPointer, event: DfsEvent, conflicts: &mut Vec, updates: &mut Vec, @@ -476,11 +476,11 @@ impl WorkspaceSnapshotGraph { fn find_ordered_container_membership_conflicts_and_updates( &self, - to_rebase_change_set: &ChangeSet, + to_rebase_change_set: &ChangeSetPointer, to_rebase_container_index: NodeIndex, to_rebase_ordering_index: NodeIndex, onto: &WorkspaceSnapshotGraph, - onto_change_set: &ChangeSet, + onto_change_set: &ChangeSetPointer, onto_container_index: NodeIndex, onto_ordering_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { @@ -725,10 +725,10 @@ impl WorkspaceSnapshotGraph { fn find_unordered_container_membership_conflicts_and_updates( &self, - to_rebase_change_set: &ChangeSet, + to_rebase_change_set: &ChangeSetPointer, to_rebase_container_index: NodeIndex, onto: &WorkspaceSnapshotGraph, - onto_change_set: &ChangeSet, + onto_change_set: &ChangeSetPointer, onto_container_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] @@ -967,7 +967,10 @@ impl WorkspaceSnapshotGraph { && algo::has_path_connecting(&self.graph, node, end, None) } - pub fn mark_graph_seen(&mut self, change_set: &ChangeSet) -> WorkspaceSnapshotGraphResult<()> { + pub fn mark_graph_seen( + &mut self, + change_set: &ChangeSetPointer, + ) -> WorkspaceSnapshotGraphResult<()> { let seen_at = Utc::now(); for edge in self.graph.edge_weights_mut() { edge.mark_seen_at(change_set, seen_at.clone()); @@ -1033,7 +1036,7 @@ impl WorkspaceSnapshotGraph { /// [`Self::cleanup()`] has run should be considered invalid. fn remove_edge( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, source_node_index: NodeIndex, target_node_index: NodeIndex, edge_kind: EdgeWeightKind, @@ -1165,7 +1168,7 @@ impl WorkspaceSnapshotGraph { pub fn update_content( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, id: Ulid, new_content_hash: ContentHash, ) -> WorkspaceSnapshotGraphResult<()> { @@ -1180,7 +1183,7 @@ impl WorkspaceSnapshotGraph { pub fn update_order( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, container_id: Ulid, new_order: Vec, ) -> WorkspaceSnapshotGraphResult<()> { @@ -1278,7 +1281,7 @@ mod test { #[test] fn new() { - let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -1287,7 +1290,7 @@ mod test { #[test] fn add_nodes_and_edges() { - let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -1441,7 +1444,7 @@ mod test { #[test] fn cyclic_failure() { - let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -1551,7 +1554,7 @@ mod test { #[test] fn update_content() { - let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -1712,7 +1715,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_no_conflicts_no_updates_in_base() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let initial_change_set = &initial_change_set; let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -1767,7 +1770,7 @@ mod test { initial_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = initial_graph.clone(); @@ -1819,7 +1822,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_no_conflicts_with_purely_new_content_in_base() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let base_change_set = &initial_change_set; let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -1875,7 +1878,7 @@ mod test { println!("Initial base graph (Root {:?}):", base_graph.root_index); base_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = base_graph.clone(); @@ -1943,7 +1946,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_no_conflicts_with_updates_on_both_sides() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let base_change_set = &initial_change_set; let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -1999,7 +2002,7 @@ mod test { println!("Initial base graph (Root {:?}):", base_graph.root_index); base_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = base_graph.clone(); @@ -2106,7 +2109,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_with_content_conflict() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let base_change_set = &initial_change_set; let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -2199,7 +2202,7 @@ mod test { println!("Initial base graph (Root {:?}):", base_graph.root_index); base_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = base_graph.clone(); @@ -2247,7 +2250,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_with_modify_removed_item_conflict() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let base_change_set = &initial_change_set; let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -2340,7 +2343,7 @@ mod test { println!("Initial base graph (Root {:?}):", base_graph.root_index); base_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = base_graph.clone(); @@ -2388,7 +2391,7 @@ mod test { #[test] fn detect_conflicts_and_updates_complex() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let base_change_set = &initial_change_set; let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -2609,7 +2612,7 @@ mod test { base_graph.dot(); // Create a new change set to cause some problems! - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = base_graph.clone(); @@ -2701,7 +2704,7 @@ mod test { #[test] fn add_ordered_node() { - let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -2901,7 +2904,7 @@ mod test { vec![ ordered_prop_1_index, ordered_prop_2_index, - ordered_prop_3_index + ordered_prop_3_index, ], graph .ordered_children_for_node( @@ -2915,7 +2918,7 @@ mod test { #[test] fn reorder_ordered_node() { - let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -3184,7 +3187,7 @@ mod test { #[test] fn remove_ordered_node() { - let change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -3472,7 +3475,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_ordering_no_conflicts_no_updates_in_base() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let initial_change_set = &initial_change_set; let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -3664,7 +3667,7 @@ mod test { initial_graph.cleanup(); initial_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = initial_graph.clone(); @@ -3708,7 +3711,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_ordering_no_conflicts_with_updates_in_base() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let initial_change_set = &initial_change_set; let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -3899,7 +3902,7 @@ mod test { initial_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let new_graph = initial_graph.clone(); @@ -3947,7 +3950,7 @@ mod test { destination: initial_graph .get_node_index_by_id(ordered_prop_5_id) .expect("Unable to get NodeIndex"), - edge_weight: new_edge_weight + edge_weight: new_edge_weight, }, Update::ReplaceSubgraph { new: initial_graph @@ -3966,7 +3969,7 @@ mod test { ) .expect("Unable to get old ordering NodeIndex") .expect("Ordering NodeIndex not found"), - } + }, ], updates ); @@ -3974,7 +3977,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_ordering_with_conflicting_ordering_updates() { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let initial_change_set = &initial_change_set; let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -4165,7 +4168,7 @@ mod test { initial_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = initial_graph.clone(); @@ -4251,7 +4254,7 @@ mod test { #[test] fn detect_conflicts_and_updates_simple_ordering_with_no_conflicts_add_in_onto_remove_in_to_rebase( ) { - let initial_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let initial_change_set = &initial_change_set; let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) .expect("Unable to create WorkspaceSnapshotGraph"); @@ -4446,7 +4449,7 @@ mod test { .expect("Unable to update recently seen information"); // initial_graph.dot(); - let new_change_set = ChangeSet::new().expect("Unable to create ChangeSet"); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let new_change_set = &new_change_set; let mut new_graph = initial_graph.clone(); diff --git a/lib/dal/src/workspace_snapshot/lamport_clock.rs b/lib/dal/src/workspace_snapshot/lamport_clock.rs index 32ae4887c2..125301fd60 100644 --- a/lib/dal/src/workspace_snapshot/lamport_clock.rs +++ b/lib/dal/src/workspace_snapshot/lamport_clock.rs @@ -5,12 +5,12 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use ulid::Ulid; -use crate::workspace_snapshot::{ChangeSet, ChangeSetError}; +use crate::workspace_snapshot::{ChangeSetPointer, ChangeSetPointerError}; #[derive(Debug, Error)] pub enum LamportClockError { #[error("Change Set error: {0}")] - ChangeSet(#[from] ChangeSetError), + ChangeSet(#[from] ChangeSetPointerError), } pub type LamportClockResult = Result; diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index f1d25fa396..f76a39f32e 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -3,10 +3,8 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use ulid::Ulid; -use crate::workspace_snapshot::{ - change_set::{ChangeSet, ChangeSetError}, - vector_clock::{VectorClock, VectorClockError}, -}; +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}; +use crate::workspace_snapshot::vector_clock::{VectorClock, VectorClockError}; use crate::ContentHash; pub use crate::workspace_snapshot::node_weight::content_node_weight::ContentAddress; @@ -25,7 +23,7 @@ pub enum NodeWeightError { #[error("Cannot update root node's content hash")] CannotUpdateRootNodeContentHash, #[error("ChangeSet error: {0}")] - ChangeSet(#[from] ChangeSetError), + ChangeSet(#[from] ChangeSetPointerError), #[error("Incompatible node weights")] IncompatibleNodeWeightVariants, #[error("Vector Clock error: {0}")] @@ -55,7 +53,10 @@ impl NodeWeight { } } - pub fn increment_vector_clock(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { match self { NodeWeight::Content(content_weight) => { content_weight.increment_vector_clock(change_set) @@ -73,7 +74,7 @@ impl NodeWeight { } } - pub fn mark_seen_at(&mut self, change_set: &ChangeSet, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { match self { NodeWeight::Content(content_weight) => content_weight.mark_seen_at(change_set, seen_at), NodeWeight::Ordering(ordering_weight) => { @@ -84,7 +85,7 @@ impl NodeWeight { pub fn merge_clocks( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, other: &NodeWeight, ) -> NodeWeightResult<()> { match (self, other) { @@ -108,7 +109,7 @@ impl NodeWeight { } pub fn new_content( - change_set: &ChangeSet, + change_set: &ChangeSetPointer, content_id: Ulid, kind: ContentAddress, ) -> NodeWeightResult { @@ -126,7 +127,7 @@ impl NodeWeight { pub fn new_with_incremented_vector_clock( &self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, ) -> NodeWeightResult { let new_weight = match self { NodeWeight::Content(content_weight) => { @@ -147,7 +148,11 @@ impl NodeWeight { } } - pub fn set_order(&mut self, change_set: &ChangeSet, order: Vec) -> NodeWeightResult<()> { + pub fn set_order( + &mut self, + change_set: &ChangeSetPointer, + order: Vec, + ) -> NodeWeightResult<()> { match self { NodeWeight::Content(_) => Err(NodeWeightError::CannotSetOrderOnKind), NodeWeight::Ordering(ordering_weight) => ordering_weight.set_order(change_set, order), @@ -156,7 +161,7 @@ impl NodeWeight { pub fn set_vector_clock_recently_seen_to( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, new_val: DateTime, ) { match self { diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index 72b29c1bd2..7e7f9fdc34 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -2,8 +2,8 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use ulid::Ulid; +use crate::change_set_pointer::ChangeSetPointer; use crate::workspace_snapshot::{ - change_set::ChangeSet, node_weight::{NodeWeightError, NodeWeightResult}, vector_clock::VectorClock, }; @@ -58,7 +58,7 @@ pub struct ContentNodeWeight { /// starting with this node as the root. Mainly useful in quickly determining "has /// something changed anywhere in this (sub)graph". merkle_tree_hash: ContentHash, - /// The first time a [`ChangeSet`] has "seen" this content. This is useful for determining + /// The first time a [`ChangeSetPointer`] has "seen" this content. This is useful for determining /// whether the absence of this content on one side or the other of a rebase/merge is because /// the content is new, or because one side deleted it. vector_clock_first_seen: VectorClock, @@ -68,7 +68,7 @@ pub struct ContentNodeWeight { impl ContentNodeWeight { pub fn new( - change_set: &ChangeSet, + change_set: &ChangeSetPointer, id: Ulid, content_address: ContentAddress, ) -> NodeWeightResult { @@ -95,7 +95,10 @@ impl ContentNodeWeight { self.id } - pub fn increment_vector_clock(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { self.vector_clock_write.inc(change_set)?; self.vector_clock_recently_seen.inc(change_set)?; @@ -106,7 +109,7 @@ impl ContentNodeWeight { self.lineage_id } - pub fn mark_seen_at(&mut self, change_set: &ChangeSet, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { self.vector_clock_recently_seen .inc_to(change_set, seen_at.clone()); if self.vector_clock_first_seen.entry_for(change_set).is_none() { @@ -116,7 +119,7 @@ impl ContentNodeWeight { pub fn merge_clocks( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, other: &ContentNodeWeight, ) -> NodeWeightResult<()> { self.vector_clock_write @@ -151,7 +154,7 @@ impl ContentNodeWeight { pub fn new_with_incremented_vector_clock( &self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, ) -> NodeWeightResult { let mut new_node_weight = self.clone(); new_node_weight.increment_vector_clock(change_set)?; @@ -165,7 +168,7 @@ impl ContentNodeWeight { pub fn set_vector_clock_recently_seen_to( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, new_val: DateTime, ) { self.vector_clock_recently_seen.inc_to(change_set, new_val); diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs index ecfb3d6f77..e02171c637 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -2,10 +2,9 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use ulid::Ulid; +use crate::change_set_pointer::ChangeSetPointer; use crate::{ - workspace_snapshot::{ - change_set::ChangeSet, node_weight::NodeWeightResult, vector_clock::VectorClock, - }, + workspace_snapshot::{node_weight::NodeWeightResult, vector_clock::VectorClock}, ContentHash, }; @@ -31,13 +30,19 @@ impl OrderingNodeWeight { self.id } - pub fn increment_seen_vector_clock(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + pub fn increment_seen_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { self.vector_clock_first_seen.inc(change_set)?; Ok(()) } - pub fn increment_vector_clock(&mut self, change_set: &ChangeSet) -> NodeWeightResult<()> { + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { self.vector_clock_write.inc(change_set).map_err(Into::into) } @@ -45,7 +50,7 @@ impl OrderingNodeWeight { self.lineage_id } - pub fn mark_seen_at(&mut self, change_set: &ChangeSet, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { self.vector_clock_recently_seen .inc_to(change_set, seen_at.clone()); if self.vector_clock_first_seen.entry_for(change_set).is_none() { @@ -55,7 +60,7 @@ impl OrderingNodeWeight { pub fn merge_clocks( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, other: &OrderingNodeWeight, ) -> NodeWeightResult<()> { self.vector_clock_write @@ -70,7 +75,7 @@ impl OrderingNodeWeight { self.merkle_tree_hash } - pub fn new(change_set: &ChangeSet) -> NodeWeightResult { + pub fn new(change_set: &ChangeSetPointer) -> NodeWeightResult { Ok(Self { id: change_set.generate_ulid()?, lineage_id: change_set.generate_ulid()?, @@ -82,7 +87,7 @@ impl OrderingNodeWeight { pub fn new_with_incremented_vector_clock( &self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, ) -> NodeWeightResult { let mut new_ordering_weight = self.clone(); new_ordering_weight.increment_vector_clock(change_set)?; @@ -98,7 +103,11 @@ impl OrderingNodeWeight { self.merkle_tree_hash = new_hash; } - pub fn set_order(&mut self, change_set: &ChangeSet, order: Vec) -> NodeWeightResult<()> { + pub fn set_order( + &mut self, + change_set: &ChangeSetPointer, + order: Vec, + ) -> NodeWeightResult<()> { self.order = order; self.update_content_hash(); self.increment_vector_clock(change_set)?; @@ -108,7 +117,7 @@ impl OrderingNodeWeight { pub fn set_vector_clock_recently_seen_to( &mut self, - change_set: &ChangeSet, + change_set: &ChangeSetPointer, new_val: DateTime, ) { self.vector_clock_recently_seen.inc_to(change_set, new_val); diff --git a/lib/dal/src/workspace_snapshot/update.rs b/lib/dal/src/workspace_snapshot/update.rs index 0087942833..73a73a5f5c 100644 --- a/lib/dal/src/workspace_snapshot/update.rs +++ b/lib/dal/src/workspace_snapshot/update.rs @@ -1,9 +1,10 @@ use petgraph::prelude::*; use super::edge_weight::EdgeWeight; +use serde::{Deserialize, Serialize}; #[remain::sorted] -#[derive(Debug, Clone, PartialEq, Eq)] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] pub enum Update { NewEdge { source: NodeIndex, diff --git a/lib/dal/src/workspace_snapshot/vector_clock.rs b/lib/dal/src/workspace_snapshot/vector_clock.rs index 09a5c2a058..98755e9f11 100644 --- a/lib/dal/src/workspace_snapshot/vector_clock.rs +++ b/lib/dal/src/workspace_snapshot/vector_clock.rs @@ -9,7 +9,7 @@ use ulid::Ulid; use crate::workspace_snapshot::{ lamport_clock::{LamportClock, LamportClockError}, - {ChangeSet, ChangeSetId}, + {ChangeSetPointer, ChangeSetPointerId}, }; #[derive(Debug, Error)] @@ -22,12 +22,12 @@ pub type VectorClockResult = Result; #[derive(Default, Serialize, Deserialize, PartialEq, Eq, Clone)] pub struct VectorClock { - entries: HashMap, + entries: HashMap, } impl VectorClock { - /// Create a new [`VectorClock`] with an entry for [`ChangeSet`]. - pub fn new(change_set: &ChangeSet) -> VectorClockResult { + /// Create a new [`VectorClock`] with an entry for [`ChangeSetPointer`]. + pub fn new(change_set: &ChangeSetPointer) -> VectorClockResult { let lamport_clock = LamportClock::new()?; let mut entries = HashMap::new(); entries.insert(change_set.id, lamport_clock); @@ -35,7 +35,7 @@ impl VectorClock { Ok(VectorClock { entries }) } - pub fn entry_for(&self, change_set: &ChangeSet) -> Option { + pub fn entry_for(&self, change_set: &ChangeSetPointer) -> Option { self.entries.get(&change_set.id).copied() } @@ -43,7 +43,7 @@ impl VectorClock { self.entries.values().any(|v| *v > clock_stamp) } - pub fn inc_to(&mut self, change_set: &ChangeSet, new_clock_value: DateTime) { + pub fn inc_to(&mut self, change_set: &ChangeSetPointer, new_clock_value: DateTime) { if let Some(lamport_clock) = self.entries.get_mut(&change_set.id) { lamport_clock.inc_to(new_clock_value); } else { @@ -52,8 +52,8 @@ impl VectorClock { } } - /// Increment the entry for [`ChangeSet`], adding one if there wasn't one already. - pub fn inc(&mut self, change_set: &ChangeSet) -> VectorClockResult<()> { + /// Increment the entry for [`ChangeSetPointer`], adding one if there wasn't one already. + pub fn inc(&mut self, change_set: &ChangeSetPointer) -> VectorClockResult<()> { if let Some(lamport_clock) = self.entries.get_mut(&change_set.id) { lamport_clock.inc()?; } else { @@ -64,9 +64,13 @@ impl VectorClock { } /// Add all entries in `other` to `self`, taking the most recent value if the entry already - /// exists in `self`, then increment the entry for [`ChangeSet`] (adding one if it is not + /// exists in `self`, then increment the entry for [`ChangeSetPointer`] (adding one if it is not /// already there). - pub fn merge(&mut self, change_set: &ChangeSet, other: &VectorClock) -> VectorClockResult<()> { + pub fn merge( + &mut self, + change_set: &ChangeSetPointer, + other: &VectorClock, + ) -> VectorClockResult<()> { for (other_change_set_id, other_lamport_clock) in other.entries.iter() { if let Some(lamport_clock) = self.entries.get_mut(other_change_set_id) { lamport_clock.merge(other_lamport_clock); @@ -80,8 +84,8 @@ impl VectorClock { Ok(()) } - /// Return a new [`VectorClock`] with the entry for [`ChangeSet`] incremented. - pub fn fork(&self, change_set: &ChangeSet) -> VectorClockResult { + /// Return a new [`VectorClock`] with the entry for [`ChangeSetPointer`] incremented. + pub fn fork(&self, change_set: &ChangeSetPointer) -> VectorClockResult { let mut forked = self.clone(); forked.inc(change_set)?; diff --git a/lib/dal/tests/integration_test/internal/mod.rs b/lib/dal/tests/integration_test/internal/mod.rs index 40cb020fc0..db3a310050 100644 --- a/lib/dal/tests/integration_test/internal/mod.rs +++ b/lib/dal/tests/integration_test/internal/mod.rs @@ -16,6 +16,7 @@ mod prop; mod prop_tree; mod property_editor; mod provider; +mod rebaser; mod schema; mod secret; mod socket; diff --git a/lib/dal/tests/integration_test/internal/rebaser.rs b/lib/dal/tests/integration_test/internal/rebaser.rs new file mode 100644 index 0000000000..e00559fe02 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/rebaser.rs @@ -0,0 +1,96 @@ +//! For all tests in this module, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. + +use dal::change_set_pointer::ChangeSetPointer; +use dal::content::hash::ContentHash; +use dal::workspace_snapshot::node_weight::{ContentAddress, NodeWeight}; +use dal::{DalContext, Tenancy, Visibility, WorkspacePk, WorkspaceSnapshot}; +use dal_test::test; +use rebaser_client::Client; + +#[test] +async fn simple_rebase(ctx: &mut DalContext) { + ctx.update_visibility(Visibility::new_head(false)); + ctx.update_tenancy(Tenancy::new(WorkspacePk::NONE)); + let ctx = &ctx; + + let mut client = Client::new().await.expect("could not build client"); + + let mut base_change_set = ChangeSetPointer::new(ctx, "main") + .await + .expect("could not create change set"); + let base_change_set = &mut base_change_set; + let mut snapshot = WorkspaceSnapshot::initial(ctx, &base_change_set) + .await + .expect("could not create workspace snapshot"); + + // Add a new node. + snapshot + .add_node( + NodeWeight::new_content( + base_change_set, + base_change_set + .generate_ulid() + .expect("cannot generate ulid"), + ContentAddress::Schema(ContentHash::from("lacy - olivia rodrigo")), + ) + .expect("could not create node weight"), + ) + .expect("could not add node"); + + snapshot.write(ctx).await.expect("could not write snapshot"); + base_change_set + .update_pointer(ctx, snapshot.id) + .await + .expect("could not update pointer"); + + // Create another change set and update. + let mut forked_change_set = ChangeSetPointer::new(ctx, "fork") + .await + .expect("could not create change set"); + let forked_change_set = &mut forked_change_set; + snapshot + .add_node( + NodeWeight::new_content( + forked_change_set, + forked_change_set + .generate_ulid() + .expect("cannot generate ulid"), + ContentAddress::Schema(ContentHash::from("i'm the one - victoria monét")), + ) + .expect("could not create node weight"), + ) + .expect("could not add node"); + snapshot.write(ctx).await.expect("could not write snapshot"); + forked_change_set + .update_pointer(ctx, snapshot.id) + .await + .expect("could not update pointer"); + + // Rebase! + let response = client + .send_management_open_change_set(base_change_set.id.into()) + .await + .expect("could not send management"); + + // TODO(nick): do something useful with this. + dbg!(response); + + ctx.blocking_commit().await.expect("could not do this"); + + let response = client + .send_with_reply( + base_change_set.id.into(), + snapshot.id.into(), + forked_change_set.id.into(), + ) + .await + .expect("could not send"); + + // TODO(nick): do something useful with this. + dbg!(response); + + // TODO(nick): move cleanup to the test harness. + let _ = client + .send_management_close_change_set(base_change_set.id.into()) + .await; +} diff --git a/lib/rebaser-client/BUCK b/lib/rebaser-client/BUCK index b648c90c77..9ea6588332 100644 --- a/lib/rebaser-client/BUCK +++ b/lib/rebaser-client/BUCK @@ -3,7 +3,6 @@ load("@prelude-si//:macros.bzl", "rust_library") rust_library( name = "rebaser-client", deps = [ - "//lib/rebaser-server:rebaser-server", "//lib/si-rabbitmq:si-rabbitmq", "//lib/telemetry-rs:telemetry", "//third-party/rust:remain", diff --git a/lib/rebaser-client/Cargo.toml b/lib/rebaser-client/Cargo.toml index 0a4cd4ff54..b6bb394fab 100644 --- a/lib/rebaser-client/Cargo.toml +++ b/lib/rebaser-client/Cargo.toml @@ -6,7 +6,7 @@ rust-version = "1.64" publish = false [dependencies] -rebaser-server = { path = "../../lib/rebaser-server" } +rebaser-core = { path = "../../lib/rebaser-core" } remain = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } @@ -15,3 +15,4 @@ telemetry = { path = "../../lib/telemetry-rs" } thiserror = { workspace = true } tokio = { workspace = true } ulid = { workspace = true } +log = "0.4.20" diff --git a/lib/rebaser-client/src/client.rs b/lib/rebaser-client/src/client.rs index 8a3a196055..450b4c4576 100644 --- a/lib/rebaser-client/src/client.rs +++ b/lib/rebaser-client/src/client.rs @@ -1,22 +1,28 @@ //! This module provides [`Client`], which is used for communicating with a running //! rebaser [`Server`](rebaser_server::Server). -use rebaser_server::{ManagementMessage, ManagementMessageAction, REBASER_MANAGEMENT_STREAM}; -use serde::Serialize; +use rebaser_core::{ + ChangeSetMessage, ChangeSetReplyMessage, ManagementMessage, ManagementMessageAction, + REBASER_MANAGEMENT_STREAM, +}; use si_rabbitmq::{Consumer, ConsumerOffsetSpecification, Environment, Producer}; use std::collections::HashMap; -use telemetry::prelude::{debug, error}; +use std::time::Duration; + +use telemetry::prelude::*; use ulid::Ulid; use crate::{ClientError, ClientResult}; const REBASER_REPLY_STREAM_PREFIX: &str = "rebaser-reply"; +const REPLY_TIMEOUT_SECONDS: u64 = 10; /// A client for communicating with a running rebaser [`Server`](rebaser_server::Server). #[allow(missing_debug_implementations)] pub struct Client { management_stream: Stream, streams: HashMap, + reply_timeout: Duration, } #[allow(missing_debug_implementations)] @@ -56,85 +62,120 @@ impl Client { reply_consumer: management_reply_consumer, }, streams: HashMap::new(), + reply_timeout: Duration::from_secs(REPLY_TIMEOUT_SECONDS), }) } /// Send a message to a rebaser stream for a change set and block for a reply. - pub async fn send_with_reply( + pub async fn send_with_reply( &mut self, - message: T, - change_set_id: Ulid, - ) -> ClientResult> { + change_set_to_update: Ulid, + workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at: Ulid, + change_set_that_dictates_changes: Ulid, + ) -> ClientResult { let stream = self .streams - .get_mut(&change_set_id) + .get_mut(&change_set_to_update) .ok_or(ClientError::RebaserStreamForChangeSetNotFound)?; stream .producer - .send_single(message, Some(stream.reply_stream.clone())) + .send_single( + ChangeSetMessage { + change_set_to_update, + workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at, + change_set_that_dictates_changes, + }, + Some(stream.reply_stream.clone()), + ) .await?; - if let Some(delivery) = stream.reply_consumer.next().await? { - if let Some(contents) = delivery.message_contents { - return Ok(Some(serde_json::from_value(contents)?)); - } - } - Ok(None) + let maybe_delivery = + match tokio::time::timeout(self.reply_timeout, stream.reply_consumer.next()).await { + Ok(result) => result?, + Err(e) => return Err(ClientError::ReplyTimeout(e)), + }; + + let delivery = maybe_delivery.ok_or(ClientError::EmptyDelivery( + stream.reply_consumer.stream().to_string(), + ))?; + let contents = delivery + .clone() + .message_contents + .ok_or(ClientError::EmptyMessageContentsForDelivery(delivery))?; + + Ok(serde_json::from_value(contents)?) } /// Send a message to the management stream to open a rebaser loop and block for a reply. - pub async fn send_management_open( + pub async fn send_management_open_change_set( &mut self, change_set_id: Ulid, - ) -> ClientResult> { + ) -> ClientResult { self.management_stream .producer .send_single( ManagementMessage { change_set_id, - action: ManagementMessageAction::Open, + action: ManagementMessageAction::OpenChangeSet, }, Some(self.management_stream.reply_stream.clone()), ) .await?; - if let Some(delivery) = self.management_stream.reply_consumer.next().await? { - if let Some(contents) = delivery.message_contents { - let change_set_stream: String = serde_json::from_value(contents)?; - let environment = Environment::new().await?; - let reply_stream = format!("{REBASER_REPLY_STREAM_PREFIX}-{change_set_id}"); - environment.create_stream(&reply_stream).await?; - - // FIXME(nick): name the producer properly. - let producer = Producer::new(&environment, "producer", &change_set_stream).await?; - let reply_consumer = Consumer::new( - &environment, - &reply_stream, - ConsumerOffsetSpecification::First, - ) - .await?; - - self.streams.insert( - change_set_id, - Stream { - producer, - reply_stream, - reply_consumer, - }, - ); - return Ok(Some(change_set_stream)); - } - } - Ok(None) + let maybe_delivery = match tokio::time::timeout( + self.reply_timeout, + self.management_stream.reply_consumer.next(), + ) + .await + { + Ok(result) => result?, + Err(e) => return Err(ClientError::ReplyTimeout(e)), + }; + + let delivery = maybe_delivery.ok_or(ClientError::EmptyDelivery( + self.management_stream.reply_consumer.stream().to_string(), + ))?; + let contents = delivery + .clone() + .message_contents + .ok_or(ClientError::EmptyMessageContentsForDelivery(delivery))?; + + let change_set_stream: String = serde_json::from_value(contents)?; + + let environment = Environment::new().await?; + let reply_stream = format!("{REBASER_REPLY_STREAM_PREFIX}-{change_set_id}"); + environment.create_stream(&reply_stream).await?; + + // FIXME(nick): name the producer properly. + let producer = Producer::new(&environment, "producer", &change_set_stream).await?; + let reply_consumer = Consumer::new( + &environment, + &reply_stream, + ConsumerOffsetSpecification::First, + ) + .await?; + + self.streams.insert( + change_set_id, + Stream { + producer, + reply_stream, + reply_consumer, + }, + ); + Ok(change_set_stream) } /// Send a message to the management stream to close a rebaser loop and do not wait for a reply. - pub async fn send_management_close(&mut self, change_set_id: Ulid) -> ClientResult<()> { + pub async fn send_management_close_change_set( + &mut self, + change_set_id: Ulid, + ) -> ClientResult<()> { self.management_stream .producer .send_single( ManagementMessage { change_set_id, - action: ManagementMessageAction::Close, + action: ManagementMessageAction::CloseChangeSet, }, Some(self.management_stream.reply_stream.clone()), ) diff --git a/lib/rebaser-client/src/lib.rs b/lib/rebaser-client/src/lib.rs index dab163217f..8797cea599 100644 --- a/lib/rebaser-client/src/lib.rs +++ b/lib/rebaser-client/src/lib.rs @@ -27,99 +27,28 @@ mod client; pub use client::Client; -use si_rabbitmq::RabbitError; +use si_rabbitmq::{Delivery, RabbitError}; use telemetry::prelude::error; use thiserror::Error; +use tokio::time::error::Elapsed; #[allow(missing_docs)] #[remain::sorted] #[derive(Debug, Error)] pub enum ClientError { + #[error("unexpected empty delivery for stream: {0}")] + EmptyDelivery(String), + #[error("empty message contents for delivery: {0:?}")] + EmptyMessageContentsForDelivery(Delivery), #[error("si rabbitmq error: {0}")] Rabbit(#[from] RabbitError), #[error("rebaser stream for change set not found")] RebaserStreamForChangeSetNotFound, + #[error("hit timeout while waiting for message on reply stream: {0}")] + ReplyTimeout(Elapsed), #[error("serde json error: {0}")] SerdeJson(#[from] serde_json::Error), } #[allow(missing_docs)] pub type ClientResult = Result; - -#[cfg(test)] -mod tests { - use super::*; - use rebaser_server::{ConfigBuilder, Server}; - use tokio::test; - use ulid::Ulid; - - async fn test_setup() -> Client { - // FIXME(nick): make this not brittle... make strong! - let config = ConfigBuilder::default() - .cyclone_encryption_key_path( - "../../lib/cyclone-server/src/dev.encryption.key" - .try_into() - .expect("could not convert"), - ) - .build() - .expect("could not build config"); - let server = Server::from_config(config) - .await - .expect("could not build server"); - tokio::spawn(server.run()); - - Client::new().await.expect("could not build client") - } - - #[test] - async fn connect() { - let client = test_setup().await; - client.close().await; - } - - #[test] - async fn send_management() { - let mut client = test_setup().await; - - let change_set_id = Ulid::new(); - let _new_stream_to_produce_to = client - .send_management_open(change_set_id) - .await - .expect("could not create new rebaser loop for change set") - .expect("no message returned"); - - client - .send_management_close(change_set_id) - .await - .expect("could not close the rebaser loop for change set"); - - client.close().await; - } - - #[test] - async fn send_management_and_round_trip() { - let mut client = test_setup().await; - - let change_set_id = Ulid::new(); - let _new_stream_to_produce_to = client - .send_management_open(change_set_id) - .await - .expect("could not create new rebaser loop for change set") - .expect("no message returned"); - - let contents = "MUSTANG GTD"; - let message = client - .send_with_reply(contents, change_set_id) - .await - .expect("could not send message") - .expect("no message returned"); - assert_eq!(contents, &message); - - client - .send_management_close(change_set_id) - .await - .expect("could not close the rebaser loop for change set"); - - client.close().await; - } -} diff --git a/lib/rebaser-core/BUCK b/lib/rebaser-core/BUCK new file mode 100644 index 0000000000..282fb67eec --- /dev/null +++ b/lib/rebaser-core/BUCK @@ -0,0 +1,16 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "rebaser-core", + deps = [ + "//lib/rebaser-client:rebaser-client", + "//lib/rebaser-server:rebaser-server", + "//third-party/rust:serde", + "//third-party/rust:pretty_assertions_sorted", + "//third-party/rust:tokio", + "//third-party/rust:ulid", + ], + srcs = glob([ + "src/**/*.rs", + ]), +) diff --git a/lib/rebaser-core/Cargo.toml b/lib/rebaser-core/Cargo.toml new file mode 100644 index 0000000000..d4518d3025 --- /dev/null +++ b/lib/rebaser-core/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "rebaser-core" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +serde = { workspace = true } +ulid = { workspace = true } + +[dev-dependencies] +tokio = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client" } +rebaser-server = { path = "../../lib/rebaser-server" } +pretty_assertions_sorted = { workspace = true } diff --git a/lib/rebaser-core/src/lib.rs b/lib/rebaser-core/src/lib.rs new file mode 100644 index 0000000000..f312a926ee --- /dev/null +++ b/lib/rebaser-core/src/lib.rs @@ -0,0 +1,131 @@ +//! This library exists to ensure that rebaser-client does not depend on rebaser-server and vice +//! versa. Keeping the dependency chain intact is important because rebaser-server depends on the +//! dal and the dal (really anyone) must be able to use the rebaser-client. +//! +//! This library also contains tests for rebaser-client and rebaser-server interaction. + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + private_in_public, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + +use serde::Deserialize; +use serde::Serialize; +use ulid::Ulid; + +/// Stream to manage rebaser consumer loops. +pub const REBASER_MANAGEMENT_STREAM: &str = "rebaser-management"; + +/// The action for the rebaser management loop. +#[derive(Debug, Serialize, Deserialize)] +pub enum ManagementMessageAction { + /// Close the inner rebaser loop for a change set. If it has already been closed, this is a + /// no-op. + CloseChangeSet, + /// Open the inner rebaser loop for a change set. If one already exists, it is a no-op. + OpenChangeSet, +} + +/// The message that the rebaser management consumer expects in the server. +#[derive(Debug, Serialize, Deserialize)] +pub struct ManagementMessage { + /// The ID of the change set wishing to be operated on. + pub change_set_id: Ulid, + /// The action to instruct the management loop to perform. + pub action: ManagementMessageAction, +} + +/// The message that the rebaser change set consumer expects in the server. +#[derive(Debug, Serialize, Deserialize)] +pub struct ChangeSetMessage { + /// Corresponds to the change set whose pointer is to be updated. + pub change_set_to_update: Ulid, + /// Corresponds to the workspace snapshot that will be rebased on top of the snapshot that the + /// change set is currently pointing at. + pub workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at: Ulid, + /// Corresponds to the change set that's either the base change set, the last change set before + /// edits were made, or the change set that you are trying to “merge” into the base. + pub change_set_that_dictates_changes: Ulid, +} + +/// The message shape that the rebaser change set loop will use for replying to the client. +#[derive(Debug, Serialize, Deserialize)] +pub enum ChangeSetReplyMessage { + /// Processing the delivery was a success. + Success { + /// The results of processing the delivery. + results: String, + }, + /// Processing the delivery was a failure. + Failure { + /// The error encountered when processing the delivery. + error: String, + }, +} + +#[cfg(test)] +mod tests { + + use rebaser_client::Client; + use rebaser_server::{ConfigBuilder, Server}; + use tokio::test; + use ulid::Ulid; + + async fn test_setup() -> Client { + let config = ConfigBuilder::default() + .cyclone_encryption_key_path( + "../../lib/cyclone-server/src/dev.encryption.key" + .try_into() + .expect("could not convert"), + ) + .build() + .expect("could not build config"); + let server = Server::from_config(config) + .await + .expect("could not build server"); + tokio::spawn(server.run()); + + Client::new().await.expect("could not build client") + } + + #[test] + async fn connect() { + let client = test_setup().await; + client.close().await; + } + + #[test] + async fn management() { + let mut client = test_setup().await; + + let change_set_id = Ulid::new(); + let _new_stream_to_produce_to = client + .send_management_open_change_set(change_set_id) + .await + .expect("could not create new rebaser loop for change set"); + + client + .send_management_close_change_set(change_set_id) + .await + .expect("could not close the rebaser loop for change set"); + + client.close().await; + } +} diff --git a/lib/rebaser-server/Cargo.toml b/lib/rebaser-server/Cargo.toml index 9492702409..1c9e32aa5f 100644 --- a/lib/rebaser-server/Cargo.toml +++ b/lib/rebaser-server/Cargo.toml @@ -12,6 +12,7 @@ dal = { path = "../../lib/dal" } derive_builder = { workspace = true } futures = { workspace = true } nats-subscriber = { path = "../../lib/nats-subscriber" } +rebaser-core = { path = "../../lib/rebaser-core" } remain = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } diff --git a/lib/rebaser-server/src/config.rs b/lib/rebaser-server/src/config.rs index 6fdde41193..ddffbbbe2d 100644 --- a/lib/rebaser-server/src/config.rs +++ b/lib/rebaser-server/src/config.rs @@ -35,7 +35,7 @@ impl ConfigError { type Result = std::result::Result; -/// The set of configuration options for building a [`Server`]. +#[allow(missing_docs)] #[derive(Debug, Builder)] pub struct Config { #[builder(default = "PgPoolConfig::default()")] @@ -135,8 +135,10 @@ fn default_recreate_management_stream() -> bool { false } -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -fn detect_and_configure_development(config: &mut ConfigFile) -> Result<()> { +/// This function is used to determine the development environment and update the [`ConfigFile`] +/// accordingly. +#[allow(clippy::disallowed_methods)] +pub fn detect_and_configure_development(config: &mut ConfigFile) -> Result<()> { if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { buck2_development(config) } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { diff --git a/lib/rebaser-server/src/lib.rs b/lib/rebaser-server/src/lib.rs index d048be225b..0399f5d20b 100644 --- a/lib/rebaser-server/src/lib.rs +++ b/lib/rebaser-server/src/lib.rs @@ -25,6 +25,7 @@ mod config; mod server; +pub use config::detect_and_configure_development; pub use config::Config; pub use config::ConfigBuilder; pub use config::ConfigError; @@ -32,31 +33,3 @@ pub use config::ConfigFile; pub use server::Server; pub use si_settings::StandardConfig; pub use si_settings::StandardConfigFile; - -use serde::{Deserialize, Serialize}; -use ulid::Ulid; - -/// Stream to manage rebaser consumer loops. -pub const REBASER_MANAGEMENT_STREAM: &str = "rebaser-management"; - -/// Stream prefix for rebaser consumer loops. -pub const REBASER_STREAM_PREFIX: &str = "rebaser"; - -/// The action for the rebaser management loop. -#[derive(Debug, Serialize, Deserialize)] -pub enum ManagementMessageAction { - /// Close the inner rebaser loop for a change set. If it has already been closed, this is a - /// no-op. - Close, - /// Open the inner rebaser loop for a change set. If one already exists, it is a no-op. - Open, -} - -/// The message that the rebaser management consumer expects in the server. -#[derive(Debug, Serialize, Deserialize)] -pub struct ManagementMessage { - /// The ID of the change set wishing to be operated on. - pub change_set_id: Ulid, - /// The action to instruct the management loop to perform. - pub action: ManagementMessageAction, -} diff --git a/lib/rebaser-server/src/server.rs b/lib/rebaser-server/src/server.rs index 02d3711692..43669e1b33 100644 --- a/lib/rebaser-server/src/server.rs +++ b/lib/rebaser-server/src/server.rs @@ -1,17 +1,17 @@ -use std::collections::HashMap; -use std::{io, path::Path, sync::Arc}; - +use dal::change_set_pointer::ChangeSetPointerError; +use dal::workspace_snapshot::WorkspaceSnapshotError; use dal::{ - job::consumer::JobConsumerError, DalContext, InitializationError, JobFailureError, - JobQueueProcessor, NatsProcessor, ServicesContext, TransactionsError, + job::consumer::JobConsumerError, InitializationError, JobFailureError, JobQueueProcessor, + NatsProcessor, TransactionsError, }; use futures::{FutureExt, Stream, StreamExt}; use nats_subscriber::SubscriberError; + use si_data_nats::{NatsClient, NatsConfig, NatsError}; use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; -use si_rabbitmq::{ - Consumer, ConsumerHandle, ConsumerOffsetSpecification, Environment, Producer, RabbitError, -}; +use si_rabbitmq::RabbitError; + +use std::{io, path::Path, sync::Arc}; use telemetry::prelude::*; use thiserror::Error; use tokio::{ @@ -21,18 +21,26 @@ use tokio::{ oneshot, watch, }, }; +<<<<<<< HEAD use tokio_stream::wrappers::UnboundedReceiverStream; use ulid::Ulid; use veritech_client::{Client as VeritechClient, CycloneEncryptionKey, CycloneEncryptionKeyError}; +======= + +use veritech_client::{Client as VeritechClient, EncryptionKey, EncryptionKeyError}; +>>>>>>> cdb8726f3 (Initial round trip loop of rebaser using graph logic) + +use crate::Config; -use crate::REBASER_STREAM_PREFIX; -use crate::{Config, REBASER_MANAGEMENT_STREAM}; -use crate::{ManagementMessage, ManagementMessageAction}; +mod change_set_loop; +mod management_loop; #[allow(missing_docs)] #[remain::sorted] #[derive(Debug, Error)] pub enum ServerError { + #[error("change set pointer error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), #[error("error when loading encryption key: {0}")] CycloneEncryptionKey(#[from] CycloneEncryptionKeyError), #[error(transparent)] @@ -59,6 +67,8 @@ pub enum ServerError { Subscriber(#[from] SubscriberError), #[error(transparent)] Transactions(#[from] Box), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } impl From for ServerError { @@ -164,7 +174,7 @@ impl Server { /// The primary function for running the server. This should be called when deciding to run /// the server as a task, in a standalone binary, etc. pub async fn run(self) -> ServerResult<()> { - consume_stream_task( + management_loop::management_loop_infallible_wrapper( self.recreate_management_stream, self.pg_pool, self.nats, @@ -247,6 +257,7 @@ impl Default for ShutdownSource { } } +<<<<<<< HEAD #[allow(clippy::too_many_arguments)] async fn consume_stream_task( recreate_management_stream: bool, @@ -416,6 +427,8 @@ async fn rebaser_loop(mut consumer: Consumer) -> ServerResult<()> { Ok(()) } +======= +>>>>>>> cdb8726f3 (Initial round trip loop of rebaser using graph logic) fn prepare_graceful_shutdown( mut external_shutdown_rx: mpsc::Receiver, shutdown_watch_tx: watch::Sender<()>, diff --git a/lib/rebaser-server/src/server/change_set_loop.rs b/lib/rebaser-server/src/server/change_set_loop.rs new file mode 100644 index 0000000000..3892afc5d4 --- /dev/null +++ b/lib/rebaser-server/src/server/change_set_loop.rs @@ -0,0 +1,126 @@ +use dal::change_set_pointer::ChangeSetPointer; +use dal::{DalContext, DalContextBuilder, Tenancy, Visibility, WorkspacePk, WorkspaceSnapshot}; +use rebaser_core::{ChangeSetMessage, ChangeSetReplyMessage}; +use si_rabbitmq::{Consumer, Delivery, Environment, Producer}; +use telemetry::prelude::*; + +use crate::server::{ServerError, ServerResult}; + +pub(crate) async fn change_set_loop_infallible_wrapper( + ctx_builder: DalContextBuilder, + consumer: Consumer, +) { + if let Err(err) = change_set_loop(ctx_builder, consumer).await { + error!(error = ?err, "change set loop failed"); + } +} + +async fn change_set_loop( + ctx_builder: DalContextBuilder, + mut consumer: Consumer, +) -> ServerResult> { + let mut ctx = ctx_builder.build_default().await?; + ctx.update_visibility(Visibility::new_head(false)); + ctx.update_tenancy(Tenancy::new(WorkspacePk::NONE)); + + // Create an environment for reply streams. + let environment = Environment::new().await?; + while let Some(delivery) = consumer.next().await? { + // TODO(nick): first detect conflicts and updates, second perform the updates. + // If conflicts appears, do not perform updates if they exist, and report conflicts back. + // In other words... + // 1) succeed everywhere + // 2) store offset with changeset + // 3) update requester stream w/out waiting for reply + process_delivery_infallible_wrapper(&mut ctx, &environment, consumer.stream(), &delivery) + .await; + } + Ok(None) +} + +// NOTE(nick): reply to whoever sent the message if a failure happens. +async fn process_delivery_infallible_wrapper( + ctx: &mut DalContext, + environment: &Environment, + inbound_stream: impl AsRef, + delivery: &Delivery, +) { + let inbound_stream = inbound_stream.as_ref(); + match &delivery.reply_to { + Some(reply_to) => { + if let Err(err) = + process_delivery(ctx, environment, inbound_stream, delivery, reply_to).await + { + error!(error = ?err, "processing delivery failed, attempting to reply"); + match Producer::for_reply(&environment, inbound_stream, reply_to).await { + Ok(mut producer) => { + if let Err(err) = producer + .send_single( + ChangeSetReplyMessage::Failure { + error: format!("{err}"), + }, + None, + ) + .await + { + error!(error = ?err, "sending reply failed"); + } + if let Err(err) = producer.close().await { + error!(error = ?err, "closing reply producer failed"); + } + } + Err(err) => error!(error = ?err, "creating reply producer failed"), + } + } + } + None => error!( + "cannot reply: empty reply field found for delivery: {:?}", + delivery + ), + } +} + +// TODO(nick): use real errors in this function. +async fn process_delivery( + ctx: &mut DalContext, + environment: &Environment, + inbound_stream: impl AsRef, + delivery: &Delivery, + reply_to: impl AsRef, +) -> ServerResult<()> { + let raw_message = match &delivery.message_contents { + Some(found_raw_message) => found_raw_message, + None => return Err(ServerError::MissingManagementMessageReplyTo), + }; + let message: ChangeSetMessage = serde_json::from_value(raw_message.clone())?; + + // ------------------------------------ + // NOTE(nick): the "work" begins below! + // ------------------------------------ + + let to_rebase: WorkspaceSnapshot = WorkspaceSnapshot::find( + ctx, + message + .workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at + .into(), + ) + .await?; + let to_rebase_change_set = + ChangeSetPointer::find(ctx, message.change_set_that_dictates_changes.into()).await?; + let onto_change_set = ChangeSetPointer::find(ctx, message.change_set_to_update.into()).await?; + + let (conflicts, updates) = to_rebase + .detect_conflicts_and_updates(ctx, &to_rebase_change_set, &onto_change_set) + .await?; + + // TODO(nick): for now, just send back the conflicts and updates. We'll need to do something + // with those updates later. + let serialized = serde_json::to_value(ChangeSetReplyMessage::Success { + results: format!("{:?} {:?}", conflicts, updates), + })?; + let mut producer = Producer::for_reply(&environment, inbound_stream, reply_to).await?; + producer.send_single(serialized, None).await?; + producer.close().await?; + + Ok(()) +} diff --git a/lib/rebaser-server/src/server/management_loop.rs b/lib/rebaser-server/src/server/management_loop.rs new file mode 100644 index 0000000000..63e25bedb6 --- /dev/null +++ b/lib/rebaser-server/src/server/management_loop.rs @@ -0,0 +1,157 @@ +use dal::{DalContext, JobQueueProcessor, ServicesContext}; + +use rebaser_core::{ManagementMessage, ManagementMessageAction, REBASER_MANAGEMENT_STREAM}; +use si_data_nats::NatsClient; +use si_data_pg::PgPool; +use si_rabbitmq::{Consumer, ConsumerHandle, ConsumerOffsetSpecification, Environment, Producer}; +use std::collections::HashMap; + +use std::sync::Arc; +use telemetry::prelude::*; + +use tokio::sync::watch; +use ulid::Ulid; + +use crate::server::{change_set_loop, ServerError, ServerResult}; + +pub(crate) async fn management_loop_infallible_wrapper( + recreate_management_stream: bool, + pg_pool: PgPool, + nats: NatsClient, + veritech: veritech_client::Client, + job_processor: Box, + encryption_key: Arc, + shutdown_watch_rx: watch::Receiver<()>, +) { + if let Err(err) = management_loop( + recreate_management_stream, + pg_pool, + nats, + veritech, + job_processor, + encryption_key, + shutdown_watch_rx, + ) + .await + { + error!(error = ?err, "consuming stream failed"); + } +} + +async fn management_loop( + recreate_management_stream: bool, + pg_pool: PgPool, + nats: NatsClient, + veritech: veritech_client::Client, + job_processor: Box, + encryption_key: Arc, + _shutdown_watch_rx: watch::Receiver<()>, +) -> ServerResult<()> { + let services_context = ServicesContext::new( + pg_pool, + nats.clone(), + job_processor, + veritech.clone(), + encryption_key, + None, + None, + ); + // let ctx_builder = DalContext::builder(services_context, false); + + // Meta: we can only have one rebaser instance right now due to https://github.com/rabbitmq/rabbitmq-stream-rust-client/issues/130 + // + // 1) subscribe to "next" for changeset close/create events --> stream for ChangeSetClose or ChangeSetOpen + // --> "rebaser-management" + // 2) query db for all named, open changesets + // 3) start a subscription for each result for step 2 + // --> "rebaser-" + // 1:N --> "rebaser--reply--" + // (e.g. "rebaser--reply-sdf-") + // note: requester deletes stream upon reply + // + // NOTE: QUERY DB FOR OFFSET NUMBER OR GO TO FIRST SPECIFICATION + + // Prepare the environment and management stream. + let environment = Environment::new().await?; + if recreate_management_stream { + environment.delete_stream(REBASER_MANAGEMENT_STREAM).await?; + } + environment.create_stream(REBASER_MANAGEMENT_STREAM).await?; + + let mut management_consumer = Consumer::new( + &environment, + REBASER_MANAGEMENT_STREAM, + ConsumerOffsetSpecification::Next, + ) + .await?; + let management_handle = management_consumer.handle(); + let mut rebaser_handles: HashMap = HashMap::new(); + + while let Some(management_delivery) = management_consumer.next().await? { + let contents = management_delivery + .message_contents + .ok_or(ServerError::MissingManagementMessageContents)?; + let reply_to = management_delivery + .reply_to + .ok_or(ServerError::MissingManagementMessageReplyTo)?; + let mm: ManagementMessage = serde_json::from_value(contents)?; + + match mm.action { + ManagementMessageAction::CloseChangeSet => { + match rebaser_handles.remove(&mm.change_set_id) { + Some((stream, handle)) => { + if let Err(err) = handle.close().await { + warn!(error = ?err, "closing change set consumer failed"); + } + if let Err(err) = environment.delete_stream(stream).await { + warn!(error = ?err, "deleting change set stream failed"); + } + } + None => debug!( + "did not find handle for change set id ({}) (it have already been closed)", + mm.change_set_id + ), + } + } + ManagementMessageAction::OpenChangeSet => { + // TODO(nick): move stream naming to a centralized system, perhaps behind a unit struct. + let new_stream = format!("rebaser-{}", mm.change_set_id); + let stream_already_exists = environment.create_stream(&new_stream).await?; + + // Only create the new stream and loop if the stream does not already exist. + if !stream_already_exists { + let consumer = + Consumer::new(&environment, &new_stream, ConsumerOffsetSpecification::Next) + .await?; + let handle = consumer.handle(); + rebaser_handles.insert(mm.change_set_id, (new_stream.clone(), handle)); + + let ctx_builder = DalContext::builder(services_context.clone(), false); + tokio::spawn(change_set_loop::change_set_loop_infallible_wrapper( + ctx_builder, + consumer, + )); + } + + // Return the requested stream and then close the producer. + let mut producer = Producer::for_reply(&environment, &new_stream, reply_to).await?; + producer.send_single(new_stream, None).await?; + producer.close().await?; + } + } + } + + // Once the loop is done, perform cleanup. + for (_, (stream, handle)) in rebaser_handles.drain() { + if let Err(err) = handle.close().await { + warn!(error = ?err, "closing change set consumer failed during cleanup"); + } + if let Err(err) = environment.delete_stream(stream).await { + warn!(error = ?err, "deleting change set stream failed during cleanup"); + } + } + if let Err(err) = management_handle.close().await { + warn!(error = ?err, "closing management consumer failed during cleanup"); + } + Ok(()) +} diff --git a/lib/rebaser-server/tests/integration_test/connection.rs b/lib/rebaser-server/tests/integration_test/connection.rs index f070464869..06fa534fd7 100644 --- a/lib/rebaser-server/tests/integration_test/connection.rs +++ b/lib/rebaser-server/tests/integration_test/connection.rs @@ -1,12 +1,12 @@ -use dal::workspace_snapshot::change_set::ChangeSet; +use dal::change_set_pointer::ChangeSetPointer; use dal::{DalContext, WorkspaceSnapshot}; use si_rabbitmq::Environment; use si_test_macros::rebaser_test as test; #[test] async fn connect_to_database(ctx: &DalContext) { - let change_set = ChangeSet::new().expect("could not create change set"); - let _snapshot = WorkspaceSnapshot::new(ctx, &change_set) + let change_set = ChangeSetPointer::new_local().expect("could not create change set"); + let _snapshot = WorkspaceSnapshot::initial(ctx, &change_set) .await .expect("could not create snapshot"); } diff --git a/lib/si-data-pg/src/lib.rs b/lib/si-data-pg/src/lib.rs index 3f89ffccd8..a8f2a1ec79 100644 --- a/lib/si-data-pg/src/lib.rs +++ b/lib/si-data-pg/src/lib.rs @@ -52,6 +52,8 @@ pub enum PgError { "transaction not exclusively referenced when rollback attempted; arc_strong_count={0}" )] TxnRollbackNotExclusive(usize), + #[error("unexpected row returned: {0:?}")] + UnexpectedRow(PgRow), } #[remain::sorted] @@ -2330,6 +2332,38 @@ impl PgSharedTransaction { } } + /// Executes a statement that returns zero rows. + /// + /// Returns an error if the query returns more than zero rows. + /// + /// A statement may contain parameters, specified by `$n`, where `n` is the index of the + /// parameter of the list provided, 1-indexed. + /// + /// The `statement` argument can either be a `Statement`, or a raw query string. If the same + /// statement will be repeatedly executed (perhaps with different query parameters), consider + /// preparing the statement up front with the `prepare` method. + /// + /// # Panics + /// + /// - If the number of parameters provided does not match the number expected. + /// - If the internal transaction has already been consumed which is an internal correctness + /// bug + pub async fn query_none( + &self, + statement: &str, + params: &[&(dyn ToSql + Sync)], + ) -> Result<(), PgError> { + match self.inner.lock().await.borrow_txn().as_ref() { + Some(txn) => match txn.query_opt(statement, params).await? { + None => Ok(()), + Some(row) => Err(PgError::UnexpectedRow(row)), + }, + None => { + unreachable!("txn is only consumed with commit/rollback--this is an internal bug") + } + } + } + /// The maximally flexible version of [`query`]. /// /// A statement may contain parameters, specified by `$n`, where `n` is the index of the diff --git a/lib/si-rabbitmq/src/delivery.rs b/lib/si-rabbitmq/src/delivery.rs index 8287cb9d08..9f2e3987ca 100644 --- a/lib/si-rabbitmq/src/delivery.rs +++ b/lib/si-rabbitmq/src/delivery.rs @@ -5,7 +5,7 @@ use crate::RabbitError; /// This type is a deconstruction of the upstream /// [`Delivery`](rabbitmq_stream_client::types::Delivery) type. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct Delivery { /// The contents of the message. pub message_contents: Option, diff --git a/lib/si-test-macros/src/dal_test.rs b/lib/si-test-macros/src/dal_test.rs index 5fff086d72..139076dbb1 100644 --- a/lib/si-test-macros/src/dal_test.rs +++ b/lib/si-test-macros/src/dal_test.rs @@ -79,6 +79,11 @@ fn fn_setup<'a>(params: impl Iterator) -> DalTestFnSetup { let var = var.as_ref(); expander.push_arg(parse_quote! {#var}); } + "RebaserShutdownHandle" => { + let var = expander.setup_rebaser_shutdown_handle(); + let var = var.as_ref(); + expander.push_arg(parse_quote! {#var}); + } "ServicesContext" => { let var = expander.setup_services_context(); let var = var.as_ref(); @@ -173,6 +178,7 @@ fn fn_setup<'a>(params: impl Iterator) -> DalTestFnSetup { expander.setup_start_veritech_server(); expander.setup_start_pinga_server(); expander.setup_start_council_server(); + expander.setup_start_rebaser_server(); } expander.finish() @@ -200,6 +206,9 @@ struct DalTestFnSetupExpander { pinga_server: Option>, pinga_shutdown_handle: Option>, start_pinga_server: Option<()>, + rebaser_server: Option>, + rebaser_shutdown_handle: Option>, + start_rebaser_server: Option<()>, veritech_server: Option>, veritech_shutdown_handle: Option>, start_veritech_server: Option<()>, @@ -226,6 +235,9 @@ impl DalTestFnSetupExpander { pinga_server: None, pinga_shutdown_handle: None, start_pinga_server: None, + rebaser_server: None, + rebaser_shutdown_handle: None, + start_rebaser_server: None, veritech_server: None, veritech_shutdown_handle: None, start_veritech_server: None, @@ -318,6 +330,30 @@ impl FnSetupExpander for DalTestFnSetupExpander { self.start_pinga_server = value; } + fn rebaser_server(&self) -> Option<&Rc> { + self.rebaser_server.as_ref() + } + + fn set_rebaser_server(&mut self, value: Option>) { + self.rebaser_server = value; + } + + fn rebaser_shutdown_handle(&self) -> Option<&Rc> { + self.rebaser_shutdown_handle.as_ref() + } + + fn set_rebaser_shutdown_handle(&mut self, value: Option>) { + self.rebaser_shutdown_handle = value; + } + + fn start_rebaser_server(&self) -> Option<()> { + self.start_rebaser_server + } + + fn set_start_rebaser_server(&mut self, value: Option<()>) { + self.start_rebaser_server = value; + } + fn veritech_server(&self) -> Option<&Rc> { self.veritech_server.as_ref() } diff --git a/lib/si-test-macros/src/expand.rs b/lib/si-test-macros/src/expand.rs index 13245912af..2e51584eab 100644 --- a/lib/si-test-macros/src/expand.rs +++ b/lib/si-test-macros/src/expand.rs @@ -224,6 +224,15 @@ pub(crate) trait FnSetupExpander { fn start_pinga_server(&self) -> Option<()>; fn set_start_pinga_server(&mut self, value: Option<()>); + fn rebaser_server(&self) -> Option<&Rc>; + fn set_rebaser_server(&mut self, value: Option>); + + fn rebaser_shutdown_handle(&self) -> Option<&Rc>; + fn set_rebaser_shutdown_handle(&mut self, value: Option>); + + fn start_rebaser_server(&self) -> Option<()>; + fn set_start_rebaser_server(&mut self, value: Option<()>); + fn veritech_server(&self) -> Option<&Rc>; fn set_veritech_server(&mut self, value: Option>); @@ -377,6 +386,54 @@ pub(crate) trait FnSetupExpander { self.set_start_pinga_server(Some(())); } + fn setup_rebaser_server(&mut self) -> Rc { + if let Some(ident) = self.rebaser_server() { + return ident.clone(); + } + + let services_context = self.setup_services_context(); + let services_context = services_context.as_ref(); + + let var = Ident::new("rebaser_server", Span::call_site()); + self.code_extend(quote! { + let #var = ::dal_test::rebaser_server(&#services_context)?; + }); + self.set_rebaser_server(Some(Rc::new(var))); + + self.rebaser_server().unwrap().clone() + } + + fn setup_rebaser_shutdown_handle(&mut self) -> Rc { + if let Some(ident) = self.rebaser_shutdown_handle() { + return ident.clone(); + } + + let rebaser_server = self.setup_rebaser_server(); + let rebaser_server = rebaser_server.as_ref(); + + let var = Ident::new("rebaser_shutdown_handle", Span::call_site()); + self.code_extend(quote! { + let #var = #rebaser_server.shutdown_handle(); + }); + self.set_rebaser_shutdown_handle(Some(Rc::new(var))); + + self.rebaser_shutdown_handle().unwrap().clone() + } + + fn setup_start_rebaser_server(&mut self) { + if self.start_rebaser_server().is_some() { + return; + } + + let rebaser_server = self.setup_rebaser_server(); + let rebaser_server = rebaser_server.as_ref(); + + self.code_extend(quote! { + ::tokio::spawn(#rebaser_server.run()); + }); + self.set_start_rebaser_server(Some(())); + } + fn setup_veritech_server(&mut self) -> Rc { if let Some(ident) = self.veritech_server() { return ident.clone(); diff --git a/lib/si-test-macros/src/lib.rs b/lib/si-test-macros/src/lib.rs index 0afaf03f03..25c637a645 100644 --- a/lib/si-test-macros/src/lib.rs +++ b/lib/si-test-macros/src/lib.rs @@ -149,6 +149,8 @@ fn path_as_string(path: &Path) -> String { /// for a workspace for a visibility which is not in a change set /// * `pinga_handle: PingaShutdownHandle`: the shutdown handle for the Pinga server running /// alongside each test +/// * `rebaser_handle: RebaserShutdownHandle`: the shutdown handle for the rebaser server running +/// alongside each test /// * `services_ctx: ServicesContext`: a services context object, used to create DAL contexts /// * `veritech_handle: VeritechShutdownHandle`: the shutdown handle for the Veritech server /// running alongside each test @@ -325,6 +327,8 @@ pub fn dal_test(attr: TokenStream, input: TokenStream) -> TokenStream { /// for a workspace for a visibility which is not in a change set /// * `pinga_handle: PingaShutdownHandle`: the shutdown handle for the Pinga server running /// alongside each test +/// * `rebaser_handle: RebaserShutdownHandle`: the shutdown handle for the rebaser server running +/// alongside each test /// * `services_ctx: ServicesContext`: a services context object, used to create DAL contexts /// * `veritech_handle: VeritechShutdownHandle`: the shutdown handle for the Veritech server /// running alongside each test diff --git a/lib/si-test-macros/src/sdf_test.rs b/lib/si-test-macros/src/sdf_test.rs index 1a9212daa0..bd478ad149 100644 --- a/lib/si-test-macros/src/sdf_test.rs +++ b/lib/si-test-macros/src/sdf_test.rs @@ -95,6 +95,11 @@ fn fn_setup<'a>(params: impl Iterator) -> SdfTestFnSetup { let var = var.as_ref(); expander.push_arg(parse_quote! {#var}); } + "RebaserShutdownHandle" => { + let var = expander.setup_rebaser_shutdown_handle(); + let var = var.as_ref(); + expander.push_arg(parse_quote! {#var}); + } "ServicesContext" => { let var = expander.setup_services_context(); let var = var.as_ref(); @@ -189,6 +194,7 @@ fn fn_setup<'a>(params: impl Iterator) -> SdfTestFnSetup { expander.setup_start_veritech_server(); expander.setup_start_pinga_server(); expander.setup_start_council_server(); + expander.setup_start_rebaser_server(); } expander.finish() @@ -216,6 +222,9 @@ struct SdfTestFnSetupExpander { pinga_server: Option>, pinga_shutdown_handle: Option>, start_pinga_server: Option<()>, + rebaser_server: Option>, + rebaser_shutdown_handle: Option>, + start_rebaser_server: Option<()>, veritech_server: Option>, veritech_shutdown_handle: Option>, start_veritech_server: Option<()>, @@ -248,6 +257,9 @@ impl SdfTestFnSetupExpander { pinga_server: None, pinga_shutdown_handle: None, start_pinga_server: None, + rebaser_server: None, + rebaser_shutdown_handle: None, + start_rebaser_server: None, veritech_server: None, veritech_shutdown_handle: None, start_veritech_server: None, @@ -463,6 +475,30 @@ impl FnSetupExpander for SdfTestFnSetupExpander { self.start_pinga_server = value; } + fn rebaser_server(&self) -> Option<&Rc> { + self.rebaser_server.as_ref() + } + + fn set_rebaser_server(&mut self, value: Option>) { + self.rebaser_server = value; + } + + fn rebaser_shutdown_handle(&self) -> Option<&Rc> { + self.rebaser_shutdown_handle.as_ref() + } + + fn set_rebaser_shutdown_handle(&mut self, value: Option>) { + self.rebaser_shutdown_handle = value; + } + + fn start_rebaser_server(&self) -> Option<()> { + self.start_rebaser_server + } + + fn set_start_rebaser_server(&mut self, value: Option<()>) { + self.start_rebaser_server = value; + } + fn veritech_server(&self) -> Option<&Rc> { self.veritech_server.as_ref() } From 1754f6a10d8ded479f3ac0850db494cddda805a2 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Mon, 18 Sep 2023 20:56:08 -0400 Subject: [PATCH 20/92] Fix buck2 compilation errors by moving rebaser-client tests Fix buck2 compilation errors by moving rebaser-client tests from unit tests in the rebaser-core crate to integration tests in the rebaser-server crate. Not only does this make more sense from a domain perspective (why would rebaser-core host thoses tests?), but now we can prevent cyclic dependencies by using the integration test buck2 rule for clean separation. Signed-off-by: Nick Gerace --- Cargo.lock | 6 +- lib/dal/BUCK | 1 + lib/rebaser-client/BUCK | 1 + lib/rebaser-client/Cargo.toml | 1 - lib/rebaser-core/BUCK | 4 - lib/rebaser-core/Cargo.toml | 6 - lib/rebaser-core/src/lib.rs | 50 - lib/rebaser-server/BUCK | 3 + lib/rebaser-server/Cargo.toml | 1 + lib/rebaser-server/src/lib.rs | 6 +- .../tests/integration_test/client.rs | 46 + .../tests/integration_test/mod.rs | 1 + third-party/rust/BUCK | 6850 ++++++++++------- third-party/rust/Cargo.lock | 1645 ++-- third-party/rust/Cargo.toml | 46 +- third-party/rust/fixups/semver/fixups.toml | 4 + 16 files changed, 5040 insertions(+), 3631 deletions(-) create mode 100644 lib/rebaser-server/tests/integration_test/client.rs create mode 100644 third-party/rust/fixups/semver/fixups.toml diff --git a/Cargo.lock b/Cargo.lock index 869136b84b..07db076e44 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4090,7 +4090,6 @@ dependencies = [ name = "rebaser-client" version = "0.1.0" dependencies = [ - "log", "rebaser-core", "remain", "serde", @@ -4106,11 +4105,7 @@ dependencies = [ name = "rebaser-core" version = "0.1.0" dependencies = [ - "pretty_assertions_sorted", - "rebaser-client", - "rebaser-server", "serde", - "tokio", "ulid", ] @@ -4125,6 +4120,7 @@ dependencies = [ "futures", "nats-subscriber", "pretty_assertions_sorted", + "rebaser-client", "rebaser-core", "remain", "serde", diff --git a/lib/dal/BUCK b/lib/dal/BUCK index 36bff71bae..b506a351f9 100644 --- a/lib/dal/BUCK +++ b/lib/dal/BUCK @@ -76,6 +76,7 @@ rust_test( name = "test-integration", deps = [ "//lib/dal-test:dal-test", + "//lib/rebaser-client:rebaser-client", "//lib/si-pkg:si-pkg", "//lib/veritech-client:veritech-client", "//third-party/rust:base64", diff --git a/lib/rebaser-client/BUCK b/lib/rebaser-client/BUCK index 9ea6588332..354932a34b 100644 --- a/lib/rebaser-client/BUCK +++ b/lib/rebaser-client/BUCK @@ -3,6 +3,7 @@ load("@prelude-si//:macros.bzl", "rust_library") rust_library( name = "rebaser-client", deps = [ + "//lib/rebaser-core:rebaser-core", "//lib/si-rabbitmq:si-rabbitmq", "//lib/telemetry-rs:telemetry", "//third-party/rust:remain", diff --git a/lib/rebaser-client/Cargo.toml b/lib/rebaser-client/Cargo.toml index b6bb394fab..24674495e2 100644 --- a/lib/rebaser-client/Cargo.toml +++ b/lib/rebaser-client/Cargo.toml @@ -15,4 +15,3 @@ telemetry = { path = "../../lib/telemetry-rs" } thiserror = { workspace = true } tokio = { workspace = true } ulid = { workspace = true } -log = "0.4.20" diff --git a/lib/rebaser-core/BUCK b/lib/rebaser-core/BUCK index 282fb67eec..a0bb494027 100644 --- a/lib/rebaser-core/BUCK +++ b/lib/rebaser-core/BUCK @@ -3,11 +3,7 @@ load("@prelude-si//:macros.bzl", "rust_library") rust_library( name = "rebaser-core", deps = [ - "//lib/rebaser-client:rebaser-client", - "//lib/rebaser-server:rebaser-server", "//third-party/rust:serde", - "//third-party/rust:pretty_assertions_sorted", - "//third-party/rust:tokio", "//third-party/rust:ulid", ], srcs = glob([ diff --git a/lib/rebaser-core/Cargo.toml b/lib/rebaser-core/Cargo.toml index d4518d3025..2733580632 100644 --- a/lib/rebaser-core/Cargo.toml +++ b/lib/rebaser-core/Cargo.toml @@ -7,9 +7,3 @@ publish = false [dependencies] serde = { workspace = true } ulid = { workspace = true } - -[dev-dependencies] -tokio = { workspace = true } -rebaser-client = { path = "../../lib/rebaser-client" } -rebaser-server = { path = "../../lib/rebaser-server" } -pretty_assertions_sorted = { workspace = true } diff --git a/lib/rebaser-core/src/lib.rs b/lib/rebaser-core/src/lib.rs index f312a926ee..8a6236b66d 100644 --- a/lib/rebaser-core/src/lib.rs +++ b/lib/rebaser-core/src/lib.rs @@ -79,53 +79,3 @@ pub enum ChangeSetReplyMessage { error: String, }, } - -#[cfg(test)] -mod tests { - - use rebaser_client::Client; - use rebaser_server::{ConfigBuilder, Server}; - use tokio::test; - use ulid::Ulid; - - async fn test_setup() -> Client { - let config = ConfigBuilder::default() - .cyclone_encryption_key_path( - "../../lib/cyclone-server/src/dev.encryption.key" - .try_into() - .expect("could not convert"), - ) - .build() - .expect("could not build config"); - let server = Server::from_config(config) - .await - .expect("could not build server"); - tokio::spawn(server.run()); - - Client::new().await.expect("could not build client") - } - - #[test] - async fn connect() { - let client = test_setup().await; - client.close().await; - } - - #[test] - async fn management() { - let mut client = test_setup().await; - - let change_set_id = Ulid::new(); - let _new_stream_to_produce_to = client - .send_management_open_change_set(change_set_id) - .await - .expect("could not create new rebaser loop for change set"); - - client - .send_management_close_change_set(change_set_id) - .await - .expect("could not close the rebaser loop for change set"); - - client.close().await; - } -} diff --git a/lib/rebaser-server/BUCK b/lib/rebaser-server/BUCK index 5b97ec8892..4f857d629e 100644 --- a/lib/rebaser-server/BUCK +++ b/lib/rebaser-server/BUCK @@ -7,6 +7,7 @@ rust_library( "//lib/dal:dal", "//lib/nats-subscriber:nats-subscriber", "//lib/si-crypto:si-crypto", + "//lib/rebaser-core:rebaser-core", "//lib/si-data-nats:si-data-nats", "//lib/si-data-pg:si-data-pg", "//lib/si-rabbitmq:si-rabbitmq", @@ -36,6 +37,8 @@ rust_test( deps = [ "//lib/dal:dal", "//lib/dal-test:dal-test", + "//lib/rebaser-client:rebaser-client", + "//lib/rebaser-server:rebaser-server", "//lib/si-pkg:si-pkg", "//lib/si-rabbitmq:si-rabbitmq", "//lib/si-test-macros:si-test-macros", diff --git a/lib/rebaser-server/Cargo.toml b/lib/rebaser-server/Cargo.toml index 1c9e32aa5f..1fad512995 100644 --- a/lib/rebaser-server/Cargo.toml +++ b/lib/rebaser-server/Cargo.toml @@ -34,3 +34,4 @@ veritech-client = { path = "../../lib/veritech-client" } [dev-dependencies] dal-test = { path = "../../lib/dal-test" } pretty_assertions_sorted = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client" } diff --git a/lib/rebaser-server/src/lib.rs b/lib/rebaser-server/src/lib.rs index 0399f5d20b..eff74462f3 100644 --- a/lib/rebaser-server/src/lib.rs +++ b/lib/rebaser-server/src/lib.rs @@ -22,9 +22,6 @@ clippy::missing_panics_doc )] -mod config; -mod server; - pub use config::detect_and_configure_development; pub use config::Config; pub use config::ConfigBuilder; @@ -33,3 +30,6 @@ pub use config::ConfigFile; pub use server::Server; pub use si_settings::StandardConfig; pub use si_settings::StandardConfigFile; + +mod config; +mod server; diff --git a/lib/rebaser-server/tests/integration_test/client.rs b/lib/rebaser-server/tests/integration_test/client.rs new file mode 100644 index 0000000000..0b845789f5 --- /dev/null +++ b/lib/rebaser-server/tests/integration_test/client.rs @@ -0,0 +1,46 @@ +use tokio::test; +use ulid::Ulid; + +use rebaser_client::Client; +use rebaser_server::{ConfigBuilder, Server}; + +#[test] +async fn connect() { + let client = test_setup().await; + client.close().await; +} + +#[test] +async fn management() { + let mut client = test_setup().await; + + let change_set_id = Ulid::new(); + let _new_stream_to_produce_to = client + .send_management_open_change_set(change_set_id) + .await + .expect("could not create new rebaser loop for change set"); + + client + .send_management_close_change_set(change_set_id) + .await + .expect("could not close the rebaser loop for change set"); + + client.close().await; +} + +async fn test_setup() -> Client { + let config = ConfigBuilder::default() + .cyclone_encryption_key_path( + "../../lib/cyclone-server/src/dev.encryption.key" + .try_into() + .expect("could not convert"), + ) + .build() + .expect("could not build config"); + let server = Server::from_config(config) + .await + .expect("could not build server"); + tokio::spawn(server.run()); + + Client::new().await.expect("could not build client") +} diff --git a/lib/rebaser-server/tests/integration_test/mod.rs b/lib/rebaser-server/tests/integration_test/mod.rs index 6fa1f6955a..ce013a0ed6 100644 --- a/lib/rebaser-server/tests/integration_test/mod.rs +++ b/lib/rebaser-server/tests/integration_test/mod.rs @@ -1 +1,2 @@ +mod client; mod connection; diff --git a/third-party/rust/BUCK b/third-party/rust/BUCK index c7a72c3f3e..ccc10de0b9 100644 --- a/third-party/rust/BUCK +++ b/third-party/rust/BUCK @@ -63,7 +63,7 @@ cargo.rust_library( crate_root = "addr2line-0.21.0.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":gimli-0.28.0"], + deps = [":gimli-0.28.1"], ) http_archive( @@ -84,18 +84,18 @@ cargo.rust_library( ) http_archive( - name = "ahash-0.7.6.crate", - sha256 = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47", - strip_prefix = "ahash-0.7.6", - urls = ["https://crates.io/api/v1/crates/ahash/0.7.6/download"], + name = "ahash-0.7.7.crate", + sha256 = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd", + strip_prefix = "ahash-0.7.7", + urls = ["https://crates.io/api/v1/crates/ahash/0.7.7/download"], visibility = [], ) cargo.rust_library( - name = "ahash-0.7.6", - srcs = [":ahash-0.7.6.crate"], + name = "ahash-0.7.7", + srcs = [":ahash-0.7.7.crate"], crate = "ahash", - crate_root = "ahash-0.7.6.crate/src/lib.rs", + crate_root = "ahash-0.7.7.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -104,38 +104,38 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":getrandom-0.2.10", - ":once_cell-1.18.0", + ":getrandom-0.2.11", + ":once_cell-1.19.0", ], ), "linux-x86_64": dict( deps = [ - ":getrandom-0.2.10", - ":once_cell-1.18.0", + ":getrandom-0.2.11", + ":once_cell-1.19.0", ], ), "macos-arm64": dict( deps = [ - ":getrandom-0.2.10", - ":once_cell-1.18.0", + ":getrandom-0.2.11", + ":once_cell-1.19.0", ], ), "macos-x86_64": dict( deps = [ - ":getrandom-0.2.10", - ":once_cell-1.18.0", + ":getrandom-0.2.11", + ":once_cell-1.19.0", ], ), "windows-gnu": dict( deps = [ - ":getrandom-0.2.10", - ":once_cell-1.18.0", + ":getrandom-0.2.11", + ":once_cell-1.19.0", ], ), "windows-msvc": dict( deps = [ - ":getrandom-0.2.10", - ":once_cell-1.18.0", + ":getrandom-0.2.11", + ":once_cell-1.19.0", ], ), }, @@ -144,59 +144,60 @@ cargo.rust_library( ) http_archive( - name = "ahash-0.8.3.crate", - sha256 = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f", - strip_prefix = "ahash-0.8.3", - urls = ["https://crates.io/api/v1/crates/ahash/0.8.3/download"], + name = "ahash-0.8.6.crate", + sha256 = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a", + strip_prefix = "ahash-0.8.6", + urls = ["https://crates.io/api/v1/crates/ahash/0.8.6/download"], visibility = [], ) cargo.rust_library( - name = "ahash-0.8.3", - srcs = [":ahash-0.8.3.crate"], + name = "ahash-0.8.6", + srcs = [":ahash-0.8.6.crate"], crate = "ahash", - crate_root = "ahash-0.8.3.crate/src/lib.rs", + crate_root = "ahash-0.8.6.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":once_cell-1.18.0"], + deps = [":once_cell-1.19.0"], ), "linux-x86_64": dict( - deps = [":once_cell-1.18.0"], + deps = [":once_cell-1.19.0"], ), "macos-arm64": dict( - deps = [":once_cell-1.18.0"], + deps = [":once_cell-1.19.0"], ), "macos-x86_64": dict( - deps = [":once_cell-1.18.0"], + deps = [":once_cell-1.19.0"], ), "windows-gnu": dict( - deps = [":once_cell-1.18.0"], + deps = [":once_cell-1.19.0"], ), "windows-msvc": dict( - deps = [":once_cell-1.18.0"], + deps = [":once_cell-1.19.0"], ), }, visibility = [], deps = [ ":cfg-if-1.0.0", ":version_check-0.9.4", + ":zerocopy-0.7.30", ], ) http_archive( - name = "aho-corasick-1.0.4.crate", - sha256 = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a", - strip_prefix = "aho-corasick-1.0.4", - urls = ["https://crates.io/api/v1/crates/aho-corasick/1.0.4/download"], + name = "aho-corasick-1.1.2.crate", + sha256 = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0", + strip_prefix = "aho-corasick-1.1.2", + urls = ["https://crates.io/api/v1/crates/aho-corasick/1.1.2/download"], visibility = [], ) cargo.rust_library( - name = "aho-corasick-1.0.4", - srcs = [":aho-corasick-1.0.4.crate"], + name = "aho-corasick-1.1.2", + srcs = [":aho-corasick-1.1.2.crate"], crate = "aho_corasick", - crate_root = "aho-corasick-1.0.4.crate/src/lib.rs", + crate_root = "aho-corasick-1.1.2.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -204,7 +205,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":memchr-2.5.0"], + deps = [":memchr-2.6.4"], ) http_archive( @@ -247,18 +248,18 @@ cargo.rust_library( ) http_archive( - name = "anstream-0.3.2.crate", - sha256 = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163", - strip_prefix = "anstream-0.3.2", - urls = ["https://crates.io/api/v1/crates/anstream/0.3.2/download"], + name = "anstream-0.6.5.crate", + sha256 = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6", + strip_prefix = "anstream-0.6.5", + urls = ["https://crates.io/api/v1/crates/anstream/0.6.5/download"], visibility = [], ) cargo.rust_library( - name = "anstream-0.3.2", - srcs = [":anstream-0.3.2.crate"], + name = "anstream-0.6.5", + srcs = [":anstream-0.6.5.crate"], crate = "anstream", - crate_root = "anstream-0.3.2.crate/src/lib.rs", + crate_root = "anstream-0.6.5.crate/src/lib.rs", edition = "2021", features = [ "auto", @@ -267,36 +268,35 @@ cargo.rust_library( ], platform = { "windows-gnu": dict( - deps = [":anstyle-wincon-1.0.2"], + deps = [":anstyle-wincon-3.0.2"], ), "windows-msvc": dict( - deps = [":anstyle-wincon-1.0.2"], + deps = [":anstyle-wincon-3.0.2"], ), }, visibility = [], deps = [ - ":anstyle-1.0.2", - ":anstyle-parse-0.2.1", - ":anstyle-query-1.0.0", + ":anstyle-1.0.4", + ":anstyle-parse-0.2.3", + ":anstyle-query-1.0.2", ":colorchoice-1.0.0", - ":is-terminal-0.4.9", ":utf8parse-0.2.1", ], ) http_archive( - name = "anstyle-1.0.2.crate", - sha256 = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea", - strip_prefix = "anstyle-1.0.2", - urls = ["https://crates.io/api/v1/crates/anstyle/1.0.2/download"], + name = "anstyle-1.0.4.crate", + sha256 = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87", + strip_prefix = "anstyle-1.0.4", + urls = ["https://crates.io/api/v1/crates/anstyle/1.0.4/download"], visibility = [], ) cargo.rust_library( - name = "anstyle-1.0.2", - srcs = [":anstyle-1.0.2.crate"], + name = "anstyle-1.0.4", + srcs = [":anstyle-1.0.4.crate"], crate = "anstyle", - crate_root = "anstyle-1.0.2.crate/src/lib.rs", + crate_root = "anstyle-1.0.4.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -306,18 +306,18 @@ cargo.rust_library( ) http_archive( - name = "anstyle-parse-0.2.1.crate", - sha256 = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333", - strip_prefix = "anstyle-parse-0.2.1", - urls = ["https://crates.io/api/v1/crates/anstyle-parse/0.2.1/download"], + name = "anstyle-parse-0.2.3.crate", + sha256 = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c", + strip_prefix = "anstyle-parse-0.2.3", + urls = ["https://crates.io/api/v1/crates/anstyle-parse/0.2.3/download"], visibility = [], ) cargo.rust_library( - name = "anstyle-parse-0.2.1", - srcs = [":anstyle-parse-0.2.1.crate"], + name = "anstyle-parse-0.2.3", + srcs = [":anstyle-parse-0.2.3.crate"], crate = "anstyle_parse", - crate_root = "anstyle-parse-0.2.1.crate/src/lib.rs", + crate_root = "anstyle-parse-0.2.3.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -328,54 +328,54 @@ cargo.rust_library( ) http_archive( - name = "anstyle-query-1.0.0.crate", - sha256 = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b", - strip_prefix = "anstyle-query-1.0.0", - urls = ["https://crates.io/api/v1/crates/anstyle-query/1.0.0/download"], + name = "anstyle-query-1.0.2.crate", + sha256 = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648", + strip_prefix = "anstyle-query-1.0.2", + urls = ["https://crates.io/api/v1/crates/anstyle-query/1.0.2/download"], visibility = [], ) cargo.rust_library( - name = "anstyle-query-1.0.0", - srcs = [":anstyle-query-1.0.0.crate"], + name = "anstyle-query-1.0.2", + srcs = [":anstyle-query-1.0.2.crate"], crate = "anstyle_query", - crate_root = "anstyle-query-1.0.0.crate/src/lib.rs", + crate_root = "anstyle-query-1.0.2.crate/src/lib.rs", edition = "2021", platform = { "windows-gnu": dict( - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), "windows-msvc": dict( - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), }, visibility = [], ) http_archive( - name = "anstyle-wincon-1.0.2.crate", - sha256 = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c", - strip_prefix = "anstyle-wincon-1.0.2", - urls = ["https://crates.io/api/v1/crates/anstyle-wincon/1.0.2/download"], + name = "anstyle-wincon-3.0.2.crate", + sha256 = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7", + strip_prefix = "anstyle-wincon-3.0.2", + urls = ["https://crates.io/api/v1/crates/anstyle-wincon/3.0.2/download"], visibility = [], ) cargo.rust_library( - name = "anstyle-wincon-1.0.2", - srcs = [":anstyle-wincon-1.0.2.crate"], + name = "anstyle-wincon-3.0.2", + srcs = [":anstyle-wincon-3.0.2.crate"], crate = "anstyle_wincon", - crate_root = "anstyle-wincon-1.0.2.crate/src/lib.rs", + crate_root = "anstyle-wincon-3.0.2.crate/src/lib.rs", edition = "2021", platform = { "windows-gnu": dict( - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), "windows-msvc": dict( - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), }, visibility = [], - deps = [":anstyle-1.0.2"], + deps = [":anstyle-1.0.4"], ) http_archive( @@ -467,61 +467,61 @@ cargo.rust_library( }, visibility = [], deps = [ - ":base64-0.21.2", - ":bytes-1.4.0", - ":futures-0.3.28", - ":http-0.2.9", - ":itoa-1.0.9", - ":memchr-2.5.0", - ":nkeys-0.3.1", + ":base64-0.21.5", + ":bytes-1.5.0", + ":futures-0.3.29", + ":http-0.2.11", + ":itoa-1.0.10", + ":memchr-2.6.4", + ":nkeys-0.3.2", ":nuid-0.3.2", - ":once_cell-1.18.0", + ":once_cell-1.19.0", ":rand-0.8.5", - ":regex-1.9.3", + ":regex-1.10.2", ":ring-0.16.20", ":rustls-native-certs-0.6.3", - ":rustls-pemfile-1.0.3", - ":rustls-webpki-0.101.4", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":rustls-pemfile-1.0.4", + ":rustls-webpki-0.101.7", + ":serde-1.0.193", + ":serde_json-1.0.108", ":serde_nanos-0.1.3", - ":serde_repr-0.1.16", - ":thiserror-1.0.47", - ":time-0.3.27", - ":tokio-1.32.0", + ":serde_repr-0.1.17", + ":thiserror-1.0.50", + ":time-0.3.30", + ":tokio-1.35.0", ":tokio-retry-0.3.0", ":tokio-rustls-0.24.1", - ":tracing-0.1.37", - ":url-2.4.0", + ":tracing-0.1.40", + ":url-2.5.0", ], ) alias( name = "async-recursion", - actual = ":async-recursion-1.0.4", + actual = ":async-recursion-1.0.5", visibility = ["PUBLIC"], ) http_archive( - name = "async-recursion-1.0.4.crate", - sha256 = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba", - strip_prefix = "async-recursion-1.0.4", - urls = ["https://crates.io/api/v1/crates/async-recursion/1.0.4/download"], + name = "async-recursion-1.0.5.crate", + sha256 = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0", + strip_prefix = "async-recursion-1.0.5", + urls = ["https://crates.io/api/v1/crates/async-recursion/1.0.5/download"], visibility = [], ) cargo.rust_library( - name = "async-recursion-1.0.4", - srcs = [":async-recursion-1.0.4.crate"], + name = "async-recursion-1.0.5", + srcs = [":async-recursion-1.0.5.crate"], crate = "async_recursion", - crate_root = "async-recursion-1.0.4.crate/src/lib.rs", + crate_root = "async-recursion-1.0.5.crate/src/lib.rs", edition = "2018", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -543,7 +543,7 @@ cargo.rust_library( deps = [ ":async-stream-impl-0.3.5", ":futures-core-0.3.29", - ":pin-project-lite-0.2.12", + ":pin-project-lite-0.2.13", ], ) @@ -564,38 +564,38 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) alias( name = "async-trait", - actual = ":async-trait-0.1.73", + actual = ":async-trait-0.1.74", visibility = ["PUBLIC"], ) http_archive( - name = "async-trait-0.1.73.crate", - sha256 = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0", - strip_prefix = "async-trait-0.1.73", - urls = ["https://crates.io/api/v1/crates/async-trait/0.1.73/download"], + name = "async-trait-0.1.74.crate", + sha256 = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9", + strip_prefix = "async-trait-0.1.74", + urls = ["https://crates.io/api/v1/crates/async-trait/0.1.74/download"], visibility = [], ) cargo.rust_library( - name = "async-trait-0.1.73", - srcs = [":async-trait-0.1.73.crate"], + name = "async-trait-0.1.74", + srcs = [":async-trait-0.1.74.crate"], crate = "async_trait", - crate_root = "async-trait-0.1.73.crate/src/lib.rs", + crate_root = "async-trait-0.1.74.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -616,11 +616,11 @@ cargo.rust_library( features = ["default"], visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":futures-sink-0.3.29", ":futures-util-0.3.29", - ":memchr-2.5.0", - ":pin-project-lite-0.2.12", + ":memchr-2.6.4", + ":pin-project-lite-0.2.13", ], ) @@ -639,7 +639,7 @@ cargo.rust_library( crate_root = "atoi-1.0.0.crate/src/lib.rs", edition = "2021", visibility = [], - deps = [":num-traits-0.2.16"], + deps = [":num-traits-0.2.17"], ) http_archive( @@ -695,16 +695,16 @@ cargo.rust_library( "webpki-roots", ], named_deps = { - "rustls_opt_dep": ":rustls-0.20.8", + "rustls_opt_dep": ":rustls-0.20.9", }, visibility = [], deps = [ - ":http-0.2.9", + ":http-0.2.11", ":log-0.4.20", - ":serde-1.0.186", - ":serde_json-1.0.105", - ":url-2.4.0", - ":webpki-0.22.0", + ":serde-1.0.193", + ":serde_json-1.0.108", + ":url-2.5.0", + ":webpki-0.22.4", ":webpki-roots-0.22.6", ], ) @@ -752,29 +752,29 @@ cargo.rust_library( ":log-0.4.20", ":quick-xml-0.26.0", ":rust-ini-0.18.0", - ":serde-1.0.186", - ":thiserror-1.0.47", - ":time-0.3.27", - ":url-2.4.0", + ":serde-1.0.193", + ":thiserror-1.0.50", + ":time-0.3.30", + ":url-2.5.0", ], ) http_archive( - name = "aws-region-0.25.3.crate", - sha256 = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba", - strip_prefix = "aws-region-0.25.3", - urls = ["https://crates.io/api/v1/crates/aws-region/0.25.3/download"], + name = "aws-region-0.25.4.crate", + sha256 = "42fed2b9fca70f2908268d057a607f2a906f47edbf856ea8587de9038d264e22", + strip_prefix = "aws-region-0.25.4", + urls = ["https://crates.io/api/v1/crates/aws-region/0.25.4/download"], visibility = [], ) cargo.rust_library( - name = "aws-region-0.25.3", - srcs = [":aws-region-0.25.3.crate"], + name = "aws-region-0.25.4", + srcs = [":aws-region-0.25.4.crate"], crate = "awsregion", - crate_root = "aws-region-0.25.3.crate/src/lib.rs", + crate_root = "aws-region-0.25.4.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":thiserror-1.0.47"], + deps = [":thiserror-1.0.50"], ) alias( @@ -813,31 +813,31 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.73", + ":async-trait-0.1.74", ":axum-core-0.3.4", ":axum-macros-0.3.8", - ":base64-0.21.2", + ":base64-0.21.5", ":bitflags-1.3.2", - ":bytes-1.4.0", + ":bytes-1.5.0", ":futures-util-0.3.29", - ":http-0.2.9", - ":http-body-0.4.5", + ":http-0.2.11", + ":http-body-0.4.6", ":hyper-0.14.27", - ":itoa-1.0.9", - ":matchit-0.7.2", - ":memchr-2.5.0", + ":itoa-1.0.10", + ":matchit-0.7.3", + ":memchr-2.6.4", ":mime-0.3.17", ":multer-2.1.0", - ":percent-encoding-2.3.0", - ":pin-project-lite-0.2.12", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":percent-encoding-2.3.1", + ":pin-project-lite-0.2.13", + ":serde-1.0.193", + ":serde_json-1.0.108", ":serde_path_to_error-0.1.14", ":serde_urlencoded-0.7.1", - ":sha1-0.10.5", + ":sha1-0.10.6", ":sync_wrapper-0.1.2", - ":tokio-1.32.0", - ":tokio-tungstenite-0.20.0", + ":tokio-1.35.0", + ":tokio-tungstenite-0.20.1", ":tower-0.4.13", ":tower-layer-0.3.2", ":tower-service-0.3.2", @@ -860,11 +860,11 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":async-trait-0.1.73", - ":bytes-1.4.0", + ":async-trait-0.1.74", + ":bytes-1.5.0", ":futures-util-0.3.29", - ":http-0.2.9", - ":http-body-0.4.5", + ":http-0.2.11", + ":http-body-0.4.6", ":mime-0.3.17", ":tower-layer-0.3.2", ":tower-service-0.3.2", @@ -890,9 +890,9 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -919,41 +919,41 @@ cargo.rust_library( "linux-arm64": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.147", + ":libc-0.2.151", ":miniz_oxide-0.7.1", - ":object-0.32.0", + ":object-0.32.1", ], ), "linux-x86_64": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.147", + ":libc-0.2.151", ":miniz_oxide-0.7.1", - ":object-0.32.0", + ":object-0.32.1", ], ), "macos-arm64": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.147", + ":libc-0.2.151", ":miniz_oxide-0.7.1", - ":object-0.32.0", + ":object-0.32.1", ], ), "macos-x86_64": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.147", + ":libc-0.2.151", ":miniz_oxide-0.7.1", - ":object-0.32.0", + ":object-0.32.1", ], ), "windows-gnu": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.147", + ":libc-0.2.151", ":miniz_oxide-0.7.1", - ":object-0.32.0", + ":object-0.32.1", ], ), }, @@ -983,7 +983,7 @@ cargo.rust_library( deps = [ ":heck-0.3.3", ":proc-macro-error-1.0.4", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -1031,24 +1031,24 @@ cargo.rust_library( alias( name = "base64", - actual = ":base64-0.21.2", + actual = ":base64-0.21.5", visibility = ["PUBLIC"], ) http_archive( - name = "base64-0.21.2.crate", - sha256 = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d", - strip_prefix = "base64-0.21.2", - urls = ["https://crates.io/api/v1/crates/base64/0.21.2/download"], + name = "base64-0.21.5.crate", + sha256 = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9", + strip_prefix = "base64-0.21.5", + urls = ["https://crates.io/api/v1/crates/base64/0.21.5/download"], visibility = [], ) cargo.rust_library( - name = "base64-0.21.2", - srcs = [":base64-0.21.2.crate"], + name = "base64-0.21.5", + srcs = [":base64-0.21.5.crate"], crate = "base64", - crate_root = "base64-0.21.2.crate/src/lib.rs", - edition = "2021", + crate_root = "base64-0.21.5.crate/src/lib.rs", + edition = "2018", features = [ "alloc", "default", @@ -1093,7 +1093,7 @@ cargo.rust_library( deps = [ ":num-bigint-0.4.4", ":num-integer-0.1.45", - ":num-traits-0.2.16", + ":num-traits-0.2.17", ], ) @@ -1133,18 +1133,18 @@ cargo.rust_library( ) http_archive( - name = "bitflags-2.4.0.crate", - sha256 = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635", - strip_prefix = "bitflags-2.4.0", - urls = ["https://crates.io/api/v1/crates/bitflags/2.4.0/download"], + name = "bitflags-2.4.1.crate", + sha256 = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07", + strip_prefix = "bitflags-2.4.1", + urls = ["https://crates.io/api/v1/crates/bitflags/2.4.1/download"], visibility = [], ) cargo.rust_library( - name = "bitflags-2.4.0", - srcs = [":bitflags-2.4.0.crate"], + name = "bitflags-2.4.1", + srcs = [":bitflags-2.4.1.crate"], crate = "bitflags", - crate_root = "bitflags-2.4.0.crate/src/lib.rs", + crate_root = "bitflags-2.4.1.crate/src/lib.rs", edition = "2021", features = ["std"], visibility = [], @@ -1176,14 +1176,14 @@ cargo.rust_library( alias( name = "blake3", - actual = ":blake3-1.4.1", + actual = ":blake3-1.5.0", visibility = ["PUBLIC"], ) http_archive( - name = "blake3-1.4.1.crate", - sha256 = "199c42ab6972d92c9f8995f086273d25c42fc0f7b2a1fcefba465c1352d25ba5", - strip_prefix = "blake3-1.4.1", + name = "blake3-1.5.0.crate", + sha256 = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87", + strip_prefix = "blake3-1.5.0", sub_targets = [ "c/blake3.c", "c/blake3.h", @@ -1204,25 +1204,24 @@ http_archive( "c/blake3_sse41_x86-64_windows_gnu.S", "c/blake3_sse41_x86-64_windows_msvc.asm", ], - urls = ["https://crates.io/api/v1/crates/blake3/1.4.1/download"], + urls = ["https://crates.io/api/v1/crates/blake3/1.5.0/download"], visibility = [], ) cargo.rust_library( - name = "blake3-1.4.1", - srcs = [":blake3-1.4.1.crate"], + name = "blake3-1.5.0", + srcs = [":blake3-1.5.0.crate"], crate = "blake3", - crate_root = "blake3-1.4.1.crate/src/lib.rs", + crate_root = "blake3-1.5.0.crate/src/lib.rs", edition = "2021", features = [ "default", - "digest", "std", ], platform = { "linux-arm64": dict( rustc_flags = ["--cfg=blake3_neon"], - deps = [":blake3-1.4.1-simd_neon-aarch64"], + deps = [":blake3-1.5.0-simd_neon-aarch64"], ), "linux-x86_64": dict( rustc_flags = [ @@ -1231,11 +1230,11 @@ cargo.rust_library( "--cfg=blake3_sse2_ffi", "--cfg=blake3_sse41_ffi", ], - deps = [":blake3-1.4.1-simd_x86_unix"], + deps = [":blake3-1.5.0-simd_x86_unix"], ), "macos-arm64": dict( rustc_flags = ["--cfg=blake3_neon"], - deps = [":blake3-1.4.1-simd_neon-aarch64"], + deps = [":blake3-1.5.0-simd_neon-aarch64"], ), "macos-x86_64": dict( rustc_flags = [ @@ -1244,7 +1243,7 @@ cargo.rust_library( "--cfg=blake3_sse2_ffi", "--cfg=blake3_sse41_ffi", ], - deps = [":blake3-1.4.1-simd_x86_unix"], + deps = [":blake3-1.5.0-simd_x86_unix"], ), "windows-gnu": dict( rustc_flags = [ @@ -1253,7 +1252,7 @@ cargo.rust_library( "--cfg=blake3_sse2_ffi", "--cfg=blake3_sse41_ffi", ], - deps = [":blake3-1.4.1-simd_x86_windows_gnu"], + deps = [":blake3-1.5.0-simd_x86_windows_gnu"], ), "windows-msvc": dict( rustc_flags = [ @@ -1262,7 +1261,7 @@ cargo.rust_library( "--cfg=blake3_sse2_ffi", "--cfg=blake3_sse41_ffi", ], - deps = [":blake3-1.4.1-simd_x86_windows_msvc"], + deps = [":blake3-1.5.0-simd_x86_windows_msvc"], ), }, visibility = [], @@ -1271,27 +1270,26 @@ cargo.rust_library( ":arrayvec-0.7.4", ":cfg-if-1.0.0", ":constant_time_eq-0.3.0", - ":digest-0.10.7", ], ) cxx_library( - name = "blake3-1.4.1-simd_neon-aarch64", - srcs = [":blake3-1.4.1.crate[c/blake3_neon.c]"], + name = "blake3-1.5.0-simd_neon-aarch64", + srcs = [":blake3-1.5.0.crate[c/blake3_neon.c]"], headers = [ - ":blake3-1.4.1.crate[c/blake3.h]", - ":blake3-1.4.1.crate[c/blake3_impl.h]", + ":blake3-1.5.0.crate[c/blake3.h]", + ":blake3-1.5.0.crate[c/blake3_impl.h]", ], preferred_linkage = "static", visibility = [], ) cxx_library( - name = "blake3-1.4.1-simd_neon-armv7", - srcs = [":blake3-1.4.1.crate[c/blake3_neon.c]"], + name = "blake3-1.5.0-simd_neon-armv7", + srcs = [":blake3-1.5.0.crate[c/blake3_neon.c]"], headers = [ - ":blake3-1.4.1.crate[c/blake3.h]", - ":blake3-1.4.1.crate[c/blake3_impl.h]", + ":blake3-1.5.0.crate[c/blake3.h]", + ":blake3-1.5.0.crate[c/blake3_impl.h]", ], compiler_flags = [ "-mfpu=neon-vfpv4", @@ -1302,19 +1300,19 @@ cxx_library( ) cxx_library( - name = "blake3-1.4.1-simd_x86_unix", + name = "blake3-1.5.0-simd_x86_unix", srcs = [ - ":blake3-1.4.1.crate[c/blake3.c]", - ":blake3-1.4.1.crate[c/blake3_avx2_x86-64_unix.S]", - ":blake3-1.4.1.crate[c/blake3_avx512_x86-64_unix.S]", - ":blake3-1.4.1.crate[c/blake3_dispatch.c]", - ":blake3-1.4.1.crate[c/blake3_portable.c]", - ":blake3-1.4.1.crate[c/blake3_sse2_x86-64_unix.S]", - ":blake3-1.4.1.crate[c/blake3_sse41_x86-64_unix.S]", + ":blake3-1.5.0.crate[c/blake3.c]", + ":blake3-1.5.0.crate[c/blake3_avx2_x86-64_unix.S]", + ":blake3-1.5.0.crate[c/blake3_avx512_x86-64_unix.S]", + ":blake3-1.5.0.crate[c/blake3_dispatch.c]", + ":blake3-1.5.0.crate[c/blake3_portable.c]", + ":blake3-1.5.0.crate[c/blake3_sse2_x86-64_unix.S]", + ":blake3-1.5.0.crate[c/blake3_sse41_x86-64_unix.S]", ], headers = [ - ":blake3-1.4.1.crate[c/blake3.h]", - ":blake3-1.4.1.crate[c/blake3_impl.h]", + ":blake3-1.5.0.crate[c/blake3.h]", + ":blake3-1.5.0.crate[c/blake3_impl.h]", ], compatible_with = [ "prelude//os/constraints:linux", @@ -1329,19 +1327,19 @@ cxx_library( ) cxx_library( - name = "blake3-1.4.1-simd_x86_windows_gnu", + name = "blake3-1.5.0-simd_x86_windows_gnu", srcs = [ - ":blake3-1.4.1.crate[c/blake3.c]", - ":blake3-1.4.1.crate[c/blake3_avx2_x86-64_windows_gnu.S]", - ":blake3-1.4.1.crate[c/blake3_avx512_x86-64_windows_gnu.S]", - ":blake3-1.4.1.crate[c/blake3_dispatch.c]", - ":blake3-1.4.1.crate[c/blake3_portable.c]", - ":blake3-1.4.1.crate[c/blake3_sse2_x86-64_windows_gnu.S]", - ":blake3-1.4.1.crate[c/blake3_sse41_x86-64_windows_gnu.S]", + ":blake3-1.5.0.crate[c/blake3.c]", + ":blake3-1.5.0.crate[c/blake3_avx2_x86-64_windows_gnu.S]", + ":blake3-1.5.0.crate[c/blake3_avx512_x86-64_windows_gnu.S]", + ":blake3-1.5.0.crate[c/blake3_dispatch.c]", + ":blake3-1.5.0.crate[c/blake3_portable.c]", + ":blake3-1.5.0.crate[c/blake3_sse2_x86-64_windows_gnu.S]", + ":blake3-1.5.0.crate[c/blake3_sse41_x86-64_windows_gnu.S]", ], headers = [ - ":blake3-1.4.1.crate[c/blake3.h]", - ":blake3-1.4.1.crate[c/blake3_impl.h]", + ":blake3-1.5.0.crate[c/blake3.h]", + ":blake3-1.5.0.crate[c/blake3_impl.h]", ], compatible_with = ["prelude//os/constraints:windows"], compiler_flags = [ @@ -1353,19 +1351,19 @@ cxx_library( ) cxx_library( - name = "blake3-1.4.1-simd_x86_windows_msvc", + name = "blake3-1.5.0-simd_x86_windows_msvc", srcs = [ - ":blake3-1.4.1.crate[c/blake3.c]", - ":blake3-1.4.1.crate[c/blake3_avx2_x86-64_windows_msvc.asm]", - ":blake3-1.4.1.crate[c/blake3_avx512_x86-64_windows_msvc.asm]", - ":blake3-1.4.1.crate[c/blake3_dispatch.c]", - ":blake3-1.4.1.crate[c/blake3_portable.c]", - ":blake3-1.4.1.crate[c/blake3_sse2_x86-64_windows_msvc.asm]", - ":blake3-1.4.1.crate[c/blake3_sse41_x86-64_windows_msvc.asm]", + ":blake3-1.5.0.crate[c/blake3.c]", + ":blake3-1.5.0.crate[c/blake3_avx2_x86-64_windows_msvc.asm]", + ":blake3-1.5.0.crate[c/blake3_avx512_x86-64_windows_msvc.asm]", + ":blake3-1.5.0.crate[c/blake3_dispatch.c]", + ":blake3-1.5.0.crate[c/blake3_portable.c]", + ":blake3-1.5.0.crate[c/blake3_sse2_x86-64_windows_msvc.asm]", + ":blake3-1.5.0.crate[c/blake3_sse41_x86-64_windows_msvc.asm]", ], headers = [ - ":blake3-1.4.1.crate[c/blake3.h]", - ":blake3-1.4.1.crate[c/blake3_impl.h]", + ":blake3-1.5.0.crate[c/blake3.h]", + ":blake3-1.5.0.crate[c/blake3_impl.h]", ], compatible_with = ["prelude//os/constraints:windows"], preferred_linkage = "static", @@ -1450,25 +1448,25 @@ cargo.rust_library( }, visibility = [], deps = [ - ":base64-0.21.2", + ":base64-0.21.5", ":bollard-stubs-1.43.0-rc.2", - ":bytes-1.4.0", + ":bytes-1.5.0", ":futures-core-0.3.29", ":futures-util-0.3.29", ":hex-0.4.3", - ":http-0.2.9", + ":http-0.2.11", ":hyper-0.14.27", ":log-0.4.20", - ":pin-project-lite-0.2.12", - ":serde-1.0.186", - ":serde_derive-1.0.186", - ":serde_json-1.0.105", - ":serde_repr-0.1.16", + ":pin-project-lite-0.2.13", + ":serde-1.0.193", + ":serde_derive-1.0.193", + ":serde_json-1.0.108", + ":serde_repr-0.1.17", ":serde_urlencoded-0.7.1", - ":thiserror-1.0.47", - ":tokio-1.32.0", - ":tokio-util-0.7.8", - ":url-2.4.0", + ":thiserror-1.0.50", + ":tokio-1.35.0", + ":tokio-util-0.7.10", + ":url-2.5.0", ], ) @@ -1488,127 +1486,90 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":serde-1.0.186", - ":serde_repr-0.1.16", - ":serde_with-3.3.0", + ":serde-1.0.193", + ":serde_repr-0.1.17", + ":serde_with-3.4.0", ], ) http_archive( - name = "borsh-0.10.3.crate", - sha256 = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b", - strip_prefix = "borsh-0.10.3", - urls = ["https://crates.io/api/v1/crates/borsh/0.10.3/download"], + name = "borsh-1.2.1.crate", + sha256 = "9897ef0f1bd2362169de6d7e436ea2237dc1085d7d1e4db75f4be34d86f309d1", + strip_prefix = "borsh-1.2.1", + urls = ["https://crates.io/api/v1/crates/borsh/1.2.1/download"], visibility = [], ) cargo.rust_library( - name = "borsh-0.10.3", - srcs = [":borsh-0.10.3.crate"], + name = "borsh-1.2.1", + srcs = [":borsh-1.2.1.crate"], crate = "borsh", - crate_root = "borsh-0.10.3.crate/src/lib.rs", + crate_root = "borsh-1.2.1.crate/src/lib.rs", edition = "2018", env = { - "CARGO_MANIFEST_DIR": "borsh-0.10.3.crate", + "CARGO_MANIFEST_DIR": "borsh-1.2.1.crate", "CARGO_PKG_AUTHORS": "Near Inc ", "CARGO_PKG_DESCRIPTION": "Binary Object Representation Serializer for Hashing\n", "CARGO_PKG_NAME": "borsh", "CARGO_PKG_REPOSITORY": "https://github.com/near/borsh-rs", - "CARGO_PKG_VERSION": "0.10.3", - "CARGO_PKG_VERSION_MAJOR": "0", - "CARGO_PKG_VERSION_MINOR": "10", - "CARGO_PKG_VERSION_PATCH": "3", + "CARGO_PKG_VERSION": "1.2.1", + "CARGO_PKG_VERSION_MAJOR": "1", + "CARGO_PKG_VERSION_MINOR": "2", + "CARGO_PKG_VERSION_PATCH": "1", }, - features = ["std"], - visibility = [], - deps = [ - ":borsh-derive-0.10.3", - ":hashbrown-0.13.2", + features = [ + "borsh-derive", + "derive", + "std", + "unstable__schema", ], -) - -http_archive( - name = "borsh-derive-0.10.3.crate", - sha256 = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7", - strip_prefix = "borsh-derive-0.10.3", - urls = ["https://crates.io/api/v1/crates/borsh-derive/0.10.3/download"], - visibility = [], -) - -cargo.rust_library( - name = "borsh-derive-0.10.3", - srcs = [":borsh-derive-0.10.3.crate"], - crate = "borsh_derive", - crate_root = "borsh-derive-0.10.3.crate/src/lib.rs", - edition = "2018", - proc_macro = True, visibility = [], - deps = [ - ":borsh-derive-internal-0.10.3", - ":borsh-schema-derive-internal-0.10.3", - ":proc-macro-crate-0.1.5", - ":proc-macro2-1.0.66", - ":syn-1.0.109", - ], + deps = [":borsh-derive-1.2.1"], ) http_archive( - name = "borsh-derive-internal-0.10.3.crate", - sha256 = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb", - strip_prefix = "borsh-derive-internal-0.10.3", - urls = ["https://crates.io/api/v1/crates/borsh-derive-internal/0.10.3/download"], + name = "borsh-derive-1.2.1.crate", + sha256 = "478b41ff04256c5c8330f3dfdaaae2a5cc976a8e75088bafa4625b0d0208de8c", + strip_prefix = "borsh-derive-1.2.1", + urls = ["https://crates.io/api/v1/crates/borsh-derive/1.2.1/download"], visibility = [], ) cargo.rust_library( - name = "borsh-derive-internal-0.10.3", - srcs = [":borsh-derive-internal-0.10.3.crate"], - crate = "borsh_derive_internal", - crate_root = "borsh-derive-internal-0.10.3.crate/src/lib.rs", + name = "borsh-derive-1.2.1", + srcs = [":borsh-derive-1.2.1.crate"], + crate = "borsh_derive", + crate_root = "borsh-derive-1.2.1.crate/src/lib.rs", edition = "2018", - visibility = [], - deps = [ - ":proc-macro2-1.0.66", - ":quote-1.0.33", - ":syn-1.0.109", + features = [ + "default", + "schema", ], -) - -http_archive( - name = "borsh-schema-derive-internal-0.10.3.crate", - sha256 = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd", - strip_prefix = "borsh-schema-derive-internal-0.10.3", - urls = ["https://crates.io/api/v1/crates/borsh-schema-derive-internal/0.10.3/download"], - visibility = [], -) - -cargo.rust_library( - name = "borsh-schema-derive-internal-0.10.3", - srcs = [":borsh-schema-derive-internal-0.10.3.crate"], - crate = "borsh_schema_derive_internal", - crate_root = "borsh-schema-derive-internal-0.10.3.crate/src/lib.rs", - edition = "2018", + proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":once_cell-1.19.0", + ":proc-macro-crate-2.0.0", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-1.0.109", + ":syn-2.0.40", + ":syn_derive-0.1.8", ], ) http_archive( - name = "bstr-1.6.0.crate", - sha256 = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05", - strip_prefix = "bstr-1.6.0", - urls = ["https://crates.io/api/v1/crates/bstr/1.6.0/download"], + name = "bstr-1.8.0.crate", + sha256 = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c", + strip_prefix = "bstr-1.8.0", + urls = ["https://crates.io/api/v1/crates/bstr/1.8.0/download"], visibility = [], ) cargo.rust_library( - name = "bstr-1.6.0", - srcs = [":bstr-1.6.0.crate"], + name = "bstr-1.8.0", + srcs = [":bstr-1.8.0.crate"], crate = "bstr", - crate_root = "bstr-1.6.0.crate/src/lib.rs", + crate_root = "bstr-1.8.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -1616,8 +1577,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":memchr-2.5.0", - ":serde-1.0.186", + ":memchr-2.6.4", + ":serde-1.0.193", ], ) @@ -1668,26 +1629,26 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], ) http_archive( - name = "byteorder-1.4.3.crate", - sha256 = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610", - strip_prefix = "byteorder-1.4.3", - urls = ["https://crates.io/api/v1/crates/byteorder/1.4.3/download"], + name = "byteorder-1.5.0.crate", + sha256 = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b", + strip_prefix = "byteorder-1.5.0", + urls = ["https://crates.io/api/v1/crates/byteorder/1.5.0/download"], visibility = [], ) cargo.rust_library( - name = "byteorder-1.4.3", - srcs = [":byteorder-1.4.3.crate"], + name = "byteorder-1.5.0", + srcs = [":byteorder-1.5.0.crate"], crate = "byteorder", - crate_root = "byteorder-1.4.3.crate/src/lib.rs", - edition = "2018", + crate_root = "byteorder-1.5.0.crate/src/lib.rs", + edition = "2021", features = [ "default", "i128", @@ -1719,23 +1680,23 @@ cargo.rust_library( alias( name = "bytes", - actual = ":bytes-1.4.0", + actual = ":bytes-1.5.0", visibility = ["PUBLIC"], ) http_archive( - name = "bytes-1.4.0.crate", - sha256 = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be", - strip_prefix = "bytes-1.4.0", - urls = ["https://crates.io/api/v1/crates/bytes/1.4.0/download"], + name = "bytes-1.5.0.crate", + sha256 = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223", + strip_prefix = "bytes-1.5.0", + urls = ["https://crates.io/api/v1/crates/bytes/1.5.0/download"], visibility = [], ) cargo.rust_library( - name = "bytes-1.4.0", - srcs = [":bytes-1.4.0.crate"], + name = "bytes-1.5.0", + srcs = [":bytes-1.5.0.crate"], crate = "bytes", - crate_root = "bytes-1.4.0.crate/src/lib.rs", + crate_root = "bytes-1.5.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -1743,7 +1704,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":serde-1.0.186"], + deps = [":serde-1.0.193"], ) http_archive( @@ -1762,16 +1723,16 @@ cargo.rust_library( edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), }, visibility = [], @@ -1796,26 +1757,27 @@ cargo.rust_library( alias( name = "chrono", - actual = ":chrono-0.4.26", + actual = ":chrono-0.4.31", visibility = ["PUBLIC"], ) http_archive( - name = "chrono-0.4.26.crate", - sha256 = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5", - strip_prefix = "chrono-0.4.26", - urls = ["https://crates.io/api/v1/crates/chrono/0.4.26/download"], + name = "chrono-0.4.31.crate", + sha256 = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38", + strip_prefix = "chrono-0.4.31", + urls = ["https://crates.io/api/v1/crates/chrono/0.4.31/download"], visibility = [], ) cargo.rust_library( - name = "chrono-0.4.26", - srcs = [":chrono-0.4.26.crate"], + name = "chrono-0.4.31", + srcs = [":chrono-0.4.31.crate"], crate = "chrono", - crate_root = "chrono-0.4.26.crate/src/lib.rs", + crate_root = "chrono-0.4.31.crate/src/lib.rs", edition = "2021", features = [ "alloc", + "android-tzdata", "clock", "default", "iana-time-zone", @@ -1823,36 +1785,35 @@ cargo.rust_library( "oldtime", "serde", "std", - "time", "wasm-bindgen", "wasmbind", "winapi", + "windows-targets", ], platform = { "linux-arm64": dict( - deps = [":iana-time-zone-0.1.57"], + deps = [":iana-time-zone-0.1.58"], ), "linux-x86_64": dict( - deps = [":iana-time-zone-0.1.57"], + deps = [":iana-time-zone-0.1.58"], ), "macos-arm64": dict( - deps = [":iana-time-zone-0.1.57"], + deps = [":iana-time-zone-0.1.58"], ), "macos-x86_64": dict( - deps = [":iana-time-zone-0.1.57"], + deps = [":iana-time-zone-0.1.58"], ), "windows-gnu": dict( - deps = [":winapi-0.3.9"], + deps = [":windows-targets-0.48.5"], ), "windows-msvc": dict( - deps = [":winapi-0.3.9"], + deps = [":windows-targets-0.48.5"], ), }, visibility = [], deps = [ - ":num-traits-0.2.16", - ":serde-1.0.186", - ":time-0.1.45", + ":num-traits-0.2.17", + ":serde-1.0.193", ], ) @@ -1884,7 +1845,7 @@ cargo.rust_library( deps = [ ":ciborium-io-0.2.1", ":ciborium-ll-0.2.1", - ":serde-1.0.186", + ":serde-1.0.193", ], ) @@ -1932,23 +1893,23 @@ cargo.rust_library( alias( name = "clap", - actual = ":clap-4.3.24", + actual = ":clap-4.4.11", visibility = ["PUBLIC"], ) http_archive( - name = "clap-4.3.24.crate", - sha256 = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487", - strip_prefix = "clap-4.3.24", - urls = ["https://crates.io/api/v1/crates/clap/4.3.24/download"], + name = "clap-4.4.11.crate", + sha256 = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2", + strip_prefix = "clap-4.4.11", + urls = ["https://crates.io/api/v1/crates/clap/4.4.11/download"], visibility = [], ) cargo.rust_library( - name = "clap-4.3.24", - srcs = [":clap-4.3.24.crate"], + name = "clap-4.4.11", + srcs = [":clap-4.4.11.crate"], crate = "clap", - crate_root = "clap-4.3.24.crate/src/lib.rs", + crate_root = "clap-4.4.11.crate/src/lib.rs", edition = "2021", features = [ "color", @@ -1964,25 +1925,24 @@ cargo.rust_library( ], visibility = [], deps = [ - ":clap_builder-4.3.24", - ":clap_derive-4.3.12", - ":once_cell-1.18.0", + ":clap_builder-4.4.11", + ":clap_derive-4.4.7", ], ) http_archive( - name = "clap_builder-4.3.24.crate", - sha256 = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e", - strip_prefix = "clap_builder-4.3.24", - urls = ["https://crates.io/api/v1/crates/clap_builder/4.3.24/download"], + name = "clap_builder-4.4.11.crate", + sha256 = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb", + strip_prefix = "clap_builder-4.4.11", + urls = ["https://crates.io/api/v1/crates/clap_builder/4.4.11/download"], visibility = [], ) cargo.rust_library( - name = "clap_builder-4.3.24", - srcs = [":clap_builder-4.3.24.crate"], + name = "clap_builder-4.4.11", + srcs = [":clap_builder-4.4.11.crate"], crate = "clap_builder", - crate_root = "clap_builder-4.3.24.crate/src/lib.rs", + crate_root = "clap_builder-4.4.11.crate/src/lib.rs", edition = "2021", features = [ "color", @@ -1996,92 +1956,92 @@ cargo.rust_library( ], visibility = [], deps = [ - ":anstream-0.3.2", - ":anstyle-1.0.2", - ":clap_lex-0.5.0", + ":anstream-0.6.5", + ":anstyle-1.0.4", + ":clap_lex-0.6.0", ":strsim-0.10.0", - ":terminal_size-0.2.6", + ":terminal_size-0.3.0", ], ) http_archive( - name = "clap_derive-4.3.12.crate", - sha256 = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050", - strip_prefix = "clap_derive-4.3.12", - urls = ["https://crates.io/api/v1/crates/clap_derive/4.3.12/download"], + name = "clap_derive-4.4.7.crate", + sha256 = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442", + strip_prefix = "clap_derive-4.4.7", + urls = ["https://crates.io/api/v1/crates/clap_derive/4.4.7/download"], visibility = [], ) cargo.rust_library( - name = "clap_derive-4.3.12", - srcs = [":clap_derive-4.3.12.crate"], + name = "clap_derive-4.4.7", + srcs = [":clap_derive-4.4.7.crate"], crate = "clap_derive", - crate_root = "clap_derive-4.3.12.crate/src/lib.rs", + crate_root = "clap_derive-4.4.7.crate/src/lib.rs", edition = "2021", features = ["default"], proc_macro = True, visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) http_archive( - name = "clap_lex-0.5.0.crate", - sha256 = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b", - strip_prefix = "clap_lex-0.5.0", - urls = ["https://crates.io/api/v1/crates/clap_lex/0.5.0/download"], + name = "clap_lex-0.6.0.crate", + sha256 = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1", + strip_prefix = "clap_lex-0.6.0", + urls = ["https://crates.io/api/v1/crates/clap_lex/0.6.0/download"], visibility = [], ) cargo.rust_library( - name = "clap_lex-0.5.0", - srcs = [":clap_lex-0.5.0.crate"], + name = "clap_lex-0.6.0", + srcs = [":clap_lex-0.6.0.crate"], crate = "clap_lex", - crate_root = "clap_lex-0.5.0.crate/src/lib.rs", + crate_root = "clap_lex-0.6.0.crate/src/lib.rs", edition = "2021", visibility = [], ) http_archive( - name = "coarsetime-0.1.23.crate", - sha256 = "a90d114103adbc625300f346d4d09dfb4ab1c4a8df6868435dd903392ecf4354", - strip_prefix = "coarsetime-0.1.23", - urls = ["https://crates.io/api/v1/crates/coarsetime/0.1.23/download"], + name = "coarsetime-0.1.33.crate", + sha256 = "71367d3385c716342014ad17e3d19f7788ae514885a1f4c24f500260fb365e1a", + strip_prefix = "coarsetime-0.1.33", + urls = ["https://crates.io/api/v1/crates/coarsetime/0.1.33/download"], visibility = [], ) cargo.rust_library( - name = "coarsetime-0.1.23", - srcs = [":coarsetime-0.1.23.crate"], + name = "coarsetime-0.1.33", + srcs = [":coarsetime-0.1.33.crate"], crate = "coarsetime", - crate_root = "coarsetime-0.1.23.crate/src/lib.rs", + crate_root = "coarsetime-0.1.33.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-msvc": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), }, visibility = [], - deps = [":once_cell-1.18.0"], + deps = [":once_cell-1.19.0"], ) alias( @@ -2114,34 +2074,34 @@ cargo.rust_library( visibility = [], deps = [ ":backtrace-0.3.69", - ":color-spantrace-0.2.0", - ":eyre-0.6.8", + ":color-spantrace-0.2.1", + ":eyre-0.6.10", ":indenter-0.3.3", - ":once_cell-1.18.0", + ":once_cell-1.19.0", ":owo-colors-3.5.0", ":tracing-error-0.2.0", ], ) http_archive( - name = "color-spantrace-0.2.0.crate", - sha256 = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce", - strip_prefix = "color-spantrace-0.2.0", - urls = ["https://crates.io/api/v1/crates/color-spantrace/0.2.0/download"], + name = "color-spantrace-0.2.1.crate", + sha256 = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2", + strip_prefix = "color-spantrace-0.2.1", + urls = ["https://crates.io/api/v1/crates/color-spantrace/0.2.1/download"], visibility = [], ) cargo.rust_library( - name = "color-spantrace-0.2.0", - srcs = [":color-spantrace-0.2.0.crate"], + name = "color-spantrace-0.2.1", + srcs = [":color-spantrace-0.2.1.crate"], crate = "color_spantrace", - crate_root = "color-spantrace-0.2.0.crate/src/lib.rs", + crate_root = "color-spantrace-0.2.1.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ - ":once_cell-1.18.0", + ":once_cell-1.19.0", ":owo-colors-3.5.0", - ":tracing-core-0.1.31", + ":tracing-core-0.1.32", ":tracing-error-0.2.0", ], ) @@ -2165,23 +2125,23 @@ cargo.rust_library( alias( name = "colored", - actual = ":colored-2.0.4", + actual = ":colored-2.1.0", visibility = ["PUBLIC"], ) http_archive( - name = "colored-2.0.4.crate", - sha256 = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6", - strip_prefix = "colored-2.0.4", - urls = ["https://crates.io/api/v1/crates/colored/2.0.4/download"], + name = "colored-2.1.0.crate", + sha256 = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8", + strip_prefix = "colored-2.1.0", + urls = ["https://crates.io/api/v1/crates/colored/2.1.0/download"], visibility = [], ) cargo.rust_library( - name = "colored-2.0.4", - srcs = [":colored-2.0.4.crate"], + name = "colored-2.1.0", + srcs = [":colored-2.1.0.crate"], crate = "colored", - crate_root = "colored-2.0.4.crate/src/lib.rs", + crate_root = "colored-2.1.0.crate/src/lib.rs", edition = "2021", platform = { "windows-gnu": dict( @@ -2192,31 +2152,28 @@ cargo.rust_library( ), }, visibility = [], - deps = [ - ":is-terminal-0.4.9", - ":lazy_static-1.4.0", - ], + deps = [":lazy_static-1.4.0"], ) alias( name = "comfy-table", - actual = ":comfy-table-7.0.1", + actual = ":comfy-table-7.1.0", visibility = ["PUBLIC"], ) http_archive( - name = "comfy-table-7.0.1.crate", - sha256 = "9ab77dbd8adecaf3f0db40581631b995f312a8a5ae3aa9993188bb8f23d83a5b", - strip_prefix = "comfy-table-7.0.1", - urls = ["https://crates.io/api/v1/crates/comfy-table/7.0.1/download"], + name = "comfy-table-7.1.0.crate", + sha256 = "7c64043d6c7b7a4c58e39e7efccfdea7b93d885a795d0c054a69dbbf4dd52686", + strip_prefix = "comfy-table-7.1.0", + urls = ["https://crates.io/api/v1/crates/comfy-table/7.1.0/download"], visibility = [], ) cargo.rust_library( - name = "comfy-table-7.0.1", - srcs = [":comfy-table-7.0.1.crate"], + name = "comfy-table-7.1.0", + srcs = [":comfy-table-7.1.0.crate"], crate = "comfy_table", - crate_root = "comfy-table-7.0.1.crate/src/lib.rs", + crate_root = "comfy-table-7.1.0.crate/src/lib.rs", edition = "2021", features = [ "console", @@ -2225,44 +2182,63 @@ cargo.rust_library( "default", "tty", ], + platform = { + "linux-arm64": dict( + deps = [":crossterm-0.27.0"], + ), + "linux-x86_64": dict( + deps = [":crossterm-0.27.0"], + ), + "macos-arm64": dict( + deps = [":crossterm-0.27.0"], + ), + "macos-x86_64": dict( + deps = [":crossterm-0.27.0"], + ), + "windows-gnu": dict( + deps = [":crossterm-0.27.0"], + ), + "windows-msvc": dict( + deps = [":crossterm-0.27.0"], + ), + }, visibility = [], deps = [ ":console-0.15.7", - ":crossterm-0.26.1", - ":strum-0.24.1", - ":strum_macros-0.24.3", - ":unicode-width-0.1.10", + ":strum-0.25.0", + ":strum_macros-0.25.3", + ":unicode-width-0.1.11", ], ) alias( name = "config", - actual = ":config-0.13.3", + actual = ":config-0.13.4", visibility = ["PUBLIC"], ) http_archive( - name = "config-0.13.3.crate", - sha256 = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7", - strip_prefix = "config-0.13.3", - urls = ["https://crates.io/api/v1/crates/config/0.13.3/download"], + name = "config-0.13.4.crate", + sha256 = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca", + strip_prefix = "config-0.13.4", + urls = ["https://crates.io/api/v1/crates/config/0.13.4/download"], visibility = [], ) cargo.rust_library( - name = "config-0.13.3", - srcs = [":config-0.13.3.crate"], + name = "config-0.13.4", + srcs = [":config-0.13.4.crate"], crate = "config", - crate_root = "config-0.13.3.crate/src/lib.rs", + crate_root = "config-0.13.4.crate/src/lib.rs", edition = "2018", features = ["toml"], visibility = [], deps = [ - ":async-trait-0.1.73", + ":async-trait-0.1.74", ":lazy_static-1.4.0", ":nom-7.1.3", ":pathdiff-0.2.1", - ":serde-1.0.186", + ":serde-1.0.193", ":toml-0.5.11", ], ) @@ -2309,8 +2285,8 @@ cargo.rust_library( visibility = [], deps = [ ":lazy_static-1.4.0", - ":libc-0.2.147", - ":unicode-width-0.1.10", + ":libc-0.2.151", + ":unicode-width-0.1.11", ], ) @@ -2399,21 +2375,21 @@ cargo.rust_library( }, visibility = [], deps = [ - ":chrono-0.4.26", - ":flate2-1.0.27", + ":chrono-0.4.31", + ":flate2-1.0.28", ":futures-util-0.3.29", - ":http-0.2.9", + ":http-0.2.11", ":hyper-0.14.27", ":log-0.4.20", ":mime-0.3.17", ":paste-1.0.14", ":pin-project-1.1.3", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ":tar-0.4.40", - ":thiserror-1.0.47", - ":tokio-1.32.0", - ":url-2.4.0", + ":thiserror-1.0.50", + ":tokio-1.35.0", + ":url-2.5.0", ], ) @@ -2451,21 +2427,21 @@ cargo.rust_library( }, visibility = [], deps = [ - ":chrono-0.4.26", - ":flate2-1.0.27", + ":chrono-0.4.31", + ":flate2-1.0.28", ":futures-util-0.3.29", - ":http-0.2.9", + ":http-0.2.11", ":hyper-0.14.27", ":log-0.4.20", ":mime-0.3.17", ":paste-1.0.14", ":pin-project-1.1.3", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ":tar-0.4.40", - ":thiserror-1.0.47", - ":tokio-1.32.0", - ":url-2.4.0", + ":thiserror-1.0.50", + ":tokio-1.35.0", + ":url-2.5.0", ], ) @@ -2511,63 +2487,71 @@ cargo.rust_library( ) http_archive( - name = "core-foundation-0.9.3.crate", - sha256 = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146", - strip_prefix = "core-foundation-0.9.3", - urls = ["https://crates.io/api/v1/crates/core-foundation/0.9.3/download"], + name = "core-foundation-0.9.4.crate", + sha256 = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f", + strip_prefix = "core-foundation-0.9.4", + urls = ["https://crates.io/api/v1/crates/core-foundation/0.9.4/download"], visibility = [], ) cargo.rust_library( - name = "core-foundation-0.9.3", - srcs = [":core-foundation-0.9.3.crate"], + name = "core-foundation-0.9.4", + srcs = [":core-foundation-0.9.4.crate"], crate = "core_foundation", - crate_root = "core-foundation-0.9.3.crate/src/lib.rs", - edition = "2015", + crate_root = "core-foundation-0.9.4.crate/src/lib.rs", + edition = "2018", + features = [ + "default", + "link", + ], visibility = [], deps = [ - ":core-foundation-sys-0.8.4", - ":libc-0.2.147", + ":core-foundation-sys-0.8.6", + ":libc-0.2.151", ], ) http_archive( - name = "core-foundation-sys-0.8.4.crate", - sha256 = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa", - strip_prefix = "core-foundation-sys-0.8.4", - urls = ["https://crates.io/api/v1/crates/core-foundation-sys/0.8.4/download"], + name = "core-foundation-sys-0.8.6.crate", + sha256 = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f", + strip_prefix = "core-foundation-sys-0.8.6", + urls = ["https://crates.io/api/v1/crates/core-foundation-sys/0.8.6/download"], visibility = [], ) cargo.rust_library( - name = "core-foundation-sys-0.8.4", - srcs = [":core-foundation-sys-0.8.4.crate"], + name = "core-foundation-sys-0.8.6", + srcs = [":core-foundation-sys-0.8.6.crate"], crate = "core_foundation_sys", - crate_root = "core-foundation-sys-0.8.4.crate/src/lib.rs", - edition = "2015", + crate_root = "core-foundation-sys-0.8.6.crate/src/lib.rs", + edition = "2018", + features = [ + "default", + "link", + ], visibility = [], ) http_archive( - name = "cpufeatures-0.2.9.crate", - sha256 = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1", - strip_prefix = "cpufeatures-0.2.9", - urls = ["https://crates.io/api/v1/crates/cpufeatures/0.2.9/download"], + name = "cpufeatures-0.2.11.crate", + sha256 = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0", + strip_prefix = "cpufeatures-0.2.11", + urls = ["https://crates.io/api/v1/crates/cpufeatures/0.2.11/download"], visibility = [], ) cargo.rust_library( - name = "cpufeatures-0.2.9", - srcs = [":cpufeatures-0.2.9.crate"], + name = "cpufeatures-0.2.11", + srcs = [":cpufeatures-0.2.11.crate"], crate = "cpufeatures", - crate_root = "cpufeatures-0.2.9.crate/src/lib.rs", + crate_root = "cpufeatures-0.2.11.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), }, visibility = [], @@ -2627,6 +2611,87 @@ cargo.rust_library( ], ) +http_archive( + name = "crossbeam-deque-0.8.3.crate", + sha256 = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef", + strip_prefix = "crossbeam-deque-0.8.3", + urls = ["https://crates.io/api/v1/crates/crossbeam-deque/0.8.3/download"], + visibility = [], +) + +cargo.rust_library( + name = "crossbeam-deque-0.8.3", + srcs = [":crossbeam-deque-0.8.3.crate"], + crate = "crossbeam_deque", + crate_root = "crossbeam-deque-0.8.3.crate/src/lib.rs", + edition = "2018", + features = [ + "crossbeam-epoch", + "crossbeam-utils", + "default", + "std", + ], + visibility = [], + deps = [ + ":cfg-if-1.0.0", + ":crossbeam-epoch-0.9.15", + ":crossbeam-utils-0.8.16", + ], +) + +http_archive( + name = "crossbeam-epoch-0.9.15.crate", + sha256 = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7", + strip_prefix = "crossbeam-epoch-0.9.15", + urls = ["https://crates.io/api/v1/crates/crossbeam-epoch/0.9.15/download"], + visibility = [], +) + +cargo.rust_library( + name = "crossbeam-epoch-0.9.15", + srcs = [":crossbeam-epoch-0.9.15.crate"], + crate = "crossbeam_epoch", + crate_root = "crossbeam-epoch-0.9.15.crate/src/lib.rs", + edition = "2018", + features = [ + "alloc", + "std", + ], + rustc_flags = ["@$(location :crossbeam-epoch-0.9.15-build-script-run[rustc_flags])"], + visibility = [], + deps = [ + ":cfg-if-1.0.0", + ":crossbeam-utils-0.8.16", + ":memoffset-0.9.0", + ":scopeguard-1.2.0", + ], +) + +cargo.rust_binary( + name = "crossbeam-epoch-0.9.15-build-script-build", + srcs = [":crossbeam-epoch-0.9.15.crate"], + crate = "build_script_build", + crate_root = "crossbeam-epoch-0.9.15.crate/build.rs", + edition = "2018", + features = [ + "alloc", + "std", + ], + visibility = [], + deps = [":autocfg-1.1.0"], +) + +buildscript_run( + name = "crossbeam-epoch-0.9.15-build-script-run", + package_name = "crossbeam-epoch", + buildscript_rule = ":crossbeam-epoch-0.9.15-build-script-build", + features = [ + "alloc", + "std", + ], + version = "0.9.15", +) + http_archive( name = "crossbeam-queue-0.3.8.crate", sha256 = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add", @@ -2704,32 +2769,32 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.147", - ":mio-0.8.8", + ":libc-0.2.151", + ":mio-0.8.10", ":signal-hook-0.3.17", ":signal-hook-mio-0.2.3", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.147", - ":mio-0.8.8", + ":libc-0.2.151", + ":mio-0.8.10", ":signal-hook-0.3.17", ":signal-hook-mio-0.2.3", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.147", - ":mio-0.8.8", + ":libc-0.2.151", + ":mio-0.8.10", ":signal-hook-0.3.17", ":signal-hook-mio-0.2.3", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.147", - ":mio-0.8.8", + ":libc-0.2.151", + ":mio-0.8.10", ":signal-hook-0.3.17", ":signal-hook-mio-0.2.3", ], @@ -2755,51 +2820,32 @@ cargo.rust_library( ) http_archive( - name = "crossterm-0.26.1.crate", - sha256 = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13", - strip_prefix = "crossterm-0.26.1", - urls = ["https://crates.io/api/v1/crates/crossterm/0.26.1/download"], + name = "crossterm-0.27.0.crate", + sha256 = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df", + strip_prefix = "crossterm-0.27.0", + urls = ["https://crates.io/api/v1/crates/crossterm/0.27.0/download"], visibility = [], ) cargo.rust_library( - name = "crossterm-0.26.1", - srcs = [":crossterm-0.26.1.crate"], + name = "crossterm-0.27.0", + srcs = [":crossterm-0.27.0.crate"], crate = "crossterm", - crate_root = "crossterm-0.26.1.crate/src/lib.rs", + crate_root = "crossterm-0.27.0.crate/src/lib.rs", edition = "2021", + features = ["windows"], platform = { "linux-arm64": dict( - deps = [ - ":libc-0.2.147", - ":mio-0.8.8", - ":signal-hook-0.3.17", - ":signal-hook-mio-0.2.3", - ], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [ - ":libc-0.2.147", - ":mio-0.8.8", - ":signal-hook-0.3.17", - ":signal-hook-mio-0.2.3", - ], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [ - ":libc-0.2.147", - ":mio-0.8.8", - ":signal-hook-0.3.17", - ":signal-hook-mio-0.2.3", - ], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [ - ":libc-0.2.147", - ":mio-0.8.8", - ":signal-hook-0.3.17", - ":signal-hook-mio-0.2.3", - ], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( deps = [ @@ -2816,7 +2862,7 @@ cargo.rust_library( }, visibility = [], deps = [ - ":bitflags-1.3.2", + ":bitflags-2.4.1", ":parking_lot-0.12.1", ], ) @@ -2847,18 +2893,18 @@ cargo.rust_library( ) http_archive( - name = "crypto-bigint-0.5.2.crate", - sha256 = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15", - strip_prefix = "crypto-bigint-0.5.2", - urls = ["https://crates.io/api/v1/crates/crypto-bigint/0.5.2/download"], + name = "crypto-bigint-0.5.5.crate", + sha256 = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76", + strip_prefix = "crypto-bigint-0.5.5", + urls = ["https://crates.io/api/v1/crates/crypto-bigint/0.5.5/download"], visibility = [], ) cargo.rust_library( - name = "crypto-bigint-0.5.2", - srcs = [":crypto-bigint-0.5.2.crate"], + name = "crypto-bigint-0.5.5", + srcs = [":crypto-bigint-0.5.5.crate"], crate = "crypto_bigint", - crate_root = "crypto-bigint-0.5.2.crate/src/lib.rs", + crate_root = "crypto-bigint-0.5.5.crate/src/lib.rs", edition = "2021", features = [ "generic-array", @@ -2870,7 +2916,7 @@ cargo.rust_library( ":generic-array-0.14.7", ":rand_core-0.6.4", ":subtle-2.5.0", - ":zeroize-1.6.0", + ":zeroize-1.7.0", ], ) @@ -2892,7 +2938,7 @@ cargo.rust_library( visibility = [], deps = [ ":generic-array-0.14.7", - ":typenum-1.16.0", + ":typenum-1.17.0", ], ) @@ -2945,11 +2991,128 @@ cargo.rust_library( features = ["u64_backend"], visibility = [], deps = [ - ":byteorder-1.4.3", + ":byteorder-1.5.0", ":digest-0.9.0", ":rand_core-0.5.1", ":subtle-2.5.0", - ":zeroize-1.6.0", + ":zeroize-1.7.0", + ], +) + +http_archive( + name = "curve25519-dalek-4.1.1.crate", + sha256 = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c", + strip_prefix = "curve25519-dalek-4.1.1", + urls = ["https://crates.io/api/v1/crates/curve25519-dalek/4.1.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "curve25519-dalek-4.1.1", + srcs = [":curve25519-dalek-4.1.1.crate"], + crate = "curve25519_dalek", + crate_root = "curve25519-dalek-4.1.1.crate/src/lib.rs", + edition = "2021", + env = { + "CARGO_MANIFEST_DIR": "curve25519-dalek-4.1.1.crate", + "CARGO_PKG_AUTHORS": "Isis Lovecruft :Henry de Valence ", + "CARGO_PKG_DESCRIPTION": "A pure-Rust implementation of group operations on ristretto255 and Curve25519", + "CARGO_PKG_NAME": "curve25519-dalek", + "CARGO_PKG_REPOSITORY": "https://github.com/dalek-cryptography/curve25519-dalek/tree/main/curve25519-dalek", + "CARGO_PKG_VERSION": "4.1.1", + "CARGO_PKG_VERSION_MAJOR": "4", + "CARGO_PKG_VERSION_MINOR": "1", + "CARGO_PKG_VERSION_PATCH": "1", + }, + features = ["digest"], + platform = { + "linux-x86_64": dict( + deps = [ + ":cpufeatures-0.2.11", + ":curve25519-dalek-derive-0.1.1", + ], + ), + "macos-x86_64": dict( + deps = [ + ":cpufeatures-0.2.11", + ":curve25519-dalek-derive-0.1.1", + ], + ), + "windows-gnu": dict( + deps = [ + ":cpufeatures-0.2.11", + ":curve25519-dalek-derive-0.1.1", + ], + ), + "windows-msvc": dict( + deps = [ + ":cpufeatures-0.2.11", + ":curve25519-dalek-derive-0.1.1", + ], + ), + }, + rustc_flags = ["@$(location :curve25519-dalek-4.1.1-build-script-run[rustc_flags])"], + visibility = [], + deps = [ + ":cfg-if-1.0.0", + ":digest-0.10.7", + ":subtle-2.5.0", + ], +) + +cargo.rust_binary( + name = "curve25519-dalek-4.1.1-build-script-build", + srcs = [":curve25519-dalek-4.1.1.crate"], + crate = "build_script_build", + crate_root = "curve25519-dalek-4.1.1.crate/build.rs", + edition = "2021", + env = { + "CARGO_MANIFEST_DIR": "curve25519-dalek-4.1.1.crate", + "CARGO_PKG_AUTHORS": "Isis Lovecruft :Henry de Valence ", + "CARGO_PKG_DESCRIPTION": "A pure-Rust implementation of group operations on ristretto255 and Curve25519", + "CARGO_PKG_NAME": "curve25519-dalek", + "CARGO_PKG_REPOSITORY": "https://github.com/dalek-cryptography/curve25519-dalek/tree/main/curve25519-dalek", + "CARGO_PKG_VERSION": "4.1.1", + "CARGO_PKG_VERSION_MAJOR": "4", + "CARGO_PKG_VERSION_MINOR": "1", + "CARGO_PKG_VERSION_PATCH": "1", + }, + features = ["digest"], + visibility = [], + deps = [ + ":platforms-3.2.0", + ":rustc_version-0.4.0", + ], +) + +buildscript_run( + name = "curve25519-dalek-4.1.1-build-script-run", + package_name = "curve25519-dalek", + buildscript_rule = ":curve25519-dalek-4.1.1-build-script-build", + features = ["digest"], + version = "4.1.1", +) + +http_archive( + name = "curve25519-dalek-derive-0.1.1.crate", + sha256 = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3", + strip_prefix = "curve25519-dalek-derive-0.1.1", + urls = ["https://crates.io/api/v1/crates/curve25519-dalek-derive/0.1.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "curve25519-dalek-derive-0.1.1", + srcs = [":curve25519-dalek-derive-0.1.1.crate"], + crate = "curve25519_dalek_derive", + crate_root = "curve25519-dalek-derive-0.1.1.crate/src/lib.rs", + edition = "2021", + proc_macro = True, + visibility = [], + deps = [ + ":proc-macro2-1.0.70", + ":quote-1.0.33", + ":syn-2.0.40", ], ) @@ -3025,7 +3188,7 @@ cargo.rust_library( deps = [ ":fnv-1.0.7", ":ident_case-1.0.1", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":strsim-0.10.0", ":syn-1.0.109", @@ -3054,10 +3217,10 @@ cargo.rust_library( deps = [ ":fnv-1.0.7", ":ident_case-1.0.1", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":strsim-0.10.0", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -3103,7 +3266,7 @@ cargo.rust_library( deps = [ ":darling_core-0.20.3", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -3126,47 +3289,47 @@ cargo.rust_library( ":anyhow-1.0.75", ":html-escape-0.2.13", ":nom-7.1.3", - ":ordered-float-2.10.0", + ":ordered-float-2.10.1", ], ) http_archive( - name = "dashmap-5.5.1.crate", - sha256 = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28", - strip_prefix = "dashmap-5.5.1", - urls = ["https://crates.io/api/v1/crates/dashmap/5.5.1/download"], + name = "dashmap-5.5.3.crate", + sha256 = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856", + strip_prefix = "dashmap-5.5.3", + urls = ["https://crates.io/api/v1/crates/dashmap/5.5.3/download"], visibility = [], ) cargo.rust_library( - name = "dashmap-5.5.1", - srcs = [":dashmap-5.5.1.crate"], + name = "dashmap-5.5.3", + srcs = [":dashmap-5.5.3.crate"], crate = "dashmap", - crate_root = "dashmap-5.5.1.crate/src/lib.rs", + crate_root = "dashmap-5.5.3.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ ":cfg-if-1.0.0", - ":hashbrown-0.14.0", - ":lock_api-0.4.10", - ":once_cell-1.18.0", - ":parking_lot_core-0.9.8", + ":hashbrown-0.14.3", + ":lock_api-0.4.11", + ":once_cell-1.19.0", + ":parking_lot_core-0.9.9", ], ) http_archive( - name = "data-encoding-2.4.0.crate", - sha256 = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308", - strip_prefix = "data-encoding-2.4.0", - urls = ["https://crates.io/api/v1/crates/data-encoding/2.4.0/download"], + name = "data-encoding-2.5.0.crate", + sha256 = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5", + strip_prefix = "data-encoding-2.5.0", + urls = ["https://crates.io/api/v1/crates/data-encoding/2.5.0/download"], visibility = [], ) cargo.rust_library( - name = "data-encoding-2.4.0", - srcs = [":data-encoding-2.4.0.crate"], + name = "data-encoding-2.5.0", + srcs = [":data-encoding-2.5.0.crate"], crate = "data_encoding", - crate_root = "data-encoding-2.4.0.crate/src/lib.rs", + crate_root = "data-encoding-2.5.0.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -3205,11 +3368,11 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.73", - ":deadpool-runtime-0.1.2", + ":async-trait-0.1.74", + ":deadpool-runtime-0.1.3", ":num_cpus-1.16.0", ":retain_mut-0.1.9", - ":tokio-1.32.0", + ":tokio-1.35.0", ], ) @@ -3241,28 +3404,28 @@ cargo.rust_library( deps = [ ":deadpool-0.9.5", ":log-0.4.20", - ":tokio-1.32.0", - ":tokio-postgres-0.7.9", + ":tokio-1.35.0", + ":tokio-postgres-0.7.10", ], ) http_archive( - name = "deadpool-runtime-0.1.2.crate", - sha256 = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1", - strip_prefix = "deadpool-runtime-0.1.2", - urls = ["https://crates.io/api/v1/crates/deadpool-runtime/0.1.2/download"], + name = "deadpool-runtime-0.1.3.crate", + sha256 = "63dfa964fe2a66f3fde91fc70b267fe193d822c7e603e2a675a49a7f46ad3f49", + strip_prefix = "deadpool-runtime-0.1.3", + urls = ["https://crates.io/api/v1/crates/deadpool-runtime/0.1.3/download"], visibility = [], ) cargo.rust_library( - name = "deadpool-runtime-0.1.2", - srcs = [":deadpool-runtime-0.1.2.crate"], + name = "deadpool-runtime-0.1.3", + srcs = [":deadpool-runtime-0.1.3.crate"], crate = "deadpool_runtime", - crate_root = "deadpool-runtime-0.1.2.crate/src/lib.rs", + crate_root = "deadpool-runtime-0.1.3.crate/src/lib.rs", edition = "2018", features = ["tokio_1"], named_deps = { - "tokio_1": ":tokio-1.32.0", + "tokio_1": ":tokio-1.35.0", }, visibility = [], ) @@ -3318,7 +3481,7 @@ cargo.rust_library( deps = [ ":const-oid-0.9.5", ":pem-rfc7468-0.6.0", - ":zeroize-1.6.0", + ":zeroize-1.7.0", ], ) @@ -3347,31 +3510,35 @@ cargo.rust_library( deps = [ ":const-oid-0.9.5", ":pem-rfc7468-0.7.0", - ":zeroize-1.6.0", + ":zeroize-1.7.0", ], ) http_archive( - name = "deranged-0.3.8.crate", - sha256 = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946", - strip_prefix = "deranged-0.3.8", - urls = ["https://crates.io/api/v1/crates/deranged/0.3.8/download"], + name = "deranged-0.3.10.crate", + sha256 = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc", + strip_prefix = "deranged-0.3.10", + urls = ["https://crates.io/api/v1/crates/deranged/0.3.10/download"], visibility = [], ) cargo.rust_library( - name = "deranged-0.3.8", - srcs = [":deranged-0.3.8.crate"], + name = "deranged-0.3.10", + srcs = [":deranged-0.3.10.crate"], crate = "deranged", - crate_root = "deranged-0.3.8.crate/src/lib.rs", + crate_root = "deranged-0.3.10.crate/src/lib.rs", edition = "2021", features = [ "alloc", + "powerfmt", "serde", "std", ], visibility = [], - deps = [":serde-1.0.186"], + deps = [ + ":powerfmt-0.2.0", + ":serde-1.0.193", + ], ) alias( @@ -3419,7 +3586,7 @@ cargo.rust_library( visibility = [], deps = [ ":darling-0.14.4", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -3499,7 +3666,7 @@ cargo.rust_library( visibility = [], deps = [ ":convert_case-0.4.0", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -3638,16 +3805,16 @@ cargo.rust_library( edition = "2015", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( deps = [":winapi-0.3.9"], @@ -3675,16 +3842,16 @@ cargo.rust_library( edition = "2015", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -3734,21 +3901,21 @@ cargo.rust_library( deps = [ ":asynchronous-codec-0.6.2", ":base64-0.13.1", - ":byteorder-1.4.3", - ":bytes-1.4.0", - ":chrono-0.4.26", + ":byteorder-1.5.0", + ":bytes-1.5.0", + ":chrono-0.4.31", ":containers-api-0.9.0", ":docker-api-stubs-0.6.0", ":futures-util-0.3.29", - ":http-0.2.9", + ":http-0.2.11", ":hyper-0.14.27", ":log-0.4.20", ":paste-1.0.14", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ":tar-0.4.40", - ":thiserror-1.0.47", - ":url-2.4.0", + ":thiserror-1.0.50", + ":url-2.5.0", ], ) @@ -3760,9 +3927,9 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":chrono-0.4.26", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":chrono-0.4.31", + ":serde-1.0.193", + ":serde_json-1.0.108", ":serde_with-2.3.3", ], ) @@ -3786,40 +3953,40 @@ cargo.rust_library( alias( name = "dyn-clone", - actual = ":dyn-clone-1.0.13", + actual = ":dyn-clone-1.0.16", visibility = ["PUBLIC"], ) http_archive( - name = "dyn-clone-1.0.13.crate", - sha256 = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555", - strip_prefix = "dyn-clone-1.0.13", - urls = ["https://crates.io/api/v1/crates/dyn-clone/1.0.13/download"], + name = "dyn-clone-1.0.16.crate", + sha256 = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d", + strip_prefix = "dyn-clone-1.0.16", + urls = ["https://crates.io/api/v1/crates/dyn-clone/1.0.16/download"], visibility = [], ) cargo.rust_library( - name = "dyn-clone-1.0.13", - srcs = [":dyn-clone-1.0.13.crate"], + name = "dyn-clone-1.0.16", + srcs = [":dyn-clone-1.0.16.crate"], crate = "dyn_clone", - crate_root = "dyn-clone-1.0.13.crate/src/lib.rs", + crate_root = "dyn-clone-1.0.16.crate/src/lib.rs", edition = "2018", visibility = [], ) http_archive( - name = "ecdsa-0.16.8.crate", - sha256 = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4", - strip_prefix = "ecdsa-0.16.8", - urls = ["https://crates.io/api/v1/crates/ecdsa/0.16.8/download"], + name = "ecdsa-0.16.9.crate", + sha256 = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca", + strip_prefix = "ecdsa-0.16.9", + urls = ["https://crates.io/api/v1/crates/ecdsa/0.16.9/download"], visibility = [], ) cargo.rust_library( - name = "ecdsa-0.16.8", - srcs = [":ecdsa-0.16.8.crate"], + name = "ecdsa-0.16.9", + srcs = [":ecdsa-0.16.9.crate"], crate = "ecdsa", - crate_root = "ecdsa-0.16.8.crate/src/lib.rs", + crate_root = "ecdsa-0.16.9.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -3839,10 +4006,10 @@ cargo.rust_library( deps = [ ":der-0.7.8", ":digest-0.10.7", - ":elliptic-curve-0.13.5", + ":elliptic-curve-0.13.8", ":rfc6979-0.4.0", - ":signature-2.1.0", - ":spki-0.7.2", + ":signature-2.2.0", + ":spki-0.7.3", ], ) @@ -3864,6 +4031,24 @@ cargo.rust_library( deps = [":signature-1.6.4"], ) +http_archive( + name = "ed25519-2.2.3.crate", + sha256 = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53", + strip_prefix = "ed25519-2.2.3", + urls = ["https://crates.io/api/v1/crates/ed25519/2.2.3/download"], + visibility = [], +) + +cargo.rust_library( + name = "ed25519-2.2.3", + srcs = [":ed25519-2.2.3.crate"], + crate = "ed25519", + crate_root = "ed25519-2.2.3.crate/src/lib.rs", + edition = "2021", + visibility = [], + deps = [":signature-2.2.0"], +) + http_archive( name = "ed25519-compact-2.0.4.crate", sha256 = "6a3d382e8464107391c8706b4c14b087808ecb909f6c15c34114bc42e53a9e4c", @@ -3890,7 +4075,7 @@ cargo.rust_library( visibility = [], deps = [ ":ct-codecs-1.1.1", - ":getrandom-0.2.10", + ":getrandom-0.2.11", ], ) @@ -3914,23 +4099,51 @@ cargo.rust_library( ":curve25519-dalek-3.2.0", ":ed25519-1.5.3", ":sha2-0.9.9", - ":zeroize-1.6.0", + ":zeroize-1.7.0", + ], +) + +http_archive( + name = "ed25519-dalek-2.1.0.crate", + sha256 = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0", + strip_prefix = "ed25519-dalek-2.1.0", + urls = ["https://crates.io/api/v1/crates/ed25519-dalek/2.1.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "ed25519-dalek-2.1.0", + srcs = [":ed25519-dalek-2.1.0.crate"], + crate = "ed25519_dalek", + crate_root = "ed25519-dalek-2.1.0.crate/src/lib.rs", + edition = "2021", + features = [ + "digest", + "signature", + ], + visibility = [], + deps = [ + ":curve25519-dalek-4.1.1", + ":ed25519-2.2.3", + ":sha2-0.10.8", + ":signature-2.2.0", + ":subtle-2.5.0", ], ) http_archive( - name = "educe-0.4.22.crate", - sha256 = "079044df30bb07de7d846d41a184c4b00e66ebdac93ee459253474f3a47e50ae", - strip_prefix = "educe-0.4.22", - urls = ["https://crates.io/api/v1/crates/educe/0.4.22/download"], + name = "educe-0.4.23.crate", + sha256 = "0f0042ff8246a363dbe77d2ceedb073339e85a804b9a47636c6e016a9a32c05f", + strip_prefix = "educe-0.4.23", + urls = ["https://crates.io/api/v1/crates/educe/0.4.23/download"], visibility = [], ) cargo.rust_library( - name = "educe-0.4.22", - srcs = [":educe-0.4.22.crate"], + name = "educe-0.4.23", + srcs = [":educe-0.4.23.crate"], crate = "educe", - crate_root = "educe-0.4.22.crate/src/lib.rs", + crate_root = "educe-0.4.23.crate/src/lib.rs", edition = "2021", features = [ "Debug", @@ -3939,8 +4152,8 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":enum-ordinalize-3.1.13", - ":proc-macro2-1.0.66", + ":enum-ordinalize-3.1.15", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -3968,18 +4181,18 @@ cargo.rust_library( ) http_archive( - name = "elliptic-curve-0.13.5.crate", - sha256 = "968405c8fdc9b3bf4df0a6638858cc0b52462836ab6b1c87377785dd09cf1c0b", - strip_prefix = "elliptic-curve-0.13.5", - urls = ["https://crates.io/api/v1/crates/elliptic-curve/0.13.5/download"], + name = "elliptic-curve-0.13.8.crate", + sha256 = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47", + strip_prefix = "elliptic-curve-0.13.8", + urls = ["https://crates.io/api/v1/crates/elliptic-curve/0.13.8/download"], visibility = [], ) cargo.rust_library( - name = "elliptic-curve-0.13.5", - srcs = [":elliptic-curve-0.13.5.crate"], + name = "elliptic-curve-0.13.8", + srcs = [":elliptic-curve-0.13.8.crate"], crate = "elliptic_curve", - crate_root = "elliptic-curve-0.13.5.crate/src/lib.rs", + crate_root = "elliptic-curve-0.13.8.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -3997,7 +4210,7 @@ cargo.rust_library( visibility = [], deps = [ ":base16ct-0.2.0", - ":crypto-bigint-0.5.2", + ":crypto-bigint-0.5.5", ":digest-0.10.7", ":ff-0.13.0", ":generic-array-0.14.7", @@ -4008,7 +4221,7 @@ cargo.rust_library( ":rand_core-0.6.4", ":sec1-0.7.3", ":subtle-2.5.0", - ":zeroize-1.6.0", + ":zeroize-1.7.0", ], ) @@ -4056,27 +4269,27 @@ cargo.rust_library( ) http_archive( - name = "enum-ordinalize-3.1.13.crate", - sha256 = "e4f76552f53cefc9a7f64987c3701b99d982f7690606fd67de1d09712fbf52f1", - strip_prefix = "enum-ordinalize-3.1.13", - urls = ["https://crates.io/api/v1/crates/enum-ordinalize/3.1.13/download"], + name = "enum-ordinalize-3.1.15.crate", + sha256 = "1bf1fa3f06bbff1ea5b1a9c7b14aa992a39657db60a2759457328d7e058f49ee", + strip_prefix = "enum-ordinalize-3.1.15", + urls = ["https://crates.io/api/v1/crates/enum-ordinalize/3.1.15/download"], visibility = [], ) cargo.rust_library( - name = "enum-ordinalize-3.1.13", - srcs = [":enum-ordinalize-3.1.13.crate"], + name = "enum-ordinalize-3.1.15", + srcs = [":enum-ordinalize-3.1.15.crate"], crate = "enum_ordinalize", - crate_root = "enum-ordinalize-3.1.13.crate/src/lib.rs", + crate_root = "enum-ordinalize-3.1.15.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ ":num-bigint-0.4.4", - ":num-traits-0.2.16", - ":proc-macro2-1.0.66", + ":num-traits-0.2.17", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -4098,37 +4311,38 @@ cargo.rust_library( ) http_archive( - name = "errno-0.3.2.crate", - sha256 = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f", - strip_prefix = "errno-0.3.2", - urls = ["https://crates.io/api/v1/crates/errno/0.3.2/download"], + name = "errno-0.3.8.crate", + sha256 = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245", + strip_prefix = "errno-0.3.8", + urls = ["https://crates.io/api/v1/crates/errno/0.3.8/download"], visibility = [], ) cargo.rust_library( - name = "errno-0.3.2", - srcs = [":errno-0.3.2.crate"], + name = "errno-0.3.8", + srcs = [":errno-0.3.8.crate"], crate = "errno", - crate_root = "errno-0.3.2.crate/src/lib.rs", + crate_root = "errno-0.3.8.crate/src/lib.rs", edition = "2018", + features = ["std"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), "windows-msvc": dict( - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), }, visibility = [], @@ -4152,18 +4366,18 @@ cargo.rust_library( ) http_archive( - name = "eyre-0.6.8.crate", - sha256 = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb", - strip_prefix = "eyre-0.6.8", - urls = ["https://crates.io/api/v1/crates/eyre/0.6.8/download"], + name = "eyre-0.6.10.crate", + sha256 = "8bbb8258be8305fb0237d7b295f47bb24ff1b136a535f473baf40e70468515aa", + strip_prefix = "eyre-0.6.10", + urls = ["https://crates.io/api/v1/crates/eyre/0.6.10/download"], visibility = [], ) cargo.rust_library( - name = "eyre-0.6.8", - srcs = [":eyre-0.6.8.crate"], + name = "eyre-0.6.10", + srcs = [":eyre-0.6.10.crate"], crate = "eyre", - crate_root = "eyre-0.6.8.crate/src/lib.rs", + crate_root = "eyre-0.6.10.crate/src/lib.rs", edition = "2018", features = [ "auto-install", @@ -4173,7 +4387,7 @@ cargo.rust_library( visibility = [], deps = [ ":indenter-0.3.3", - ":once_cell-1.18.0", + ":once_cell-1.19.0", ], ) @@ -4216,18 +4430,18 @@ cargo.rust_library( ) http_archive( - name = "fastrand-2.0.0.crate", - sha256 = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764", - strip_prefix = "fastrand-2.0.0", - urls = ["https://crates.io/api/v1/crates/fastrand/2.0.0/download"], + name = "fastrand-2.0.1.crate", + sha256 = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5", + strip_prefix = "fastrand-2.0.1", + urls = ["https://crates.io/api/v1/crates/fastrand/2.0.1/download"], visibility = [], ) cargo.rust_library( - name = "fastrand-2.0.0", - srcs = [":fastrand-2.0.0.crate"], + name = "fastrand-2.0.1", + srcs = [":fastrand-2.0.1.crate"], crate = "fastrand", - crate_root = "fastrand-2.0.0.crate/src/lib.rs", + crate_root = "fastrand-2.0.1.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -4260,43 +4474,65 @@ cargo.rust_library( ) http_archive( - name = "filetime-0.2.22.crate", - sha256 = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0", - strip_prefix = "filetime-0.2.22", - urls = ["https://crates.io/api/v1/crates/filetime/0.2.22/download"], + name = "filetime-0.2.23.crate", + sha256 = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd", + strip_prefix = "filetime-0.2.23", + urls = ["https://crates.io/api/v1/crates/filetime/0.2.23/download"], visibility = [], ) cargo.rust_library( - name = "filetime-0.2.22", - srcs = [":filetime-0.2.22.crate"], + name = "filetime-0.2.23", + srcs = [":filetime-0.2.23.crate"], crate = "filetime", - crate_root = "filetime-0.2.22.crate/src/lib.rs", + crate_root = "filetime-0.2.23.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), "windows-msvc": dict( - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), }, visibility = [], deps = [":cfg-if-1.0.0"], ) +http_archive( + name = "finl_unicode-1.2.0.crate", + sha256 = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6", + strip_prefix = "finl_unicode-1.2.0", + urls = ["https://crates.io/api/v1/crates/finl_unicode/1.2.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "finl_unicode-1.2.0", + srcs = [":finl_unicode-1.2.0.crate"], + crate = "finl_unicode", + crate_root = "finl_unicode-1.2.0.crate/src/lib.rs", + edition = "2021", + features = [ + "categories", + "default", + "grapheme_clusters", + ], + visibility = [], +) + http_archive( name = "fixedbitset-0.4.2.crate", sha256 = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80", @@ -4316,23 +4552,23 @@ cargo.rust_library( alias( name = "flate2", - actual = ":flate2-1.0.27", + actual = ":flate2-1.0.28", visibility = ["PUBLIC"], ) http_archive( - name = "flate2-1.0.27.crate", - sha256 = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010", - strip_prefix = "flate2-1.0.27", - urls = ["https://crates.io/api/v1/crates/flate2/1.0.27/download"], + name = "flate2-1.0.28.crate", + sha256 = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e", + strip_prefix = "flate2-1.0.28", + urls = ["https://crates.io/api/v1/crates/flate2/1.0.28/download"], visibility = [], ) cargo.rust_library( - name = "flate2-1.0.27", - srcs = [":flate2-1.0.27.crate"], + name = "flate2-1.0.28", + srcs = [":flate2-1.0.28.crate"], crate = "flate2", - crate_root = "flate2-1.0.27.crate/src/lib.rs", + crate_root = "flate2-1.0.28.crate/src/lib.rs", edition = "2018", features = [ "any_impl", @@ -4369,18 +4605,18 @@ cargo.rust_library( ) http_archive( - name = "form_urlencoded-1.2.0.crate", - sha256 = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652", - strip_prefix = "form_urlencoded-1.2.0", - urls = ["https://crates.io/api/v1/crates/form_urlencoded/1.2.0/download"], + name = "form_urlencoded-1.2.1.crate", + sha256 = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456", + strip_prefix = "form_urlencoded-1.2.1", + urls = ["https://crates.io/api/v1/crates/form_urlencoded/1.2.1/download"], visibility = [], ) cargo.rust_library( - name = "form_urlencoded-1.2.0", - srcs = [":form_urlencoded-1.2.0.crate"], + name = "form_urlencoded-1.2.1", + srcs = [":form_urlencoded-1.2.1.crate"], crate = "form_urlencoded", - crate_root = "form_urlencoded-1.2.0.crate/src/lib.rs", + crate_root = "form_urlencoded-1.2.1.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -4388,7 +4624,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":percent-encoding-2.3.0"], + deps = [":percent-encoding-2.3.1"], ) http_archive( @@ -4410,23 +4646,23 @@ cargo.rust_library( alias( name = "futures", - actual = ":futures-0.3.28", + actual = ":futures-0.3.29", visibility = ["PUBLIC"], ) http_archive( - name = "futures-0.3.28.crate", - sha256 = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40", - strip_prefix = "futures-0.3.28", - urls = ["https://crates.io/api/v1/crates/futures/0.3.28/download"], + name = "futures-0.3.29.crate", + sha256 = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335", + strip_prefix = "futures-0.3.29", + urls = ["https://crates.io/api/v1/crates/futures/0.3.29/download"], visibility = [], ) cargo.rust_library( - name = "futures-0.3.28", - srcs = [":futures-0.3.28.crate"], + name = "futures-0.3.29", + srcs = [":futures-0.3.29.crate"], crate = "futures", - crate_root = "futures-0.3.28.crate/src/lib.rs", + crate_root = "futures-0.3.29.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -4440,7 +4676,7 @@ cargo.rust_library( deps = [ ":futures-channel-0.3.29", ":futures-core-0.3.29", - ":futures-executor-0.3.28", + ":futures-executor-0.3.29", ":futures-io-0.3.29", ":futures-sink-0.3.29", ":futures-task-0.3.29", @@ -4521,18 +4757,18 @@ cargo.rust_library( ) http_archive( - name = "futures-executor-0.3.28.crate", - sha256 = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0", - strip_prefix = "futures-executor-0.3.28", - urls = ["https://crates.io/api/v1/crates/futures-executor/0.3.28/download"], + name = "futures-executor-0.3.29.crate", + sha256 = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc", + strip_prefix = "futures-executor-0.3.29", + urls = ["https://crates.io/api/v1/crates/futures-executor/0.3.29/download"], visibility = [], ) cargo.rust_library( - name = "futures-executor-0.3.28", - srcs = [":futures-executor-0.3.28.crate"], + name = "futures-executor-0.3.29", + srcs = [":futures-executor-0.3.29.crate"], crate = "futures_executor", - crate_root = "futures-executor-0.3.28.crate/src/lib.rs", + crate_root = "futures-executor-0.3.29.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -4569,7 +4805,7 @@ cargo.rust_library( visibility = [], deps = [ ":futures-core-0.3.29", - ":lock_api-0.4.10", + ":lock_api-0.4.11", ":parking_lot-0.11.2", ], ) @@ -4630,10 +4866,10 @@ cargo.rust_library( ":fastrand-1.9.0", ":futures-core-0.3.29", ":futures-io-0.3.29", - ":memchr-2.5.0", - ":parking-2.1.0", - ":pin-project-lite-0.2.12", - ":waker-fn-1.1.0", + ":memchr-2.6.4", + ":parking-2.2.0", + ":pin-project-lite-0.2.13", + ":waker-fn-1.1.1", ], ) @@ -4654,9 +4890,9 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -4763,8 +4999,8 @@ cargo.rust_library( ":futures-macro-0.3.29", ":futures-sink-0.3.29", ":futures-task-0.3.29", - ":memchr-2.5.0", - ":pin-project-lite-0.2.12", + ":memchr-2.6.4", + ":pin-project-lite-0.2.13", ":pin-utils-0.1.0", ":slab-0.4.9", ], @@ -4788,8 +5024,8 @@ cargo.rust_library( visibility = [], deps = [ ":bytes-0.5.6", - ":futures-0.3.28", - ":memchr-2.5.0", + ":futures-0.3.29", + ":memchr-2.6.4", ":pin-project-0.4.30", ], ) @@ -4814,8 +5050,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":typenum-1.16.0", - ":zeroize-1.6.0", + ":typenum-1.17.0", + ":zeroize-1.7.0", ], ) @@ -4841,16 +5077,16 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), }, visibility = [], @@ -4858,18 +5094,18 @@ cargo.rust_library( ) http_archive( - name = "getrandom-0.2.10.crate", - sha256 = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427", - strip_prefix = "getrandom-0.2.10", - urls = ["https://crates.io/api/v1/crates/getrandom/0.2.10/download"], + name = "getrandom-0.2.11.crate", + sha256 = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f", + strip_prefix = "getrandom-0.2.11", + urls = ["https://crates.io/api/v1/crates/getrandom/0.2.11/download"], visibility = [], ) cargo.rust_library( - name = "getrandom-0.2.10", - srcs = [":getrandom-0.2.10.crate"], + name = "getrandom-0.2.11", + srcs = [":getrandom-0.2.11.crate"], crate = "getrandom", - crate_root = "getrandom-0.2.10.crate/src/lib.rs", + crate_root = "getrandom-0.2.11.crate/src/lib.rs", edition = "2018", features = [ "custom", @@ -4877,16 +5113,16 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), }, visibility = [], @@ -4894,18 +5130,18 @@ cargo.rust_library( ) http_archive( - name = "gimli-0.28.0.crate", - sha256 = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0", - strip_prefix = "gimli-0.28.0", - urls = ["https://crates.io/api/v1/crates/gimli/0.28.0/download"], + name = "gimli-0.28.1.crate", + sha256 = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253", + strip_prefix = "gimli-0.28.1", + urls = ["https://crates.io/api/v1/crates/gimli/0.28.1/download"], visibility = [], ) cargo.rust_library( - name = "gimli-0.28.0", - srcs = [":gimli-0.28.0.crate"], + name = "gimli-0.28.1", + srcs = [":gimli-0.28.1.crate"], crate = "gimli", - crate_root = "gimli-0.28.0.crate/src/lib.rs", + crate_root = "gimli-0.28.1.crate/src/lib.rs", edition = "2018", features = [ "read", @@ -4915,30 +5151,30 @@ cargo.rust_library( ) http_archive( - name = "globset-0.4.13.crate", - sha256 = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d", - strip_prefix = "globset-0.4.13", - urls = ["https://crates.io/api/v1/crates/globset/0.4.13/download"], + name = "globset-0.4.14.crate", + sha256 = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1", + strip_prefix = "globset-0.4.14", + urls = ["https://crates.io/api/v1/crates/globset/0.4.14/download"], visibility = [], ) cargo.rust_library( - name = "globset-0.4.13", - srcs = [":globset-0.4.13.crate"], + name = "globset-0.4.14", + srcs = [":globset-0.4.14.crate"], crate = "globset", - crate_root = "globset-0.4.13.crate/src/lib.rs", - edition = "2018", + crate_root = "globset-0.4.14.crate/src/lib.rs", + edition = "2021", features = [ "default", "log", ], visibility = [], deps = [ - ":aho-corasick-1.0.4", - ":bstr-1.6.0", - ":fnv-1.0.7", + ":aho-corasick-1.1.2", + ":bstr-1.8.0", ":log-0.4.20", - ":regex-1.9.3", + ":regex-automata-0.4.3", + ":regex-syntax-0.8.2", ], ) @@ -4966,32 +5202,32 @@ cargo.rust_library( ) http_archive( - name = "h2-0.3.21.crate", - sha256 = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833", - strip_prefix = "h2-0.3.21", - urls = ["https://crates.io/api/v1/crates/h2/0.3.21/download"], + name = "h2-0.3.22.crate", + sha256 = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178", + strip_prefix = "h2-0.3.22", + urls = ["https://crates.io/api/v1/crates/h2/0.3.22/download"], visibility = [], ) cargo.rust_library( - name = "h2-0.3.21", - srcs = [":h2-0.3.21.crate"], + name = "h2-0.3.22", + srcs = [":h2-0.3.22.crate"], crate = "h2", - crate_root = "h2-0.3.21.crate/src/lib.rs", + crate_root = "h2-0.3.22.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":fnv-1.0.7", ":futures-core-0.3.29", ":futures-sink-0.3.29", ":futures-util-0.3.29", - ":http-0.2.9", - ":indexmap-1.9.3", + ":http-0.2.11", + ":indexmap-2.1.0", ":slab-0.4.9", - ":tokio-1.32.0", - ":tokio-util-0.7.8", - ":tracing-0.1.37", + ":tokio-1.35.0", + ":tokio-util-0.7.10", + ":tracing-0.1.40", ], ) @@ -5033,45 +5269,22 @@ cargo.rust_library( "raw", ], visibility = [], - deps = [":ahash-0.7.6"], -) - -http_archive( - name = "hashbrown-0.13.2.crate", - sha256 = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e", - strip_prefix = "hashbrown-0.13.2", - urls = ["https://crates.io/api/v1/crates/hashbrown/0.13.2/download"], - visibility = [], -) - -cargo.rust_library( - name = "hashbrown-0.13.2", - srcs = [":hashbrown-0.13.2.crate"], - crate = "hashbrown", - crate_root = "hashbrown-0.13.2.crate/src/lib.rs", - edition = "2021", - features = [ - "ahash", - "default", - "inline-more", - ], - visibility = [], - deps = [":ahash-0.8.3"], + deps = [":ahash-0.7.7"], ) http_archive( - name = "hashbrown-0.14.0.crate", - sha256 = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a", - strip_prefix = "hashbrown-0.14.0", - urls = ["https://crates.io/api/v1/crates/hashbrown/0.14.0/download"], + name = "hashbrown-0.14.3.crate", + sha256 = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604", + strip_prefix = "hashbrown-0.14.3", + urls = ["https://crates.io/api/v1/crates/hashbrown/0.14.3/download"], visibility = [], ) cargo.rust_library( - name = "hashbrown-0.14.0", - srcs = [":hashbrown-0.14.0.crate"], + name = "hashbrown-0.14.3", + srcs = [":hashbrown-0.14.3.crate"], crate = "hashbrown", - crate_root = "hashbrown-0.14.0.crate/src/lib.rs", + crate_root = "hashbrown-0.14.3.crate/src/lib.rs", edition = "2021", features = [ "ahash", @@ -5082,27 +5295,27 @@ cargo.rust_library( ], visibility = [], deps = [ - ":ahash-0.8.3", + ":ahash-0.8.6", ":allocator-api2-0.2.16", ], ) http_archive( - name = "hashlink-0.8.3.crate", - sha256 = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f", - strip_prefix = "hashlink-0.8.3", - urls = ["https://crates.io/api/v1/crates/hashlink/0.8.3/download"], + name = "hashlink-0.8.4.crate", + sha256 = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7", + strip_prefix = "hashlink-0.8.4", + urls = ["https://crates.io/api/v1/crates/hashlink/0.8.4/download"], visibility = [], ) cargo.rust_library( - name = "hashlink-0.8.3", - srcs = [":hashlink-0.8.3.crate"], + name = "hashlink-0.8.4", + srcs = [":hashlink-0.8.4.crate"], crate = "hashlink", - crate_root = "hashlink-0.8.3.crate/src/lib.rs", + crate_root = "hashlink-0.8.4.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":hashbrown-0.14.0"], + deps = [":hashbrown-0.14.3"], ) http_archive( @@ -5280,6 +5493,31 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "home-0.5.5.crate", + sha256 = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb", + strip_prefix = "home-0.5.5", + urls = ["https://crates.io/api/v1/crates/home/0.5.5/download"], + visibility = [], +) + +cargo.rust_library( + name = "home-0.5.5", + srcs = [":home-0.5.5.crate"], + crate = "home", + crate_root = "home-0.5.5.crate/src/lib.rs", + edition = "2018", + platform = { + "windows-gnu": dict( + deps = [":windows-sys-0.48.0"], + ), + "windows-msvc": dict( + deps = [":windows-sys-0.48.0"], + ), + }, + visibility = [], +) + http_archive( name = "html-escape-0.2.13.crate", sha256 = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476", @@ -5299,56 +5537,56 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":utf8-width-0.1.6"], + deps = [":utf8-width-0.1.7"], ) alias( name = "http", - actual = ":http-0.2.9", + actual = ":http-0.2.11", visibility = ["PUBLIC"], ) http_archive( - name = "http-0.2.9.crate", - sha256 = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482", - strip_prefix = "http-0.2.9", - urls = ["https://crates.io/api/v1/crates/http/0.2.9/download"], + name = "http-0.2.11.crate", + sha256 = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb", + strip_prefix = "http-0.2.11", + urls = ["https://crates.io/api/v1/crates/http/0.2.11/download"], visibility = [], ) cargo.rust_library( - name = "http-0.2.9", - srcs = [":http-0.2.9.crate"], + name = "http-0.2.11", + srcs = [":http-0.2.11.crate"], crate = "http", - crate_root = "http-0.2.9.crate/src/lib.rs", + crate_root = "http-0.2.11.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":fnv-1.0.7", - ":itoa-1.0.9", + ":itoa-1.0.10", ], ) http_archive( - name = "http-body-0.4.5.crate", - sha256 = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1", - strip_prefix = "http-body-0.4.5", - urls = ["https://crates.io/api/v1/crates/http-body/0.4.5/download"], + name = "http-body-0.4.6.crate", + sha256 = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2", + strip_prefix = "http-body-0.4.6", + urls = ["https://crates.io/api/v1/crates/http-body/0.4.6/download"], visibility = [], ) cargo.rust_library( - name = "http-body-0.4.5", - srcs = [":http-body-0.4.5.crate"], + name = "http-body-0.4.6", + srcs = [":http-body-0.4.6.crate"], crate = "http_body", - crate_root = "http-body-0.4.5.crate/src/lib.rs", + crate_root = "http-body-0.4.6.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ - ":bytes-1.4.0", - ":http-0.2.9", - ":pin-project-lite-0.2.12", + ":bytes-1.5.0", + ":http-0.2.11", + ":pin-project-lite-0.2.13", ], ) @@ -5442,46 +5680,46 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":futures-channel-0.3.29", ":futures-core-0.3.29", ":futures-util-0.3.29", - ":h2-0.3.21", - ":http-0.2.9", - ":http-body-0.4.5", + ":h2-0.3.22", + ":http-0.2.11", + ":http-body-0.4.6", ":httparse-1.8.0", ":httpdate-1.0.3", - ":itoa-1.0.9", - ":pin-project-lite-0.2.12", - ":socket2-0.4.9", - ":tokio-1.32.0", + ":itoa-1.0.10", + ":pin-project-lite-0.2.13", + ":socket2-0.4.10", + ":tokio-1.35.0", ":tower-service-0.3.2", - ":tracing-0.1.37", + ":tracing-0.1.40", ":want-0.3.1", ], ) http_archive( - name = "hyper-rustls-0.24.1.crate", - sha256 = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97", - strip_prefix = "hyper-rustls-0.24.1", - urls = ["https://crates.io/api/v1/crates/hyper-rustls/0.24.1/download"], + name = "hyper-rustls-0.24.2.crate", + sha256 = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590", + strip_prefix = "hyper-rustls-0.24.2", + urls = ["https://crates.io/api/v1/crates/hyper-rustls/0.24.2/download"], visibility = [], ) cargo.rust_library( - name = "hyper-rustls-0.24.1", - srcs = [":hyper-rustls-0.24.1.crate"], + name = "hyper-rustls-0.24.2", + srcs = [":hyper-rustls-0.24.2.crate"], crate = "hyper_rustls", - crate_root = "hyper-rustls-0.24.1.crate/src/lib.rs", + crate_root = "hyper-rustls-0.24.2.crate/src/lib.rs", edition = "2021", visibility = [], deps = [ ":futures-util-0.3.29", - ":http-0.2.9", + ":http-0.2.11", ":hyper-0.14.27", - ":rustls-0.21.6", - ":tokio-1.32.0", + ":rustls-0.21.10", + ":tokio-1.35.0", ":tokio-rustls-0.24.1", ], ) @@ -5503,8 +5741,8 @@ cargo.rust_library( visibility = [], deps = [ ":hyper-0.14.27", - ":pin-project-lite-0.2.12", - ":tokio-1.32.0", + ":pin-project-lite-0.2.13", + ":tokio-1.35.0", ":tokio-io-timeout-1.2.0", ], ) @@ -5532,37 +5770,37 @@ cargo.rust_library( ":hex-0.4.3", ":hyper-0.14.27", ":pin-project-1.1.3", - ":tokio-1.32.0", + ":tokio-1.35.0", ], ) http_archive( - name = "iana-time-zone-0.1.57.crate", - sha256 = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613", - strip_prefix = "iana-time-zone-0.1.57", - urls = ["https://crates.io/api/v1/crates/iana-time-zone/0.1.57/download"], + name = "iana-time-zone-0.1.58.crate", + sha256 = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20", + strip_prefix = "iana-time-zone-0.1.58", + urls = ["https://crates.io/api/v1/crates/iana-time-zone/0.1.58/download"], visibility = [], ) cargo.rust_library( - name = "iana-time-zone-0.1.57", - srcs = [":iana-time-zone-0.1.57.crate"], + name = "iana-time-zone-0.1.58", + srcs = [":iana-time-zone-0.1.58.crate"], crate = "iana_time_zone", - crate_root = "iana-time-zone-0.1.57.crate/src/lib.rs", + crate_root = "iana-time-zone-0.1.58.crate/src/lib.rs", edition = "2018", features = ["fallback"], platform = { "macos-arm64": dict( - deps = [":core-foundation-sys-0.8.4"], + deps = [":core-foundation-sys-0.8.6"], ), "macos-x86_64": dict( - deps = [":core-foundation-sys-0.8.4"], + deps = [":core-foundation-sys-0.8.6"], ), "windows-gnu": dict( - deps = [":windows-0.48.0"], + deps = [":windows-core-0.51.1"], ), "windows-msvc": dict( - deps = [":windows-0.48.0"], + deps = [":windows-core-0.51.1"], ), }, visibility = [], @@ -5586,18 +5824,18 @@ cargo.rust_library( ) http_archive( - name = "idna-0.4.0.crate", - sha256 = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c", - strip_prefix = "idna-0.4.0", - urls = ["https://crates.io/api/v1/crates/idna/0.4.0/download"], + name = "idna-0.5.0.crate", + sha256 = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6", + strip_prefix = "idna-0.5.0", + urls = ["https://crates.io/api/v1/crates/idna/0.5.0/download"], visibility = [], ) cargo.rust_library( - name = "idna-0.4.0", - srcs = [":idna-0.4.0.crate"], + name = "idna-0.5.0", + srcs = [":idna-0.5.0.crate"], crate = "idna", - crate_root = "idna-0.4.0.crate/src/lib.rs", + crate_root = "idna-0.5.0.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -5606,7 +5844,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":unicode-bidi-0.3.13", + ":unicode-bidi-0.3.14", ":unicode-normalization-0.1.22", ], ) @@ -5634,48 +5872,47 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":ignore-0.4.20", - ":proc-macro2-1.0.66", + ":ignore-0.4.21", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":serde-1.0.186", + ":serde-1.0.193", ":syn-1.0.109", - ":toml-0.7.6", + ":toml-0.7.8", ":unicode-xid-0.2.4", ], ) http_archive( - name = "ignore-0.4.20.crate", - sha256 = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492", - strip_prefix = "ignore-0.4.20", - urls = ["https://crates.io/api/v1/crates/ignore/0.4.20/download"], + name = "ignore-0.4.21.crate", + sha256 = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060", + strip_prefix = "ignore-0.4.21", + urls = ["https://crates.io/api/v1/crates/ignore/0.4.21/download"], visibility = [], ) cargo.rust_library( - name = "ignore-0.4.20", - srcs = [":ignore-0.4.20.crate"], + name = "ignore-0.4.21", + srcs = [":ignore-0.4.21.crate"], crate = "ignore", - crate_root = "ignore-0.4.20.crate/src/lib.rs", - edition = "2018", + crate_root = "ignore-0.4.21.crate/src/lib.rs", + edition = "2021", platform = { "windows-gnu": dict( - deps = [":winapi-util-0.1.5"], + deps = [":winapi-util-0.1.6"], ), "windows-msvc": dict( - deps = [":winapi-util-0.1.5"], + deps = [":winapi-util-0.1.6"], ), }, visibility = [], deps = [ - ":globset-0.4.13", - ":lazy_static-1.4.0", + ":crossbeam-deque-0.8.3", + ":globset-0.4.14", ":log-0.4.20", - ":memchr-2.5.0", - ":regex-1.9.3", + ":memchr-2.6.4", + ":regex-automata-0.4.3", ":same-file-1.0.6", - ":thread_local-1.1.7", - ":walkdir-2.3.3", + ":walkdir-2.4.0", ], ) @@ -5720,23 +5957,23 @@ cargo.rust_library( visibility = [], deps = [ ":hashbrown-0.12.3", - ":serde-1.0.186", + ":serde-1.0.193", ], ) http_archive( - name = "indexmap-2.0.0.crate", - sha256 = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d", - strip_prefix = "indexmap-2.0.0", - urls = ["https://crates.io/api/v1/crates/indexmap/2.0.0/download"], + name = "indexmap-2.1.0.crate", + sha256 = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f", + strip_prefix = "indexmap-2.1.0", + urls = ["https://crates.io/api/v1/crates/indexmap/2.1.0/download"], visibility = [], ) cargo.rust_library( - name = "indexmap-2.0.0", - srcs = [":indexmap-2.0.0.crate"], + name = "indexmap-2.1.0", + srcs = [":indexmap-2.1.0.crate"], crate = "indexmap", - crate_root = "indexmap-2.0.0.crate/src/lib.rs", + crate_root = "indexmap-2.1.0.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -5747,31 +5984,31 @@ cargo.rust_library( visibility = [], deps = [ ":equivalent-1.0.1", - ":hashbrown-0.14.0", - ":serde-1.0.186", + ":hashbrown-0.14.3", + ":serde-1.0.193", ], ) alias( name = "indicatif", - actual = ":indicatif-0.17.6", + actual = ":indicatif-0.17.7", visibility = ["PUBLIC"], ) http_archive( - name = "indicatif-0.17.6.crate", - sha256 = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730", - strip_prefix = "indicatif-0.17.6", - urls = ["https://crates.io/api/v1/crates/indicatif/0.17.6/download"], + name = "indicatif-0.17.7.crate", + sha256 = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25", + strip_prefix = "indicatif-0.17.7", + urls = ["https://crates.io/api/v1/crates/indicatif/0.17.7/download"], visibility = [], ) cargo.rust_library( - name = "indicatif-0.17.6", - srcs = [":indicatif-0.17.6.crate"], + name = "indicatif-0.17.7", + srcs = [":indicatif-0.17.7.crate"], crate = "indicatif", - crate_root = "indicatif-0.17.6.crate/src/lib.rs", - edition = "2018", + crate_root = "indicatif-0.17.7.crate/src/lib.rs", + edition = "2021", features = [ "default", "unicode-width", @@ -5780,30 +6017,30 @@ cargo.rust_library( deps = [ ":console-0.15.7", ":number_prefix-0.4.0", - ":portable-atomic-1.4.2", - ":unicode-width-0.1.10", + ":portable-atomic-1.6.0", + ":unicode-width-0.1.11", ], ) alias( name = "indoc", - actual = ":indoc-2.0.3", + actual = ":indoc-2.0.4", visibility = ["PUBLIC"], ) http_archive( - name = "indoc-2.0.3.crate", - sha256 = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4", - strip_prefix = "indoc-2.0.3", - urls = ["https://crates.io/api/v1/crates/indoc/2.0.3/download"], + name = "indoc-2.0.4.crate", + sha256 = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8", + strip_prefix = "indoc-2.0.4", + urls = ["https://crates.io/api/v1/crates/indoc/2.0.4/download"], visibility = [], ) cargo.rust_library( - name = "indoc-2.0.3", - srcs = [":indoc-2.0.3.crate"], + name = "indoc-2.0.4", + srcs = [":indoc-2.0.4.crate"], crate = "indoc", - crate_root = "indoc-2.0.3.crate/src/lib.rs", + crate_root = "indoc-2.0.4.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], @@ -5838,12 +6075,12 @@ cargo.rust_library( deps = [ ":bitflags-1.3.2", ":crossterm-0.25.0", - ":dyn-clone-1.0.13", + ":dyn-clone-1.0.16", ":lazy_static-1.4.0", ":newline-converter-0.2.2", - ":thiserror-1.0.47", + ":thiserror-1.0.50", ":unicode-segmentation-1.10.1", - ":unicode-width-0.1.10", + ":unicode-width-0.1.11", ], ) @@ -5866,61 +6103,18 @@ cargo.rust_library( ) http_archive( - name = "io-lifetimes-1.0.11.crate", - sha256 = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2", - strip_prefix = "io-lifetimes-1.0.11", - urls = ["https://crates.io/api/v1/crates/io-lifetimes/1.0.11/download"], - visibility = [], -) - -cargo.rust_library( - name = "io-lifetimes-1.0.11", - srcs = [":io-lifetimes-1.0.11.crate"], - crate = "io_lifetimes", - crate_root = "io-lifetimes-1.0.11.crate/src/lib.rs", - edition = "2018", - features = [ - "close", - "hermit-abi", - "libc", - "windows-sys", - ], - platform = { - "linux-arm64": dict( - deps = [":libc-0.2.147"], - ), - "linux-x86_64": dict( - deps = [":libc-0.2.147"], - ), - "macos-arm64": dict( - deps = [":libc-0.2.147"], - ), - "macos-x86_64": dict( - deps = [":libc-0.2.147"], - ), - "windows-gnu": dict( - deps = [":windows-sys-0.48.0"], - ), - "windows-msvc": dict( - deps = [":windows-sys-0.48.0"], - ), - }, - visibility = [], -) - -http_archive( - name = "ipnet-2.8.0.crate", - sha256 = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6", - strip_prefix = "ipnet-2.8.0", - urls = ["https://crates.io/api/v1/crates/ipnet/2.8.0/download"], + name = "ipnet-2.9.0.crate", + sha256 = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3", + strip_prefix = "ipnet-2.9.0", + urls = ["https://crates.io/api/v1/crates/ipnet/2.9.0/download"], visibility = [], ) cargo.rust_library( - name = "ipnet-2.8.0", - srcs = [":ipnet-2.8.0.crate"], + name = "ipnet-2.9.0", + srcs = [":ipnet-2.9.0.crate"], crate = "ipnet", - crate_root = "ipnet-2.8.0.crate/src/lib.rs", + crate_root = "ipnet-2.9.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -5944,44 +6138,7 @@ cargo.rust_library( crate_root = "is-docker-0.2.0.crate/src/lib.rs", edition = "2015", visibility = [], - deps = [":once_cell-1.18.0"], -) - -http_archive( - name = "is-terminal-0.4.9.crate", - sha256 = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b", - strip_prefix = "is-terminal-0.4.9", - urls = ["https://crates.io/api/v1/crates/is-terminal/0.4.9/download"], - visibility = [], -) - -cargo.rust_library( - name = "is-terminal-0.4.9", - srcs = [":is-terminal-0.4.9.crate"], - crate = "is_terminal", - crate_root = "is-terminal-0.4.9.crate/src/lib.rs", - edition = "2018", - platform = { - "linux-arm64": dict( - deps = [":rustix-0.38.8"], - ), - "linux-x86_64": dict( - deps = [":rustix-0.38.8"], - ), - "macos-arm64": dict( - deps = [":rustix-0.38.8"], - ), - "macos-x86_64": dict( - deps = [":rustix-0.38.8"], - ), - "windows-gnu": dict( - deps = [":windows-sys-0.48.0"], - ), - "windows-msvc": dict( - deps = [":windows-sys-0.48.0"], - ), - }, - visibility = [], + deps = [":once_cell-1.19.0"], ) http_archive( @@ -6001,7 +6158,7 @@ cargo.rust_library( visibility = [], deps = [ ":is-docker-0.2.0", - ":once_cell-1.18.0", + ":once_cell-1.19.0", ], ) @@ -6035,78 +6192,101 @@ cargo.rust_library( ) http_archive( - name = "itoa-1.0.9.crate", - sha256 = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38", - strip_prefix = "itoa-1.0.9", - urls = ["https://crates.io/api/v1/crates/itoa/1.0.9/download"], + name = "itertools-0.12.0.crate", + sha256 = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0", + strip_prefix = "itertools-0.12.0", + urls = ["https://crates.io/api/v1/crates/itertools/0.12.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "itertools-0.12.0", + srcs = [":itertools-0.12.0.crate"], + crate = "itertools", + crate_root = "itertools-0.12.0.crate/src/lib.rs", + edition = "2018", + features = [ + "default", + "use_alloc", + "use_std", + ], + visibility = [], + deps = [":either-1.9.0"], +) + +http_archive( + name = "itoa-1.0.10.crate", + sha256 = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c", + strip_prefix = "itoa-1.0.10", + urls = ["https://crates.io/api/v1/crates/itoa/1.0.10/download"], visibility = [], ) cargo.rust_library( - name = "itoa-1.0.9", - srcs = [":itoa-1.0.9.crate"], + name = "itoa-1.0.10", + srcs = [":itoa-1.0.10.crate"], crate = "itoa", - crate_root = "itoa-1.0.9.crate/src/lib.rs", + crate_root = "itoa-1.0.10.crate/src/lib.rs", edition = "2018", visibility = [], ) alias( name = "jwt-simple", - actual = ":jwt-simple-0.11.6", + actual = ":jwt-simple-0.11.9", visibility = ["PUBLIC"], ) http_archive( - name = "jwt-simple-0.11.6.crate", - sha256 = "733741e7bcd1532b56c9ba6c698c069f274f3782ad956f0d2c7f31650cedaa1b", - strip_prefix = "jwt-simple-0.11.6", - urls = ["https://crates.io/api/v1/crates/jwt-simple/0.11.6/download"], + name = "jwt-simple-0.11.9.crate", + sha256 = "357892bb32159d763abdea50733fadcb9a8e1c319a9aa77592db8555d05af83e", + strip_prefix = "jwt-simple-0.11.9", + urls = ["https://crates.io/api/v1/crates/jwt-simple/0.11.9/download"], visibility = [], ) cargo.rust_library( - name = "jwt-simple-0.11.6", - srcs = [":jwt-simple-0.11.6.crate"], + name = "jwt-simple-0.11.9", + srcs = [":jwt-simple-0.11.9.crate"], crate = "jwt_simple", - crate_root = "jwt-simple-0.11.6.crate/src/lib.rs", + crate_root = "jwt-simple-0.11.9.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ ":anyhow-1.0.75", ":binstring-0.1.1", - ":coarsetime-0.1.23", + ":coarsetime-0.1.33", ":ct-codecs-1.1.1", ":ed25519-compact-2.0.4", ":hmac-sha1-compact-1.1.4", ":hmac-sha256-1.1.7", ":hmac-sha512-1.1.5", - ":k256-0.13.1", + ":k256-0.13.2", ":p256-0.13.2", ":p384-0.13.0", ":rand-0.8.5", ":rsa-0.7.2", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ":spki-0.6.0", - ":thiserror-1.0.47", - ":zeroize-1.6.0", + ":thiserror-1.0.50", + ":zeroize-1.7.0", ], ) http_archive( - name = "k256-0.13.1.crate", - sha256 = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc", - strip_prefix = "k256-0.13.1", - urls = ["https://crates.io/api/v1/crates/k256/0.13.1/download"], + name = "k256-0.13.2.crate", + sha256 = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b", + strip_prefix = "k256-0.13.2", + urls = ["https://crates.io/api/v1/crates/k256/0.13.2/download"], visibility = [], ) cargo.rust_library( - name = "k256-0.13.1", - srcs = [":k256-0.13.1.crate"], + name = "k256-0.13.2", + srcs = [":k256-0.13.2.crate"], crate = "k256", - crate_root = "k256-0.13.1.crate/src/lib.rs", + crate_root = "k256-0.13.2.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -6126,15 +6306,15 @@ cargo.rust_library( "std", ], named_deps = { - "ecdsa_core": ":ecdsa-0.16.8", + "ecdsa_core": ":ecdsa-0.16.9", }, visibility = [], deps = [ ":cfg-if-1.0.0", - ":elliptic-curve-0.13.5", - ":once_cell-1.18.0", - ":sha2-0.10.7", - ":signature-2.1.0", + ":elliptic-curve-0.13.8", + ":once_cell-1.19.0", + ":sha2-0.10.8", + ":signature-2.2.0", ], ) @@ -6181,37 +6361,37 @@ cargo.rust_library( crate_root = "lib0-0.16.10.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":thiserror-1.0.47"], + deps = [":thiserror-1.0.50"], ) http_archive( - name = "libc-0.2.147.crate", - sha256 = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3", - strip_prefix = "libc-0.2.147", - urls = ["https://crates.io/api/v1/crates/libc/0.2.147/download"], + name = "libc-0.2.151.crate", + sha256 = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4", + strip_prefix = "libc-0.2.151", + urls = ["https://crates.io/api/v1/crates/libc/0.2.151/download"], visibility = [], ) cargo.rust_library( - name = "libc-0.2.147", - srcs = [":libc-0.2.147.crate"], + name = "libc-0.2.151", + srcs = [":libc-0.2.151.crate"], crate = "libc", - crate_root = "libc-0.2.147.crate/src/lib.rs", + crate_root = "libc-0.2.151.crate/src/lib.rs", edition = "2015", features = [ "default", "extra_traits", "std", ], - rustc_flags = ["@$(location :libc-0.2.147-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :libc-0.2.151-build-script-run[rustc_flags])"], visibility = [], ) cargo.rust_binary( - name = "libc-0.2.147-build-script-build", - srcs = [":libc-0.2.147.crate"], + name = "libc-0.2.151-build-script-build", + srcs = [":libc-0.2.151.crate"], crate = "build_script_build", - crate_root = "libc-0.2.147.crate/build.rs", + crate_root = "libc-0.2.151.crate/build.rs", edition = "2015", features = [ "default", @@ -6222,30 +6402,30 @@ cargo.rust_binary( ) buildscript_run( - name = "libc-0.2.147-build-script-run", + name = "libc-0.2.151-build-script-run", package_name = "libc", - buildscript_rule = ":libc-0.2.147-build-script-build", + buildscript_rule = ":libc-0.2.151-build-script-build", features = [ "default", "extra_traits", "std", ], - version = "0.2.147", + version = "0.2.151", ) http_archive( - name = "libm-0.2.7.crate", - sha256 = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4", - strip_prefix = "libm-0.2.7", - urls = ["https://crates.io/api/v1/crates/libm/0.2.7/download"], + name = "libm-0.2.8.crate", + sha256 = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058", + strip_prefix = "libm-0.2.8", + urls = ["https://crates.io/api/v1/crates/libm/0.2.8/download"], visibility = [], ) cargo.rust_library( - name = "libm-0.2.7", - srcs = [":libm-0.2.7.crate"], + name = "libm-0.2.8", + srcs = [":libm-0.2.8.crate"], crate = "libm", - crate_root = "libm-0.2.7.crate/src/lib.rs", + crate_root = "libm-0.2.8.crate/src/lib.rs", edition = "2018", features = ["default"], preferred_linkage = "static", @@ -6504,7 +6684,7 @@ cargo.rust_library( edition = "2015", visibility = [], deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":libsodium-sys-0.2.7-libsodium", ], ) @@ -6760,64 +6940,43 @@ cxx_library( ) http_archive( - name = "linux-raw-sys-0.3.8.crate", - sha256 = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519", - strip_prefix = "linux-raw-sys-0.3.8", - urls = ["https://crates.io/api/v1/crates/linux-raw-sys/0.3.8/download"], - visibility = [], -) - -cargo.rust_library( - name = "linux-raw-sys-0.3.8", - srcs = [":linux-raw-sys-0.3.8.crate"], - crate = "linux_raw_sys", - crate_root = "linux-raw-sys-0.3.8.crate/src/lib.rs", - edition = "2018", - features = [ - "errno", - "general", - "ioctl", - "no_std", - ], - visibility = [], -) - -http_archive( - name = "linux-raw-sys-0.4.5.crate", - sha256 = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503", - strip_prefix = "linux-raw-sys-0.4.5", - urls = ["https://crates.io/api/v1/crates/linux-raw-sys/0.4.5/download"], + name = "linux-raw-sys-0.4.12.crate", + sha256 = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456", + strip_prefix = "linux-raw-sys-0.4.12", + urls = ["https://crates.io/api/v1/crates/linux-raw-sys/0.4.12/download"], visibility = [], ) cargo.rust_library( - name = "linux-raw-sys-0.4.5", - srcs = [":linux-raw-sys-0.4.5.crate"], + name = "linux-raw-sys-0.4.12", + srcs = [":linux-raw-sys-0.4.12.crate"], crate = "linux_raw_sys", - crate_root = "linux-raw-sys-0.4.5.crate/src/lib.rs", + crate_root = "linux-raw-sys-0.4.12.crate/src/lib.rs", edition = "2021", features = [ + "elf", "errno", "general", "ioctl", "no_std", + "std", ], visibility = [], ) http_archive( - name = "lock_api-0.4.10.crate", - sha256 = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16", - strip_prefix = "lock_api-0.4.10", - urls = ["https://crates.io/api/v1/crates/lock_api/0.4.10/download"], + name = "lock_api-0.4.11.crate", + sha256 = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45", + strip_prefix = "lock_api-0.4.11", + urls = ["https://crates.io/api/v1/crates/lock_api/0.4.11/download"], visibility = [], ) cargo.rust_library( - name = "lock_api-0.4.10", - srcs = [":lock_api-0.4.10.crate"], + name = "lock_api-0.4.11", + srcs = [":lock_api-0.4.11.crate"], crate = "lock_api", - crate_root = "lock_api-0.4.10.crate/src/lib.rs", + crate_root = "lock_api-0.4.11.crate/src/lib.rs", edition = "2018", features = [ "atomic_usize", @@ -6864,18 +7023,18 @@ cargo.rust_library( ) http_archive( - name = "matchit-0.7.2.crate", - sha256 = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef", - strip_prefix = "matchit-0.7.2", - urls = ["https://crates.io/api/v1/crates/matchit/0.7.2/download"], + name = "matchit-0.7.3.crate", + sha256 = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94", + strip_prefix = "matchit-0.7.3", + urls = ["https://crates.io/api/v1/crates/matchit/0.7.3/download"], visibility = [], ) cargo.rust_library( - name = "matchit-0.7.2", - srcs = [":matchit-0.7.2.crate"], + name = "matchit-0.7.3", + srcs = [":matchit-0.7.3.crate"], crate = "matchit", - crate_root = "matchit-0.7.2.crate/src/lib.rs", + crate_root = "matchit-0.7.3.crate/src/lib.rs", edition = "2021", features = ["default"], visibility = [], @@ -6899,32 +7058,35 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], ) http_archive( - name = "md-5-0.10.5.crate", - sha256 = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca", - strip_prefix = "md-5-0.10.5", - urls = ["https://crates.io/api/v1/crates/md-5/0.10.5/download"], + name = "md-5-0.10.6.crate", + sha256 = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf", + strip_prefix = "md-5-0.10.6", + urls = ["https://crates.io/api/v1/crates/md-5/0.10.6/download"], visibility = [], ) cargo.rust_library( - name = "md-5-0.10.5", - srcs = [":md-5-0.10.5.crate"], + name = "md-5-0.10.6", + srcs = [":md-5-0.10.6.crate"], crate = "md5", - crate_root = "md-5-0.10.5.crate/src/lib.rs", + crate_root = "md-5-0.10.6.crate/src/lib.rs", edition = "2018", features = [ "default", "std", ], visibility = [], - deps = [":digest-0.10.7"], + deps = [ + ":cfg-if-1.0.0", + ":digest-0.10.7", + ], ) http_archive( @@ -6949,20 +7111,21 @@ cargo.rust_library( ) http_archive( - name = "memchr-2.5.0.crate", - sha256 = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d", - strip_prefix = "memchr-2.5.0", - urls = ["https://crates.io/api/v1/crates/memchr/2.5.0/download"], + name = "memchr-2.6.4.crate", + sha256 = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167", + strip_prefix = "memchr-2.6.4", + urls = ["https://crates.io/api/v1/crates/memchr/2.6.4/download"], visibility = [], ) cargo.rust_library( - name = "memchr-2.5.0", - srcs = [":memchr-2.5.0.crate"], + name = "memchr-2.6.4", + srcs = [":memchr-2.6.4.crate"], crate = "memchr", - crate_root = "memchr-2.5.0.crate/src/lib.rs", - edition = "2018", + crate_root = "memchr-2.6.4.crate/src/lib.rs", + edition = "2021", features = [ + "alloc", "default", "std", ], @@ -6986,16 +7149,16 @@ cargo.rust_library( features = ["stable_deref_trait"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), }, visibility = [], @@ -7038,6 +7201,24 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "memoffset-0.9.0.crate", + sha256 = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c", + strip_prefix = "memoffset-0.9.0", + urls = ["https://crates.io/api/v1/crates/memoffset/0.9.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "memoffset-0.9.0", + srcs = [":memoffset-0.9.0.crate"], + crate = "memoffset", + crate_root = "memoffset-0.9.0.crate/src/lib.rs", + edition = "2015", + features = ["default"], + visibility = [], +) + http_archive( name = "mime-0.3.17.crate", sha256 = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a", @@ -7134,18 +7315,18 @@ cargo.rust_library( ) http_archive( - name = "mio-0.8.8.crate", - sha256 = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2", - strip_prefix = "mio-0.8.8", - urls = ["https://crates.io/api/v1/crates/mio/0.8.8/download"], + name = "mio-0.8.10.crate", + sha256 = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09", + strip_prefix = "mio-0.8.10", + urls = ["https://crates.io/api/v1/crates/mio/0.8.10/download"], visibility = [], ) cargo.rust_library( - name = "mio-0.8.8", - srcs = [":mio-0.8.8.crate"], + name = "mio-0.8.10", + srcs = [":mio-0.8.10.crate"], crate = "mio", - crate_root = "mio-0.8.8.crate/src/lib.rs", + crate_root = "mio-0.8.10.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -7156,16 +7337,16 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -7195,13 +7376,13 @@ cargo.rust_library( features = ["default"], visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":encoding_rs-0.8.33", ":futures-util-0.3.29", - ":http-0.2.9", + ":http-0.2.11", ":httparse-1.8.0", ":log-0.4.20", - ":memchr-2.5.0", + ":memchr-2.6.4", ":mime-0.3.17", ":spin-0.9.8", ":version_check-0.9.4", @@ -7381,29 +7562,29 @@ cargo.rust_library( deps = [ ":bitflags-1.3.2", ":cfg-if-1.0.0", - ":libc-0.2.147", + ":libc-0.2.151", ], ) alias( name = "nix", - actual = ":nix-0.26.2", + actual = ":nix-0.26.4", visibility = ["PUBLIC"], ) http_archive( - name = "nix-0.26.2.crate", - sha256 = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a", - strip_prefix = "nix-0.26.2", - urls = ["https://crates.io/api/v1/crates/nix/0.26.2/download"], + name = "nix-0.26.4.crate", + sha256 = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b", + strip_prefix = "nix-0.26.4", + urls = ["https://crates.io/api/v1/crates/nix/0.26.4/download"], visibility = [], ) cargo.rust_library( - name = "nix-0.26.2", - srcs = [":nix-0.26.2.crate"], + name = "nix-0.26.4", + srcs = [":nix-0.26.4.crate"], crate = "nix", - crate_root = "nix-0.26.2.crate/src/lib.rs", + crate_root = "nix-0.26.4.crate/src/lib.rs", edition = "2018", features = [ "acct", @@ -7466,9 +7647,8 @@ cargo.rust_library( deps = [ ":bitflags-1.3.2", ":cfg-if-1.0.0", - ":libc-0.2.147", + ":libc-0.2.151", ":pin-utils-0.1.0", - ":static_assertions-1.1.0", ], ) @@ -7494,8 +7674,8 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":byteorder-1.4.3", - ":data-encoding-2.4.0", + ":byteorder-1.5.0", + ":data-encoding-2.5.0", ":ed25519-dalek-1.0.1", ":log-0.4.20", ":rand-0.8.5", @@ -7504,28 +7684,28 @@ cargo.rust_library( ) http_archive( - name = "nkeys-0.3.1.crate", - sha256 = "3e9261eb915c785ea65708bc45ef43507ea46914e1a73f1412d1a38aba967c8e", - strip_prefix = "nkeys-0.3.1", - urls = ["https://crates.io/api/v1/crates/nkeys/0.3.1/download"], + name = "nkeys-0.3.2.crate", + sha256 = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47", + strip_prefix = "nkeys-0.3.2", + urls = ["https://crates.io/api/v1/crates/nkeys/0.3.2/download"], visibility = [], ) cargo.rust_library( - name = "nkeys-0.3.1", - srcs = [":nkeys-0.3.1.crate"], + name = "nkeys-0.3.2", + srcs = [":nkeys-0.3.2.crate"], crate = "nkeys", - crate_root = "nkeys-0.3.1.crate/src/lib.rs", + crate_root = "nkeys-0.3.2.crate/src/lib.rs", edition = "2021", visibility = [], deps = [ - ":byteorder-1.4.3", - ":data-encoding-2.4.0", - ":ed25519-1.5.3", - ":ed25519-dalek-1.0.1", + ":byteorder-1.5.0", + ":data-encoding-2.5.0", + ":ed25519-2.2.3", + ":ed25519-dalek-2.1.0", ":log-0.4.20", ":rand-0.8.5", - ":signatory-0.23.2", + ":signatory-0.27.1", ], ) @@ -7550,7 +7730,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":memchr-2.5.0", + ":memchr-2.6.4", ":minimal-lexical-0.2.1", ], ) @@ -7627,7 +7807,7 @@ cargo.rust_library( visibility = [], deps = [ ":num-integer-0.1.45", - ":num-traits-0.2.16", + ":num-traits-0.2.17", ], ) @@ -7680,15 +7860,15 @@ cargo.rust_library( rustc_flags = ["@$(location :num-bigint-dig-0.8.4-build-script-run[rustc_flags])"], visibility = [], deps = [ - ":byteorder-1.4.3", + ":byteorder-1.5.0", ":lazy_static-1.4.0", - ":libm-0.2.7", + ":libm-0.2.8", ":num-integer-0.1.45", ":num-iter-0.1.43", - ":num-traits-0.2.16", + ":num-traits-0.2.17", ":rand-0.8.5", - ":smallvec-1.11.0", - ":zeroize-1.6.0", + ":smallvec-1.11.2", + ":zeroize-1.7.0", ], ) @@ -7743,7 +7923,7 @@ cargo.rust_library( ], rustc_flags = ["@$(location :num-integer-0.1.45-build-script-run[rustc_flags])"], visibility = [], - deps = [":num-traits-0.2.16"], + deps = [":num-traits-0.2.17"], ) cargo.rust_binary( @@ -7791,7 +7971,7 @@ cargo.rust_library( visibility = [], deps = [ ":num-integer-0.1.45", - ":num-traits-0.2.16", + ":num-traits-0.2.17", ], ) @@ -7813,18 +7993,18 @@ buildscript_run( ) http_archive( - name = "num-traits-0.2.16.crate", - sha256 = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2", - strip_prefix = "num-traits-0.2.16", - urls = ["https://crates.io/api/v1/crates/num-traits/0.2.16/download"], + name = "num-traits-0.2.17.crate", + sha256 = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c", + strip_prefix = "num-traits-0.2.17", + urls = ["https://crates.io/api/v1/crates/num-traits/0.2.17/download"], visibility = [], ) cargo.rust_library( - name = "num-traits-0.2.16", - srcs = [":num-traits-0.2.16.crate"], + name = "num-traits-0.2.17", + srcs = [":num-traits-0.2.17.crate"], crate = "num_traits", - crate_root = "num-traits-0.2.16.crate/src/lib.rs", + crate_root = "num-traits-0.2.17.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -7832,16 +8012,16 @@ cargo.rust_library( "libm", "std", ], - rustc_flags = ["@$(location :num-traits-0.2.16-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :num-traits-0.2.17-build-script-run[rustc_flags])"], visibility = [], - deps = [":libm-0.2.7"], + deps = [":libm-0.2.8"], ) cargo.rust_binary( - name = "num-traits-0.2.16-build-script-build", - srcs = [":num-traits-0.2.16.crate"], + name = "num-traits-0.2.17-build-script-build", + srcs = [":num-traits-0.2.17.crate"], crate = "build_script_build", - crate_root = "num-traits-0.2.16.crate/build.rs", + crate_root = "num-traits-0.2.17.crate/build.rs", edition = "2018", features = [ "default", @@ -7854,16 +8034,16 @@ cargo.rust_binary( ) buildscript_run( - name = "num-traits-0.2.16-build-script-run", + name = "num-traits-0.2.17-build-script-run", package_name = "num-traits", - buildscript_rule = ":num-traits-0.2.16-build-script-build", + buildscript_rule = ":num-traits-0.2.17-build-script-build", features = [ "default", "i128", "libm", "std", ], - version = "0.2.16", + version = "0.2.17", ) alias( @@ -7888,16 +8068,16 @@ cargo.rust_library( edition = "2015", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), }, visibility = [], @@ -7947,9 +8127,9 @@ cargo.rust_library( visibility = [], deps = [ ":proc-macro-crate-1.3.1", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -7975,18 +8155,18 @@ cargo.rust_library( ) http_archive( - name = "object-0.32.0.crate", - sha256 = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe", - strip_prefix = "object-0.32.0", - urls = ["https://crates.io/api/v1/crates/object/0.32.0/download"], + name = "object-0.32.1.crate", + sha256 = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0", + strip_prefix = "object-0.32.1", + urls = ["https://crates.io/api/v1/crates/object/0.32.1/download"], visibility = [], ) cargo.rust_library( - name = "object-0.32.0", - srcs = [":object-0.32.0.crate"], + name = "object-0.32.1", + srcs = [":object-0.32.1.crate"], crate = "object", - crate_root = "object-0.32.0.crate/src/lib.rs", + crate_root = "object-0.32.1.crate/src/lib.rs", edition = "2018", features = [ "archive", @@ -7998,28 +8178,28 @@ cargo.rust_library( "unaligned", ], visibility = [], - deps = [":memchr-2.5.0"], + deps = [":memchr-2.6.4"], ) alias( name = "once_cell", - actual = ":once_cell-1.18.0", + actual = ":once_cell-1.19.0", visibility = ["PUBLIC"], ) http_archive( - name = "once_cell-1.18.0.crate", - sha256 = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d", - strip_prefix = "once_cell-1.18.0", - urls = ["https://crates.io/api/v1/crates/once_cell/1.18.0/download"], + name = "once_cell-1.19.0.crate", + sha256 = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92", + strip_prefix = "once_cell-1.19.0", + urls = ["https://crates.io/api/v1/crates/once_cell/1.19.0/download"], visibility = [], ) cargo.rust_library( - name = "once_cell-1.18.0", - srcs = [":once_cell-1.18.0.crate"], + name = "once_cell-1.19.0", + srcs = [":once_cell-1.19.0.crate"], crate = "once_cell", - crate_root = "once_cell-1.18.0.crate/src/lib.rs", + crate_root = "once_cell-1.19.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -8050,48 +8230,48 @@ cargo.rust_library( alias( name = "open", - actual = ":open-5.0.0", + actual = ":open-5.0.1", visibility = ["PUBLIC"], ) http_archive( - name = "open-5.0.0.crate", - sha256 = "cfabf1927dce4d6fdf563d63328a0a506101ced3ec780ca2135747336c98cef8", - strip_prefix = "open-5.0.0", - urls = ["https://crates.io/api/v1/crates/open/5.0.0/download"], + name = "open-5.0.1.crate", + sha256 = "90878fb664448b54c4e592455ad02831e23a3f7e157374a8b95654731aac7349", + strip_prefix = "open-5.0.1", + urls = ["https://crates.io/api/v1/crates/open/5.0.1/download"], visibility = [], ) cargo.rust_library( - name = "open-5.0.0", - srcs = [":open-5.0.0.crate"], + name = "open-5.0.1", + srcs = [":open-5.0.1.crate"], crate = "open", - crate_root = "open-5.0.0.crate/src/lib.rs", + crate_root = "open-5.0.1.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( deps = [ ":is-wsl-0.4.0", - ":libc-0.2.147", + ":libc-0.2.151", ":pathdiff-0.2.1", ], ), "linux-x86_64": dict( deps = [ ":is-wsl-0.4.0", - ":libc-0.2.147", + ":libc-0.2.151", ":pathdiff-0.2.1", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":pathdiff-0.2.1", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":pathdiff-0.2.1", ], ), @@ -8191,15 +8371,15 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.73", - ":futures-0.3.28", + ":async-trait-0.1.74", + ":futures-0.3.29", ":futures-util-0.3.29", - ":http-0.2.9", + ":http-0.2.11", ":opentelemetry-0.18.0", ":opentelemetry-proto-0.1.0", ":prost-0.11.9", - ":thiserror-1.0.47", - ":tokio-1.32.0", + ":thiserror-1.0.50", + ":tokio-1.35.0", ":tonic-0.8.3", ], ) @@ -8231,7 +8411,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":futures-0.3.28", + ":futures-0.3.29", ":futures-util-0.3.29", ":opentelemetry-0.18.0", ":prost-0.11.9", @@ -8323,9 +8503,9 @@ cargo.rust_library( ":futures-channel-0.3.29", ":futures-util-0.3.29", ":indexmap-1.9.3", - ":once_cell-1.18.0", - ":pin-project-lite-0.2.12", - ":thiserror-1.0.47", + ":once_cell-1.19.0", + ":pin-project-lite-0.2.13", + ":thiserror-1.0.50", ], ) @@ -8370,19 +8550,19 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.73", + ":async-trait-0.1.74", ":crossbeam-channel-0.5.8", - ":dashmap-5.5.1", + ":dashmap-5.5.3", ":fnv-1.0.7", ":futures-channel-0.3.29", - ":futures-executor-0.3.28", + ":futures-executor-0.3.29", ":futures-util-0.3.29", - ":once_cell-1.18.0", + ":once_cell-1.19.0", ":opentelemetry_api-0.18.0", - ":percent-encoding-2.3.0", + ":percent-encoding-2.3.1", ":rand-0.8.5", - ":thiserror-1.0.47", - ":tokio-1.32.0", + ":thiserror-1.0.50", + ":tokio-1.35.0", ":tokio-stream-0.1.14", ], ) @@ -8405,47 +8585,47 @@ cargo.rust_library( ) http_archive( - name = "ordered-float-2.10.0.crate", - sha256 = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87", - strip_prefix = "ordered-float-2.10.0", - urls = ["https://crates.io/api/v1/crates/ordered-float/2.10.0/download"], + name = "ordered-float-2.10.1.crate", + sha256 = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c", + strip_prefix = "ordered-float-2.10.1", + urls = ["https://crates.io/api/v1/crates/ordered-float/2.10.1/download"], visibility = [], ) cargo.rust_library( - name = "ordered-float-2.10.0", - srcs = [":ordered-float-2.10.0.crate"], + name = "ordered-float-2.10.1", + srcs = [":ordered-float-2.10.1.crate"], crate = "ordered_float", - crate_root = "ordered-float-2.10.0.crate/src/lib.rs", + crate_root = "ordered-float-2.10.1.crate/src/lib.rs", edition = "2018", features = [ "default", "std", ], visibility = [], - deps = [":num-traits-0.2.16"], + deps = [":num-traits-0.2.17"], ) http_archive( - name = "ordered-float-3.7.0.crate", - sha256 = "2fc2dbde8f8a79f2102cc474ceb0ad68e3b80b85289ea62389b60e66777e4213", - strip_prefix = "ordered-float-3.7.0", - urls = ["https://crates.io/api/v1/crates/ordered-float/3.7.0/download"], + name = "ordered-float-3.9.2.crate", + sha256 = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc", + strip_prefix = "ordered-float-3.9.2", + urls = ["https://crates.io/api/v1/crates/ordered-float/3.9.2/download"], visibility = [], ) cargo.rust_library( - name = "ordered-float-3.7.0", - srcs = [":ordered-float-3.7.0.crate"], + name = "ordered-float-3.9.2", + srcs = [":ordered-float-3.9.2.crate"], crate = "ordered_float", - crate_root = "ordered-float-3.7.0.crate/src/lib.rs", + crate_root = "ordered-float-3.9.2.crate/src/lib.rs", edition = "2021", features = [ "default", "std", ], visibility = [], - deps = [":num-traits-0.2.16"], + deps = [":num-traits-0.2.17"], ) http_archive( @@ -8520,7 +8700,7 @@ cargo.rust_library( deps = [ ":Inflector-0.11.4", ":proc-macro-error-1.0.4", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -8588,13 +8768,13 @@ cargo.rust_library( "std", ], named_deps = { - "ecdsa_core": ":ecdsa-0.16.8", + "ecdsa_core": ":ecdsa-0.16.9", }, visibility = [], deps = [ - ":elliptic-curve-0.13.5", - ":primeorder-0.13.2", - ":sha2-0.10.7", + ":elliptic-curve-0.13.8", + ":primeorder-0.13.6", + ":sha2-0.10.8", ], ) @@ -8627,29 +8807,29 @@ cargo.rust_library( "std", ], named_deps = { - "ecdsa_core": ":ecdsa-0.16.8", + "ecdsa_core": ":ecdsa-0.16.9", }, visibility = [], deps = [ - ":elliptic-curve-0.13.5", - ":primeorder-0.13.2", - ":sha2-0.10.7", + ":elliptic-curve-0.13.8", + ":primeorder-0.13.6", + ":sha2-0.10.8", ], ) http_archive( - name = "parking-2.1.0.crate", - sha256 = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e", - strip_prefix = "parking-2.1.0", - urls = ["https://crates.io/api/v1/crates/parking/2.1.0/download"], + name = "parking-2.2.0.crate", + sha256 = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae", + strip_prefix = "parking-2.2.0", + urls = ["https://crates.io/api/v1/crates/parking/2.2.0/download"], visibility = [], ) cargo.rust_library( - name = "parking-2.1.0", - srcs = [":parking-2.1.0.crate"], + name = "parking-2.2.0", + srcs = [":parking-2.2.0.crate"], crate = "parking", - crate_root = "parking-2.1.0.crate/src/lib.rs", + crate_root = "parking-2.2.0.crate/src/lib.rs", edition = "2018", visibility = [], ) @@ -8672,7 +8852,7 @@ cargo.rust_library( visibility = [], deps = [ ":instant-0.1.12", - ":lock_api-0.4.10", + ":lock_api-0.4.11", ":parking_lot_core-0.8.6", ], ) @@ -8694,8 +8874,8 @@ cargo.rust_library( features = ["default"], visibility = [], deps = [ - ":lock_api-0.4.10", - ":parking_lot_core-0.9.8", + ":lock_api-0.4.11", + ":parking_lot_core-0.9.9", ], ) @@ -8715,16 +8895,16 @@ cargo.rust_library( edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( deps = [":winapi-0.3.9"], @@ -8737,36 +8917,36 @@ cargo.rust_library( deps = [ ":cfg-if-1.0.0", ":instant-0.1.12", - ":smallvec-1.11.0", + ":smallvec-1.11.2", ], ) http_archive( - name = "parking_lot_core-0.9.8.crate", - sha256 = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447", - strip_prefix = "parking_lot_core-0.9.8", - urls = ["https://crates.io/api/v1/crates/parking_lot_core/0.9.8/download"], + name = "parking_lot_core-0.9.9.crate", + sha256 = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e", + strip_prefix = "parking_lot_core-0.9.9", + urls = ["https://crates.io/api/v1/crates/parking_lot_core/0.9.9/download"], visibility = [], ) cargo.rust_library( - name = "parking_lot_core-0.9.8", - srcs = [":parking_lot_core-0.9.8.crate"], + name = "parking_lot_core-0.9.9", + srcs = [":parking_lot_core-0.9.9.crate"], crate = "parking_lot_core", - crate_root = "parking_lot_core-0.9.8.crate/src/lib.rs", + crate_root = "parking_lot_core-0.9.9.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( deps = [":windows-targets-0.48.5"], @@ -8778,7 +8958,7 @@ cargo.rust_library( visibility = [], deps = [ ":cfg-if-1.0.0", - ":smallvec-1.11.0", + ":smallvec-1.11.2", ], ) @@ -8887,18 +9067,18 @@ cargo.rust_library( ) http_archive( - name = "percent-encoding-2.3.0.crate", - sha256 = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94", - strip_prefix = "percent-encoding-2.3.0", - urls = ["https://crates.io/api/v1/crates/percent-encoding/2.3.0/download"], + name = "percent-encoding-2.3.1.crate", + sha256 = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e", + strip_prefix = "percent-encoding-2.3.1", + urls = ["https://crates.io/api/v1/crates/percent-encoding/2.3.1/download"], visibility = [], ) cargo.rust_library( - name = "percent-encoding-2.3.0", - srcs = [":percent-encoding-2.3.0.crate"], + name = "percent-encoding-2.3.1", + srcs = [":percent-encoding-2.3.1.crate"], crate = "percent_encoding", - crate_root = "percent-encoding-2.3.0.crate/src/lib.rs", + crate_root = "percent-encoding-2.3.1.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -8940,9 +9120,9 @@ cargo.rust_library( visibility = [], deps = [ ":fixedbitset-0.4.2", - ":indexmap-2.0.0", - ":serde-1.0.186", - ":serde_derive-1.0.186", + ":indexmap-2.1.0", + ":serde-1.0.193", + ":serde_derive-1.0.193", ], ) @@ -9040,7 +9220,7 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -9063,31 +9243,31 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) alias( name = "pin-project-lite", - actual = ":pin-project-lite-0.2.12", + actual = ":pin-project-lite-0.2.13", visibility = ["PUBLIC"], ) http_archive( - name = "pin-project-lite-0.2.12.crate", - sha256 = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05", - strip_prefix = "pin-project-lite-0.2.12", - urls = ["https://crates.io/api/v1/crates/pin-project-lite/0.2.12/download"], + name = "pin-project-lite-0.2.13.crate", + sha256 = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58", + strip_prefix = "pin-project-lite-0.2.13", + urls = ["https://crates.io/api/v1/crates/pin-project-lite/0.2.13/download"], visibility = [], ) cargo.rust_library( - name = "pin-project-lite-0.2.12", - srcs = [":pin-project-lite-0.2.12.crate"], + name = "pin-project-lite-0.2.13", + srcs = [":pin-project-lite-0.2.13.crate"], crate = "pin_project_lite", - crate_root = "pin-project-lite-0.2.12.crate/src/lib.rs", + crate_root = "pin-project-lite-0.2.13.crate/src/lib.rs", edition = "2018", visibility = [], ) @@ -9135,7 +9315,7 @@ cargo.rust_library( ":der-0.6.1", ":pkcs8-0.9.0", ":spki-0.6.0", - ":zeroize-1.6.0", + ":zeroize-1.7.0", ], ) @@ -9161,7 +9341,7 @@ cargo.rust_library( visibility = [], deps = [ ":der-0.7.8", - ":spki-0.7.2", + ":spki-0.7.3", ], ) @@ -9191,7 +9371,7 @@ cargo.rust_library( ":der-0.4.5", ":pem-rfc7468-0.2.3", ":spki-0.4.1", - ":zeroize-1.6.0", + ":zeroize-1.7.0", ], ) @@ -9238,6 +9418,27 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "platforms-3.2.0.crate", + sha256 = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0", + strip_prefix = "platforms-3.2.0", + urls = ["https://crates.io/api/v1/crates/platforms/3.2.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "platforms-3.2.0", + srcs = [":platforms-3.2.0.crate"], + crate = "platforms", + crate_root = "platforms-3.2.0.crate/src/lib.rs", + edition = "2018", + features = [ + "default", + "std", + ], + visibility = [], +) + alias( name = "podman-api", actual = ":podman-api-0.10.0", @@ -9265,22 +9466,22 @@ cargo.rust_library( visibility = [], deps = [ ":base64-0.13.1", - ":byteorder-1.4.3", - ":bytes-1.4.0", - ":chrono-0.4.26", + ":byteorder-1.5.0", + ":bytes-1.5.0", + ":chrono-0.4.31", ":containers-api-0.8.0", - ":flate2-1.0.27", + ":flate2-1.0.28", ":futures-util-0.3.29", ":futures_codec-0.4.1", ":log-0.4.20", ":paste-1.0.14", ":podman-api-stubs-0.9.0", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ":tar-0.4.40", - ":thiserror-1.0.47", - ":tokio-1.32.0", - ":url-2.4.0", + ":thiserror-1.0.50", + ":tokio-1.35.0", + ":url-2.5.0", ], ) @@ -9300,61 +9501,61 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":chrono-0.4.26", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":chrono-0.4.31", + ":serde-1.0.193", + ":serde_json-1.0.108", ], ) http_archive( - name = "portable-atomic-1.4.2.crate", - sha256 = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e", - strip_prefix = "portable-atomic-1.4.2", - urls = ["https://crates.io/api/v1/crates/portable-atomic/1.4.2/download"], + name = "portable-atomic-1.6.0.crate", + sha256 = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0", + strip_prefix = "portable-atomic-1.6.0", + urls = ["https://crates.io/api/v1/crates/portable-atomic/1.6.0/download"], visibility = [], ) cargo.rust_library( - name = "portable-atomic-1.4.2", - srcs = [":portable-atomic-1.4.2.crate"], + name = "portable-atomic-1.6.0", + srcs = [":portable-atomic-1.6.0.crate"], crate = "portable_atomic", - crate_root = "portable-atomic-1.4.2.crate/src/lib.rs", + crate_root = "portable-atomic-1.6.0.crate/src/lib.rs", edition = "2018", env = { - "CARGO_MANIFEST_DIR": "portable-atomic-1.4.2.crate", + "CARGO_MANIFEST_DIR": "portable-atomic-1.6.0.crate", "CARGO_PKG_AUTHORS": "", "CARGO_PKG_DESCRIPTION": "Portable atomic types including support for 128-bit atomics, atomic float, etc.\n", "CARGO_PKG_NAME": "portable-atomic", "CARGO_PKG_REPOSITORY": "https://github.com/taiki-e/portable-atomic", - "CARGO_PKG_VERSION": "1.4.2", + "CARGO_PKG_VERSION": "1.6.0", "CARGO_PKG_VERSION_MAJOR": "1", - "CARGO_PKG_VERSION_MINOR": "4", - "CARGO_PKG_VERSION_PATCH": "2", + "CARGO_PKG_VERSION_MINOR": "6", + "CARGO_PKG_VERSION_PATCH": "0", }, features = [ "default", "fallback", ], - rustc_flags = ["@$(location :portable-atomic-1.4.2-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :portable-atomic-1.6.0-build-script-run[rustc_flags])"], visibility = [], ) cargo.rust_binary( - name = "portable-atomic-1.4.2-build-script-build", - srcs = [":portable-atomic-1.4.2.crate"], + name = "portable-atomic-1.6.0-build-script-build", + srcs = [":portable-atomic-1.6.0.crate"], crate = "build_script_build", - crate_root = "portable-atomic-1.4.2.crate/build.rs", + crate_root = "portable-atomic-1.6.0.crate/build.rs", edition = "2018", env = { - "CARGO_MANIFEST_DIR": "portable-atomic-1.4.2.crate", + "CARGO_MANIFEST_DIR": "portable-atomic-1.6.0.crate", "CARGO_PKG_AUTHORS": "", "CARGO_PKG_DESCRIPTION": "Portable atomic types including support for 128-bit atomics, atomic float, etc.\n", "CARGO_PKG_NAME": "portable-atomic", "CARGO_PKG_REPOSITORY": "https://github.com/taiki-e/portable-atomic", - "CARGO_PKG_VERSION": "1.4.2", + "CARGO_PKG_VERSION": "1.6.0", "CARGO_PKG_VERSION_MAJOR": "1", - "CARGO_PKG_VERSION_MINOR": "4", - "CARGO_PKG_VERSION_PATCH": "2", + "CARGO_PKG_VERSION_MINOR": "6", + "CARGO_PKG_VERSION_PATCH": "0", }, features = [ "default", @@ -9364,14 +9565,14 @@ cargo.rust_binary( ) buildscript_run( - name = "portable-atomic-1.4.2-build-script-run", + name = "portable-atomic-1.6.0-build-script-run", package_name = "portable-atomic", - buildscript_rule = ":portable-atomic-1.4.2-build-script-build", + buildscript_rule = ":portable-atomic-1.6.0-build-script-build", features = [ "default", "fallback", ], - version = "1.4.2", + version = "1.6.0", ) http_archive( @@ -9392,9 +9593,9 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -9415,16 +9616,16 @@ cargo.rust_library( features = ["default"], visibility = [], deps = [ - ":base64-0.21.2", - ":byteorder-1.4.3", - ":bytes-1.4.0", + ":base64-0.21.5", + ":byteorder-1.5.0", + ":bytes-1.5.0", ":fallible-iterator-0.2.0", ":hmac-0.12.1", - ":md-5-0.10.5", - ":memchr-2.5.0", + ":md-5-0.10.6", + ":memchr-2.6.4", ":rand-0.8.5", - ":sha2-0.10.7", - ":stringprep-0.1.3", + ":sha2-0.10.8", + ":stringprep-0.1.4", ], ) @@ -9458,19 +9659,36 @@ cargo.rust_library( "with-serde_json-1", ], named_deps = { - "chrono_04": ":chrono-0.4.26", - "serde_1": ":serde-1.0.186", - "serde_json_1": ":serde_json-1.0.105", + "chrono_04": ":chrono-0.4.31", + "serde_1": ":serde-1.0.193", + "serde_json_1": ":serde_json-1.0.108", }, visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":fallible-iterator-0.2.0", ":postgres-derive-0.4.5", ":postgres-protocol-0.6.6", ], ) +http_archive( + name = "powerfmt-0.2.0.crate", + sha256 = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391", + strip_prefix = "powerfmt-0.2.0", + urls = ["https://crates.io/api/v1/crates/powerfmt/0.2.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "powerfmt-0.2.0", + srcs = [":powerfmt-0.2.0.crate"], + crate = "powerfmt", + crate_root = "powerfmt-0.2.0.crate/src/lib.rs", + edition = "2021", + visibility = [], +) + http_archive( name = "ppv-lite86-0.2.17.crate", sha256 = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de", @@ -9571,45 +9789,27 @@ cargo.rust_library( }, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":syn-1.0.109", ], ) http_archive( - name = "primeorder-0.13.2.crate", - sha256 = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3", - strip_prefix = "primeorder-0.13.2", - urls = ["https://crates.io/api/v1/crates/primeorder/0.13.2/download"], + name = "primeorder-0.13.6.crate", + sha256 = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6", + strip_prefix = "primeorder-0.13.6", + urls = ["https://crates.io/api/v1/crates/primeorder/0.13.6/download"], visibility = [], ) cargo.rust_library( - name = "primeorder-0.13.2", - srcs = [":primeorder-0.13.2.crate"], + name = "primeorder-0.13.6", + srcs = [":primeorder-0.13.6.crate"], crate = "primeorder", - crate_root = "primeorder-0.13.2.crate/src/lib.rs", + crate_root = "primeorder-0.13.6.crate/src/lib.rs", edition = "2021", visibility = [], - deps = [":elliptic-curve-0.13.5"], -) - -http_archive( - name = "proc-macro-crate-0.1.5.crate", - sha256 = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785", - strip_prefix = "proc-macro-crate-0.1.5", - urls = ["https://crates.io/api/v1/crates/proc-macro-crate/0.1.5/download"], - visibility = [], -) - -cargo.rust_library( - name = "proc-macro-crate-0.1.5", - srcs = [":proc-macro-crate-0.1.5.crate"], - crate = "proc_macro_crate", - crate_root = "proc-macro-crate-0.1.5.crate/src/lib.rs", - edition = "2018", - visibility = [], - deps = [":toml-0.5.11"], + deps = [":elliptic-curve-0.13.8"], ) http_archive( @@ -9628,11 +9828,29 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":once_cell-1.18.0", - ":toml_edit-0.19.14", + ":once_cell-1.19.0", + ":toml_edit-0.19.15", ], ) +http_archive( + name = "proc-macro-crate-2.0.0.crate", + sha256 = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8", + strip_prefix = "proc-macro-crate-2.0.0", + urls = ["https://crates.io/api/v1/crates/proc-macro-crate/2.0.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "proc-macro-crate-2.0.0", + srcs = [":proc-macro-crate-2.0.0.crate"], + crate = "proc_macro_crate", + crate_root = "proc-macro-crate-2.0.0.crate/src/lib.rs", + edition = "2021", + visibility = [], + deps = [":toml_edit-0.20.7"], +) + http_archive( name = "proc-macro-error-1.0.4.crate", sha256 = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c", @@ -9656,7 +9874,7 @@ cargo.rust_library( visibility = [], deps = [ ":proc-macro-error-attr-1.0.4", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -9706,45 +9924,45 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ], ) alias( name = "proc-macro2", - actual = ":proc-macro2-1.0.66", + actual = ":proc-macro2-1.0.70", visibility = ["PUBLIC"], ) http_archive( - name = "proc-macro2-1.0.66.crate", - sha256 = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9", - strip_prefix = "proc-macro2-1.0.66", - urls = ["https://crates.io/api/v1/crates/proc-macro2/1.0.66/download"], + name = "proc-macro2-1.0.70.crate", + sha256 = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b", + strip_prefix = "proc-macro2-1.0.70", + urls = ["https://crates.io/api/v1/crates/proc-macro2/1.0.70/download"], visibility = [], ) cargo.rust_library( - name = "proc-macro2-1.0.66", - srcs = [":proc-macro2-1.0.66.crate"], + name = "proc-macro2-1.0.70", + srcs = [":proc-macro2-1.0.70.crate"], crate = "proc_macro2", - crate_root = "proc-macro2-1.0.66.crate/src/lib.rs", + crate_root = "proc-macro2-1.0.70.crate/src/lib.rs", edition = "2021", features = [ "default", "proc-macro", ], - rustc_flags = ["@$(location :proc-macro2-1.0.66-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :proc-macro2-1.0.70-build-script-run[rustc_flags])"], visibility = [], - deps = [":unicode-ident-1.0.11"], + deps = [":unicode-ident-1.0.12"], ) cargo.rust_binary( - name = "proc-macro2-1.0.66-build-script-build", - srcs = [":proc-macro2-1.0.66.crate"], + name = "proc-macro2-1.0.70-build-script-build", + srcs = [":proc-macro2-1.0.70.crate"], crate = "build_script_build", - crate_root = "proc-macro2-1.0.66.crate/build.rs", + crate_root = "proc-macro2-1.0.70.crate/build.rs", edition = "2021", features = [ "default", @@ -9754,14 +9972,14 @@ cargo.rust_binary( ) buildscript_run( - name = "proc-macro2-1.0.66-build-script-run", + name = "proc-macro2-1.0.70-build-script-run", package_name = "proc-macro2", - buildscript_rule = ":proc-macro2-1.0.66-build-script-build", + buildscript_rule = ":proc-macro2-1.0.70-build-script-build", features = [ "default", "proc-macro", ], - version = "1.0.66", + version = "1.0.70", ) http_archive( @@ -9785,7 +10003,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":prost-derive-0.11.9", ], ) @@ -9812,7 +10030,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":heck-0.4.1", ":itertools-0.10.5", ":lazy_static-1.4.0", @@ -9822,10 +10040,10 @@ cargo.rust_library( ":prettyplease-0.1.25", ":prost-0.11.9", ":prost-types-0.11.9", - ":regex-1.9.3", + ":regex-1.10.2", ":syn-1.0.109", - ":tempfile-3.8.0", - ":which-4.4.0", + ":tempfile-3.8.1", + ":which-4.4.2", ], ) @@ -9848,7 +10066,7 @@ cargo.rust_library( deps = [ ":anyhow-1.0.75", ":itertools-0.10.5", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -9908,7 +10126,7 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -9935,8 +10153,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":memchr-2.5.0", - ":serde-1.0.186", + ":memchr-2.6.4", + ":serde-1.0.193", ], ) @@ -9965,7 +10183,7 @@ cargo.rust_library( "proc-macro", ], visibility = [], - deps = [":proc-macro2-1.0.66"], + deps = [":proc-macro2-1.0.70"], ) alias( @@ -9990,21 +10208,21 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":async-trait-0.1.73", - ":bytes-1.4.0", - ":dashmap-5.5.1", - ":futures-0.3.28", + ":async-trait-0.1.74", + ":bytes-1.5.0", + ":dashmap-5.5.3", + ":futures-0.3.29", ":pin-project-1.1.3", ":rabbitmq-stream-protocol-0.3.0", ":rand-0.8.5", - ":rustls-pemfile-1.0.3", - ":thiserror-1.0.47", - ":tokio-1.32.0", + ":rustls-pemfile-1.0.4", + ":thiserror-1.0.50", + ":tokio-1.35.0", ":tokio-rustls-0.24.1", ":tokio-stream-0.1.14", - ":tokio-util-0.7.8", - ":tracing-0.1.37", - ":url-2.4.0", + ":tokio-util-0.7.10", + ":tracing-0.1.40", + ":url-2.5.0", ], ) @@ -10024,12 +10242,12 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":byteorder-1.4.3", - ":chrono-0.4.26", + ":byteorder-1.5.0", + ":chrono-0.4.31", ":derive_more-0.99.17", ":num_enum-0.6.1", - ":ordered-float-3.7.0", - ":uuid-1.4.1", + ":ordered-float-3.9.2", + ":uuid-1.6.1", ], ) @@ -10079,25 +10297,25 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":rand_chacha-0.2.2", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":rand_chacha-0.2.2", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":rand_chacha-0.2.2", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":rand_chacha-0.2.2", ], ), @@ -10144,16 +10362,16 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), }, visibility = [], @@ -10250,28 +10468,28 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":getrandom-0.2.10"], + deps = [":getrandom-0.2.11"], ) alias( name = "refinery", - actual = ":refinery-0.8.10", + actual = ":refinery-0.8.11", visibility = ["PUBLIC"], ) http_archive( - name = "refinery-0.8.10.crate", - sha256 = "cdb0436d0dd7bd8d4fce1e828751fa79742b08e35f27cfea7546f8a322b5ef24", - strip_prefix = "refinery-0.8.10", - urls = ["https://crates.io/api/v1/crates/refinery/0.8.10/download"], + name = "refinery-0.8.11.crate", + sha256 = "529664dbccc0a296947615c997a857912d72d1c44be1fafb7bae54ecfa7a8c24", + strip_prefix = "refinery-0.8.11", + urls = ["https://crates.io/api/v1/crates/refinery/0.8.11/download"], visibility = [], ) cargo.rust_library( - name = "refinery-0.8.10", - srcs = [":refinery-0.8.10.crate"], + name = "refinery-0.8.11", + srcs = [":refinery-0.8.11.crate"], crate = "refinery", - crate_root = "refinery-0.8.10.crate/src/lib.rs", + crate_root = "refinery-0.8.11.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -10279,24 +10497,24 @@ cargo.rust_library( ], visibility = [], deps = [ - ":refinery-core-0.8.10", - ":refinery-macros-0.8.10", + ":refinery-core-0.8.11", + ":refinery-macros-0.8.11", ], ) http_archive( - name = "refinery-core-0.8.10.crate", - sha256 = "19206547cd047e8f4dfa6b20c30d3ecaf24be05841b6aa0aa926a47a3d0662bb", - strip_prefix = "refinery-core-0.8.10", - urls = ["https://crates.io/api/v1/crates/refinery-core/0.8.10/download"], + name = "refinery-core-0.8.11.crate", + sha256 = "e895cb870cf06e92318cbbeb701f274d022d5ca87a16fa8244e291cd035ef954", + strip_prefix = "refinery-core-0.8.11", + urls = ["https://crates.io/api/v1/crates/refinery-core/0.8.11/download"], visibility = [], ) cargo.rust_library( - name = "refinery-core-0.8.10", - srcs = [":refinery-core-0.8.10.crate"], + name = "refinery-core-0.8.11", + srcs = [":refinery-core-0.8.11.crate"], crate = "refinery_core", - crate_root = "refinery-core-0.8.10.crate/src/lib.rs", + crate_root = "refinery-core-0.8.11.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -10305,67 +10523,67 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.73", + ":async-trait-0.1.74", ":cfg-if-1.0.0", ":lazy_static-1.4.0", ":log-0.4.20", - ":regex-1.9.3", - ":serde-1.0.186", - ":siphasher-0.3.11", - ":thiserror-1.0.47", - ":time-0.3.27", - ":tokio-1.32.0", - ":tokio-postgres-0.7.9", - ":toml-0.7.6", - ":url-2.4.0", - ":walkdir-2.3.3", + ":regex-1.10.2", + ":serde-1.0.193", + ":siphasher-1.0.0", + ":thiserror-1.0.50", + ":time-0.3.30", + ":tokio-1.35.0", + ":tokio-postgres-0.7.10", + ":toml-0.7.8", + ":url-2.5.0", + ":walkdir-2.4.0", ], ) http_archive( - name = "refinery-macros-0.8.10.crate", - sha256 = "d94d4b9241859ba19eaa5c04c86e782eb3aa0aae2c5868e0cfa90c856e58a174", - strip_prefix = "refinery-macros-0.8.10", - urls = ["https://crates.io/api/v1/crates/refinery-macros/0.8.10/download"], + name = "refinery-macros-0.8.11.crate", + sha256 = "123e8b80f8010c3ae38330c81e76938fc7adf6cdbfbaad20295bb8c22718b4f1", + strip_prefix = "refinery-macros-0.8.11", + urls = ["https://crates.io/api/v1/crates/refinery-macros/0.8.11/download"], visibility = [], ) cargo.rust_library( - name = "refinery-macros-0.8.10", - srcs = [":refinery-macros-0.8.10.crate"], + name = "refinery-macros-0.8.11", + srcs = [":refinery-macros-0.8.11.crate"], crate = "refinery_macros", - crate_root = "refinery-macros-0.8.10.crate/src/lib.rs", + crate_root = "refinery-macros-0.8.11.crate/src/lib.rs", edition = "2018", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":refinery-core-0.8.10", - ":regex-1.9.3", - ":syn-2.0.29", + ":refinery-core-0.8.11", + ":regex-1.10.2", + ":syn-2.0.40", ], ) alias( name = "regex", - actual = ":regex-1.9.3", + actual = ":regex-1.10.2", visibility = ["PUBLIC"], ) http_archive( - name = "regex-1.9.3.crate", - sha256 = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a", - strip_prefix = "regex-1.9.3", - urls = ["https://crates.io/api/v1/crates/regex/1.9.3/download"], + name = "regex-1.10.2.crate", + sha256 = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343", + strip_prefix = "regex-1.10.2", + urls = ["https://crates.io/api/v1/crates/regex/1.10.2/download"], visibility = [], ) cargo.rust_library( - name = "regex-1.9.3", - srcs = [":regex-1.9.3.crate"], + name = "regex-1.10.2", + srcs = [":regex-1.10.2.crate"], crate = "regex", - crate_root = "regex-1.9.3.crate/src/lib.rs", + crate_root = "regex-1.10.2.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -10388,10 +10606,10 @@ cargo.rust_library( ], visibility = [], deps = [ - ":aho-corasick-1.0.4", - ":memchr-2.5.0", - ":regex-automata-0.3.6", - ":regex-syntax-0.7.4", + ":aho-corasick-1.1.2", + ":memchr-2.6.4", + ":regex-automata-0.4.3", + ":regex-syntax-0.8.2", ], ) @@ -10419,27 +10637,29 @@ cargo.rust_library( ) http_archive( - name = "regex-automata-0.3.6.crate", - sha256 = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69", - strip_prefix = "regex-automata-0.3.6", - urls = ["https://crates.io/api/v1/crates/regex-automata/0.3.6/download"], + name = "regex-automata-0.4.3.crate", + sha256 = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f", + strip_prefix = "regex-automata-0.4.3", + urls = ["https://crates.io/api/v1/crates/regex-automata/0.4.3/download"], visibility = [], ) cargo.rust_library( - name = "regex-automata-0.3.6", - srcs = [":regex-automata-0.3.6.crate"], + name = "regex-automata-0.4.3", + srcs = [":regex-automata-0.4.3.crate"], crate = "regex_automata", - crate_root = "regex-automata-0.3.6.crate/src/lib.rs", + crate_root = "regex-automata-0.4.3.crate/src/lib.rs", edition = "2021", features = [ "alloc", "dfa-onepass", "hybrid", "meta", + "nfa", "nfa-backtrack", "nfa-pikevm", "nfa-thompson", + "perf", "perf-inline", "perf-literal", "perf-literal-multisubstring", @@ -10458,9 +10678,9 @@ cargo.rust_library( ], visibility = [], deps = [ - ":aho-corasick-1.0.4", - ":memchr-2.5.0", - ":regex-syntax-0.7.4", + ":aho-corasick-1.1.2", + ":memchr-2.6.4", + ":regex-syntax-0.8.2", ], ) @@ -10493,18 +10713,18 @@ cargo.rust_library( ) http_archive( - name = "regex-syntax-0.7.4.crate", - sha256 = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2", - strip_prefix = "regex-syntax-0.7.4", - urls = ["https://crates.io/api/v1/crates/regex-syntax/0.7.4/download"], + name = "regex-syntax-0.8.2.crate", + sha256 = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f", + strip_prefix = "regex-syntax-0.8.2", + urls = ["https://crates.io/api/v1/crates/regex-syntax/0.8.2/download"], visibility = [], ) cargo.rust_library( - name = "regex-syntax-0.7.4", - srcs = [":regex-syntax-0.7.4.crate"], + name = "regex-syntax-0.8.2", + srcs = [":regex-syntax-0.8.2.crate"], crate = "regex_syntax", - crate_root = "regex-syntax-0.7.4.crate/src/lib.rs", + crate_root = "regex-syntax-0.8.2.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -10544,25 +10764,25 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) http_archive( - name = "rend-0.4.0.crate", - sha256 = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab", - strip_prefix = "rend-0.4.0", - urls = ["https://crates.io/api/v1/crates/rend/0.4.0/download"], + name = "rend-0.4.1.crate", + sha256 = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd", + strip_prefix = "rend-0.4.1", + urls = ["https://crates.io/api/v1/crates/rend/0.4.1/download"], visibility = [], ) cargo.rust_library( - name = "rend-0.4.0", - srcs = [":rend-0.4.0.crate"], + name = "rend-0.4.1", + srcs = [":rend-0.4.1.crate"], crate = "rend", - crate_root = "rend-0.4.0.crate/src/lib.rs", + crate_root = "rend-0.4.1.crate/src/lib.rs", edition = "2018", features = [ "bytecheck", @@ -10574,23 +10794,23 @@ cargo.rust_library( alias( name = "reqwest", - actual = ":reqwest-0.11.20", + actual = ":reqwest-0.11.22", visibility = ["PUBLIC"], ) http_archive( - name = "reqwest-0.11.20.crate", - sha256 = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1", - strip_prefix = "reqwest-0.11.20", - urls = ["https://crates.io/api/v1/crates/reqwest/0.11.20/download"], + name = "reqwest-0.11.22.crate", + sha256 = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b", + strip_prefix = "reqwest-0.11.22", + urls = ["https://crates.io/api/v1/crates/reqwest/0.11.22/download"], visibility = [], ) cargo.rust_library( - name = "reqwest-0.11.20", - srcs = [":reqwest-0.11.20.crate"], + name = "reqwest-0.11.22", + srcs = [":reqwest-0.11.22.crate"], crate = "reqwest", - crate_root = "reqwest-0.11.20.crate/src/lib.rs", + crate_root = "reqwest-0.11.22.crate/src/lib.rs", edition = "2018", features = [ "__rustls", @@ -10614,145 +10834,147 @@ cargo.rust_library( "linux-arm64": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.21", - ":http-body-0.4.5", + ":h2-0.3.22", + ":http-body-0.4.6", ":hyper-0.14.27", - ":hyper-rustls-0.24.1", - ":ipnet-2.8.0", + ":hyper-rustls-0.24.2", + ":ipnet-2.9.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.18.0", - ":percent-encoding-2.3.0", - ":pin-project-lite-0.2.12", - ":rustls-0.21.6", - ":rustls-pemfile-1.0.3", - ":tokio-1.32.0", + ":once_cell-1.19.0", + ":percent-encoding-2.3.1", + ":pin-project-lite-0.2.13", + ":rustls-0.21.10", + ":rustls-pemfile-1.0.4", + ":tokio-1.35.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.8", - ":webpki-roots-0.25.2", + ":tokio-util-0.7.10", + ":webpki-roots-0.25.3", ], ), "linux-x86_64": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.21", - ":http-body-0.4.5", + ":h2-0.3.22", + ":http-body-0.4.6", ":hyper-0.14.27", - ":hyper-rustls-0.24.1", - ":ipnet-2.8.0", + ":hyper-rustls-0.24.2", + ":ipnet-2.9.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.18.0", - ":percent-encoding-2.3.0", - ":pin-project-lite-0.2.12", - ":rustls-0.21.6", - ":rustls-pemfile-1.0.3", - ":tokio-1.32.0", + ":once_cell-1.19.0", + ":percent-encoding-2.3.1", + ":pin-project-lite-0.2.13", + ":rustls-0.21.10", + ":rustls-pemfile-1.0.4", + ":tokio-1.35.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.8", - ":webpki-roots-0.25.2", + ":tokio-util-0.7.10", + ":webpki-roots-0.25.3", ], ), "macos-arm64": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.21", - ":http-body-0.4.5", + ":h2-0.3.22", + ":http-body-0.4.6", ":hyper-0.14.27", - ":hyper-rustls-0.24.1", - ":ipnet-2.8.0", + ":hyper-rustls-0.24.2", + ":ipnet-2.9.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.18.0", - ":percent-encoding-2.3.0", - ":pin-project-lite-0.2.12", - ":rustls-0.21.6", - ":rustls-pemfile-1.0.3", - ":tokio-1.32.0", + ":once_cell-1.19.0", + ":percent-encoding-2.3.1", + ":pin-project-lite-0.2.13", + ":rustls-0.21.10", + ":rustls-pemfile-1.0.4", + ":system-configuration-0.5.1", + ":tokio-1.35.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.8", - ":webpki-roots-0.25.2", + ":tokio-util-0.7.10", + ":webpki-roots-0.25.3", ], ), "macos-x86_64": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.21", - ":http-body-0.4.5", + ":h2-0.3.22", + ":http-body-0.4.6", ":hyper-0.14.27", - ":hyper-rustls-0.24.1", - ":ipnet-2.8.0", + ":hyper-rustls-0.24.2", + ":ipnet-2.9.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.18.0", - ":percent-encoding-2.3.0", - ":pin-project-lite-0.2.12", - ":rustls-0.21.6", - ":rustls-pemfile-1.0.3", - ":tokio-1.32.0", + ":once_cell-1.19.0", + ":percent-encoding-2.3.1", + ":pin-project-lite-0.2.13", + ":rustls-0.21.10", + ":rustls-pemfile-1.0.4", + ":system-configuration-0.5.1", + ":tokio-1.35.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.8", - ":webpki-roots-0.25.2", + ":tokio-util-0.7.10", + ":webpki-roots-0.25.3", ], ), "windows-gnu": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.21", - ":http-body-0.4.5", + ":h2-0.3.22", + ":http-body-0.4.6", ":hyper-0.14.27", - ":hyper-rustls-0.24.1", - ":ipnet-2.8.0", + ":hyper-rustls-0.24.2", + ":ipnet-2.9.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.18.0", - ":percent-encoding-2.3.0", - ":pin-project-lite-0.2.12", - ":rustls-0.21.6", - ":rustls-pemfile-1.0.3", - ":tokio-1.32.0", + ":once_cell-1.19.0", + ":percent-encoding-2.3.1", + ":pin-project-lite-0.2.13", + ":rustls-0.21.10", + ":rustls-pemfile-1.0.4", + ":tokio-1.35.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.8", - ":webpki-roots-0.25.2", + ":tokio-util-0.7.10", + ":webpki-roots-0.25.3", ":winreg-0.50.0", ], ), "windows-msvc": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.21", - ":http-body-0.4.5", + ":h2-0.3.22", + ":http-body-0.4.6", ":hyper-0.14.27", - ":hyper-rustls-0.24.1", - ":ipnet-2.8.0", + ":hyper-rustls-0.24.2", + ":ipnet-2.9.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.18.0", - ":percent-encoding-2.3.0", - ":pin-project-lite-0.2.12", - ":rustls-0.21.6", - ":rustls-pemfile-1.0.3", - ":tokio-1.32.0", + ":once_cell-1.19.0", + ":percent-encoding-2.3.1", + ":pin-project-lite-0.2.13", + ":rustls-0.21.10", + ":rustls-pemfile-1.0.4", + ":tokio-1.35.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.8", - ":webpki-roots-0.25.2", + ":tokio-util-0.7.10", + ":webpki-roots-0.25.3", ":winreg-0.50.0", ], ), }, visibility = [], deps = [ - ":base64-0.21.2", - ":bytes-1.4.0", + ":base64-0.21.5", + ":bytes-1.5.0", ":futures-core-0.3.29", ":futures-util-0.3.29", - ":http-0.2.9", + ":http-0.2.11", ":mime_guess-2.0.4", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ":serde_urlencoded-0.7.1", ":tower-service-0.3.2", - ":url-2.4.0", + ":url-2.5.0", ], ) @@ -10908,16 +11130,16 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.147", - ":once_cell-1.18.0", + ":libc-0.2.151", + ":once_cell-1.19.0", ":ring-0.16.20-ring-c-asm-elf-aarch64", ":spin-0.5.2", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.147", - ":once_cell-1.18.0", + ":libc-0.2.151", + ":once_cell-1.19.0", ":ring-0.16.20-ring-c-asm-elf-x86_84", ":spin-0.5.2", ], @@ -11327,58 +11549,563 @@ cxx_library( ) http_archive( - name = "rkyv-0.7.42.crate", - sha256 = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58", - strip_prefix = "rkyv-0.7.42", - urls = ["https://crates.io/api/v1/crates/rkyv/0.7.42/download"], - visibility = [], -) - -cargo.rust_library( - name = "rkyv-0.7.42", - srcs = [":rkyv-0.7.42.crate"], - crate = "rkyv", - crate_root = "rkyv-0.7.42.crate/src/lib.rs", - edition = "2021", - features = [ - "alloc", - "hashbrown", - "size_32", - "std", - ], - visibility = [], - deps = [ - ":bitvec-1.0.1", - ":bytecheck-0.6.11", - ":hashbrown-0.12.3", - ":ptr_meta-0.1.4", - ":rend-0.4.0", - ":rkyv_derive-0.7.42", - ":seahash-4.1.0", - ":tinyvec-1.6.0", - ":uuid-1.4.1", - ], -) - -http_archive( - name = "rkyv_derive-0.7.42.crate", - sha256 = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d", - strip_prefix = "rkyv_derive-0.7.42", - urls = ["https://crates.io/api/v1/crates/rkyv_derive/0.7.42/download"], - visibility = [], -) - -cargo.rust_library( - name = "rkyv_derive-0.7.42", - srcs = [":rkyv_derive-0.7.42.crate"], - crate = "rkyv_derive", - crate_root = "rkyv_derive-0.7.42.crate/src/lib.rs", - edition = "2021", - features = ["default"], + name = "ring-0.17.7.crate", + sha256 = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74", + strip_prefix = "ring-0.17.7", + sub_targets = [ + "crypto/constant_time_test.c", + "crypto/cpu_intel.c", + "crypto/crypto.c", + "crypto/curve25519/curve25519.c", + "crypto/curve25519/curve25519_64_adx.c", + "crypto/curve25519/curve25519_tables.h", + "crypto/curve25519/internal.h", + "crypto/fipsmodule/aes/aes_nohw.c", + "crypto/fipsmodule/bn/internal.h", + "crypto/fipsmodule/bn/montgomery.c", + "crypto/fipsmodule/bn/montgomery_inv.c", + "crypto/fipsmodule/ec/ecp_nistz.c", + "crypto/fipsmodule/ec/ecp_nistz.h", + "crypto/fipsmodule/ec/ecp_nistz384.h", + "crypto/fipsmodule/ec/ecp_nistz384.inl", + "crypto/fipsmodule/ec/gfp_p256.c", + "crypto/fipsmodule/ec/gfp_p384.c", + "crypto/fipsmodule/ec/p256.c", + "crypto/fipsmodule/ec/p256-nistz.c", + "crypto/fipsmodule/ec/p256-nistz.h", + "crypto/fipsmodule/ec/p256-nistz-table.h", + "crypto/fipsmodule/ec/p256_shared.h", + "crypto/fipsmodule/ec/p256_table.h", + "crypto/fipsmodule/ec/util.h", + "crypto/internal.h", + "crypto/limbs/limbs.c", + "crypto/limbs/limbs.h", + "crypto/limbs/limbs.inl", + "crypto/mem.c", + "crypto/poly1305/internal.h", + "crypto/poly1305/poly1305.c", + "crypto/poly1305/poly1305_arm.c", + "crypto/poly1305/poly1305_vec.c", + "include/ring-core/aes.h", + "include/ring-core/arm_arch.h", + "include/ring-core/asm_base.h", + "include/ring-core/base.h", + "include/ring-core/check.h", + "include/ring-core/mem.h", + "include/ring-core/poly1305.h", + "include/ring-core/target.h", + "include/ring-core/type_check.h", + "pregenerated/aesni-gcm-x86_64-elf.S", + "pregenerated/aesni-gcm-x86_64-macosx.S", + "pregenerated/aesni-x86_64-elf.S", + "pregenerated/aesni-x86_64-macosx.S", + "pregenerated/aesv8-armx-ios64.S", + "pregenerated/aesv8-armx-linux64.S", + "pregenerated/armv8-mont-ios64.S", + "pregenerated/armv8-mont-linux64.S", + "pregenerated/chacha-armv8-ios64.S", + "pregenerated/chacha-armv8-linux64.S", + "pregenerated/chacha-x86_64-elf.S", + "pregenerated/chacha-x86_64-macosx.S", + "pregenerated/chacha20_poly1305_armv8-ios64.S", + "pregenerated/chacha20_poly1305_armv8-linux64.S", + "pregenerated/chacha20_poly1305_x86_64-elf.S", + "pregenerated/chacha20_poly1305_x86_64-macosx.S", + "pregenerated/ghash-neon-armv8-ios64.S", + "pregenerated/ghash-neon-armv8-linux64.S", + "pregenerated/ghash-x86_64-elf.S", + "pregenerated/ghash-x86_64-macosx.S", + "pregenerated/ghashv8-armx-ios64.S", + "pregenerated/ghashv8-armx-linux64.S", + "pregenerated/p256-armv8-asm-ios64.S", + "pregenerated/p256-armv8-asm-linux64.S", + "pregenerated/p256-x86_64-asm-elf.S", + "pregenerated/p256-x86_64-asm-macosx.S", + "pregenerated/sha256-armv8-ios64.S", + "pregenerated/sha256-armv8-linux64.S", + "pregenerated/sha256-x86_64-elf.S", + "pregenerated/sha256-x86_64-macosx.S", + "pregenerated/sha512-armv8-ios64.S", + "pregenerated/sha512-armv8-linux64.S", + "pregenerated/sha512-x86_64-elf.S", + "pregenerated/sha512-x86_64-macosx.S", + "pregenerated/vpaes-armv8-ios64.S", + "pregenerated/vpaes-armv8-linux64.S", + "pregenerated/vpaes-x86_64-elf.S", + "pregenerated/vpaes-x86_64-macosx.S", + "pregenerated/x86_64-mont-elf.S", + "pregenerated/x86_64-mont-macosx.S", + "pregenerated/x86_64-mont5-elf.S", + "pregenerated/x86_64-mont5-macosx.S", + "third_party/fiat/curve25519_32.h", + "third_party/fiat/curve25519_64.h", + "third_party/fiat/curve25519_64_adx.h", + "third_party/fiat/curve25519_64_msvc.h", + "third_party/fiat/p256_32.h", + "third_party/fiat/p256_64.h", + "third_party/fiat/p256_64_msvc.h", + ], + urls = ["https://crates.io/api/v1/crates/ring/0.17.7/download"], + visibility = [], +) + +cargo.rust_library( + name = "ring-0.17.7", + srcs = [":ring-0.17.7.crate"], + crate = "ring", + crate_root = "ring-0.17.7.crate/src/lib.rs", + edition = "2021", + features = [ + "alloc", + "default", + "dev_urandom_fallback", + ], + platform = { + "linux-arm64": dict( + deps = [ + ":libc-0.2.151", + ":ring-0.17.7-ring-c-asm-elf-aarch64", + ":spin-0.9.8", + ], + ), + "linux-x86_64": dict( + deps = [ + ":ring-0.17.7-ring-c-asm-elf-x86_84", + ":spin-0.9.8", + ], + ), + "macos-arm64": dict( + deps = [ + ":ring-0.17.7-ring-c-asm-macos-arm64", + ":spin-0.9.8", + ], + ), + "macos-x86_64": dict( + deps = [ + ":ring-0.17.7-ring-c-asm-macos-x86_64", + ":spin-0.9.8", + ], + ), + "windows-gnu": dict( + deps = [ + ":ring-0.17.7-ring-c-win-x86_84", + ":spin-0.9.8", + ], + ), + "windows-msvc": dict( + deps = [ + ":ring-0.17.7-ring-c-win-x86_84", + ":spin-0.9.8", + ], + ), + }, + visibility = [], + deps = [ + ":getrandom-0.2.11", + ":untrusted-0.9.0", + ], +) + +cxx_library( + name = "ring-0.17.7-ring-c-asm-elf-aarch64", + srcs = [ + ":ring-0.17.7.crate[crypto/constant_time_test.c]", + ":ring-0.17.7.crate[crypto/cpu_intel.c]", + ":ring-0.17.7.crate[crypto/crypto.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", + ":ring-0.17.7.crate[crypto/limbs/limbs.c]", + ":ring-0.17.7.crate[crypto/mem.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", + ":ring-0.17.7.crate[pregenerated/aesv8-armx-linux64.S]", + ":ring-0.17.7.crate[pregenerated/armv8-mont-linux64.S]", + ":ring-0.17.7.crate[pregenerated/chacha-armv8-linux64.S]", + ":ring-0.17.7.crate[pregenerated/chacha20_poly1305_armv8-linux64.S]", + ":ring-0.17.7.crate[pregenerated/ghash-neon-armv8-linux64.S]", + ":ring-0.17.7.crate[pregenerated/ghashv8-armx-linux64.S]", + ":ring-0.17.7.crate[pregenerated/p256-armv8-asm-linux64.S]", + ":ring-0.17.7.crate[pregenerated/sha256-armv8-linux64.S]", + ":ring-0.17.7.crate[pregenerated/sha512-armv8-linux64.S]", + ":ring-0.17.7.crate[pregenerated/vpaes-armv8-linux64.S]", + ], + headers = [ + ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", + ":ring-0.17.7.crate[crypto/curve25519/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", + ":ring-0.17.7.crate[crypto/internal.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", + ":ring-0.17.7.crate[crypto/poly1305/internal.h]", + ":ring-0.17.7.crate[include/ring-core/aes.h]", + ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", + ":ring-0.17.7.crate[include/ring-core/asm_base.h]", + ":ring-0.17.7.crate[include/ring-core/base.h]", + ":ring-0.17.7.crate[include/ring-core/check.h]", + ":ring-0.17.7.crate[include/ring-core/mem.h]", + ":ring-0.17.7.crate[include/ring-core/poly1305.h]", + ":ring-0.17.7.crate[include/ring-core/target.h]", + ":ring-0.17.7.crate[include/ring-core/type_check.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", + ], + compiler_flags = ["-Wno-error"], + preferred_linkage = "static", + preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], + visibility = [], +) + +cxx_library( + name = "ring-0.17.7-ring-c-asm-elf-x86_84", + srcs = [ + ":ring-0.17.7.crate[crypto/constant_time_test.c]", + ":ring-0.17.7.crate[crypto/cpu_intel.c]", + ":ring-0.17.7.crate[crypto/crypto.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", + ":ring-0.17.7.crate[crypto/limbs/limbs.c]", + ":ring-0.17.7.crate[crypto/mem.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", + ":ring-0.17.7.crate[pregenerated/aesni-gcm-x86_64-elf.S]", + ":ring-0.17.7.crate[pregenerated/aesni-x86_64-elf.S]", + ":ring-0.17.7.crate[pregenerated/chacha-x86_64-elf.S]", + ":ring-0.17.7.crate[pregenerated/chacha20_poly1305_x86_64-elf.S]", + ":ring-0.17.7.crate[pregenerated/ghash-x86_64-elf.S]", + ":ring-0.17.7.crate[pregenerated/p256-x86_64-asm-elf.S]", + ":ring-0.17.7.crate[pregenerated/sha256-x86_64-elf.S]", + ":ring-0.17.7.crate[pregenerated/sha512-x86_64-elf.S]", + ":ring-0.17.7.crate[pregenerated/vpaes-x86_64-elf.S]", + ":ring-0.17.7.crate[pregenerated/x86_64-mont-elf.S]", + ":ring-0.17.7.crate[pregenerated/x86_64-mont5-elf.S]", + ], + headers = [ + ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", + ":ring-0.17.7.crate[crypto/curve25519/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", + ":ring-0.17.7.crate[crypto/internal.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", + ":ring-0.17.7.crate[crypto/poly1305/internal.h]", + ":ring-0.17.7.crate[include/ring-core/aes.h]", + ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", + ":ring-0.17.7.crate[include/ring-core/asm_base.h]", + ":ring-0.17.7.crate[include/ring-core/base.h]", + ":ring-0.17.7.crate[include/ring-core/check.h]", + ":ring-0.17.7.crate[include/ring-core/mem.h]", + ":ring-0.17.7.crate[include/ring-core/poly1305.h]", + ":ring-0.17.7.crate[include/ring-core/target.h]", + ":ring-0.17.7.crate[include/ring-core/type_check.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", + ], + compiler_flags = ["-Wno-error"], + preferred_linkage = "static", + preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], + visibility = [], +) + +cxx_library( + name = "ring-0.17.7-ring-c-asm-macos-arm64", + srcs = [ + ":ring-0.17.7.crate[crypto/constant_time_test.c]", + ":ring-0.17.7.crate[crypto/cpu_intel.c]", + ":ring-0.17.7.crate[crypto/crypto.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", + ":ring-0.17.7.crate[crypto/limbs/limbs.c]", + ":ring-0.17.7.crate[crypto/mem.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", + ":ring-0.17.7.crate[pregenerated/aesv8-armx-ios64.S]", + ":ring-0.17.7.crate[pregenerated/armv8-mont-ios64.S]", + ":ring-0.17.7.crate[pregenerated/chacha-armv8-ios64.S]", + ":ring-0.17.7.crate[pregenerated/chacha20_poly1305_armv8-ios64.S]", + ":ring-0.17.7.crate[pregenerated/ghash-neon-armv8-ios64.S]", + ":ring-0.17.7.crate[pregenerated/ghashv8-armx-ios64.S]", + ":ring-0.17.7.crate[pregenerated/p256-armv8-asm-ios64.S]", + ":ring-0.17.7.crate[pregenerated/sha256-armv8-ios64.S]", + ":ring-0.17.7.crate[pregenerated/sha512-armv8-ios64.S]", + ":ring-0.17.7.crate[pregenerated/vpaes-armv8-ios64.S]", + ], + headers = [ + ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", + ":ring-0.17.7.crate[crypto/curve25519/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", + ":ring-0.17.7.crate[crypto/internal.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", + ":ring-0.17.7.crate[crypto/poly1305/internal.h]", + ":ring-0.17.7.crate[include/ring-core/aes.h]", + ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", + ":ring-0.17.7.crate[include/ring-core/asm_base.h]", + ":ring-0.17.7.crate[include/ring-core/base.h]", + ":ring-0.17.7.crate[include/ring-core/check.h]", + ":ring-0.17.7.crate[include/ring-core/mem.h]", + ":ring-0.17.7.crate[include/ring-core/poly1305.h]", + ":ring-0.17.7.crate[include/ring-core/target.h]", + ":ring-0.17.7.crate[include/ring-core/type_check.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", + ], + compiler_flags = ["-Wno-error"], + preferred_linkage = "static", + preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], + visibility = [], +) + +cxx_library( + name = "ring-0.17.7-ring-c-asm-macos-x86_64", + srcs = [ + ":ring-0.17.7.crate[crypto/constant_time_test.c]", + ":ring-0.17.7.crate[crypto/cpu_intel.c]", + ":ring-0.17.7.crate[crypto/crypto.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", + ":ring-0.17.7.crate[crypto/limbs/limbs.c]", + ":ring-0.17.7.crate[crypto/mem.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", + ":ring-0.17.7.crate[pregenerated/aesni-gcm-x86_64-macosx.S]", + ":ring-0.17.7.crate[pregenerated/aesni-x86_64-macosx.S]", + ":ring-0.17.7.crate[pregenerated/chacha-x86_64-macosx.S]", + ":ring-0.17.7.crate[pregenerated/chacha20_poly1305_x86_64-macosx.S]", + ":ring-0.17.7.crate[pregenerated/ghash-x86_64-macosx.S]", + ":ring-0.17.7.crate[pregenerated/p256-x86_64-asm-macosx.S]", + ":ring-0.17.7.crate[pregenerated/sha256-x86_64-macosx.S]", + ":ring-0.17.7.crate[pregenerated/sha512-x86_64-macosx.S]", + ":ring-0.17.7.crate[pregenerated/vpaes-x86_64-macosx.S]", + ":ring-0.17.7.crate[pregenerated/x86_64-mont-macosx.S]", + ":ring-0.17.7.crate[pregenerated/x86_64-mont5-macosx.S]", + ], + headers = [ + ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", + ":ring-0.17.7.crate[crypto/curve25519/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", + ":ring-0.17.7.crate[crypto/internal.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", + ":ring-0.17.7.crate[crypto/poly1305/internal.h]", + ":ring-0.17.7.crate[include/ring-core/aes.h]", + ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", + ":ring-0.17.7.crate[include/ring-core/asm_base.h]", + ":ring-0.17.7.crate[include/ring-core/base.h]", + ":ring-0.17.7.crate[include/ring-core/check.h]", + ":ring-0.17.7.crate[include/ring-core/mem.h]", + ":ring-0.17.7.crate[include/ring-core/poly1305.h]", + ":ring-0.17.7.crate[include/ring-core/target.h]", + ":ring-0.17.7.crate[include/ring-core/type_check.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", + ], + compiler_flags = ["-Wno-error"], + preferred_linkage = "static", + preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], + visibility = [], +) + +cxx_library( + name = "ring-0.17.7-ring-c-win-x86_84", + srcs = [ + ":ring-0.17.7.crate[crypto/constant_time_test.c]", + ":ring-0.17.7.crate[crypto/cpu_intel.c]", + ":ring-0.17.7.crate[crypto/crypto.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", + ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", + ":ring-0.17.7.crate[crypto/limbs/limbs.c]", + ":ring-0.17.7.crate[crypto/mem.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", + ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", + ], + headers = [ + ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", + ":ring-0.17.7.crate[crypto/curve25519/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", + ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", + ":ring-0.17.7.crate[crypto/internal.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.h]", + ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", + ":ring-0.17.7.crate[crypto/poly1305/internal.h]", + ":ring-0.17.7.crate[include/ring-core/aes.h]", + ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", + ":ring-0.17.7.crate[include/ring-core/asm_base.h]", + ":ring-0.17.7.crate[include/ring-core/base.h]", + ":ring-0.17.7.crate[include/ring-core/check.h]", + ":ring-0.17.7.crate[include/ring-core/mem.h]", + ":ring-0.17.7.crate[include/ring-core/poly1305.h]", + ":ring-0.17.7.crate[include/ring-core/target.h]", + ":ring-0.17.7.crate[include/ring-core/type_check.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", + ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", + ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", + ], + compiler_flags = ["-Wno-error"], + preferred_linkage = "static", + preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], + visibility = [], +) + +http_archive( + name = "rkyv-0.7.42.crate", + sha256 = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58", + strip_prefix = "rkyv-0.7.42", + urls = ["https://crates.io/api/v1/crates/rkyv/0.7.42/download"], + visibility = [], +) + +cargo.rust_library( + name = "rkyv-0.7.42", + srcs = [":rkyv-0.7.42.crate"], + crate = "rkyv", + crate_root = "rkyv-0.7.42.crate/src/lib.rs", + edition = "2021", + features = [ + "alloc", + "hashbrown", + "size_32", + "std", + ], + visibility = [], + deps = [ + ":bitvec-1.0.1", + ":bytecheck-0.6.11", + ":hashbrown-0.12.3", + ":ptr_meta-0.1.4", + ":rend-0.4.1", + ":rkyv_derive-0.7.42", + ":seahash-4.1.0", + ":tinyvec-1.6.0", + ":uuid-1.6.1", + ], +) + +http_archive( + name = "rkyv_derive-0.7.42.crate", + sha256 = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d", + strip_prefix = "rkyv_derive-0.7.42", + urls = ["https://crates.io/api/v1/crates/rkyv_derive/0.7.42/download"], + visibility = [], +) + +cargo.rust_library( + name = "rkyv_derive-0.7.42", + srcs = [":rkyv_derive-0.7.42.crate"], + crate = "rkyv_derive", + crate_root = "rkyv_derive-0.7.42.crate/src/lib.rs", + edition = "2021", + features = ["default"], proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -11408,18 +12135,18 @@ cargo.rust_library( }, visibility = [], deps = [ - ":byteorder-1.4.3", + ":byteorder-1.5.0", ":digest-0.10.7", ":num-integer-0.1.45", ":num-iter-0.1.43", - ":num-traits-0.2.16", + ":num-traits-0.2.17", ":pkcs1-0.4.1", ":pkcs8-0.9.0", ":rand_core-0.6.4", ":signature-1.6.4", - ":smallvec-1.11.0", + ":smallvec-1.11.2", ":subtle-2.5.0", - ":zeroize-1.6.0", + ":zeroize-1.7.0", ], ) @@ -11475,49 +12202,49 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.73", + ":async-trait-0.1.74", ":aws-creds-0.34.1", - ":aws-region-0.25.3", + ":aws-region-0.25.4", ":base64-0.13.1", - ":bytes-1.4.0", + ":bytes-1.5.0", ":cfg-if-1.0.0", - ":futures-0.3.28", + ":futures-0.3.29", ":hex-0.4.3", ":hmac-0.12.1", - ":http-0.2.9", + ":http-0.2.11", ":log-0.4.20", ":maybe-async-0.2.7", ":md5-0.7.0", - ":percent-encoding-2.3.0", + ":percent-encoding-2.3.1", ":quick-xml-0.26.0", - ":reqwest-0.11.20", - ":serde-1.0.186", - ":serde_derive-1.0.186", - ":sha2-0.10.7", - ":thiserror-1.0.47", - ":time-0.3.27", - ":tokio-1.32.0", + ":reqwest-0.11.22", + ":serde-1.0.193", + ":serde_derive-1.0.193", + ":sha2-0.10.8", + ":thiserror-1.0.50", + ":time-0.3.30", + ":tokio-1.35.0", ":tokio-stream-0.1.14", - ":url-2.4.0", + ":url-2.5.0", ], ) http_archive( - name = "rust_decimal-1.32.0.crate", - sha256 = "a4c4216490d5a413bc6d10fa4742bd7d4955941d062c0ef873141d6b0e7b30fd", - strip_prefix = "rust_decimal-1.32.0", - urls = ["https://crates.io/api/v1/crates/rust_decimal/1.32.0/download"], + name = "rust_decimal-1.33.1.crate", + sha256 = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4", + strip_prefix = "rust_decimal-1.33.1", + urls = ["https://crates.io/api/v1/crates/rust_decimal/1.33.1/download"], visibility = [], ) cargo.rust_library( - name = "rust_decimal-1.32.0", - srcs = [":rust_decimal-1.32.0.crate"], + name = "rust_decimal-1.33.1", + srcs = [":rust_decimal-1.33.1.crate"], crate = "rust_decimal", - crate_root = "rust_decimal-1.32.0.crate/src/lib.rs", + crate_root = "rust_decimal-1.33.1.crate/src/lib.rs", edition = "2021", env = { - "OUT_DIR": "$(location :rust_decimal-1.32.0-build-script-run[out_dir])", + "OUT_DIR": "$(location :rust_decimal-1.33.1-build-script-run[out_dir])", }, features = [ "default", @@ -11527,21 +12254,21 @@ cargo.rust_library( visibility = [], deps = [ ":arrayvec-0.7.4", - ":borsh-0.10.3", - ":bytes-1.4.0", - ":num-traits-0.2.16", + ":borsh-1.2.1", + ":bytes-1.5.0", + ":num-traits-0.2.17", ":rand-0.8.5", ":rkyv-0.7.42", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ], ) cargo.rust_binary( - name = "rust_decimal-1.32.0-build-script-build", - srcs = [":rust_decimal-1.32.0.crate"], + name = "rust_decimal-1.33.1-build-script-build", + srcs = [":rust_decimal-1.33.1.crate"], crate = "build_script_build", - crate_root = "rust_decimal-1.32.0.crate/build.rs", + crate_root = "rust_decimal-1.33.1.crate/build.rs", edition = "2021", features = [ "default", @@ -11552,15 +12279,15 @@ cargo.rust_binary( ) buildscript_run( - name = "rust_decimal-1.32.0-build-script-run", + name = "rust_decimal-1.33.1-build-script-run", package_name = "rust_decimal", - buildscript_rule = ":rust_decimal-1.32.0-build-script-build", + buildscript_rule = ":rust_decimal-1.33.1-build-script-build", features = [ "default", "serde", "std", ], - version = "1.32.0", + version = "1.33.1", ) http_archive( @@ -11581,120 +12308,39 @@ cargo.rust_library( ) http_archive( - name = "rustix-0.37.23.crate", - sha256 = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06", - strip_prefix = "rustix-0.37.23", - urls = ["https://crates.io/api/v1/crates/rustix/0.37.23/download"], + name = "rustc_version-0.4.0.crate", + sha256 = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366", + strip_prefix = "rustc_version-0.4.0", + urls = ["https://crates.io/api/v1/crates/rustc_version/0.4.0/download"], visibility = [], ) cargo.rust_library( - name = "rustix-0.37.23", - srcs = [":rustix-0.37.23.crate"], - crate = "rustix", - crate_root = "rustix-0.37.23.crate/src/lib.rs", + name = "rustc_version-0.4.0", + srcs = [":rustc_version-0.4.0.crate"], + crate = "rustc_version", + crate_root = "rustc_version-0.4.0.crate/src/lib.rs", edition = "2018", - features = [ - "default", - "io-lifetimes", - "libc", - "std", - "termios", - "use-libc-auxv", - ], - platform = { - "linux-arm64": dict( - deps = [ - ":libc-0.2.147", - ":linux-raw-sys-0.3.8", - ], - ), - "linux-x86_64": dict( - deps = [ - ":libc-0.2.147", - ":linux-raw-sys-0.3.8", - ], - ), - "macos-arm64": dict( - named_deps = { - "libc_errno": ":errno-0.3.2", - }, - deps = [":libc-0.2.147"], - ), - "macos-x86_64": dict( - named_deps = { - "libc_errno": ":errno-0.3.2", - }, - deps = [":libc-0.2.147"], - ), - "windows-gnu": dict( - named_deps = { - "libc_errno": ":errno-0.3.2", - }, - deps = [":windows-sys-0.48.0"], - ), - "windows-msvc": dict( - named_deps = { - "libc_errno": ":errno-0.3.2", - }, - deps = [":windows-sys-0.48.0"], - ), - }, - rustc_flags = ["@$(location :rustix-0.37.23-build-script-run[rustc_flags])"], - visibility = [], - deps = [ - ":bitflags-1.3.2", - ":io-lifetimes-1.0.11", - ], -) - -cargo.rust_binary( - name = "rustix-0.37.23-build-script-build", - srcs = [":rustix-0.37.23.crate"], - crate = "build_script_build", - crate_root = "rustix-0.37.23.crate/build.rs", - edition = "2018", - features = [ - "default", - "io-lifetimes", - "libc", - "std", - "termios", - "use-libc-auxv", - ], visibility = [], -) - -buildscript_run( - name = "rustix-0.37.23-build-script-run", - package_name = "rustix", - buildscript_rule = ":rustix-0.37.23-build-script-build", - features = [ - "default", - "io-lifetimes", - "libc", - "std", - "termios", - "use-libc-auxv", - ], - version = "0.37.23", + deps = [":semver-1.0.20"], ) http_archive( - name = "rustix-0.38.8.crate", - sha256 = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f", - strip_prefix = "rustix-0.38.8", - urls = ["https://crates.io/api/v1/crates/rustix/0.38.8/download"], + name = "rustix-0.38.28.crate", + sha256 = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316", + strip_prefix = "rustix-0.38.28", + urls = ["https://crates.io/api/v1/crates/rustix/0.38.28/download"], visibility = [], ) cargo.rust_library( - name = "rustix-0.38.8", - srcs = [":rustix-0.38.8.crate"], + name = "rustix-0.38.28", + srcs = [":rustix-0.38.28.crate"], crate = "rustix", - crate_root = "rustix-0.38.8.crate/src/lib.rs", + crate_root = "rustix-0.38.28.crate/src/lib.rs", edition = "2021", features = [ + "alloc", "default", "fs", "std", @@ -11703,48 +12349,61 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":linux-raw-sys-0.4.5"], + named_deps = { + "libc_errno": ":errno-0.3.8", + }, + deps = [ + ":libc-0.2.151", + ":linux-raw-sys-0.4.12", + ], ), "linux-x86_64": dict( - deps = [":linux-raw-sys-0.4.5"], + named_deps = { + "libc_errno": ":errno-0.3.8", + }, + deps = [ + ":libc-0.2.151", + ":linux-raw-sys-0.4.12", + ], ), "macos-arm64": dict( named_deps = { - "libc_errno": ":errno-0.3.2", + "libc_errno": ":errno-0.3.8", }, - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( named_deps = { - "libc_errno": ":errno-0.3.2", + "libc_errno": ":errno-0.3.8", }, - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( named_deps = { - "libc_errno": ":errno-0.3.2", + "libc_errno": ":errno-0.3.8", }, - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), "windows-msvc": dict( named_deps = { - "libc_errno": ":errno-0.3.2", + "libc_errno": ":errno-0.3.8", }, - deps = [":windows-sys-0.48.0"], + deps = [":windows-sys-0.52.0"], ), }, - rustc_flags = ["@$(location :rustix-0.38.8-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :rustix-0.38.28-build-script-run[rustc_flags])"], visibility = [], - deps = [":bitflags-2.4.0"], + deps = [":bitflags-2.4.1"], ) cargo.rust_binary( - name = "rustix-0.38.8-build-script-build", - srcs = [":rustix-0.38.8.crate"], + name = "rustix-0.38.28-build-script-build", + srcs = [":rustix-0.38.28.crate"], crate = "build_script_build", - crate_root = "rustix-0.38.8.crate/build.rs", + crate_root = "rustix-0.38.28.crate/build.rs", edition = "2021", features = [ + "alloc", "default", "fs", "std", @@ -11755,32 +12414,33 @@ cargo.rust_binary( ) buildscript_run( - name = "rustix-0.38.8-build-script-run", + name = "rustix-0.38.28-build-script-run", package_name = "rustix", - buildscript_rule = ":rustix-0.38.8-build-script-build", + buildscript_rule = ":rustix-0.38.28-build-script-build", features = [ + "alloc", "default", "fs", "std", "termios", "use-libc-auxv", ], - version = "0.38.8", + version = "0.38.28", ) http_archive( - name = "rustls-0.20.8.crate", - sha256 = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f", - strip_prefix = "rustls-0.20.8", - urls = ["https://crates.io/api/v1/crates/rustls/0.20.8/download"], + name = "rustls-0.20.9.crate", + sha256 = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99", + strip_prefix = "rustls-0.20.9", + urls = ["https://crates.io/api/v1/crates/rustls/0.20.9/download"], visibility = [], ) cargo.rust_library( - name = "rustls-0.20.8", - srcs = [":rustls-0.20.8.crate"], + name = "rustls-0.20.9", + srcs = [":rustls-0.20.9.crate"], crate = "rustls", - crate_root = "rustls-0.20.8.crate/src/lib.rs", + crate_root = "rustls-0.20.9.crate/src/lib.rs", edition = "2018", features = [ "dangerous_configuration", @@ -11793,30 +12453,30 @@ cargo.rust_library( deps = [ ":log-0.4.20", ":ring-0.16.20", - ":sct-0.7.0", - ":webpki-0.22.0", + ":sct-0.7.1", + ":webpki-0.22.4", ], ) alias( name = "rustls", - actual = ":rustls-0.21.6", + actual = ":rustls-0.21.10", visibility = ["PUBLIC"], ) http_archive( - name = "rustls-0.21.6.crate", - sha256 = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb", - strip_prefix = "rustls-0.21.6", - urls = ["https://crates.io/api/v1/crates/rustls/0.21.6/download"], + name = "rustls-0.21.10.crate", + sha256 = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba", + strip_prefix = "rustls-0.21.10", + urls = ["https://crates.io/api/v1/crates/rustls/0.21.10/download"], visibility = [], ) cargo.rust_library( - name = "rustls-0.21.6", - srcs = [":rustls-0.21.6.crate"], + name = "rustls-0.21.10", + srcs = [":rustls-0.21.10.crate"], crate = "rustls", - crate_root = "rustls-0.21.6.crate/src/lib.rs", + crate_root = "rustls-0.21.10.crate/src/lib.rs", edition = "2021", features = [ "dangerous_configuration", @@ -11828,9 +12488,9 @@ cargo.rust_library( visibility = [], deps = [ ":log-0.4.20", - ":ring-0.16.20", - ":rustls-webpki-0.101.4", - ":sct-0.7.0", + ":ring-0.17.7", + ":rustls-webpki-0.101.7", + ":sct-0.7.1", ], ) @@ -11869,40 +12529,40 @@ cargo.rust_library( ), }, visibility = [], - deps = [":rustls-pemfile-1.0.3"], + deps = [":rustls-pemfile-1.0.4"], ) http_archive( - name = "rustls-pemfile-1.0.3.crate", - sha256 = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2", - strip_prefix = "rustls-pemfile-1.0.3", - urls = ["https://crates.io/api/v1/crates/rustls-pemfile/1.0.3/download"], + name = "rustls-pemfile-1.0.4.crate", + sha256 = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c", + strip_prefix = "rustls-pemfile-1.0.4", + urls = ["https://crates.io/api/v1/crates/rustls-pemfile/1.0.4/download"], visibility = [], ) cargo.rust_library( - name = "rustls-pemfile-1.0.3", - srcs = [":rustls-pemfile-1.0.3.crate"], + name = "rustls-pemfile-1.0.4", + srcs = [":rustls-pemfile-1.0.4.crate"], crate = "rustls_pemfile", - crate_root = "rustls-pemfile-1.0.3.crate/src/lib.rs", + crate_root = "rustls-pemfile-1.0.4.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":base64-0.21.2"], + deps = [":base64-0.21.5"], ) http_archive( - name = "rustls-webpki-0.101.4.crate", - sha256 = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d", - strip_prefix = "rustls-webpki-0.101.4", - urls = ["https://crates.io/api/v1/crates/rustls-webpki/0.101.4/download"], + name = "rustls-webpki-0.101.7.crate", + sha256 = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765", + strip_prefix = "rustls-webpki-0.101.7", + urls = ["https://crates.io/api/v1/crates/rustls-webpki/0.101.7/download"], visibility = [], ) cargo.rust_library( - name = "rustls-webpki-0.101.4", - srcs = [":rustls-webpki-0.101.4.crate"], + name = "rustls-webpki-0.101.7", + srcs = [":rustls-webpki-0.101.7.crate"], crate = "webpki", - crate_root = "rustls-webpki-0.101.4.crate/src/lib.rs", + crate_root = "rustls-webpki-0.101.7.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -11911,8 +12571,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":ring-0.16.20", - ":untrusted-0.7.1", + ":ring-0.17.7", + ":untrusted-0.9.0", ], ) @@ -11954,18 +12614,18 @@ buildscript_run( ) http_archive( - name = "ryu-1.0.15.crate", - sha256 = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741", - strip_prefix = "ryu-1.0.15", - urls = ["https://crates.io/api/v1/crates/ryu/1.0.15/download"], + name = "ryu-1.0.16.crate", + sha256 = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c", + strip_prefix = "ryu-1.0.16", + urls = ["https://crates.io/api/v1/crates/ryu/1.0.16/download"], visibility = [], ) cargo.rust_library( - name = "ryu-1.0.15", - srcs = [":ryu-1.0.15.crate"], + name = "ryu-1.0.16", + srcs = [":ryu-1.0.16.crate"], crate = "ryu", - crate_root = "ryu-1.0.15.crate/src/lib.rs", + crate_root = "ryu-1.0.16.crate/src/lib.rs", edition = "2018", visibility = [], ) @@ -11986,10 +12646,10 @@ cargo.rust_library( edition = "2018", platform = { "windows-gnu": dict( - deps = [":winapi-util-0.1.5"], + deps = [":winapi-util-0.1.6"], ), "windows-msvc": dict( - deps = [":winapi-util-0.1.5"], + deps = [":winapi-util-0.1.6"], ), }, visibility = [], @@ -12031,23 +12691,23 @@ cargo.rust_library( ) http_archive( - name = "sct-0.7.0.crate", - sha256 = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4", - strip_prefix = "sct-0.7.0", - urls = ["https://crates.io/api/v1/crates/sct/0.7.0/download"], + name = "sct-0.7.1.crate", + sha256 = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414", + strip_prefix = "sct-0.7.1", + urls = ["https://crates.io/api/v1/crates/sct/0.7.1/download"], visibility = [], ) cargo.rust_library( - name = "sct-0.7.0", - srcs = [":sct-0.7.0.crate"], + name = "sct-0.7.1", + srcs = [":sct-0.7.1.crate"], crate = "sct", - crate_root = "sct-0.7.0.crate/src/lib.rs", - edition = "2018", + crate_root = "sct-0.7.1.crate/src/lib.rs", + edition = "2021", visibility = [], deps = [ - ":ring-0.16.20", - ":untrusted-0.7.1", + ":ring-0.17.7", + ":untrusted-0.9.0", ], ) @@ -12098,25 +12758,25 @@ cargo.rust_library( visibility = [], deps = [ ":async-stream-0.3.5", - ":async-trait-0.1.73", + ":async-trait-0.1.74", ":bigdecimal-0.3.1", - ":chrono-0.4.26", - ":futures-0.3.28", + ":chrono-0.4.31", + ":futures-0.3.29", ":log-0.4.20", ":ouroboros-0.15.6", - ":rust_decimal-1.32.0", + ":rust_decimal-1.33.1", ":sea-orm-macros-0.11.3", ":sea-query-0.28.5", ":sea-query-binder-0.3.1", ":sea-strum-0.23.0", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ":sqlx-0.6.3", - ":thiserror-1.0.47", - ":time-0.3.27", - ":tracing-0.1.37", - ":url-2.4.0", - ":uuid-1.4.1", + ":thiserror-1.0.50", + ":time-0.3.30", + ":tracing-0.1.40", + ":url-2.5.0", + ":uuid-1.6.1", ], ) @@ -12139,7 +12799,7 @@ cargo.rust_library( deps = [ ":bae-0.1.7", ":heck-0.3.3", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", ], @@ -12183,12 +12843,12 @@ cargo.rust_library( visibility = [], deps = [ ":bigdecimal-0.3.1", - ":chrono-0.4.26", - ":rust_decimal-1.32.0", + ":chrono-0.4.31", + ":rust_decimal-1.33.1", ":sea-query-derive-0.3.0", - ":serde_json-1.0.105", - ":time-0.3.27", - ":uuid-1.4.1", + ":serde_json-1.0.108", + ":time-0.3.30", + ":uuid-1.6.1", ], ) @@ -12226,13 +12886,13 @@ cargo.rust_library( visibility = [], deps = [ ":bigdecimal-0.3.1", - ":chrono-0.4.26", - ":rust_decimal-1.32.0", + ":chrono-0.4.31", + ":rust_decimal-1.33.1", ":sea-query-0.28.5", - ":serde_json-1.0.105", + ":serde_json-1.0.108", ":sqlx-0.6.3", - ":time-0.3.27", - ":uuid-1.4.1", + ":time-0.3.30", + ":uuid-1.6.1", ], ) @@ -12254,10 +12914,10 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":syn-1.0.109", - ":thiserror-1.0.47", + ":thiserror-1.0.50", ], ) @@ -12303,7 +12963,7 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.3.3", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":rustversion-1.0.14", ":syn-1.0.109", @@ -12360,7 +13020,7 @@ cargo.rust_library( ":generic-array-0.14.7", ":pkcs8-0.10.2", ":subtle-2.5.0", - ":zeroize-1.6.0", + ":zeroize-1.7.0", ], ) @@ -12385,9 +13045,9 @@ cargo.rust_library( visibility = [], deps = [ ":bitflags-1.3.2", - ":core-foundation-0.9.3", - ":core-foundation-sys-0.8.4", - ":libc-0.2.147", + ":core-foundation-0.9.4", + ":core-foundation-sys-0.8.6", + ":libc-0.2.151", ":security-framework-sys-2.9.1", ], ) @@ -12409,8 +13069,8 @@ cargo.rust_library( features = ["OSX_10_9"], visibility = [], deps = [ - ":core-foundation-sys-0.8.4", - ":libc-0.2.147", + ":core-foundation-sys-0.8.6", + ":libc-0.2.151", ], ) @@ -12441,28 +13101,96 @@ cargo.rust_library( ), }, visibility = [], - deps = [":tempfile-3.8.0"], + deps = [":tempfile-3.8.1"], +) + +http_archive( + name = "semver-1.0.20.crate", + sha256 = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090", + strip_prefix = "semver-1.0.20", + urls = ["https://crates.io/api/v1/crates/semver/1.0.20/download"], + visibility = [], +) + +cargo.rust_library( + name = "semver-1.0.20", + srcs = [":semver-1.0.20.crate"], + crate = "semver", + crate_root = "semver-1.0.20.crate/src/lib.rs", + edition = "2018", + env = { + "CARGO_MANIFEST_DIR": "semver-1.0.20.crate", + "CARGO_PKG_AUTHORS": "David Tolnay ", + "CARGO_PKG_DESCRIPTION": "Parser and evaluator for Cargo's flavor of Semantic Versioning", + "CARGO_PKG_NAME": "semver", + "CARGO_PKG_REPOSITORY": "https://github.com/dtolnay/semver", + "CARGO_PKG_VERSION": "1.0.20", + "CARGO_PKG_VERSION_MAJOR": "1", + "CARGO_PKG_VERSION_MINOR": "0", + "CARGO_PKG_VERSION_PATCH": "20", + }, + features = [ + "default", + "std", + ], + rustc_flags = ["@$(location :semver-1.0.20-build-script-run[rustc_flags])"], + visibility = [], +) + +cargo.rust_binary( + name = "semver-1.0.20-build-script-build", + srcs = [":semver-1.0.20.crate"], + crate = "build_script_build", + crate_root = "semver-1.0.20.crate/build.rs", + edition = "2018", + env = { + "CARGO_MANIFEST_DIR": "semver-1.0.20.crate", + "CARGO_PKG_AUTHORS": "David Tolnay ", + "CARGO_PKG_DESCRIPTION": "Parser and evaluator for Cargo's flavor of Semantic Versioning", + "CARGO_PKG_NAME": "semver", + "CARGO_PKG_REPOSITORY": "https://github.com/dtolnay/semver", + "CARGO_PKG_VERSION": "1.0.20", + "CARGO_PKG_VERSION_MAJOR": "1", + "CARGO_PKG_VERSION_MINOR": "0", + "CARGO_PKG_VERSION_PATCH": "20", + }, + features = [ + "default", + "std", + ], + visibility = [], +) + +buildscript_run( + name = "semver-1.0.20-build-script-run", + package_name = "semver", + buildscript_rule = ":semver-1.0.20-build-script-build", + features = [ + "default", + "std", + ], + version = "1.0.20", ) alias( name = "serde", - actual = ":serde-1.0.186", + actual = ":serde-1.0.193", visibility = ["PUBLIC"], ) http_archive( - name = "serde-1.0.186.crate", - sha256 = "9f5db24220c009de9bd45e69fb2938f4b6d2df856aa9304ce377b3180f83b7c1", - strip_prefix = "serde-1.0.186", - urls = ["https://crates.io/api/v1/crates/serde/1.0.186/download"], + name = "serde-1.0.193.crate", + sha256 = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89", + strip_prefix = "serde-1.0.193", + urls = ["https://crates.io/api/v1/crates/serde/1.0.193/download"], visibility = [], ) cargo.rust_library( - name = "serde-1.0.186", - srcs = [":serde-1.0.186.crate"], + name = "serde-1.0.193", + srcs = [":serde-1.0.193.crate"], crate = "serde", - crate_root = "serde-1.0.186.crate/src/lib.rs", + crate_root = "serde-1.0.193.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -12473,28 +13201,28 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":serde_derive-1.0.186"], + deps = [":serde_derive-1.0.193"], ) alias( name = "serde-aux", - actual = ":serde-aux-4.2.0", + actual = ":serde-aux-4.3.1", visibility = ["PUBLIC"], ) http_archive( - name = "serde-aux-4.2.0.crate", - sha256 = "c3dfe1b7eb6f9dcf011bd6fad169cdeaae75eda0d61b1a99a3f015b41b0cae39", - strip_prefix = "serde-aux-4.2.0", - urls = ["https://crates.io/api/v1/crates/serde-aux/4.2.0/download"], + name = "serde-aux-4.3.1.crate", + sha256 = "184eba62ebddb71658697c8b08822edee89970bf318c5362189f0de27f85b498", + strip_prefix = "serde-aux-4.3.1", + urls = ["https://crates.io/api/v1/crates/serde-aux/4.3.1/download"], visibility = [], ) cargo.rust_library( - name = "serde-aux-4.2.0", - srcs = [":serde-aux-4.2.0.crate"], + name = "serde-aux-4.3.1", + srcs = [":serde-aux-4.3.1.crate"], crate = "serde_aux", - crate_root = "serde-aux-4.2.0.crate/src/lib.rs", + crate_root = "serde-aux-4.3.1.crate/src/lib.rs", edition = "2021", features = [ "chrono", @@ -12502,55 +13230,55 @@ cargo.rust_library( ], visibility = [], deps = [ - ":chrono-0.4.26", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":chrono-0.4.31", + ":serde-1.0.193", + ":serde_json-1.0.108", ], ) http_archive( - name = "serde_derive-1.0.186.crate", - sha256 = "5ad697f7e0b65af4983a4ce8f56ed5b357e8d3c36651bf6a7e13639c17b8e670", - strip_prefix = "serde_derive-1.0.186", - urls = ["https://crates.io/api/v1/crates/serde_derive/1.0.186/download"], + name = "serde_derive-1.0.193.crate", + sha256 = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3", + strip_prefix = "serde_derive-1.0.193", + urls = ["https://crates.io/api/v1/crates/serde_derive/1.0.193/download"], visibility = [], ) cargo.rust_library( - name = "serde_derive-1.0.186", - srcs = [":serde_derive-1.0.186.crate"], + name = "serde_derive-1.0.193", + srcs = [":serde_derive-1.0.193.crate"], crate = "serde_derive", - crate_root = "serde_derive-1.0.186.crate/src/lib.rs", + crate_root = "serde_derive-1.0.193.crate/src/lib.rs", edition = "2015", features = ["default"], proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) alias( name = "serde_json", - actual = ":serde_json-1.0.105", + actual = ":serde_json-1.0.108", visibility = ["PUBLIC"], ) http_archive( - name = "serde_json-1.0.105.crate", - sha256 = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360", - strip_prefix = "serde_json-1.0.105", - urls = ["https://crates.io/api/v1/crates/serde_json/1.0.105/download"], + name = "serde_json-1.0.108.crate", + sha256 = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b", + strip_prefix = "serde_json-1.0.108", + urls = ["https://crates.io/api/v1/crates/serde_json/1.0.108/download"], visibility = [], ) cargo.rust_library( - name = "serde_json-1.0.105", - srcs = [":serde_json-1.0.105.crate"], + name = "serde_json-1.0.108", + srcs = [":serde_json-1.0.108.crate"], crate = "serde_json", - crate_root = "serde_json-1.0.105.crate/src/lib.rs", + crate_root = "serde_json-1.0.108.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -12562,10 +13290,10 @@ cargo.rust_library( ], visibility = [], deps = [ - ":indexmap-2.0.0", - ":itoa-1.0.9", - ":ryu-1.0.15", - ":serde-1.0.186", + ":indexmap-2.1.0", + ":itoa-1.0.10", + ":ryu-1.0.16", + ":serde-1.0.193", ], ) @@ -12584,7 +13312,7 @@ cargo.rust_library( crate_root = "serde_nanos-0.1.3.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":serde-1.0.186"], + deps = [":serde-1.0.193"], ) http_archive( @@ -12603,51 +13331,51 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":itoa-1.0.9", - ":serde-1.0.186", + ":itoa-1.0.10", + ":serde-1.0.193", ], ) http_archive( - name = "serde_repr-0.1.16.crate", - sha256 = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00", - strip_prefix = "serde_repr-0.1.16", - urls = ["https://crates.io/api/v1/crates/serde_repr/0.1.16/download"], + name = "serde_repr-0.1.17.crate", + sha256 = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145", + strip_prefix = "serde_repr-0.1.17", + urls = ["https://crates.io/api/v1/crates/serde_repr/0.1.17/download"], visibility = [], ) cargo.rust_library( - name = "serde_repr-0.1.16", - srcs = [":serde_repr-0.1.16.crate"], + name = "serde_repr-0.1.17", + srcs = [":serde_repr-0.1.17.crate"], crate = "serde_repr", - crate_root = "serde_repr-0.1.16.crate/src/lib.rs", + crate_root = "serde_repr-0.1.17.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) http_archive( - name = "serde_spanned-0.6.3.crate", - sha256 = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186", - strip_prefix = "serde_spanned-0.6.3", - urls = ["https://crates.io/api/v1/crates/serde_spanned/0.6.3/download"], + name = "serde_spanned-0.6.4.crate", + sha256 = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80", + strip_prefix = "serde_spanned-0.6.4", + urls = ["https://crates.io/api/v1/crates/serde_spanned/0.6.4/download"], visibility = [], ) cargo.rust_library( - name = "serde_spanned-0.6.3", - srcs = [":serde_spanned-0.6.3.crate"], + name = "serde_spanned-0.6.4", + srcs = [":serde_spanned-0.6.4.crate"], crate = "serde_spanned", - crate_root = "serde_spanned-0.6.3.crate/src/lib.rs", + crate_root = "serde_spanned-0.6.4.crate/src/lib.rs", edition = "2021", features = ["serde"], visibility = [], - deps = [":serde-1.0.186"], + deps = [":serde-1.0.193"], ) alias( @@ -12672,8 +13400,8 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":serde-1.0.186", - ":url-2.4.0", + ":serde-1.0.193", + ":url-2.5.0", ], ) @@ -12693,10 +13421,10 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":form_urlencoded-1.2.0", - ":itoa-1.0.9", - ":ryu-1.0.15", - ":serde-1.0.186", + ":form_urlencoded-1.2.1", + ":itoa-1.0.10", + ":ryu-1.0.16", + ":serde-1.0.193", ], ) @@ -12721,39 +13449,39 @@ cargo.rust_library( "std", ], named_deps = { - "chrono_0_4": ":chrono-0.4.26", + "chrono_0_4": ":chrono-0.4.31", "indexmap_1": ":indexmap-1.9.3", - "time_0_3": ":time-0.3.27", + "time_0_3": ":time-0.3.30", }, visibility = [], deps = [ ":base64-0.13.1", ":hex-0.4.3", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ":serde_with_macros-2.3.3", ], ) alias( name = "serde_with", - actual = ":serde_with-3.3.0", + actual = ":serde_with-3.4.0", visibility = ["PUBLIC"], ) http_archive( - name = "serde_with-3.3.0.crate", - sha256 = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237", - strip_prefix = "serde_with-3.3.0", - urls = ["https://crates.io/api/v1/crates/serde_with/3.3.0/download"], + name = "serde_with-3.4.0.crate", + sha256 = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23", + strip_prefix = "serde_with-3.4.0", + urls = ["https://crates.io/api/v1/crates/serde_with/3.4.0/download"], visibility = [], ) cargo.rust_library( - name = "serde_with-3.3.0", - srcs = [":serde_with-3.3.0.crate"], + name = "serde_with-3.4.0", + srcs = [":serde_with-3.4.0.crate"], crate = "serde_with", - crate_root = "serde_with-3.3.0.crate/src/lib.rs", + crate_root = "serde_with-3.4.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -12762,18 +13490,18 @@ cargo.rust_library( "std", ], named_deps = { - "chrono_0_4": ":chrono-0.4.26", + "chrono_0_4": ":chrono-0.4.31", "indexmap_1": ":indexmap-1.9.3", - "indexmap_2": ":indexmap-2.0.0", - "time_0_3": ":time-0.3.27", + "indexmap_2": ":indexmap-2.1.0", + "time_0_3": ":time-0.3.30", }, visibility = [], deps = [ - ":base64-0.21.2", + ":base64-0.21.5", ":hex-0.4.3", - ":serde-1.0.186", - ":serde_json-1.0.105", - ":serde_with_macros-3.3.0", + ":serde-1.0.193", + ":serde_json-1.0.108", + ":serde_with_macros-3.4.0", ], ) @@ -12795,79 +13523,79 @@ cargo.rust_library( visibility = [], deps = [ ":darling-0.20.3", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) http_archive( - name = "serde_with_macros-3.3.0.crate", - sha256 = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c", - strip_prefix = "serde_with_macros-3.3.0", - urls = ["https://crates.io/api/v1/crates/serde_with_macros/3.3.0/download"], + name = "serde_with_macros-3.4.0.crate", + sha256 = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788", + strip_prefix = "serde_with_macros-3.4.0", + urls = ["https://crates.io/api/v1/crates/serde_with_macros/3.4.0/download"], visibility = [], ) cargo.rust_library( - name = "serde_with_macros-3.3.0", - srcs = [":serde_with_macros-3.3.0.crate"], + name = "serde_with_macros-3.4.0", + srcs = [":serde_with_macros-3.4.0.crate"], crate = "serde_with_macros", - crate_root = "serde_with_macros-3.3.0.crate/src/lib.rs", + crate_root = "serde_with_macros-3.4.0.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ ":darling-0.20.3", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) alias( name = "serde_yaml", - actual = ":serde_yaml-0.9.25", + actual = ":serde_yaml-0.9.27", visibility = ["PUBLIC"], ) http_archive( - name = "serde_yaml-0.9.25.crate", - sha256 = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574", - strip_prefix = "serde_yaml-0.9.25", - urls = ["https://crates.io/api/v1/crates/serde_yaml/0.9.25/download"], + name = "serde_yaml-0.9.27.crate", + sha256 = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c", + strip_prefix = "serde_yaml-0.9.27", + urls = ["https://crates.io/api/v1/crates/serde_yaml/0.9.27/download"], visibility = [], ) cargo.rust_library( - name = "serde_yaml-0.9.25", - srcs = [":serde_yaml-0.9.25.crate"], + name = "serde_yaml-0.9.27", + srcs = [":serde_yaml-0.9.27.crate"], crate = "serde_yaml", - crate_root = "serde_yaml-0.9.25.crate/src/lib.rs", + crate_root = "serde_yaml-0.9.27.crate/src/lib.rs", edition = "2021", visibility = [], deps = [ - ":indexmap-2.0.0", - ":itoa-1.0.9", - ":ryu-1.0.15", - ":serde-1.0.186", + ":indexmap-2.1.0", + ":itoa-1.0.10", + ":ryu-1.0.16", + ":serde-1.0.193", ":unsafe-libyaml-0.2.9", ], ) http_archive( - name = "sha1-0.10.5.crate", - sha256 = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3", - strip_prefix = "sha1-0.10.5", - urls = ["https://crates.io/api/v1/crates/sha1/0.10.5/download"], + name = "sha1-0.10.6.crate", + sha256 = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba", + strip_prefix = "sha1-0.10.6", + urls = ["https://crates.io/api/v1/crates/sha1/0.10.6/download"], visibility = [], ) cargo.rust_library( - name = "sha1-0.10.5", - srcs = [":sha1-0.10.5.crate"], + name = "sha1-0.10.6", + srcs = [":sha1-0.10.6.crate"], crate = "sha1", - crate_root = "sha1-0.10.5.crate/src/lib.rs", + crate_root = "sha1-0.10.6.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -12875,22 +13603,22 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "linux-x86_64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "macos-arm64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "macos-x86_64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "windows-gnu": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "windows-msvc": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), }, visibility = [], @@ -12901,18 +13629,18 @@ cargo.rust_library( ) http_archive( - name = "sha2-0.10.7.crate", - sha256 = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8", - strip_prefix = "sha2-0.10.7", - urls = ["https://crates.io/api/v1/crates/sha2/0.10.7/download"], + name = "sha2-0.10.8.crate", + sha256 = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8", + strip_prefix = "sha2-0.10.8", + urls = ["https://crates.io/api/v1/crates/sha2/0.10.8/download"], visibility = [], ) cargo.rust_library( - name = "sha2-0.10.7", - srcs = [":sha2-0.10.7.crate"], + name = "sha2-0.10.8", + srcs = [":sha2-0.10.8.crate"], crate = "sha2", - crate_root = "sha2-0.10.7.crate/src/lib.rs", + crate_root = "sha2-0.10.8.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -12920,22 +13648,22 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "linux-x86_64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "macos-arm64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "macos-x86_64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "windows-gnu": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "windows-msvc": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), }, visibility = [], @@ -12961,22 +13689,22 @@ cargo.rust_library( edition = "2018", platform = { "linux-arm64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "linux-x86_64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "macos-arm64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "macos-x86_64": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "windows-gnu": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), "windows-msvc": dict( - deps = [":cpufeatures-0.2.9"], + deps = [":cpufeatures-0.2.11"], ), }, visibility = [], @@ -12989,18 +13717,18 @@ cargo.rust_library( ) http_archive( - name = "sharded-slab-0.1.4.crate", - sha256 = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31", - strip_prefix = "sharded-slab-0.1.4", - urls = ["https://crates.io/api/v1/crates/sharded-slab/0.1.4/download"], + name = "sharded-slab-0.1.7.crate", + sha256 = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6", + strip_prefix = "sharded-slab-0.1.7", + urls = ["https://crates.io/api/v1/crates/sharded-slab/0.1.7/download"], visibility = [], ) cargo.rust_library( - name = "sharded-slab-0.1.4", - srcs = [":sharded-slab-0.1.4.crate"], + name = "sharded-slab-0.1.7", + srcs = [":sharded-slab-0.1.7.crate"], crate = "sharded_slab", - crate_root = "sharded-slab-0.1.4.crate/src/lib.rs", + crate_root = "sharded-slab-0.1.7.crate/src/lib.rs", edition = "2018", visibility = [], deps = [":lazy_static-1.4.0"], @@ -13027,7 +13755,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":signal-hook-registry-1.4.1", ], ) @@ -13051,11 +13779,11 @@ cargo.rust_library( "support-v0_8", ], named_deps = { - "mio_0_8": ":mio-0.8.8", + "mio_0_8": ":mio-0.8.10", }, visibility = [], deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":signal-hook-0.3.17", ], ) @@ -13075,7 +13803,7 @@ cargo.rust_library( crate_root = "signal-hook-registry-1.4.1.crate/src/lib.rs", edition = "2015", visibility = [], - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ) http_archive( @@ -13101,7 +13829,34 @@ cargo.rust_library( ":pkcs8-0.7.6", ":rand_core-0.6.4", ":signature-1.6.4", - ":zeroize-1.6.0", + ":zeroize-1.7.0", + ], +) + +http_archive( + name = "signatory-0.27.1.crate", + sha256 = "c1e303f8205714074f6068773f0e29527e0453937fe837c9717d066635b65f31", + strip_prefix = "signatory-0.27.1", + urls = ["https://crates.io/api/v1/crates/signatory/0.27.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "signatory-0.27.1", + srcs = [":signatory-0.27.1.crate"], + crate = "signatory", + crate_root = "signatory-0.27.1.crate/src/lib.rs", + edition = "2021", + features = [ + "default", + "std", + ], + visibility = [], + deps = [ + ":pkcs8-0.10.2", + ":rand_core-0.6.4", + ":signature-2.2.0", + ":zeroize-1.7.0", ], ) @@ -13135,18 +13890,18 @@ cargo.rust_library( ) http_archive( - name = "signature-2.1.0.crate", - sha256 = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500", - strip_prefix = "signature-2.1.0", - urls = ["https://crates.io/api/v1/crates/signature/2.1.0/download"], + name = "signature-2.2.0.crate", + sha256 = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de", + strip_prefix = "signature-2.2.0", + urls = ["https://crates.io/api/v1/crates/signature/2.2.0/download"], visibility = [], ) cargo.rust_library( - name = "signature-2.1.0", - srcs = [":signature-2.1.0.crate"], + name = "signature-2.2.0", + srcs = [":signature-2.2.0.crate"], crate = "signature", - crate_root = "signature-2.1.0.crate/src/lib.rs", + crate_root = "signature-2.2.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -13200,6 +13955,27 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "siphasher-1.0.0.crate", + sha256 = "54ac45299ccbd390721be55b412d41931911f654fa99e2cb8bfb57184b2061fe", + strip_prefix = "siphasher-1.0.0", + urls = ["https://crates.io/api/v1/crates/siphasher/1.0.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "siphasher-1.0.0", + srcs = [":siphasher-1.0.0.crate"], + crate = "siphasher", + crate_root = "siphasher-1.0.0.crate/src/lib.rs", + edition = "2018", + features = [ + "default", + "std", + ], + visibility = [], +) + http_archive( name = "slab-0.4.9.crate", sha256 = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67", @@ -13237,22 +14013,41 @@ cargo.rust_library( edition = "2018", features = ["union"], visibility = [], - deps = [":smallvec-1.11.0"], + deps = [":smallvec-1.11.2"], ) http_archive( - name = "smallvec-1.11.0.crate", - sha256 = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9", - strip_prefix = "smallvec-1.11.0", - urls = ["https://crates.io/api/v1/crates/smallvec/1.11.0/download"], + name = "smallstr-0.3.0.crate", + sha256 = "63b1aefdf380735ff8ded0b15f31aab05daf1f70216c01c02a12926badd1df9d", + strip_prefix = "smallstr-0.3.0", + urls = ["https://crates.io/api/v1/crates/smallstr/0.3.0/download"], visibility = [], ) cargo.rust_library( - name = "smallvec-1.11.0", - srcs = [":smallvec-1.11.0.crate"], + name = "smallstr-0.3.0", + srcs = [":smallstr-0.3.0.crate"], + crate = "smallstr", + crate_root = "smallstr-0.3.0.crate/src/lib.rs", + edition = "2018", + features = ["union"], + visibility = [], + deps = [":smallvec-1.11.2"], +) + +http_archive( + name = "smallvec-1.11.2.crate", + sha256 = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970", + strip_prefix = "smallvec-1.11.2", + urls = ["https://crates.io/api/v1/crates/smallvec/1.11.2/download"], + visibility = [], +) + +cargo.rust_library( + name = "smallvec-1.11.2", + srcs = [":smallvec-1.11.2.crate"], crate = "smallvec", - crate_root = "smallvec-1.11.0.crate/src/lib.rs", + crate_root = "smallvec-1.11.2.crate/src/lib.rs", edition = "2018", features = [ "const_generics", @@ -13263,32 +14058,32 @@ cargo.rust_library( ) http_archive( - name = "socket2-0.4.9.crate", - sha256 = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662", - strip_prefix = "socket2-0.4.9", - urls = ["https://crates.io/api/v1/crates/socket2/0.4.9/download"], + name = "socket2-0.4.10.crate", + sha256 = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d", + strip_prefix = "socket2-0.4.10", + urls = ["https://crates.io/api/v1/crates/socket2/0.4.10/download"], visibility = [], ) cargo.rust_library( - name = "socket2-0.4.9", - srcs = [":socket2-0.4.9.crate"], + name = "socket2-0.4.10", + srcs = [":socket2-0.4.10.crate"], crate = "socket2", - crate_root = "socket2-0.4.9.crate/src/lib.rs", + crate_root = "socket2-0.4.10.crate/src/lib.rs", edition = "2018", features = ["all"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( deps = [":winapi-0.3.9"], @@ -13301,32 +14096,32 @@ cargo.rust_library( ) http_archive( - name = "socket2-0.5.3.crate", - sha256 = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877", - strip_prefix = "socket2-0.5.3", - urls = ["https://crates.io/api/v1/crates/socket2/0.5.3/download"], + name = "socket2-0.5.5.crate", + sha256 = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9", + strip_prefix = "socket2-0.5.5", + urls = ["https://crates.io/api/v1/crates/socket2/0.5.5/download"], visibility = [], ) cargo.rust_library( - name = "socket2-0.5.3", - srcs = [":socket2-0.5.3.crate"], + name = "socket2-0.5.5", + srcs = [":socket2-0.5.5.crate"], crate = "socket2", - crate_root = "socket2-0.5.3.crate/src/lib.rs", + crate_root = "socket2-0.5.5.crate/src/lib.rs", edition = "2021", features = ["all"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "linux-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-arm64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "macos-x86_64": dict( - deps = [":libc-0.2.147"], + deps = [":libc-0.2.151"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -13366,9 +14161,9 @@ cargo.rust_library( visibility = [], deps = [ ":ed25519-1.5.3", - ":libc-0.2.147", + ":libc-0.2.151", ":libsodium-sys-0.2.7", - ":serde-1.0.186", + ":serde-1.0.193", ], ) @@ -13405,6 +14200,7 @@ cargo.rust_library( edition = "2015", features = [ "mutex", + "once", "spin_mutex", ], visibility = [], @@ -13456,18 +14252,18 @@ cargo.rust_library( ) http_archive( - name = "spki-0.7.2.crate", - sha256 = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a", - strip_prefix = "spki-0.7.2", - urls = ["https://crates.io/api/v1/crates/spki/0.7.2/download"], + name = "spki-0.7.3.crate", + sha256 = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d", + strip_prefix = "spki-0.7.3", + urls = ["https://crates.io/api/v1/crates/spki/0.7.3/download"], visibility = [], ) cargo.rust_library( - name = "spki-0.7.2", - srcs = [":spki-0.7.2.crate"], + name = "spki-0.7.3", + srcs = [":spki-0.7.3.crate"], crate = "spki", - crate_root = "spki-0.7.2.crate/src/lib.rs", + crate_root = "spki-0.7.3.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -13482,22 +14278,22 @@ cargo.rust_library( ) http_archive( - name = "sqlformat-0.2.1.crate", - sha256 = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e", - strip_prefix = "sqlformat-0.2.1", - urls = ["https://crates.io/api/v1/crates/sqlformat/0.2.1/download"], + name = "sqlformat-0.2.3.crate", + sha256 = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c", + strip_prefix = "sqlformat-0.2.3", + urls = ["https://crates.io/api/v1/crates/sqlformat/0.2.3/download"], visibility = [], ) cargo.rust_library( - name = "sqlformat-0.2.1", - srcs = [":sqlformat-0.2.1.crate"], + name = "sqlformat-0.2.3", + srcs = [":sqlformat-0.2.3.crate"], crate = "sqlformat", - crate_root = "sqlformat-0.2.1.crate/src/lib.rs", + crate_root = "sqlformat-0.2.3.crate/src/lib.rs", edition = "2021", visibility = [], deps = [ - ":itertools-0.10.5", + ":itertools-0.12.0", ":nom-7.1.3", ":unicode_categories-0.1.1", ], @@ -13586,13 +14382,13 @@ cargo.rust_library( }, visibility = [], deps = [ - ":ahash-0.7.6", + ":ahash-0.7.7", ":atoi-1.0.0", ":base64-0.13.1", ":bitflags-1.3.2", - ":byteorder-1.4.3", - ":bytes-1.4.0", - ":chrono-0.4.26", + ":byteorder-1.5.0", + ":bytes-1.5.0", + ":chrono-0.4.31", ":crossbeam-queue-0.3.8", ":dirs-4.0.0", ":dotenvy-0.15.7", @@ -13602,37 +14398,37 @@ cargo.rust_library( ":futures-core-0.3.29", ":futures-intrusive-0.4.2", ":futures-util-0.3.29", - ":hashlink-0.8.3", + ":hashlink-0.8.4", ":hex-0.4.3", ":hkdf-0.12.3", ":hmac-0.12.1", ":indexmap-1.9.3", - ":itoa-1.0.9", - ":libc-0.2.147", + ":itoa-1.0.10", + ":libc-0.2.151", ":log-0.4.20", - ":md-5-0.10.5", - ":memchr-2.5.0", + ":md-5-0.10.6", + ":memchr-2.6.4", ":num-bigint-0.4.4", - ":once_cell-1.18.0", + ":once_cell-1.19.0", ":paste-1.0.14", - ":percent-encoding-2.3.0", + ":percent-encoding-2.3.1", ":rand-0.8.5", - ":rust_decimal-1.32.0", - ":rustls-0.20.8", - ":rustls-pemfile-1.0.3", - ":serde-1.0.186", - ":serde_json-1.0.105", - ":sha1-0.10.5", - ":sha2-0.10.7", - ":smallvec-1.11.0", - ":sqlformat-0.2.1", + ":rust_decimal-1.33.1", + ":rustls-0.20.9", + ":rustls-pemfile-1.0.4", + ":serde-1.0.193", + ":serde_json-1.0.108", + ":sha1-0.10.6", + ":sha2-0.10.8", + ":smallvec-1.11.2", + ":sqlformat-0.2.3", ":sqlx-rt-0.6.3", - ":stringprep-0.1.3", - ":thiserror-1.0.47", - ":time-0.3.27", + ":stringprep-0.1.4", + ":thiserror-1.0.50", + ":time-0.3.30", ":tokio-stream-0.1.14", - ":url-2.4.0", - ":uuid-1.4.1", + ":url-2.5.0", + ":uuid-1.6.1", ":webpki-roots-0.22.6", ":whoami-1.4.1", ], @@ -13670,14 +14466,14 @@ cargo.rust_library( ":dotenvy-0.15.7", ":either-1.9.0", ":heck-0.4.1", - ":once_cell-1.18.0", - ":proc-macro2-1.0.66", + ":once_cell-1.19.0", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":serde_json-1.0.105", + ":serde_json-1.0.108", ":sqlx-core-0.6.3", ":sqlx-rt-0.6.3", ":syn-1.0.109", - ":url-2.4.0", + ":url-2.5.0", ], ) @@ -13705,8 +14501,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":once_cell-1.18.0", - ":tokio-1.32.0", + ":once_cell-1.19.0", + ":tokio-1.35.0", ":tokio-rustls-0.23.4", ], ) @@ -13733,23 +14529,6 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "static_assertions-1.1.0.crate", - sha256 = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f", - strip_prefix = "static_assertions-1.1.0", - urls = ["https://crates.io/api/v1/crates/static_assertions/1.1.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "static_assertions-1.1.0", - srcs = [":static_assertions-1.1.0.crate"], - crate = "static_assertions", - crate_root = "static_assertions-1.1.0.crate/src/lib.rs", - edition = "2015", - visibility = [], -) - alias( name = "stream-cancel", actual = ":stream-cancel-0.8.1", @@ -13774,27 +14553,28 @@ cargo.rust_library( deps = [ ":futures-core-0.3.29", ":pin-project-1.1.3", - ":tokio-1.32.0", + ":tokio-1.35.0", ], ) http_archive( - name = "stringprep-0.1.3.crate", - sha256 = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da", - strip_prefix = "stringprep-0.1.3", - urls = ["https://crates.io/api/v1/crates/stringprep/0.1.3/download"], + name = "stringprep-0.1.4.crate", + sha256 = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6", + strip_prefix = "stringprep-0.1.4", + urls = ["https://crates.io/api/v1/crates/stringprep/0.1.4/download"], visibility = [], ) cargo.rust_library( - name = "stringprep-0.1.3", - srcs = [":stringprep-0.1.3.crate"], + name = "stringprep-0.1.4", + srcs = [":stringprep-0.1.4.crate"], crate = "stringprep", - crate_root = "stringprep-0.1.3.crate/src/lib.rs", + crate_root = "stringprep-0.1.4.crate/src/lib.rs", edition = "2015", visibility = [], deps = [ - ":unicode-bidi-0.3.13", + ":finl_unicode-1.2.0", + ":unicode-bidi-0.3.14", ":unicode-normalization-0.1.22", ], ) @@ -13846,6 +14626,27 @@ cargo.rust_library( deps = [":strum_macros-0.24.3"], ) +http_archive( + name = "strum-0.25.0.crate", + sha256 = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125", + strip_prefix = "strum-0.25.0", + urls = ["https://crates.io/api/v1/crates/strum/0.25.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "strum-0.25.0", + srcs = [":strum-0.25.0.crate"], + crate = "strum", + crate_root = "strum-0.25.0.crate/src/lib.rs", + edition = "2018", + features = [ + "default", + "std", + ], + visibility = [], +) + http_archive( name = "strum_macros-0.24.3.crate", sha256 = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59", @@ -13864,13 +14665,38 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":rustversion-1.0.14", ":syn-1.0.109", ], ) +http_archive( + name = "strum_macros-0.25.3.crate", + sha256 = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0", + strip_prefix = "strum_macros-0.25.3", + urls = ["https://crates.io/api/v1/crates/strum_macros/0.25.3/download"], + visibility = [], +) + +cargo.rust_library( + name = "strum_macros-0.25.3", + srcs = [":strum_macros-0.25.3.crate"], + crate = "strum_macros", + crate_root = "strum_macros-0.25.3.crate/src/lib.rs", + edition = "2018", + proc_macro = True, + visibility = [], + deps = [ + ":heck-0.4.1", + ":proc-macro2-1.0.70", + ":quote-1.0.33", + ":rustversion-1.0.14", + ":syn-2.0.40", + ], +) + http_archive( name = "subtle-2.5.0.crate", sha256 = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc", @@ -13908,7 +14734,6 @@ cargo.rust_library( "default", "derive", "extra-traits", - "fold", "full", "parsing", "printing", @@ -13918,50 +14743,79 @@ cargo.rust_library( ], visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":unicode-ident-1.0.11", + ":unicode-ident-1.0.12", ], ) alias( name = "syn", - actual = ":syn-2.0.29", + actual = ":syn-2.0.40", visibility = ["PUBLIC"], ) http_archive( - name = "syn-2.0.29.crate", - sha256 = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a", - strip_prefix = "syn-2.0.29", - urls = ["https://crates.io/api/v1/crates/syn/2.0.29/download"], + name = "syn-2.0.40.crate", + sha256 = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e", + strip_prefix = "syn-2.0.40", + urls = ["https://crates.io/api/v1/crates/syn/2.0.40/download"], + visibility = [], +) + +cargo.rust_library( + name = "syn-2.0.40", + srcs = [":syn-2.0.40.crate"], + crate = "syn", + crate_root = "syn-2.0.40.crate/src/lib.rs", + edition = "2021", + features = [ + "clone-impls", + "default", + "derive", + "extra-traits", + "fold", + "full", + "parsing", + "printing", + "proc-macro", + "quote", + "visit", + "visit-mut", + ], + visibility = [], + deps = [ + ":proc-macro2-1.0.70", + ":quote-1.0.33", + ":unicode-ident-1.0.12", + ], +) + +http_archive( + name = "syn_derive-0.1.8.crate", + sha256 = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b", + strip_prefix = "syn_derive-0.1.8", + urls = ["https://crates.io/api/v1/crates/syn_derive/0.1.8/download"], visibility = [], ) cargo.rust_library( - name = "syn-2.0.29", - srcs = [":syn-2.0.29.crate"], - crate = "syn", - crate_root = "syn-2.0.29.crate/src/lib.rs", + name = "syn_derive-0.1.8", + srcs = [":syn_derive-0.1.8.crate"], + crate = "syn_derive", + crate_root = "syn_derive-0.1.8.crate/src/lib.rs", edition = "2021", features = [ - "clone-impls", "default", - "derive", - "extra-traits", "full", - "parsing", - "printing", - "proc-macro", - "quote", - "visit", - "visit-mut", ], + proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro-error-1.0.4", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":unicode-ident-1.0.11", + ":syn-2.0.40", ], ) @@ -13982,6 +14836,88 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "system-configuration-0.5.1.crate", + sha256 = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7", + strip_prefix = "system-configuration-0.5.1", + urls = ["https://crates.io/api/v1/crates/system-configuration/0.5.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "system-configuration-0.5.1", + srcs = [":system-configuration-0.5.1.crate"], + crate = "system_configuration", + crate_root = "system-configuration-0.5.1.crate/src/lib.rs", + edition = "2021", + visibility = [], + deps = [ + ":bitflags-1.3.2", + ":core-foundation-0.9.4", + ":system-configuration-sys-0.5.0", + ], +) + +http_archive( + name = "system-configuration-sys-0.5.0.crate", + sha256 = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9", + strip_prefix = "system-configuration-sys-0.5.0", + urls = ["https://crates.io/api/v1/crates/system-configuration-sys/0.5.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "system-configuration-sys-0.5.0", + srcs = [":system-configuration-sys-0.5.0.crate"], + crate = "system_configuration_sys", + crate_root = "system-configuration-sys-0.5.0.crate/src/lib.rs", + edition = "2021", + env = { + "CARGO_MANIFEST_DIR": "system-configuration-sys-0.5.0.crate", + "CARGO_PKG_AUTHORS": "Mullvad VPN", + "CARGO_PKG_DESCRIPTION": "Low level bindings to SystemConfiguration framework for macOS", + "CARGO_PKG_NAME": "system-configuration-sys", + "CARGO_PKG_REPOSITORY": "https://github.com/mullvad/system-configuration-rs", + "CARGO_PKG_VERSION": "0.5.0", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "5", + "CARGO_PKG_VERSION_PATCH": "0", + }, + rustc_flags = ["@$(location :system-configuration-sys-0.5.0-build-script-run[rustc_flags])"], + visibility = [], + deps = [ + ":core-foundation-sys-0.8.6", + ":libc-0.2.151", + ], +) + +cargo.rust_binary( + name = "system-configuration-sys-0.5.0-build-script-build", + srcs = [":system-configuration-sys-0.5.0.crate"], + crate = "build_script_build", + crate_root = "system-configuration-sys-0.5.0.crate/build.rs", + edition = "2021", + env = { + "CARGO_MANIFEST_DIR": "system-configuration-sys-0.5.0.crate", + "CARGO_PKG_AUTHORS": "Mullvad VPN", + "CARGO_PKG_DESCRIPTION": "Low level bindings to SystemConfiguration framework for macOS", + "CARGO_PKG_NAME": "system-configuration-sys", + "CARGO_PKG_REPOSITORY": "https://github.com/mullvad/system-configuration-rs", + "CARGO_PKG_VERSION": "0.5.0", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "5", + "CARGO_PKG_VERSION_PATCH": "0", + }, + visibility = [], +) + +buildscript_run( + name = "system-configuration-sys-0.5.0-build-script-run", + package_name = "system-configuration-sys", + buildscript_rule = ":system-configuration-sys-0.5.0-build-script-build", + version = "0.5.0", +) + http_archive( name = "tap-1.0.1.crate", sha256 = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369", @@ -14026,31 +14962,31 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.147", - ":xattr-1.0.1", + ":libc-0.2.151", + ":xattr-1.1.2", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.147", - ":xattr-1.0.1", + ":libc-0.2.151", + ":xattr-1.1.2", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.147", - ":xattr-1.0.1", + ":libc-0.2.151", + ":xattr-1.1.2", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.147", - ":xattr-1.0.1", + ":libc-0.2.151", + ":xattr-1.1.2", ], ), }, visibility = [], - deps = [":filetime-0.2.22"], + deps = [":filetime-0.2.23"], ) http_archive( @@ -14073,36 +15009,36 @@ cargo.rust_library( alias( name = "tempfile", - actual = ":tempfile-3.8.0", + actual = ":tempfile-3.8.1", visibility = ["PUBLIC"], ) http_archive( - name = "tempfile-3.8.0.crate", - sha256 = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef", - strip_prefix = "tempfile-3.8.0", - urls = ["https://crates.io/api/v1/crates/tempfile/3.8.0/download"], + name = "tempfile-3.8.1.crate", + sha256 = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5", + strip_prefix = "tempfile-3.8.1", + urls = ["https://crates.io/api/v1/crates/tempfile/3.8.1/download"], visibility = [], ) cargo.rust_library( - name = "tempfile-3.8.0", - srcs = [":tempfile-3.8.0.crate"], + name = "tempfile-3.8.1", + srcs = [":tempfile-3.8.1.crate"], crate = "tempfile", - crate_root = "tempfile-3.8.0.crate/src/lib.rs", + crate_root = "tempfile-3.8.1.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":rustix-0.38.8"], + deps = [":rustix-0.38.28"], ), "linux-x86_64": dict( - deps = [":rustix-0.38.8"], + deps = [":rustix-0.38.28"], ), "macos-arm64": dict( - deps = [":rustix-0.38.8"], + deps = [":rustix-0.38.28"], ), "macos-x86_64": dict( - deps = [":rustix-0.38.8"], + deps = [":rustix-0.38.28"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -14114,36 +15050,36 @@ cargo.rust_library( visibility = [], deps = [ ":cfg-if-1.0.0", - ":fastrand-2.0.0", + ":fastrand-2.0.1", ], ) http_archive( - name = "terminal_size-0.2.6.crate", - sha256 = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237", - strip_prefix = "terminal_size-0.2.6", - urls = ["https://crates.io/api/v1/crates/terminal_size/0.2.6/download"], + name = "terminal_size-0.3.0.crate", + sha256 = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7", + strip_prefix = "terminal_size-0.3.0", + urls = ["https://crates.io/api/v1/crates/terminal_size/0.3.0/download"], visibility = [], ) cargo.rust_library( - name = "terminal_size-0.2.6", - srcs = [":terminal_size-0.2.6.crate"], + name = "terminal_size-0.3.0", + srcs = [":terminal_size-0.3.0.crate"], crate = "terminal_size", - crate_root = "terminal_size-0.2.6.crate/src/lib.rs", - edition = "2018", + crate_root = "terminal_size-0.3.0.crate/src/lib.rs", + edition = "2021", platform = { "linux-arm64": dict( - deps = [":rustix-0.37.23"], + deps = [":rustix-0.38.28"], ), "linux-x86_64": dict( - deps = [":rustix-0.37.23"], + deps = [":rustix-0.38.28"], ), "macos-arm64": dict( - deps = [":rustix-0.37.23"], + deps = [":rustix-0.38.28"], ), "macos-x86_64": dict( - deps = [":rustix-0.37.23"], + deps = [":rustix-0.38.28"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -14157,31 +15093,56 @@ cargo.rust_library( alias( name = "test-log", - actual = ":test-log-0.2.12", + actual = ":test-log-0.2.14", visibility = ["PUBLIC"], ) http_archive( - name = "test-log-0.2.12.crate", - sha256 = "d9601d162c1d77e62c1ea0bc8116cd1caf143ce3af947536c3c9052a1677fe0c", - strip_prefix = "test-log-0.2.12", - urls = ["https://crates.io/api/v1/crates/test-log/0.2.12/download"], + name = "test-log-0.2.14.crate", + sha256 = "6159ab4116165c99fc88cce31f99fa2c9dbe08d3691cb38da02fc3b45f357d2b", + strip_prefix = "test-log-0.2.14", + urls = ["https://crates.io/api/v1/crates/test-log/0.2.14/download"], visibility = [], ) cargo.rust_library( - name = "test-log-0.2.12", - srcs = [":test-log-0.2.12.crate"], + name = "test-log-0.2.14", + srcs = [":test-log-0.2.14.crate"], crate = "test_log", - crate_root = "test-log-0.2.12.crate/src/lib.rs", - edition = "2018", + crate_root = "test-log-0.2.14.crate/src/lib.rs", + edition = "2021", + features = [ + "trace", + "tracing-subscriber", + ], + visibility = [], + deps = [ + ":test-log-macros-0.2.14", + ":tracing-subscriber-0.3.18", + ], +) + +http_archive( + name = "test-log-macros-0.2.14.crate", + sha256 = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d", + strip_prefix = "test-log-macros-0.2.14", + urls = ["https://crates.io/api/v1/crates/test-log-macros/0.2.14/download"], + visibility = [], +) + +cargo.rust_library( + name = "test-log-macros-0.2.14", + srcs = [":test-log-macros-0.2.14.crate"], + crate = "test_log_macros", + crate_root = "test-log-macros-0.2.14.crate/src/lib.rs", + edition = "2021", features = ["trace"], proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-1.0.109", + ":syn-2.0.40", ], ) @@ -14194,20 +15155,20 @@ cargo.rust_binary( visibility = [], deps = [ ":async-nats-0.31.0", - ":async-recursion-1.0.4", - ":async-trait-0.1.73", + ":async-recursion-1.0.5", + ":async-trait-0.1.74", ":axum-0.6.20", - ":base64-0.21.2", - ":blake3-1.4.1", + ":base64-0.21.5", + ":blake3-1.5.0", ":bollard-0.15.0", - ":bytes-1.4.0", - ":chrono-0.4.26", + ":bytes-1.5.0", + ":chrono-0.4.31", ":ciborium-0.2.1", - ":clap-4.3.24", + ":clap-4.4.11", ":color-eyre-0.6.2", - ":colored-2.0.4", - ":comfy-table-7.0.1", - ":config-0.13.3", + ":colored-2.1.0", + ":comfy-table-7.1.0", + ":config-0.13.4", ":console-0.15.7", ":convert_case-0.6.0", ":crossbeam-channel-0.5.8", @@ -14218,27 +15179,27 @@ cargo.rust_binary( ":diff-0.1.13", ":directories-5.0.1", ":docker-api-0.14.0", - ":dyn-clone-1.0.13", - ":flate2-1.0.27", - ":futures-0.3.28", + ":dyn-clone-1.0.16", + ":flate2-1.0.28", + ":futures-0.3.29", ":futures-lite-1.13.0", ":hex-0.4.3", - ":http-0.2.9", + ":http-0.2.11", ":hyper-0.14.27", ":hyperlocal-0.8.0", ":iftree-1.0.4", - ":indicatif-0.17.6", - ":indoc-2.0.3", + ":indicatif-0.17.7", + ":indoc-2.0.4", ":inquire-0.6.2", ":itertools-0.10.5", - ":jwt-simple-0.11.6", + ":jwt-simple-0.11.9", ":lazy_static-1.4.0", ":names-0.14.0", - ":nix-0.26.2", + ":nix-0.26.4", ":nkeys-0.2.0", ":num_cpus-1.16.0", - ":once_cell-1.18.0", - ":open-5.0.0", + ":once_cell-1.19.0", + ":open-5.0.1", ":opentelemetry-0.18.0", ":opentelemetry-otlp-0.11.0", ":opentelemetry-semantic-conventions-0.10.0", @@ -14246,53 +15207,53 @@ cargo.rust_binary( ":paste-1.0.14", ":pathdiff-0.2.1", ":petgraph-0.6.4", - ":pin-project-lite-0.2.12", + ":pin-project-lite-0.2.13", ":podman-api-0.10.0", ":postgres-types-0.2.6", ":pretty_assertions_sorted-1.2.3", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", ":rabbitmq-stream-client-0.3.0", ":rand-0.8.5", - ":refinery-0.8.10", - ":regex-1.9.3", + ":refinery-0.8.11", + ":regex-1.10.2", ":remain-0.2.11", - ":reqwest-0.11.20", + ":reqwest-0.11.22", ":rust-s3-0.33.0", - ":rustls-0.21.6", + ":rustls-0.21.10", ":sea-orm-0.11.3", ":self-replace-1.3.5", - ":serde-1.0.186", - ":serde-aux-4.2.0", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde-aux-4.3.1", + ":serde_json-1.0.108", ":serde_url_params-0.2.1", - ":serde_with-3.3.0", - ":serde_yaml-0.9.25", + ":serde_with-3.4.0", + ":serde_yaml-0.9.27", ":sodiumoxide-0.2.7", ":stream-cancel-0.8.1", ":strum-0.24.1", - ":syn-2.0.29", + ":syn-2.0.40", ":tar-0.4.40", - ":tempfile-3.8.0", - ":test-log-0.2.12", - ":thiserror-1.0.47", - ":tokio-1.32.0", - ":tokio-postgres-0.7.9", + ":tempfile-3.8.1", + ":test-log-0.2.14", + ":thiserror-1.0.50", + ":tokio-1.35.0", + ":tokio-postgres-0.7.10", ":tokio-serde-0.8.0", ":tokio-stream-0.1.14", ":tokio-test-0.4.3", ":tokio-tungstenite-0.18.0", - ":tokio-util-0.7.8", + ":tokio-util-0.7.10", ":tokio-vsock-0.4.0", - ":toml-0.7.6", + ":toml-0.7.8", ":tower-0.4.13", - ":tower-http-0.4.3", - ":tracing-0.1.37", + ":tower-http-0.4.4", + ":tracing-0.1.40", ":tracing-opentelemetry-0.18.0", - ":tracing-subscriber-0.3.17", - ":ulid-1.0.0", - ":url-2.4.0", - ":uuid-1.4.1", + ":tracing-subscriber-0.3.18", + ":ulid-1.1.0", + ":url-2.5.0", + ":uuid-1.6.1", ":vfs-0.9.0", ":vfs-tar-0.4.0", ":y-sync-0.3.1", @@ -14302,48 +15263,48 @@ cargo.rust_binary( alias( name = "thiserror", - actual = ":thiserror-1.0.47", + actual = ":thiserror-1.0.50", visibility = ["PUBLIC"], ) http_archive( - name = "thiserror-1.0.47.crate", - sha256 = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f", - strip_prefix = "thiserror-1.0.47", - urls = ["https://crates.io/api/v1/crates/thiserror/1.0.47/download"], + name = "thiserror-1.0.50.crate", + sha256 = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2", + strip_prefix = "thiserror-1.0.50", + urls = ["https://crates.io/api/v1/crates/thiserror/1.0.50/download"], visibility = [], ) cargo.rust_library( - name = "thiserror-1.0.47", - srcs = [":thiserror-1.0.47.crate"], + name = "thiserror-1.0.50", + srcs = [":thiserror-1.0.50.crate"], crate = "thiserror", - crate_root = "thiserror-1.0.47.crate/src/lib.rs", + crate_root = "thiserror-1.0.50.crate/src/lib.rs", edition = "2021", visibility = [], - deps = [":thiserror-impl-1.0.47"], + deps = [":thiserror-impl-1.0.50"], ) http_archive( - name = "thiserror-impl-1.0.47.crate", - sha256 = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b", - strip_prefix = "thiserror-impl-1.0.47", - urls = ["https://crates.io/api/v1/crates/thiserror-impl/1.0.47/download"], + name = "thiserror-impl-1.0.50.crate", + sha256 = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8", + strip_prefix = "thiserror-impl-1.0.50", + urls = ["https://crates.io/api/v1/crates/thiserror-impl/1.0.50/download"], visibility = [], ) cargo.rust_library( - name = "thiserror-impl-1.0.47", - srcs = [":thiserror-impl-1.0.47.crate"], + name = "thiserror-impl-1.0.50", + srcs = [":thiserror-impl-1.0.50.crate"], crate = "thiserror_impl", - crate_root = "thiserror-impl-1.0.47.crate/src/lib.rs", + crate_root = "thiserror-impl-1.0.50.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) @@ -14364,49 +15325,23 @@ cargo.rust_library( visibility = [], deps = [ ":cfg-if-1.0.0", - ":once_cell-1.18.0", + ":once_cell-1.19.0", ], ) http_archive( - name = "time-0.1.45.crate", - sha256 = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a", - strip_prefix = "time-0.1.45", - urls = ["https://crates.io/api/v1/crates/time/0.1.45/download"], - visibility = [], -) - -cargo.rust_library( - name = "time-0.1.45", - srcs = [":time-0.1.45.crate"], - crate = "time", - crate_root = "time-0.1.45.crate/src/lib.rs", - edition = "2015", - platform = { - "windows-gnu": dict( - deps = [":winapi-0.3.9"], - ), - "windows-msvc": dict( - deps = [":winapi-0.3.9"], - ), - }, - visibility = [], - deps = [":libc-0.2.147"], -) - -http_archive( - name = "time-0.3.27.crate", - sha256 = "0bb39ee79a6d8de55f48f2293a830e040392f1c5f16e336bdd1788cd0aadce07", - strip_prefix = "time-0.3.27", - urls = ["https://crates.io/api/v1/crates/time/0.3.27/download"], + name = "time-0.3.30.crate", + sha256 = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5", + strip_prefix = "time-0.3.30", + urls = ["https://crates.io/api/v1/crates/time/0.3.30/download"], visibility = [], ) cargo.rust_library( - name = "time-0.3.27", - srcs = [":time-0.3.27.crate"], + name = "time-0.3.30", + srcs = [":time-0.3.30.crate"], crate = "time", - crate_root = "time-0.3.27.crate/src/lib.rs", + crate_root = "time-0.3.30.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -14420,44 +15355,45 @@ cargo.rust_library( ], visibility = [], deps = [ - ":deranged-0.3.8", - ":itoa-1.0.9", - ":serde-1.0.186", - ":time-core-0.1.1", - ":time-macros-0.2.13", + ":deranged-0.3.10", + ":itoa-1.0.10", + ":powerfmt-0.2.0", + ":serde-1.0.193", + ":time-core-0.1.2", + ":time-macros-0.2.15", ], ) http_archive( - name = "time-core-0.1.1.crate", - sha256 = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb", - strip_prefix = "time-core-0.1.1", - urls = ["https://crates.io/api/v1/crates/time-core/0.1.1/download"], + name = "time-core-0.1.2.crate", + sha256 = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3", + strip_prefix = "time-core-0.1.2", + urls = ["https://crates.io/api/v1/crates/time-core/0.1.2/download"], visibility = [], ) cargo.rust_library( - name = "time-core-0.1.1", - srcs = [":time-core-0.1.1.crate"], + name = "time-core-0.1.2", + srcs = [":time-core-0.1.2.crate"], crate = "time_core", - crate_root = "time-core-0.1.1.crate/src/lib.rs", + crate_root = "time-core-0.1.2.crate/src/lib.rs", edition = "2021", visibility = [], ) http_archive( - name = "time-macros-0.2.13.crate", - sha256 = "733d258752e9303d392b94b75230d07b0b9c489350c69b851fc6c065fde3e8f9", - strip_prefix = "time-macros-0.2.13", - urls = ["https://crates.io/api/v1/crates/time-macros/0.2.13/download"], + name = "time-macros-0.2.15.crate", + sha256 = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20", + strip_prefix = "time-macros-0.2.15", + urls = ["https://crates.io/api/v1/crates/time-macros/0.2.15/download"], visibility = [], ) cargo.rust_library( - name = "time-macros-0.2.13", - srcs = [":time-macros-0.2.13.crate"], + name = "time-macros-0.2.15", + srcs = [":time-macros-0.2.15.crate"], crate = "time_macros", - crate_root = "time-macros-0.2.13.crate/src/lib.rs", + crate_root = "time-macros-0.2.15.crate/src/lib.rs", edition = "2021", features = [ "formatting", @@ -14466,7 +15402,7 @@ cargo.rust_library( ], proc_macro = True, visibility = [], - deps = [":time-core-0.1.1"], + deps = [":time-core-0.1.2"], ) http_archive( @@ -14511,23 +15447,23 @@ cargo.rust_library( alias( name = "tokio", - actual = ":tokio-1.32.0", + actual = ":tokio-1.35.0", visibility = ["PUBLIC"], ) http_archive( - name = "tokio-1.32.0.crate", - sha256 = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9", - strip_prefix = "tokio-1.32.0", - urls = ["https://crates.io/api/v1/crates/tokio/1.32.0/download"], + name = "tokio-1.35.0.crate", + sha256 = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c", + strip_prefix = "tokio-1.35.0", + urls = ["https://crates.io/api/v1/crates/tokio/1.35.0/download"], visibility = [], ) cargo.rust_library( - name = "tokio-1.32.0", - srcs = [":tokio-1.32.0.crate"], + name = "tokio-1.35.0", + srcs = [":tokio-1.35.0.crate"], crate = "tokio", - crate_root = "tokio-1.32.0.crate/src/lib.rs", + crate_root = "tokio-1.35.0.crate/src/lib.rs", edition = "2021", features = [ "bytes", @@ -14558,41 +15494,41 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":signal-hook-registry-1.4.1", - ":socket2-0.5.3", + ":socket2-0.5.5", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":signal-hook-registry-1.4.1", - ":socket2-0.5.3", + ":socket2-0.5.5", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":signal-hook-registry-1.4.1", - ":socket2-0.5.3", + ":socket2-0.5.5", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":signal-hook-registry-1.4.1", - ":socket2-0.5.3", + ":socket2-0.5.5", ], ), "windows-gnu": dict( deps = [ - ":socket2-0.5.3", + ":socket2-0.5.5", ":windows-sys-0.48.0", ], ), "windows-msvc": dict( deps = [ - ":socket2-0.5.3", + ":socket2-0.5.5", ":windows-sys-0.48.0", ], ), @@ -14603,12 +15539,12 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.4.0", - ":mio-0.8.8", + ":bytes-1.5.0", + ":mio-0.8.10", ":num_cpus-1.16.0", ":parking_lot-0.12.1", - ":pin-project-lite-0.2.12", - ":tokio-macros-2.1.0", + ":pin-project-lite-0.2.13", + ":tokio-macros-2.2.0", ":tracing", ], ) @@ -14629,53 +15565,53 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":pin-project-lite-0.2.12", - ":tokio-1.32.0", + ":pin-project-lite-0.2.13", + ":tokio-1.35.0", ], ) http_archive( - name = "tokio-macros-2.1.0.crate", - sha256 = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e", - strip_prefix = "tokio-macros-2.1.0", - urls = ["https://crates.io/api/v1/crates/tokio-macros/2.1.0/download"], + name = "tokio-macros-2.2.0.crate", + sha256 = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b", + strip_prefix = "tokio-macros-2.2.0", + urls = ["https://crates.io/api/v1/crates/tokio-macros/2.2.0/download"], visibility = [], ) cargo.rust_library( - name = "tokio-macros-2.1.0", - srcs = [":tokio-macros-2.1.0.crate"], + name = "tokio-macros-2.2.0", + srcs = [":tokio-macros-2.2.0.crate"], crate = "tokio_macros", - crate_root = "tokio-macros-2.1.0.crate/src/lib.rs", - edition = "2018", + crate_root = "tokio-macros-2.2.0.crate/src/lib.rs", + edition = "2021", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) alias( name = "tokio-postgres", - actual = ":tokio-postgres-0.7.9", + actual = ":tokio-postgres-0.7.10", visibility = ["PUBLIC"], ) http_archive( - name = "tokio-postgres-0.7.9.crate", - sha256 = "000387915083ea6406ee44b50ca74813aba799fe682a7689e382bf9e13b74ce9", - strip_prefix = "tokio-postgres-0.7.9", - urls = ["https://crates.io/api/v1/crates/tokio-postgres/0.7.9/download"], + name = "tokio-postgres-0.7.10.crate", + sha256 = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8", + strip_prefix = "tokio-postgres-0.7.10", + urls = ["https://crates.io/api/v1/crates/tokio-postgres/0.7.10/download"], visibility = [], ) cargo.rust_library( - name = "tokio-postgres-0.7.9", - srcs = [":tokio-postgres-0.7.9.crate"], + name = "tokio-postgres-0.7.10", + srcs = [":tokio-postgres-0.7.10.crate"], crate = "tokio_postgres", - crate_root = "tokio-postgres-0.7.9.crate/src/lib.rs", + crate_root = "tokio-postgres-0.7.10.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -14685,42 +15621,42 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":socket2-0.5.3"], + deps = [":socket2-0.5.5"], ), "linux-x86_64": dict( - deps = [":socket2-0.5.3"], + deps = [":socket2-0.5.5"], ), "macos-arm64": dict( - deps = [":socket2-0.5.3"], + deps = [":socket2-0.5.5"], ), "macos-x86_64": dict( - deps = [":socket2-0.5.3"], + deps = [":socket2-0.5.5"], ), "windows-gnu": dict( - deps = [":socket2-0.5.3"], + deps = [":socket2-0.5.5"], ), "windows-msvc": dict( - deps = [":socket2-0.5.3"], + deps = [":socket2-0.5.5"], ), }, visibility = [], deps = [ - ":async-trait-0.1.73", - ":byteorder-1.4.3", - ":bytes-1.4.0", + ":async-trait-0.1.74", + ":byteorder-1.5.0", + ":bytes-1.5.0", ":fallible-iterator-0.2.0", ":futures-channel-0.3.29", ":futures-util-0.3.29", ":log-0.4.20", ":parking_lot-0.12.1", - ":percent-encoding-2.3.0", + ":percent-encoding-2.3.1", ":phf-0.11.2", - ":pin-project-lite-0.2.12", + ":pin-project-lite-0.2.13", ":postgres-protocol-0.6.6", ":postgres-types-0.2.6", ":rand-0.8.5", - ":tokio-1.32.0", - ":tokio-util-0.7.8", + ":tokio-1.35.0", + ":tokio-util-0.7.10", ":whoami-1.4.1", ], ) @@ -14743,7 +15679,7 @@ cargo.rust_library( deps = [ ":pin-project-1.1.3", ":rand-0.8.5", - ":tokio-1.32.0", + ":tokio-1.35.0", ], ) @@ -14768,9 +15704,9 @@ cargo.rust_library( ], visibility = [], deps = [ - ":rustls-0.20.8", - ":tokio-1.32.0", - ":webpki-0.22.0", + ":rustls-0.20.9", + ":tokio-1.35.0", + ":webpki-0.22.4", ], ) @@ -14795,8 +15731,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":rustls-0.21.6", - ":tokio-1.32.0", + ":rustls-0.21.10", + ":tokio-1.35.0", ], ) @@ -14828,13 +15764,13 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.4.0", - ":educe-0.4.22", + ":bytes-1.5.0", + ":educe-0.4.23", ":futures-core-0.3.29", ":futures-sink-0.3.29", ":pin-project-1.1.3", - ":serde-1.0.186", - ":serde_json-1.0.105", + ":serde-1.0.193", + ":serde_json-1.0.108", ], ) @@ -14866,8 +15802,8 @@ cargo.rust_library( visibility = [], deps = [ ":futures-core-0.3.29", - ":pin-project-lite-0.2.12", - ":tokio-1.32.0", + ":pin-project-lite-0.2.13", + ":tokio-1.35.0", ], ) @@ -14894,9 +15830,9 @@ cargo.rust_library( visibility = [], deps = [ ":async-stream-0.3.5", - ":bytes-1.4.0", + ":bytes-1.5.0", ":futures-core-0.3.29", - ":tokio-1.32.0", + ":tokio-1.35.0", ":tokio-stream-0.1.14", ], ) @@ -14931,24 +15867,24 @@ cargo.rust_library( deps = [ ":futures-util-0.3.29", ":log-0.4.20", - ":tokio-1.32.0", + ":tokio-1.35.0", ":tungstenite-0.18.0", ], ) http_archive( - name = "tokio-tungstenite-0.20.0.crate", - sha256 = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2", - strip_prefix = "tokio-tungstenite-0.20.0", - urls = ["https://crates.io/api/v1/crates/tokio-tungstenite/0.20.0/download"], + name = "tokio-tungstenite-0.20.1.crate", + sha256 = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c", + strip_prefix = "tokio-tungstenite-0.20.1", + urls = ["https://crates.io/api/v1/crates/tokio-tungstenite/0.20.1/download"], visibility = [], ) cargo.rust_library( - name = "tokio-tungstenite-0.20.0", - srcs = [":tokio-tungstenite-0.20.0.crate"], + name = "tokio-tungstenite-0.20.1", + srcs = [":tokio-tungstenite-0.20.1.crate"], crate = "tokio_tungstenite", - crate_root = "tokio-tungstenite-0.20.0.crate/src/lib.rs", + crate_root = "tokio-tungstenite-0.20.1.crate/src/lib.rs", edition = "2018", features = [ "connect", @@ -14960,30 +15896,30 @@ cargo.rust_library( deps = [ ":futures-util-0.3.29", ":log-0.4.20", - ":tokio-1.32.0", - ":tungstenite-0.20.0", + ":tokio-1.35.0", + ":tungstenite-0.20.1", ], ) alias( name = "tokio-util", - actual = ":tokio-util-0.7.8", + actual = ":tokio-util-0.7.10", visibility = ["PUBLIC"], ) http_archive( - name = "tokio-util-0.7.8.crate", - sha256 = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d", - strip_prefix = "tokio-util-0.7.8", - urls = ["https://crates.io/api/v1/crates/tokio-util/0.7.8/download"], + name = "tokio-util-0.7.10.crate", + sha256 = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15", + strip_prefix = "tokio-util-0.7.10", + urls = ["https://crates.io/api/v1/crates/tokio-util/0.7.10/download"], visibility = [], ) cargo.rust_library( - name = "tokio-util-0.7.8", - srcs = [":tokio-util-0.7.8.crate"], + name = "tokio-util-0.7.10", + srcs = [":tokio-util-0.7.10.crate"], crate = "tokio_util", - crate_root = "tokio-util-0.7.8.crate/src/lib.rs", + crate_root = "tokio-util-0.7.10.crate/src/lib.rs", edition = "2021", features = [ "codec", @@ -14993,12 +15929,12 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.4.0", + ":bytes-1.5.0", ":futures-core-0.3.29", ":futures-sink-0.3.29", - ":pin-project-lite-0.2.12", - ":tokio-1.32.0", - ":tracing-0.1.37", + ":pin-project-lite-0.2.13", + ":tokio-1.35.0", + ":tracing-0.1.40", ], ) @@ -15024,10 +15960,10 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":bytes-1.4.0", - ":futures-0.3.28", - ":libc-0.2.147", - ":tokio-1.32.0", + ":bytes-1.5.0", + ":futures-0.3.29", + ":libc-0.2.151", + ":tokio-1.35.0", ":vsock-0.3.0", ], ) @@ -15048,28 +15984,28 @@ cargo.rust_library( edition = "2018", features = ["default"], visibility = [], - deps = [":serde-1.0.186"], + deps = [":serde-1.0.193"], ) alias( name = "toml", - actual = ":toml-0.7.6", + actual = ":toml-0.7.8", visibility = ["PUBLIC"], ) http_archive( - name = "toml-0.7.6.crate", - sha256 = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542", - strip_prefix = "toml-0.7.6", - urls = ["https://crates.io/api/v1/crates/toml/0.7.6/download"], + name = "toml-0.7.8.crate", + sha256 = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257", + strip_prefix = "toml-0.7.8", + urls = ["https://crates.io/api/v1/crates/toml/0.7.8/download"], visibility = [], ) cargo.rust_library( - name = "toml-0.7.6", - srcs = [":toml-0.7.6.crate"], + name = "toml-0.7.8", + srcs = [":toml-0.7.8.crate"], crate = "toml", - crate_root = "toml-0.7.6.crate/src/lib.rs", + crate_root = "toml-0.7.8.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -15078,45 +16014,45 @@ cargo.rust_library( ], visibility = [], deps = [ - ":serde-1.0.186", - ":serde_spanned-0.6.3", - ":toml_datetime-0.6.3", - ":toml_edit-0.19.14", + ":serde-1.0.193", + ":serde_spanned-0.6.4", + ":toml_datetime-0.6.5", + ":toml_edit-0.19.15", ], ) http_archive( - name = "toml_datetime-0.6.3.crate", - sha256 = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b", - strip_prefix = "toml_datetime-0.6.3", - urls = ["https://crates.io/api/v1/crates/toml_datetime/0.6.3/download"], + name = "toml_datetime-0.6.5.crate", + sha256 = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1", + strip_prefix = "toml_datetime-0.6.5", + urls = ["https://crates.io/api/v1/crates/toml_datetime/0.6.5/download"], visibility = [], ) cargo.rust_library( - name = "toml_datetime-0.6.3", - srcs = [":toml_datetime-0.6.3.crate"], + name = "toml_datetime-0.6.5", + srcs = [":toml_datetime-0.6.5.crate"], crate = "toml_datetime", - crate_root = "toml_datetime-0.6.3.crate/src/lib.rs", + crate_root = "toml_datetime-0.6.5.crate/src/lib.rs", edition = "2021", features = ["serde"], visibility = [], - deps = [":serde-1.0.186"], + deps = [":serde-1.0.193"], ) http_archive( - name = "toml_edit-0.19.14.crate", - sha256 = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a", - strip_prefix = "toml_edit-0.19.14", - urls = ["https://crates.io/api/v1/crates/toml_edit/0.19.14/download"], + name = "toml_edit-0.19.15.crate", + sha256 = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421", + strip_prefix = "toml_edit-0.19.15", + urls = ["https://crates.io/api/v1/crates/toml_edit/0.19.15/download"], visibility = [], ) cargo.rust_library( - name = "toml_edit-0.19.14", - srcs = [":toml_edit-0.19.14.crate"], + name = "toml_edit-0.19.15", + srcs = [":toml_edit-0.19.15.crate"], crate = "toml_edit", - crate_root = "toml_edit-0.19.14.crate/src/lib.rs", + crate_root = "toml_edit-0.19.15.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -15124,11 +16060,34 @@ cargo.rust_library( ], visibility = [], deps = [ - ":indexmap-2.0.0", - ":serde-1.0.186", - ":serde_spanned-0.6.3", - ":toml_datetime-0.6.3", - ":winnow-0.5.15", + ":indexmap-2.1.0", + ":serde-1.0.193", + ":serde_spanned-0.6.4", + ":toml_datetime-0.6.5", + ":winnow-0.5.28", + ], +) + +http_archive( + name = "toml_edit-0.20.7.crate", + sha256 = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81", + strip_prefix = "toml_edit-0.20.7", + urls = ["https://crates.io/api/v1/crates/toml_edit/0.20.7/download"], + visibility = [], +) + +cargo.rust_library( + name = "toml_edit-0.20.7", + srcs = [":toml_edit-0.20.7.crate"], + crate = "toml_edit", + crate_root = "toml_edit-0.20.7.crate/src/lib.rs", + edition = "2021", + features = ["default"], + visibility = [], + deps = [ + ":indexmap-2.1.0", + ":toml_datetime-0.6.5", + ":winnow-0.5.28", ], ) @@ -15180,27 +16139,27 @@ cargo.rust_library( visibility = [], deps = [ ":async-stream-0.3.5", - ":async-trait-0.1.73", + ":async-trait-0.1.74", ":axum-0.6.20", ":base64-0.13.1", - ":bytes-1.4.0", + ":bytes-1.5.0", ":futures-core-0.3.29", ":futures-util-0.3.29", - ":h2-0.3.21", - ":http-0.2.9", - ":http-body-0.4.5", + ":h2-0.3.22", + ":http-0.2.11", + ":http-body-0.4.6", ":hyper-0.14.27", ":hyper-timeout-0.4.1", - ":percent-encoding-2.3.0", + ":percent-encoding-2.3.1", ":pin-project-1.1.3", ":prost-derive-0.11.9", - ":tokio-1.32.0", + ":tokio-1.35.0", ":tokio-stream-0.1.14", - ":tokio-util-0.7.8", + ":tokio-util-0.7.10", ":tower-0.4.13", ":tower-layer-0.3.2", ":tower-service-0.3.2", - ":tracing-0.1.37", + ":tracing-0.1.40", ":tracing-futures-0.2.5", ], ) @@ -15228,7 +16187,7 @@ cargo.rust_library( visibility = [], deps = [ ":prettyplease-0.1.25", - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":prost-build-0.11.9", ":quote-1.0.33", ":syn-1.0.109", @@ -15285,36 +16244,36 @@ cargo.rust_library( ":futures-util-0.3.29", ":indexmap-1.9.3", ":pin-project-1.1.3", - ":pin-project-lite-0.2.12", + ":pin-project-lite-0.2.13", ":rand-0.8.5", ":slab-0.4.9", - ":tokio-1.32.0", - ":tokio-util-0.7.8", + ":tokio-1.35.0", + ":tokio-util-0.7.10", ":tower-layer-0.3.2", ":tower-service-0.3.2", - ":tracing-0.1.37", + ":tracing-0.1.40", ], ) alias( name = "tower-http", - actual = ":tower-http-0.4.3", + actual = ":tower-http-0.4.4", visibility = ["PUBLIC"], ) http_archive( - name = "tower-http-0.4.3.crate", - sha256 = "55ae70283aba8d2a8b411c695c437fe25b8b5e44e23e780662002fc72fb47a82", - strip_prefix = "tower-http-0.4.3", - urls = ["https://crates.io/api/v1/crates/tower-http/0.4.3/download"], + name = "tower-http-0.4.4.crate", + sha256 = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140", + strip_prefix = "tower-http-0.4.4", + urls = ["https://crates.io/api/v1/crates/tower-http/0.4.4/download"], visibility = [], ) cargo.rust_library( - name = "tower-http-0.4.3", - srcs = [":tower-http-0.4.3.crate"], + name = "tower-http-0.4.4", + srcs = [":tower-http-0.4.4.crate"], crate = "tower_http", - crate_root = "tower-http-0.4.3.crate/src/lib.rs", + crate_root = "tower-http-0.4.4.crate/src/lib.rs", edition = "2018", features = [ "cors", @@ -15324,17 +16283,17 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bitflags-2.4.0", - ":bytes-1.4.0", + ":bitflags-2.4.1", + ":bytes-1.5.0", ":futures-core-0.3.29", ":futures-util-0.3.29", - ":http-0.2.9", - ":http-body-0.4.5", + ":http-0.2.11", + ":http-body-0.4.6", ":http-range-header-0.3.1", - ":pin-project-lite-0.2.12", + ":pin-project-lite-0.2.13", ":tower-layer-0.3.2", ":tower-service-0.3.2", - ":tracing-0.1.37", + ":tracing-0.1.40", ], ) @@ -15374,23 +16333,23 @@ cargo.rust_library( alias( name = "tracing", - actual = ":tracing-0.1.37", + actual = ":tracing-0.1.40", visibility = ["PUBLIC"], ) http_archive( - name = "tracing-0.1.37.crate", - sha256 = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8", - strip_prefix = "tracing-0.1.37", - urls = ["https://crates.io/api/v1/crates/tracing/0.1.37/download"], + name = "tracing-0.1.40.crate", + sha256 = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef", + strip_prefix = "tracing-0.1.40", + urls = ["https://crates.io/api/v1/crates/tracing/0.1.40/download"], visibility = [], ) cargo.rust_library( - name = "tracing-0.1.37", - srcs = [":tracing-0.1.37.crate"], + name = "tracing-0.1.40", + srcs = [":tracing-0.1.40.crate"], crate = "tracing", - crate_root = "tracing-0.1.37.crate/src/lib.rs", + crate_root = "tracing-0.1.40.crate/src/lib.rs", edition = "2018", features = [ "attributes", @@ -15401,50 +16360,49 @@ cargo.rust_library( ], visibility = [], deps = [ - ":cfg-if-1.0.0", ":log-0.4.20", - ":pin-project-lite-0.2.12", - ":tracing-attributes-0.1.26", - ":tracing-core-0.1.31", + ":pin-project-lite-0.2.13", + ":tracing-attributes-0.1.27", + ":tracing-core-0.1.32", ], ) http_archive( - name = "tracing-attributes-0.1.26.crate", - sha256 = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab", - strip_prefix = "tracing-attributes-0.1.26", - urls = ["https://crates.io/api/v1/crates/tracing-attributes/0.1.26/download"], + name = "tracing-attributes-0.1.27.crate", + sha256 = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7", + strip_prefix = "tracing-attributes-0.1.27", + urls = ["https://crates.io/api/v1/crates/tracing-attributes/0.1.27/download"], visibility = [], ) cargo.rust_library( - name = "tracing-attributes-0.1.26", - srcs = [":tracing-attributes-0.1.26.crate"], + name = "tracing-attributes-0.1.27", + srcs = [":tracing-attributes-0.1.27.crate"], crate = "tracing_attributes", - crate_root = "tracing-attributes-0.1.26.crate/src/lib.rs", + crate_root = "tracing-attributes-0.1.27.crate/src/lib.rs", edition = "2018", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) http_archive( - name = "tracing-core-0.1.31.crate", - sha256 = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a", - strip_prefix = "tracing-core-0.1.31", - urls = ["https://crates.io/api/v1/crates/tracing-core/0.1.31/download"], + name = "tracing-core-0.1.32.crate", + sha256 = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54", + strip_prefix = "tracing-core-0.1.32", + urls = ["https://crates.io/api/v1/crates/tracing-core/0.1.32/download"], visibility = [], ) cargo.rust_library( - name = "tracing-core-0.1.31", - srcs = [":tracing-core-0.1.31.crate"], + name = "tracing-core-0.1.32", + srcs = [":tracing-core-0.1.32.crate"], crate = "tracing_core", - crate_root = "tracing-core-0.1.31.crate/src/lib.rs", + crate_root = "tracing-core-0.1.32.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -15453,7 +16411,7 @@ cargo.rust_library( "valuable", ], visibility = [], - deps = [":once_cell-1.18.0"], + deps = [":once_cell-1.19.0"], ) http_archive( @@ -15476,8 +16434,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":tracing-0.1.37", - ":tracing-subscriber-0.3.17", + ":tracing-0.1.40", + ":tracing-subscriber-0.3.18", ], ) @@ -15504,23 +16462,45 @@ cargo.rust_library( visibility = [], deps = [ ":pin-project-1.1.3", - ":tracing-0.1.37", + ":tracing-0.1.40", + ], +) + +http_archive( + name = "tracing-log-0.1.4.crate", + sha256 = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2", + strip_prefix = "tracing-log-0.1.4", + urls = ["https://crates.io/api/v1/crates/tracing-log/0.1.4/download"], + visibility = [], +) + +cargo.rust_library( + name = "tracing-log-0.1.4", + srcs = [":tracing-log-0.1.4.crate"], + crate = "tracing_log", + crate_root = "tracing-log-0.1.4.crate/src/lib.rs", + edition = "2018", + visibility = [], + deps = [ + ":log-0.4.20", + ":once_cell-1.19.0", + ":tracing-core-0.1.32", ], ) http_archive( - name = "tracing-log-0.1.3.crate", - sha256 = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922", - strip_prefix = "tracing-log-0.1.3", - urls = ["https://crates.io/api/v1/crates/tracing-log/0.1.3/download"], + name = "tracing-log-0.2.0.crate", + sha256 = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3", + strip_prefix = "tracing-log-0.2.0", + urls = ["https://crates.io/api/v1/crates/tracing-log/0.2.0/download"], visibility = [], ) cargo.rust_library( - name = "tracing-log-0.1.3", - srcs = [":tracing-log-0.1.3.crate"], + name = "tracing-log-0.2.0", + srcs = [":tracing-log-0.2.0.crate"], crate = "tracing_log", - crate_root = "tracing-log-0.1.3.crate/src/lib.rs", + crate_root = "tracing-log-0.2.0.crate/src/lib.rs", edition = "2018", features = [ "log-tracer", @@ -15528,9 +16508,9 @@ cargo.rust_library( ], visibility = [], deps = [ - ":lazy_static-1.4.0", ":log-0.4.20", - ":tracing-core-0.1.31", + ":once_cell-1.19.0", + ":tracing-core-0.1.32", ], ) @@ -15572,34 +16552,34 @@ cargo.rust_library( ], visibility = [], deps = [ - ":once_cell-1.18.0", + ":once_cell-1.19.0", ":opentelemetry-0.18.0", - ":tracing-0.1.37", - ":tracing-core-0.1.31", - ":tracing-log-0.1.3", - ":tracing-subscriber-0.3.17", + ":tracing-0.1.40", + ":tracing-core-0.1.32", + ":tracing-log-0.1.4", + ":tracing-subscriber-0.3.18", ], ) alias( name = "tracing-subscriber", - actual = ":tracing-subscriber-0.3.17", + actual = ":tracing-subscriber-0.3.18", visibility = ["PUBLIC"], ) http_archive( - name = "tracing-subscriber-0.3.17.crate", - sha256 = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77", - strip_prefix = "tracing-subscriber-0.3.17", - urls = ["https://crates.io/api/v1/crates/tracing-subscriber/0.3.17/download"], + name = "tracing-subscriber-0.3.18.crate", + sha256 = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b", + strip_prefix = "tracing-subscriber-0.3.18", + urls = ["https://crates.io/api/v1/crates/tracing-subscriber/0.3.18/download"], visibility = [], ) cargo.rust_library( - name = "tracing-subscriber-0.3.17", - srcs = [":tracing-subscriber-0.3.17.crate"], + name = "tracing-subscriber-0.3.18", + srcs = [":tracing-subscriber-0.3.18.crate"], crate = "tracing_subscriber", - crate_root = "tracing-subscriber-0.3.17.crate/src/lib.rs", + crate_root = "tracing-subscriber-0.3.18.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -15623,30 +16603,30 @@ cargo.rust_library( deps = [ ":matchers-0.1.0", ":nu-ansi-term-0.46.0", - ":once_cell-1.18.0", - ":regex-1.9.3", - ":sharded-slab-0.1.4", - ":smallvec-1.11.0", + ":once_cell-1.19.0", + ":regex-1.10.2", + ":sharded-slab-0.1.7", + ":smallvec-1.11.2", ":thread_local-1.1.7", - ":tracing-0.1.37", - ":tracing-core-0.1.31", - ":tracing-log-0.1.3", + ":tracing-0.1.40", + ":tracing-core-0.1.32", + ":tracing-log-0.2.0", ], ) http_archive( - name = "try-lock-0.2.4.crate", - sha256 = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed", - strip_prefix = "try-lock-0.2.4", - urls = ["https://crates.io/api/v1/crates/try-lock/0.2.4/download"], + name = "try-lock-0.2.5.crate", + sha256 = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b", + strip_prefix = "try-lock-0.2.5", + urls = ["https://crates.io/api/v1/crates/try-lock/0.2.5/download"], visibility = [], ) cargo.rust_library( - name = "try-lock-0.2.4", - srcs = [":try-lock-0.2.4.crate"], + name = "try-lock-0.2.5", + srcs = [":try-lock-0.2.5.crate"], crate = "try_lock", - crate_root = "try-lock-0.2.4.crate/src/lib.rs", + crate_root = "try-lock-0.2.5.crate/src/lib.rs", edition = "2015", visibility = [], ) @@ -15676,32 +16656,32 @@ cargo.rust_library( visibility = [], deps = [ ":base64-0.13.1", - ":byteorder-1.4.3", - ":bytes-1.4.0", - ":http-0.2.9", + ":byteorder-1.5.0", + ":bytes-1.5.0", + ":http-0.2.11", ":httparse-1.8.0", ":log-0.4.20", ":rand-0.8.5", - ":sha1-0.10.5", - ":thiserror-1.0.47", - ":url-2.4.0", + ":sha1-0.10.6", + ":thiserror-1.0.50", + ":url-2.5.0", ":utf-8-0.7.6", ], ) http_archive( - name = "tungstenite-0.20.0.crate", - sha256 = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649", - strip_prefix = "tungstenite-0.20.0", - urls = ["https://crates.io/api/v1/crates/tungstenite/0.20.0/download"], + name = "tungstenite-0.20.1.crate", + sha256 = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9", + strip_prefix = "tungstenite-0.20.1", + urls = ["https://crates.io/api/v1/crates/tungstenite/0.20.1/download"], visibility = [], ) cargo.rust_library( - name = "tungstenite-0.20.0", - srcs = [":tungstenite-0.20.0.crate"], + name = "tungstenite-0.20.1", + srcs = [":tungstenite-0.20.1.crate"], crate = "tungstenite", - crate_root = "tungstenite-0.20.0.crate/src/lib.rs", + crate_root = "tungstenite-0.20.1.crate/src/lib.rs", edition = "2018", features = [ "data-encoding", @@ -15713,78 +16693,78 @@ cargo.rust_library( ], visibility = [], deps = [ - ":byteorder-1.4.3", - ":bytes-1.4.0", - ":data-encoding-2.4.0", - ":http-0.2.9", + ":byteorder-1.5.0", + ":bytes-1.5.0", + ":data-encoding-2.5.0", + ":http-0.2.11", ":httparse-1.8.0", ":log-0.4.20", ":rand-0.8.5", - ":sha1-0.10.5", - ":thiserror-1.0.47", - ":url-2.4.0", + ":sha1-0.10.6", + ":thiserror-1.0.50", + ":url-2.5.0", ":utf-8-0.7.6", ], ) http_archive( - name = "typenum-1.16.0.crate", - sha256 = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba", - strip_prefix = "typenum-1.16.0", - urls = ["https://crates.io/api/v1/crates/typenum/1.16.0/download"], + name = "typenum-1.17.0.crate", + sha256 = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825", + strip_prefix = "typenum-1.17.0", + urls = ["https://crates.io/api/v1/crates/typenum/1.17.0/download"], visibility = [], ) cargo.rust_library( - name = "typenum-1.16.0", - srcs = [":typenum-1.16.0.crate"], + name = "typenum-1.17.0", + srcs = [":typenum-1.17.0.crate"], crate = "typenum", - crate_root = "typenum-1.16.0.crate/src/lib.rs", + crate_root = "typenum-1.17.0.crate/src/lib.rs", edition = "2018", env = { - "OUT_DIR": "$(location :typenum-1.16.0-build-script-main-run[out_dir])", + "OUT_DIR": "$(location :typenum-1.17.0-build-script-main-run[out_dir])", }, features = ["force_unix_path_separator"], visibility = [], ) cargo.rust_binary( - name = "typenum-1.16.0-build-script-main", - srcs = [":typenum-1.16.0.crate"], + name = "typenum-1.17.0-build-script-main", + srcs = [":typenum-1.17.0.crate"], crate = "build_script_main", - crate_root = "typenum-1.16.0.crate/build/main.rs", + crate_root = "typenum-1.17.0.crate/build/main.rs", edition = "2018", features = ["force_unix_path_separator"], visibility = [], ) buildscript_run( - name = "typenum-1.16.0-build-script-main-run", + name = "typenum-1.17.0-build-script-main-run", package_name = "typenum", - buildscript_rule = ":typenum-1.16.0-build-script-main", + buildscript_rule = ":typenum-1.17.0-build-script-main", features = ["force_unix_path_separator"], - version = "1.16.0", + version = "1.17.0", ) alias( name = "ulid", - actual = ":ulid-1.0.0", + actual = ":ulid-1.1.0", visibility = ["PUBLIC"], ) http_archive( - name = "ulid-1.0.0.crate", - sha256 = "13a3aaa69b04e5b66cc27309710a569ea23593612387d67daaf102e73aa974fd", - strip_prefix = "ulid-1.0.0", - urls = ["https://crates.io/api/v1/crates/ulid/1.0.0/download"], + name = "ulid-1.1.0.crate", + sha256 = "7e37c4b6cbcc59a8dcd09a6429fbc7890286bcbb79215cea7b38a3c4c0921d93", + strip_prefix = "ulid-1.1.0", + urls = ["https://crates.io/api/v1/crates/ulid/1.1.0/download"], visibility = [], ) cargo.rust_library( - name = "ulid-1.0.0", - srcs = [":ulid-1.0.0.crate"], + name = "ulid-1.1.0", + srcs = [":ulid-1.1.0.crate"], crate = "ulid", - crate_root = "ulid-1.0.0.crate/src/lib.rs", + crate_root = "ulid-1.1.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -15795,7 +16775,7 @@ cargo.rust_library( visibility = [], deps = [ ":rand-0.8.5", - ":serde-1.0.186", + ":serde-1.0.193", ], ) @@ -15836,18 +16816,18 @@ buildscript_run( ) http_archive( - name = "unicode-bidi-0.3.13.crate", - sha256 = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460", - strip_prefix = "unicode-bidi-0.3.13", - urls = ["https://crates.io/api/v1/crates/unicode-bidi/0.3.13/download"], + name = "unicode-bidi-0.3.14.crate", + sha256 = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416", + strip_prefix = "unicode-bidi-0.3.14", + urls = ["https://crates.io/api/v1/crates/unicode-bidi/0.3.14/download"], visibility = [], ) cargo.rust_library( - name = "unicode-bidi-0.3.13", - srcs = [":unicode-bidi-0.3.13.crate"], + name = "unicode-bidi-0.3.14", + srcs = [":unicode-bidi-0.3.14.crate"], crate = "unicode_bidi", - crate_root = "unicode-bidi-0.3.13.crate/src/lib.rs", + crate_root = "unicode-bidi-0.3.14.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -15858,18 +16838,18 @@ cargo.rust_library( ) http_archive( - name = "unicode-ident-1.0.11.crate", - sha256 = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c", - strip_prefix = "unicode-ident-1.0.11", - urls = ["https://crates.io/api/v1/crates/unicode-ident/1.0.11/download"], + name = "unicode-ident-1.0.12.crate", + sha256 = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b", + strip_prefix = "unicode-ident-1.0.12", + urls = ["https://crates.io/api/v1/crates/unicode-ident/1.0.12/download"], visibility = [], ) cargo.rust_library( - name = "unicode-ident-1.0.11", - srcs = [":unicode-ident-1.0.11.crate"], + name = "unicode-ident-1.0.12", + srcs = [":unicode-ident-1.0.12.crate"], crate = "unicode_ident", - crate_root = "unicode-ident-1.0.11.crate/src/lib.rs", + crate_root = "unicode-ident-1.0.12.crate/src/lib.rs", edition = "2018", visibility = [], ) @@ -15914,18 +16894,18 @@ cargo.rust_library( ) http_archive( - name = "unicode-width-0.1.10.crate", - sha256 = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b", - strip_prefix = "unicode-width-0.1.10", - urls = ["https://crates.io/api/v1/crates/unicode-width/0.1.10/download"], + name = "unicode-width-0.1.11.crate", + sha256 = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85", + strip_prefix = "unicode-width-0.1.11", + urls = ["https://crates.io/api/v1/crates/unicode-width/0.1.11/download"], visibility = [], ) cargo.rust_library( - name = "unicode-width-0.1.10", - srcs = [":unicode-width-0.1.10.crate"], + name = "unicode-width-0.1.11", + srcs = [":unicode-width-0.1.11.crate"], crate = "unicode_width", - crate_root = "unicode-width-0.1.10.crate/src/lib.rs", + crate_root = "unicode-width-0.1.11.crate/src/lib.rs", edition = "2015", features = ["default"], visibility = [], @@ -16000,25 +16980,42 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "untrusted-0.9.0.crate", + sha256 = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1", + strip_prefix = "untrusted-0.9.0", + urls = ["https://crates.io/api/v1/crates/untrusted/0.9.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "untrusted-0.9.0", + srcs = [":untrusted-0.9.0.crate"], + crate = "untrusted", + crate_root = "untrusted-0.9.0.crate/src/lib.rs", + edition = "2018", + visibility = [], +) + alias( name = "url", - actual = ":url-2.4.0", + actual = ":url-2.5.0", visibility = ["PUBLIC"], ) http_archive( - name = "url-2.4.0.crate", - sha256 = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb", - strip_prefix = "url-2.4.0", - urls = ["https://crates.io/api/v1/crates/url/2.4.0/download"], + name = "url-2.5.0.crate", + sha256 = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633", + strip_prefix = "url-2.5.0", + urls = ["https://crates.io/api/v1/crates/url/2.5.0/download"], visibility = [], ) cargo.rust_library( - name = "url-2.4.0", - srcs = [":url-2.4.0.crate"], + name = "url-2.5.0", + srcs = [":url-2.5.0.crate"], crate = "url", - crate_root = "url-2.4.0.crate/src/lib.rs", + crate_root = "url-2.5.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -16026,10 +17023,10 @@ cargo.rust_library( ], visibility = [], deps = [ - ":form_urlencoded-1.2.0", - ":idna-0.4.0", - ":percent-encoding-2.3.0", - ":serde-1.0.186", + ":form_urlencoded-1.2.1", + ":idna-0.5.0", + ":percent-encoding-2.3.1", + ":serde-1.0.193", ], ) @@ -16051,18 +17048,18 @@ cargo.rust_library( ) http_archive( - name = "utf8-width-0.1.6.crate", - sha256 = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1", - strip_prefix = "utf8-width-0.1.6", - urls = ["https://crates.io/api/v1/crates/utf8-width/0.1.6/download"], + name = "utf8-width-0.1.7.crate", + sha256 = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3", + strip_prefix = "utf8-width-0.1.7", + urls = ["https://crates.io/api/v1/crates/utf8-width/0.1.7/download"], visibility = [], ) cargo.rust_library( - name = "utf8-width-0.1.6", - srcs = [":utf8-width-0.1.6.crate"], + name = "utf8-width-0.1.7", + srcs = [":utf8-width-0.1.7.crate"], crate = "utf8_width", - crate_root = "utf8-width-0.1.6.crate/src/lib.rs", + crate_root = "utf8-width-0.1.7.crate/src/lib.rs", edition = "2021", visibility = [], ) @@ -16087,23 +17084,23 @@ cargo.rust_library( alias( name = "uuid", - actual = ":uuid-1.4.1", + actual = ":uuid-1.6.1", visibility = ["PUBLIC"], ) http_archive( - name = "uuid-1.4.1.crate", - sha256 = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d", - strip_prefix = "uuid-1.4.1", - urls = ["https://crates.io/api/v1/crates/uuid/1.4.1/download"], + name = "uuid-1.6.1.crate", + sha256 = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560", + strip_prefix = "uuid-1.6.1", + urls = ["https://crates.io/api/v1/crates/uuid/1.6.1/download"], visibility = [], ) cargo.rust_library( - name = "uuid-1.4.1", - srcs = [":uuid-1.4.1.crate"], + name = "uuid-1.6.1", + srcs = [":uuid-1.6.1.crate"], crate = "uuid", - crate_root = "uuid-1.4.1.crate/src/lib.rs", + crate_root = "uuid-1.6.1.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -16115,8 +17112,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":getrandom-0.2.10", - ":serde-1.0.186", + ":getrandom-0.2.11", + ":serde-1.0.193", ], ) @@ -16209,48 +17206,48 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":libc-0.2.147", + ":libc-0.2.151", ":nix-0.24.3", ], ) http_archive( - name = "waker-fn-1.1.0.crate", - sha256 = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca", - strip_prefix = "waker-fn-1.1.0", - urls = ["https://crates.io/api/v1/crates/waker-fn/1.1.0/download"], + name = "waker-fn-1.1.1.crate", + sha256 = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690", + strip_prefix = "waker-fn-1.1.1", + urls = ["https://crates.io/api/v1/crates/waker-fn/1.1.1/download"], visibility = [], ) cargo.rust_library( - name = "waker-fn-1.1.0", - srcs = [":waker-fn-1.1.0.crate"], + name = "waker-fn-1.1.1", + srcs = [":waker-fn-1.1.1.crate"], crate = "waker_fn", - crate_root = "waker-fn-1.1.0.crate/src/lib.rs", + crate_root = "waker-fn-1.1.1.crate/src/lib.rs", edition = "2018", visibility = [], ) http_archive( - name = "walkdir-2.3.3.crate", - sha256 = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698", - strip_prefix = "walkdir-2.3.3", - urls = ["https://crates.io/api/v1/crates/walkdir/2.3.3/download"], + name = "walkdir-2.4.0.crate", + sha256 = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee", + strip_prefix = "walkdir-2.4.0", + urls = ["https://crates.io/api/v1/crates/walkdir/2.4.0/download"], visibility = [], ) cargo.rust_library( - name = "walkdir-2.3.3", - srcs = [":walkdir-2.3.3.crate"], + name = "walkdir-2.4.0", + srcs = [":walkdir-2.4.0.crate"], crate = "walkdir", - crate_root = "walkdir-2.3.3.crate/src/lib.rs", + crate_root = "walkdir-2.4.0.crate/src/lib.rs", edition = "2018", platform = { "windows-gnu": dict( - deps = [":winapi-util-0.1.5"], + deps = [":winapi-util-0.1.6"], ), "windows-msvc": dict( - deps = [":winapi-util-0.1.5"], + deps = [":winapi-util-0.1.6"], ), }, visibility = [], @@ -16272,22 +17269,22 @@ cargo.rust_library( crate_root = "want-0.3.1.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":try-lock-0.2.4"], + deps = [":try-lock-0.2.5"], ) http_archive( - name = "webpki-0.22.0.crate", - sha256 = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd", - strip_prefix = "webpki-0.22.0", - urls = ["https://crates.io/api/v1/crates/webpki/0.22.0/download"], + name = "webpki-0.22.4.crate", + sha256 = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53", + strip_prefix = "webpki-0.22.4", + urls = ["https://crates.io/api/v1/crates/webpki/0.22.4/download"], visibility = [], ) cargo.rust_library( - name = "webpki-0.22.0", - srcs = [":webpki-0.22.0.crate"], + name = "webpki-0.22.4", + srcs = [":webpki-0.22.4.crate"], crate = "webpki", - crate_root = "webpki-0.22.0.crate/src/lib.rs", + crate_root = "webpki-0.22.4.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -16295,8 +17292,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":ring-0.16.20", - ":untrusted-0.7.1", + ":ring-0.17.7", + ":untrusted-0.9.0", ], ) @@ -16315,52 +17312,70 @@ cargo.rust_library( crate_root = "webpki-roots-0.22.6.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":webpki-0.22.0"], + deps = [":webpki-0.22.4"], ) http_archive( - name = "webpki-roots-0.25.2.crate", - sha256 = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc", - strip_prefix = "webpki-roots-0.25.2", - urls = ["https://crates.io/api/v1/crates/webpki-roots/0.25.2/download"], + name = "webpki-roots-0.25.3.crate", + sha256 = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10", + strip_prefix = "webpki-roots-0.25.3", + urls = ["https://crates.io/api/v1/crates/webpki-roots/0.25.3/download"], visibility = [], ) cargo.rust_library( - name = "webpki-roots-0.25.2", - srcs = [":webpki-roots-0.25.2.crate"], + name = "webpki-roots-0.25.3", + srcs = [":webpki-roots-0.25.3.crate"], crate = "webpki_roots", - crate_root = "webpki-roots-0.25.2.crate/src/lib.rs", + crate_root = "webpki-roots-0.25.3.crate/src/lib.rs", edition = "2018", visibility = [], ) http_archive( - name = "which-4.4.0.crate", - sha256 = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269", - strip_prefix = "which-4.4.0", - urls = ["https://crates.io/api/v1/crates/which/4.4.0/download"], + name = "which-4.4.2.crate", + sha256 = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7", + strip_prefix = "which-4.4.2", + urls = ["https://crates.io/api/v1/crates/which/4.4.2/download"], visibility = [], ) cargo.rust_library( - name = "which-4.4.0", - srcs = [":which-4.4.0.crate"], + name = "which-4.4.2", + srcs = [":which-4.4.2.crate"], crate = "which", - crate_root = "which-4.4.0.crate/src/lib.rs", - edition = "2018", + crate_root = "which-4.4.2.crate/src/lib.rs", + edition = "2021", platform = { + "linux-arm64": dict( + deps = [":home-0.5.5"], + ), + "linux-x86_64": dict( + deps = [":home-0.5.5"], + ), + "macos-arm64": dict( + deps = [":home-0.5.5"], + ), + "macos-x86_64": dict( + deps = [":home-0.5.5"], + ), "windows-gnu": dict( - deps = [":once_cell-1.18.0"], + deps = [ + ":home-0.5.5", + ":once_cell-1.19.0", + ], ), "windows-msvc": dict( - deps = [":once_cell-1.18.0"], + deps = [ + ":home-0.5.5", + ":once_cell-1.19.0", + ], ), }, visibility = [], deps = [ ":either-1.9.0", - ":libc-0.2.147", + ":rustix-0.38.28", ], ) @@ -16408,19 +17423,15 @@ cargo.rust_library( "handleapi", "impl-default", "knownfolders", - "minwinbase", "minwindef", - "ntdef", "ntsecapi", "ntstatus", "objbase", "processenv", - "profileapi", "shlobj", "std", "synchapi", "sysinfoapi", - "timezoneapi", "winbase", "wincon", "winerror", @@ -16455,19 +17466,15 @@ cargo.rust_binary( "handleapi", "impl-default", "knownfolders", - "minwinbase", "minwindef", - "ntdef", "ntsecapi", "ntstatus", "objbase", "processenv", - "profileapi", "shlobj", "std", "synchapi", "sysinfoapi", - "timezoneapi", "winbase", "wincon", "winerror", @@ -16491,19 +17498,15 @@ buildscript_run( "handleapi", "impl-default", "knownfolders", - "minwinbase", "minwindef", - "ntdef", "ntsecapi", "ntstatus", "objbase", "processenv", - "profileapi", "shlobj", "std", "synchapi", "sysinfoapi", - "timezoneapi", "winbase", "wincon", "winerror", @@ -16517,19 +17520,19 @@ buildscript_run( ) http_archive( - name = "winapi-util-0.1.5.crate", - sha256 = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178", - strip_prefix = "winapi-util-0.1.5", - urls = ["https://crates.io/api/v1/crates/winapi-util/0.1.5/download"], + name = "winapi-util-0.1.6.crate", + sha256 = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596", + strip_prefix = "winapi-util-0.1.6", + urls = ["https://crates.io/api/v1/crates/winapi-util/0.1.6/download"], visibility = [], ) cargo.rust_library( - name = "winapi-util-0.1.5", - srcs = [":winapi-util-0.1.5.crate"], + name = "winapi-util-0.1.6", + srcs = [":winapi-util-0.1.6.crate"], crate = "winapi_util", - crate_root = "winapi-util-0.1.5.crate/src/lib.rs", - edition = "2018", + crate_root = "winapi-util-0.1.6.crate/src/lib.rs", + edition = "2021", platform = { "windows-gnu": dict( deps = [":winapi-0.3.9"], @@ -16589,23 +17592,20 @@ third_party_rust_prebuilt_cxx_library( ) http_archive( - name = "windows-0.48.0.crate", - sha256 = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f", - strip_prefix = "windows-0.48.0", - urls = ["https://crates.io/api/v1/crates/windows/0.48.0/download"], + name = "windows-core-0.51.1.crate", + sha256 = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64", + strip_prefix = "windows-core-0.51.1", + urls = ["https://crates.io/api/v1/crates/windows-core/0.51.1/download"], visibility = [], ) cargo.rust_library( - name = "windows-0.48.0", - srcs = [":windows-0.48.0.crate"], - crate = "windows", - crate_root = "windows-0.48.0.crate/src/lib.rs", - edition = "2018", - features = [ - "Globalization", - "default", - ], + name = "windows-core-0.51.1", + srcs = [":windows-core-0.51.1.crate"], + crate = "windows_core", + crate_root = "windows-core-0.51.1.crate/src/lib.rs", + edition = "2021", + features = ["default"], visibility = [], deps = [":windows-targets-0.48.5"], ) @@ -16677,8 +17677,6 @@ cargo.rust_library( "Win32", "Win32_Foundation", "Win32_Globalization", - "Win32_NetworkManagement", - "Win32_NetworkManagement_IpHelper", "Win32_Networking", "Win32_Networking_WinSock", "Win32_Security", @@ -16711,6 +17709,40 @@ cargo.rust_library( deps = [":windows-targets-0.48.5"], ) +http_archive( + name = "windows-sys-0.52.0.crate", + sha256 = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d", + strip_prefix = "windows-sys-0.52.0", + urls = ["https://crates.io/api/v1/crates/windows-sys/0.52.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "windows-sys-0.52.0", + srcs = [":windows-sys-0.52.0.crate"], + crate = "windows_sys", + crate_root = "windows-sys-0.52.0.crate/src/lib.rs", + edition = "2021", + features = [ + "Win32", + "Win32_Foundation", + "Win32_NetworkManagement", + "Win32_NetworkManagement_IpHelper", + "Win32_Networking", + "Win32_Networking_WinSock", + "Win32_Storage", + "Win32_Storage_FileSystem", + "Win32_System", + "Win32_System_Console", + "Win32_System_Diagnostics", + "Win32_System_Diagnostics_Debug", + "Win32_System_Threading", + "default", + ], + visibility = [], + deps = [":windows-targets-0.52.0"], +) + http_archive( name = "windows-targets-0.42.2.crate", sha256 = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071", @@ -16764,6 +17796,34 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "windows-targets-0.52.0.crate", + sha256 = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd", + strip_prefix = "windows-targets-0.52.0", + urls = ["https://crates.io/api/v1/crates/windows-targets/0.52.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "windows-targets-0.52.0", + srcs = [":windows-targets-0.52.0.crate"], + crate = "windows_targets", + crate_root = "windows-targets-0.52.0.crate/src/lib.rs", + edition = "2021", + platform = { + "linux-x86_64": dict( + deps = [":windows_x86_64_gnu-0.52.0"], + ), + "windows-gnu": dict( + deps = [":windows_x86_64_gnu-0.52.0"], + ), + "windows-msvc": dict( + deps = [":windows_x86_64_msvc-0.52.0"], + ), + }, + visibility = [], +) + http_archive( name = "windows_x86_64_gnu-0.42.2.crate", sha256 = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36", @@ -16813,6 +17873,23 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "windows_x86_64_gnu-0.52.0.crate", + sha256 = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd", + strip_prefix = "windows_x86_64_gnu-0.52.0", + urls = ["https://crates.io/api/v1/crates/windows_x86_64_gnu/0.52.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "windows_x86_64_gnu-0.52.0", + srcs = [":windows_x86_64_gnu-0.52.0.crate"], + crate = "windows_x86_64_gnu", + crate_root = "windows_x86_64_gnu-0.52.0.crate/src/lib.rs", + edition = "2021", + visibility = [], +) + http_archive( name = "windows_x86_64_msvc-0.42.2.crate", sha256 = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0", @@ -16863,18 +17940,35 @@ cargo.rust_library( ) http_archive( - name = "winnow-0.5.15.crate", - sha256 = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc", - strip_prefix = "winnow-0.5.15", - urls = ["https://crates.io/api/v1/crates/winnow/0.5.15/download"], + name = "windows_x86_64_msvc-0.52.0.crate", + sha256 = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04", + strip_prefix = "windows_x86_64_msvc-0.52.0", + urls = ["https://crates.io/api/v1/crates/windows_x86_64_msvc/0.52.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "windows_x86_64_msvc-0.52.0", + srcs = [":windows_x86_64_msvc-0.52.0.crate"], + crate = "windows_x86_64_msvc", + crate_root = "windows_x86_64_msvc-0.52.0.crate/src/lib.rs", + edition = "2021", + visibility = [], +) + +http_archive( + name = "winnow-0.5.28.crate", + sha256 = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2", + strip_prefix = "winnow-0.5.28", + urls = ["https://crates.io/api/v1/crates/winnow/0.5.28/download"], visibility = [], ) cargo.rust_library( - name = "winnow-0.5.15", - srcs = [":winnow-0.5.15.crate"], + name = "winnow-0.5.28", + srcs = [":winnow-0.5.28.crate"], crate = "winnow", - crate_root = "winnow-0.5.15.crate/src/lib.rs", + crate_root = "winnow-0.5.28.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -16882,7 +17976,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":memchr-2.5.0"], + deps = [":memchr-2.6.4"], ) http_archive( @@ -16925,25 +18019,39 @@ cargo.rust_library( ) http_archive( - name = "xattr-1.0.1.crate", - sha256 = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985", - strip_prefix = "xattr-1.0.1", - urls = ["https://crates.io/api/v1/crates/xattr/1.0.1/download"], + name = "xattr-1.1.2.crate", + sha256 = "d367426ae76bdfce3d8eaea6e94422afd6def7d46f9c89e2980309115b3c2c41", + strip_prefix = "xattr-1.1.2", + urls = ["https://crates.io/api/v1/crates/xattr/1.1.2/download"], visibility = [], ) cargo.rust_library( - name = "xattr-1.0.1", - srcs = [":xattr-1.0.1.crate"], + name = "xattr-1.1.2", + srcs = [":xattr-1.1.2.crate"], crate = "xattr", - crate_root = "xattr-1.0.1.crate/src/lib.rs", + crate_root = "xattr-1.1.2.crate/src/lib.rs", edition = "2021", features = [ "default", "unsupported", ], + platform = { + "linux-arm64": dict( + deps = [":linux-raw-sys-0.4.12"], + ), + "linux-x86_64": dict( + deps = [":linux-raw-sys-0.4.12"], + ), + "macos-arm64": dict( + deps = [":libc-0.2.151"], + ), + "macos-x86_64": dict( + deps = [":libc-0.2.151"], + ), + }, visibility = [], - deps = [":libc-0.2.147"], + deps = [":rustix-0.38.28"], ) alias( @@ -16971,9 +18079,9 @@ cargo.rust_library( deps = [ ":futures-util-0.3.29", ":lib0-0.16.10", - ":thiserror-1.0.47", - ":tokio-1.32.0", - ":yrs-0.16.10", + ":thiserror-1.0.50", + ":tokio-1.35.0", + ":yrs-0.17.2", ], ) @@ -17020,24 +18128,68 @@ cargo.rust_library( ":lib0-0.16.10", ":rand-0.7.3", ":smallstr-0.2.0", - ":smallvec-1.11.0", - ":thiserror-1.0.47", + ":smallvec-1.11.2", + ":thiserror-1.0.50", + ], +) + +http_archive( + name = "yrs-0.17.2.crate", + sha256 = "68aea14c6c33f2edd8a5ff9415360cfa5b98d90cce30c5ee3be59a8419fb15a9", + strip_prefix = "yrs-0.17.2", + urls = ["https://crates.io/api/v1/crates/yrs/0.17.2/download"], + visibility = [], +) + +cargo.rust_library( + name = "yrs-0.17.2", + srcs = [":yrs-0.17.2.crate"], + crate = "yrs", + crate_root = "yrs-0.17.2.crate/src/lib.rs", + edition = "2018", + visibility = [], + deps = [ + ":atomic_refcell-0.1.13", + ":rand-0.7.3", + ":serde-1.0.193", + ":serde_json-1.0.108", + ":smallstr-0.3.0", + ":smallvec-1.11.2", + ":thiserror-1.0.50", ], ) http_archive( - name = "zeroize-1.6.0.crate", - sha256 = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9", - strip_prefix = "zeroize-1.6.0", - urls = ["https://crates.io/api/v1/crates/zeroize/1.6.0/download"], + name = "zerocopy-0.7.30.crate", + sha256 = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7", + strip_prefix = "zerocopy-0.7.30", + urls = ["https://crates.io/api/v1/crates/zerocopy/0.7.30/download"], + visibility = [], +) + +cargo.rust_library( + name = "zerocopy-0.7.30", + srcs = [":zerocopy-0.7.30.crate"], + crate = "zerocopy", + crate_root = "zerocopy-0.7.30.crate/src/lib.rs", + edition = "2018", + features = ["simd"], + visibility = [], +) + +http_archive( + name = "zeroize-1.7.0.crate", + sha256 = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d", + strip_prefix = "zeroize-1.7.0", + urls = ["https://crates.io/api/v1/crates/zeroize/1.7.0/download"], visibility = [], ) cargo.rust_library( - name = "zeroize-1.6.0", - srcs = [":zeroize-1.6.0.crate"], + name = "zeroize-1.7.0", + srcs = [":zeroize-1.7.0.crate"], crate = "zeroize", - crate_root = "zeroize-1.6.0.crate/src/lib.rs", + crate_root = "zeroize-1.7.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -17065,8 +18217,8 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.66", + ":proc-macro2-1.0.70", ":quote-1.0.33", - ":syn-2.0.29", + ":syn-2.0.40", ], ) diff --git a/third-party/rust/Cargo.lock b/third-party/rust/Cargo.lock index a40b91beb5..ce8c5121b6 100644 --- a/third-party/rust/Cargo.lock +++ b/third-party/rust/Cargo.lock @@ -25,31 +25,32 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", "once_cell", "version_check", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.0.4" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] @@ -83,51 +84,50 @@ dependencies = [ [[package]] name = "anstream" -version = "0.3.2" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" +checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", - "is-terminal", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.0" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" +checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" dependencies = [ - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] name = "anstyle-wincon" -version = "1.0.2" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" +checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" dependencies = [ "anstyle", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -154,18 +154,18 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8257238e2a3629ee5618502a75d1b91f8017c24638c75349fc8d2d80cf1f7c4c" dependencies = [ - "base64 0.21.2", - "bytes 1.4.0", + "base64 0.21.5", + "bytes 1.5.0", "futures", "http", "itoa", "memchr", - "nkeys 0.3.1", + "nkeys 0.3.2", "nuid", "once_cell", "rand 0.8.5", "regex", - "ring", + "ring 0.16.20", "rustls-native-certs", "rustls-pemfile", "rustls-webpki", @@ -174,7 +174,7 @@ dependencies = [ "serde_nanos", "serde_repr", "thiserror", - "time 0.3.27", + "time", "tokio", "tokio-retry", "tokio-rustls 0.24.1", @@ -184,13 +184,13 @@ dependencies = [ [[package]] name = "async-recursion" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" +checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -212,18 +212,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "async-trait" -version = "0.1.73" +version = "0.1.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" +checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -232,7 +232,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures-sink", "futures-util", "memchr", @@ -262,7 +262,7 @@ checksum = "1fcf00bc6d5abb29b5f97e3c61a90b6d3caa12f3faf897d4a3e3607c050a35a7" dependencies = [ "http", "log", - "rustls 0.20.8", + "rustls 0.20.9", "serde", "serde_json", "url", @@ -289,15 +289,15 @@ dependencies = [ "rust-ini", "serde", "thiserror", - "time 0.3.27", + "time", "url", ] [[package]] name = "aws-region" -version = "0.25.3" +version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba" +checksum = "42fed2b9fca70f2908268d057a607f2a906f47edbf856ea8587de9038d264e22" dependencies = [ "thiserror", ] @@ -311,9 +311,9 @@ dependencies = [ "async-trait", "axum-core", "axum-macros", - "base64 0.21.2", + "base64 0.21.5", "bitflags 1.3.2", - "bytes 1.4.0", + "bytes 1.5.0", "futures-util", "http", "http-body", @@ -333,7 +333,7 @@ dependencies = [ "sha1", "sync_wrapper", "tokio", - "tokio-tungstenite 0.20.0", + "tokio-tungstenite 0.20.1", "tower", "tower-layer", "tower-service", @@ -346,7 +346,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", - "bytes 1.4.0", + "bytes 1.5.0", "futures-util", "http", "http-body", @@ -365,7 +365,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -410,9 +410,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.2" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" +checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" [[package]] name = "base64ct" @@ -445,9 +445,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" [[package]] name = "bitvec" @@ -463,16 +463,15 @@ dependencies = [ [[package]] name = "blake3" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "199c42ab6972d92c9f8995f086273d25c42fc0f7b2a1fcefba465c1352d25ba5" +checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", "constant_time_eq", - "digest 0.10.7", ] [[package]] @@ -499,9 +498,9 @@ version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f03db470b3c0213c47e978da93200259a1eb4dae2e5512cba9955e2b540a6fc6" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "bollard-stubs", - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "futures-util", "hex", @@ -530,59 +529,38 @@ checksum = "b58071e8fd9ec1e930efd28e3a90c1251015872a2ce49f81f36421b86466932e" dependencies = [ "serde", "serde_repr", - "serde_with 3.3.0", + "serde_with 3.4.0", ] [[package]] name = "borsh" -version = "0.10.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b" +checksum = "9897ef0f1bd2362169de6d7e436ea2237dc1085d7d1e4db75f4be34d86f309d1" dependencies = [ "borsh-derive", - "hashbrown 0.13.2", + "cfg_aliases", ] [[package]] name = "borsh-derive" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" -dependencies = [ - "borsh-derive-internal", - "borsh-schema-derive-internal", - "proc-macro-crate 0.1.5", - "proc-macro2", - "syn 1.0.109", -] - -[[package]] -name = "borsh-derive-internal" -version = "0.10.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" -dependencies = [ - "proc-macro2", - "quote", - "syn 1.0.109", -] - -[[package]] -name = "borsh-schema-derive-internal" -version = "0.10.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" +checksum = "478b41ff04256c5c8330f3dfdaaae2a5cc976a8e75088bafa4625b0d0208de8c" dependencies = [ + "once_cell", + "proc-macro-crate 2.0.0", "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.40", + "syn_derive", ] [[package]] name = "bstr" -version = "1.6.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" +checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" dependencies = [ "memchr", "serde", @@ -590,9 +568,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.13.0" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" +checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" [[package]] name = "bytecheck" @@ -618,9 +596,9 @@ dependencies = [ [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" @@ -630,9 +608,9 @@ checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" [[package]] name = "bytes" -version = "1.4.0" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" +checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" dependencies = [ "serde", ] @@ -652,20 +630,25 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" + [[package]] name = "chrono" -version = "0.4.26" +version = "0.4.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" +checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", - "time 0.1.45", "wasm-bindgen", - "winapi", + "windows-targets 0.48.5", ] [[package]] @@ -697,20 +680,19 @@ dependencies = [ [[package]] name = "clap" -version = "4.3.24" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487" +checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" dependencies = [ "clap_builder", "clap_derive", - "once_cell", ] [[package]] name = "clap_builder" -version = "4.3.24" +version = "4.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e" +checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" dependencies = [ "anstream", "anstyle", @@ -721,27 +703,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.3.12" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "clap_lex" -version = "0.5.0" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "coarsetime" -version = "0.1.23" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a90d114103adbc625300f346d4d09dfb4ab1c4a8df6868435dd903392ecf4354" +checksum = "71367d3385c716342014ad17e3d19f7788ae514885a1f4c24f500260fb365e1a" dependencies = [ "libc", "once_cell", @@ -766,9 +748,9 @@ dependencies = [ [[package]] name = "color-spantrace" -version = "0.2.0" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" +checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" dependencies = [ "once_cell", "owo-colors", @@ -784,33 +766,32 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "colored" -version = "2.0.4" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6" +checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" dependencies = [ - "is-terminal", "lazy_static", "windows-sys 0.48.0", ] [[package]] name = "comfy-table" -version = "7.0.1" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ab77dbd8adecaf3f0db40581631b995f312a8a5ae3aa9993188bb8f23d83a5b" +checksum = "7c64043d6c7b7a4c58e39e7efccfdea7b93d885a795d0c054a69dbbf4dd52686" dependencies = [ "console", - "crossterm 0.26.1", - "strum", - "strum_macros", + "crossterm 0.27.0", + "strum 0.25.0", + "strum_macros 0.25.3", "unicode-width", ] [[package]] name = "config" -version = "0.13.3" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7" +checksum = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca" dependencies = [ "async-trait", "lazy_static", @@ -916,9 +897,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ "core-foundation-sys", "libc", @@ -926,15 +907,15 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.4" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" +checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" dependencies = [ "libc", ] @@ -958,6 +939,30 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "crossbeam-deque" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" +dependencies = [ + "cfg-if", + "crossbeam-epoch", + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +dependencies = [ + "autocfg", + "cfg-if", + "crossbeam-utils", + "memoffset 0.9.0", + "scopeguard", +] + [[package]] name = "crossbeam-queue" version = "0.3.8" @@ -995,17 +1000,14 @@ dependencies = [ [[package]] name = "crossterm" -version = "0.26.1" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13" +checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "crossterm_winapi", "libc", - "mio", "parking_lot 0.12.1", - "signal-hook", - "signal-hook-mio", "winapi", ] @@ -1020,9 +1022,9 @@ dependencies = [ [[package]] name = "crypto-bigint" -version = "0.5.2" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15" +checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -1059,6 +1061,33 @@ dependencies = [ "zeroize", ] +[[package]] +name = "curve25519-dalek" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version", + "subtle", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] + [[package]] name = "darling" version = "0.14.4" @@ -1104,7 +1133,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -1126,7 +1155,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -1138,27 +1167,27 @@ dependencies = [ "anyhow", "html-escape", "nom", - "ordered-float 2.10.0", + "ordered-float 2.10.1", ] [[package]] name = "dashmap" -version = "5.5.1" +version = "5.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28" +checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" dependencies = [ "cfg-if", - "hashbrown 0.14.0", + "hashbrown 0.14.3", "lock_api", "once_cell", - "parking_lot_core 0.9.8", + "parking_lot_core 0.9.9", ] [[package]] name = "data-encoding" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" +checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" [[package]] name = "deadpool" @@ -1187,9 +1216,9 @@ dependencies = [ [[package]] name = "deadpool-runtime" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" +checksum = "63dfa964fe2a66f3fde91fc70b267fe193d822c7e603e2a675a49a7f46ad3f49" dependencies = [ "tokio", ] @@ -1227,10 +1256,11 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.8" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc" dependencies = [ + "powerfmt", "serde", ] @@ -1360,7 +1390,7 @@ dependencies = [ "asynchronous-codec", "base64 0.13.1", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "containers-api 0.9.0", "docker-api-stubs", @@ -1395,22 +1425,22 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "dyn-clone" -version = "1.0.13" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555" +checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" [[package]] name = "ecdsa" -version = "0.16.8" +version = "0.16.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" +checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" dependencies = [ "der 0.7.8", "digest 0.10.7", "elliptic-curve", "rfc6979", - "signature 2.1.0", - "spki 0.7.2", + "signature 2.2.0", + "spki 0.7.3", ] [[package]] @@ -1422,6 +1452,15 @@ dependencies = [ "signature 1.6.4", ] +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "signature 2.2.0", +] + [[package]] name = "ed25519-compact" version = "2.0.4" @@ -1429,7 +1468,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a3d382e8464107391c8706b4c14b087808ecb909f6c15c34114bc42e53a9e4c" dependencies = [ "ct-codecs", - "getrandom 0.2.10", + "getrandom 0.2.11", ] [[package]] @@ -1438,17 +1477,30 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek", - "ed25519", + "curve25519-dalek 3.2.0", + "ed25519 1.5.3", "sha2 0.9.9", "zeroize", ] +[[package]] +name = "ed25519-dalek" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0" +dependencies = [ + "curve25519-dalek 4.1.1", + "ed25519 2.2.3", + "sha2 0.10.8", + "signature 2.2.0", + "subtle", +] + [[package]] name = "educe" -version = "0.4.22" +version = "0.4.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "079044df30bb07de7d846d41a184c4b00e66ebdac93ee459253474f3a47e50ae" +checksum = "0f0042ff8246a363dbe77d2ceedb073339e85a804b9a47636c6e016a9a32c05f" dependencies = [ "enum-ordinalize", "proc-macro2", @@ -1464,9 +1516,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "elliptic-curve" -version = "0.13.5" +version = "0.13.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "968405c8fdc9b3bf4df0a6638858cc0b52462836ab6b1c87377785dd09cf1c0b" +checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" dependencies = [ "base16ct", "crypto-bigint", @@ -1500,15 +1552,15 @@ dependencies = [ [[package]] name = "enum-ordinalize" -version = "3.1.13" +version = "3.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e4f76552f53cefc9a7f64987c3701b99d982f7690606fd67de1d09712fbf52f1" +checksum = "1bf1fa3f06bbff1ea5b1a9c7b14aa992a39657db60a2759457328d7e058f49ee" dependencies = [ "num-bigint", "num-traits", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -1519,23 +1571,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys 0.48.0", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ - "cc", "libc", + "windows-sys 0.52.0", ] [[package]] @@ -1546,9 +1587,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "eyre" -version = "0.6.8" +version = "0.6.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" +checksum = "8bbb8258be8305fb0237d7b295f47bb24ff1b136a535f473baf40e70468515aa" dependencies = [ "indenter", "once_cell", @@ -1571,9 +1612,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "ff" @@ -1585,18 +1626,30 @@ dependencies = [ "subtle", ] +[[package]] +name = "fiat-crypto" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" + [[package]] name = "filetime" -version = "0.2.22" +version = "0.2.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" +checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", - "windows-sys 0.48.0", + "redox_syscall 0.4.1", + "windows-sys 0.52.0", ] +[[package]] +name = "finl_unicode" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" + [[package]] name = "fixedbitset" version = "0.4.2" @@ -1605,9 +1658,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.27" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "miniz_oxide", @@ -1621,9 +1674,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" dependencies = [ "percent-encoding", ] @@ -1636,9 +1689,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" dependencies = [ "futures-channel", "futures-core", @@ -1667,9 +1720,9 @@ checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" dependencies = [ "futures-core", "futures-task", @@ -1716,7 +1769,7 @@ checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -1787,9 +1840,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" dependencies = [ "cfg-if", "libc", @@ -1798,21 +1851,21 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.0" +version = "0.28.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" +checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" [[package]] name = "globset" -version = "0.4.13" +version = "0.4.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d" +checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" dependencies = [ "aho-corasick", "bstr", - "fnv", "log", - "regex", + "regex-automata 0.4.3", + "regex-syntax 0.8.2", ] [[package]] @@ -1828,17 +1881,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.21" +version = "0.3.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" +checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "fnv", "futures-core", "futures-sink", "futures-util", "http", - "indexmap 1.9.3", + "indexmap 2.1.0", "slab", "tokio", "tokio-util", @@ -1857,35 +1910,26 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.6", -] - -[[package]] -name = "hashbrown" -version = "0.13.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" -dependencies = [ - "ahash 0.8.3", + "ahash 0.7.7", ] [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "allocator-api2", ] [[package]] name = "hashlink" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" +checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.14.0", + "hashbrown 0.14.3", ] [[package]] @@ -1908,9 +1952,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" [[package]] name = "hex" @@ -1960,6 +2004,15 @@ dependencies = [ "digest 0.10.7", ] +[[package]] +name = "home" +version = "0.5.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" +dependencies = [ + "windows-sys 0.48.0", +] + [[package]] name = "html-escape" version = "0.2.13" @@ -1971,22 +2024,22 @@ dependencies = [ [[package]] name = "http" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" +checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "fnv", "itoa", ] [[package]] name = "http-body" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" +checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "http", "pin-project-lite", ] @@ -2015,7 +2068,7 @@ version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures-channel", "futures-core", "futures-util", @@ -2026,7 +2079,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.9", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -2035,14 +2088,14 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http", "hyper", - "rustls 0.21.6", + "rustls 0.21.10", "tokio", "tokio-rustls 0.24.1", ] @@ -2073,16 +2126,16 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.57" +version = "0.1.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" +checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows", + "windows-core", ] [[package]] @@ -2102,9 +2155,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -2121,23 +2174,22 @@ dependencies = [ "quote", "serde", "syn 1.0.109", - "toml 0.7.6", + "toml 0.7.8", "unicode-xid", ] [[package]] name = "ignore" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492" +checksum = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060" dependencies = [ + "crossbeam-deque", "globset", - "lazy_static", "log", "memchr", - "regex", + "regex-automata 0.4.3", "same-file", - "thread_local", "walkdir", "winapi-util", ] @@ -2161,20 +2213,20 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.14.3", "serde", ] [[package]] name = "indicatif" -version = "0.17.6" +version = "0.17.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730" +checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" dependencies = [ "console", "instant", @@ -2185,9 +2237,9 @@ dependencies = [ [[package]] name = "indoc" -version = "2.0.3" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4" +checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" [[package]] name = "inquire" @@ -2214,22 +2266,11 @@ dependencies = [ "cfg-if", ] -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "ipnet" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "is-docker" @@ -2240,17 +2281,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "is-terminal" -version = "0.4.9" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" -dependencies = [ - "hermit-abi", - "rustix 0.38.8", - "windows-sys 0.48.0", -] - [[package]] name = "is-wsl" version = "0.4.0" @@ -2270,26 +2300,35 @@ dependencies = [ "either", ] +[[package]] +name = "itertools" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" +dependencies = [ + "either", +] + [[package]] name = "itoa" -version = "1.0.9" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" +checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" dependencies = [ "wasm-bindgen", ] [[package]] name = "jwt-simple" -version = "0.11.6" +version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733741e7bcd1532b56c9ba6c698c069f274f3782ad956f0d2c7f31650cedaa1b" +checksum = "357892bb32159d763abdea50733fadcb9a8e1c319a9aa77592db8555d05af83e" dependencies = [ "anyhow", "binstring", @@ -2313,16 +2352,16 @@ dependencies = [ [[package]] name = "k256" -version = "0.13.1" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc" +checksum = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b" dependencies = [ "cfg-if", "ecdsa", "elliptic-curve", "once_cell", - "sha2 0.10.7", - "signature 2.1.0", + "sha2 0.10.8", + "signature 2.2.0", ] [[package]] @@ -2345,15 +2384,26 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.147" +version = "0.2.151" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" +checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" [[package]] name = "libm" -version = "0.2.7" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" + +[[package]] +name = "libredox" +version = "0.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" +checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" +dependencies = [ + "bitflags 2.4.1", + "libc", + "redox_syscall 0.4.1", +] [[package]] name = "libsodium-sys" @@ -2369,21 +2419,15 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - -[[package]] -name = "linux-raw-sys" -version = "0.4.5" +version = "0.4.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" +checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" [[package]] name = "lock_api" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -2406,9 +2450,9 @@ dependencies = [ [[package]] name = "matchit" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef" +checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] name = "maybe-async" @@ -2423,10 +2467,11 @@ dependencies = [ [[package]] name = "md-5" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" +checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" dependencies = [ + "cfg-if", "digest 0.10.7", ] @@ -2438,9 +2483,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.5.0" +version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" [[package]] name = "memmap2" @@ -2470,6 +2515,15 @@ dependencies = [ "autocfg", ] +[[package]] +name = "memoffset" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" +dependencies = [ + "autocfg", +] + [[package]] name = "mime" version = "0.3.17" @@ -2503,9 +2557,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" dependencies = [ "libc", "log", @@ -2519,7 +2573,7 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "encoding_rs", "futures-util", "http", @@ -2569,16 +2623,15 @@ dependencies = [ [[package]] name = "nix" -version = "0.26.2" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" +checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" dependencies = [ "bitflags 1.3.2", "cfg-if", "libc", "memoffset 0.7.1", "pin-utils", - "static_assertions", ] [[package]] @@ -2589,27 +2642,27 @@ checksum = "0e66a7cd1358277b2a6f77078e70aea7315ff2f20db969cc61153103ec162594" dependencies = [ "byteorder", "data-encoding", - "ed25519-dalek", - "getrandom 0.2.10", + "ed25519-dalek 1.0.1", + "getrandom 0.2.11", "log", "rand 0.8.5", - "signatory", + "signatory 0.23.2", ] [[package]] name = "nkeys" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9261eb915c785ea65708bc45ef43507ea46914e1a73f1412d1a38aba967c8e" +checksum = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47" dependencies = [ "byteorder", "data-encoding", - "ed25519", - "ed25519-dalek", - "getrandom 0.2.10", + "ed25519 2.2.3", + "ed25519-dalek 2.1.0", + "getrandom 0.2.11", "log", "rand 0.8.5", - "signatory", + "signatory 0.27.1", ] [[package]] @@ -2693,9 +2746,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", "libm", @@ -2729,7 +2782,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -2740,18 +2793,18 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.0" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" +checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.18.0" +version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" +checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "opaque-debug" @@ -2761,9 +2814,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "open" -version = "5.0.0" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cfabf1927dce4d6fdf563d63328a0a506101ced3ec780ca2135747336c98cef8" +checksum = "90878fb664448b54c4e592455ad02831e23a3f7e157374a8b95654731aac7349" dependencies = [ "is-wsl", "libc", @@ -2873,18 +2926,18 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "ordered-float" -version = "2.10.0" +version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87" +checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" dependencies = [ "num-traits", ] [[package]] name = "ordered-float" -version = "3.7.0" +version = "3.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fc2dbde8f8a79f2102cc474ceb0ad68e3b80b85289ea62389b60e66777e4213" +checksum = "f1e1c390732d15f1d48471625cd92d154e66db2c56645e29a9cd26f4699f72dc" dependencies = [ "num-traits", ] @@ -2943,7 +2996,7 @@ dependencies = [ "ecdsa", "elliptic-curve", "primeorder", - "sha2 0.10.7", + "sha2 0.10.8", ] [[package]] @@ -2955,14 +3008,14 @@ dependencies = [ "ecdsa", "elliptic-curve", "primeorder", - "sha2 0.10.7", + "sha2 0.10.8", ] [[package]] name = "parking" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" +checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" [[package]] name = "parking_lot" @@ -2982,7 +3035,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.8", + "parking_lot_core 0.9.9", ] [[package]] @@ -3001,13 +3054,13 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", + "redox_syscall 0.4.1", "smallvec", "windows-targets 0.48.5", ] @@ -3053,9 +3106,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.0" +version = "2.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" +checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "petgraph" @@ -3064,7 +3117,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.0.0", + "indexmap 2.1.0", "serde", "serde_derive", ] @@ -3084,7 +3137,7 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ - "siphasher", + "siphasher 0.3.11", ] [[package]] @@ -3124,14 +3177,14 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "pin-project-lite" -version = "0.2.12" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" [[package]] name = "pin-utils" @@ -3180,7 +3233,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der 0.7.8", - "spki 0.7.2", + "spki 0.7.3", ] [[package]] @@ -3189,6 +3242,12 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "platforms" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" + [[package]] name = "podman-api" version = "0.10.0" @@ -3197,7 +3256,7 @@ checksum = "4d0ade207138f12695cb4be3b590283f1cf764c5c4909f39966c4b4b0dba7c1e" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "containers-api 0.8.0", "flate2", @@ -3227,9 +3286,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.4.2" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e" +checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" [[package]] name = "postgres-derive" @@ -3240,7 +3299,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -3249,15 +3308,15 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "fallible-iterator", "hmac", "md-5", "memchr", "rand 0.8.5", - "sha2 0.10.7", + "sha2 0.10.8", "stringprep", ] @@ -3267,7 +3326,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "fallible-iterator", "postgres-derive", @@ -3276,6 +3335,12 @@ dependencies = [ "serde_json", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -3314,30 +3379,30 @@ dependencies = [ [[package]] name = "primeorder" -version = "0.13.2" +version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3" +checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro-crate" -version = "0.1.5" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ - "toml 0.5.11", + "once_cell", + "toml_edit 0.19.15", ] [[package]] name = "proc-macro-crate" -version = "1.3.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" dependencies = [ - "once_cell", - "toml_edit", + "toml_edit 0.20.7", ] [[package]] @@ -3366,9 +3431,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.66" +version = "1.0.70" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" dependencies = [ "unicode-ident", ] @@ -3379,7 +3444,7 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "prost-derive", ] @@ -3389,9 +3454,9 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "heck 0.4.1", - "itertools", + "itertools 0.10.5", "lazy_static", "log", "multimap", @@ -3412,7 +3477,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", - "itertools", + "itertools 0.10.5", "proc-macro2", "quote", "syn 1.0.109", @@ -3473,7 +3538,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "735d6cecec44dc27382b962309c05de47d40727ff9898a501ec8fadec76be9a8" dependencies = [ "async-trait", - "bytes 1.4.0", + "bytes 1.5.0", "dashmap", "futures", "pin-project 1.1.3", @@ -3499,7 +3564,7 @@ dependencies = [ "chrono", "derive_more", "num_enum", - "ordered-float 3.7.0", + "ordered-float 3.9.2", "uuid", ] @@ -3568,7 +3633,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", ] [[package]] @@ -3591,29 +3656,29 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.3.5" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ - "getrandom 0.2.10", - "redox_syscall 0.2.16", + "getrandom 0.2.11", + "libredox", "thiserror", ] [[package]] name = "refinery" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdb0436d0dd7bd8d4fce1e828751fa79742b08e35f27cfea7546f8a322b5ef24" +checksum = "529664dbccc0a296947615c997a857912d72d1c44be1fafb7bae54ecfa7a8c24" dependencies = [ "refinery-core", "refinery-macros", @@ -3621,9 +3686,9 @@ dependencies = [ [[package]] name = "refinery-core" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19206547cd047e8f4dfa6b20c30d3ecaf24be05841b6aa0aa926a47a3d0662bb" +checksum = "e895cb870cf06e92318cbbeb701f274d022d5ca87a16fa8244e291cd035ef954" dependencies = [ "async-trait", "cfg-if", @@ -3631,39 +3696,39 @@ dependencies = [ "log", "regex", "serde", - "siphasher", + "siphasher 1.0.0", "thiserror", - "time 0.3.27", + "time", "tokio", "tokio-postgres", - "toml 0.7.6", + "toml 0.7.8", "url", "walkdir", ] [[package]] name = "refinery-macros" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d94d4b9241859ba19eaa5c04c86e782eb3aa0aae2c5868e0cfa90c856e58a174" +checksum = "123e8b80f8010c3ae38330c81e76938fc7adf6cdbfbaad20295bb8c22718b4f1" dependencies = [ "proc-macro2", "quote", "refinery-core", "regex", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "regex" -version = "1.9.3" +version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.3.6", - "regex-syntax 0.7.4", + "regex-automata 0.4.3", + "regex-syntax 0.8.2", ] [[package]] @@ -3677,13 +3742,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.3.6" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.7.4", + "regex-syntax 0.8.2", ] [[package]] @@ -3694,9 +3759,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.7.4" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "remain" @@ -3706,26 +3771,26 @@ checksum = "bce3a7139d2ee67d07538ee5dba997364fbc243e7e7143e96eb830c74bfaa082" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "rend" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" +checksum = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd" dependencies = [ "bytecheck", ] [[package]] name = "reqwest" -version = "0.11.20" +version = "0.11.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" +checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" dependencies = [ - "base64 0.21.2", - "bytes 1.4.0", + "base64 0.21.5", + "bytes 1.5.0", "encoding_rs", "futures-core", "futures-util", @@ -3742,11 +3807,12 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.6", + "rustls 0.21.10", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", + "system-configuration", "tokio", "tokio-rustls 0.24.1", "tokio-util", @@ -3756,7 +3822,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.25.2", + "webpki-roots 0.25.3", "winreg", ] @@ -3786,11 +3852,25 @@ dependencies = [ "libc", "once_cell", "spin 0.5.2", - "untrusted", + "untrusted 0.7.1", "web-sys", "winapi", ] +[[package]] +name = "ring" +version = "0.17.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" +dependencies = [ + "cc", + "getrandom 0.2.11", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.48.0", +] + [[package]] name = "rkyv" version = "0.7.42" @@ -3860,7 +3940,7 @@ dependencies = [ "aws-creds", "aws-region", "base64 0.13.1", - "bytes 1.4.0", + "bytes 1.5.0", "cfg-if", "futures", "hex", @@ -3874,9 +3954,9 @@ dependencies = [ "reqwest", "serde", "serde_derive", - "sha2 0.10.7", + "sha2 0.10.8", "thiserror", - "time 0.3.27", + "time", "tokio", "tokio-stream", "url", @@ -3884,13 +3964,13 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.32.0" +version = "1.33.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4c4216490d5a413bc6d10fa4742bd7d4955941d062c0ef873141d6b0e7b30fd" +checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4" dependencies = [ "arrayvec", "borsh", - "bytes 1.4.0", + "bytes 1.5.0", "num-traits", "rand 0.8.5", "rkyv", @@ -3915,51 +3995,37 @@ dependencies = [ [[package]] name = "rustix" -version = "0.37.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" -dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", -] - -[[package]] -name = "rustix" -version = "0.38.8" +version = "0.38.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f" +checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "errno", "libc", - "linux-raw-sys 0.4.5", - "windows-sys 0.48.0", + "linux-raw-sys", + "windows-sys 0.52.0", ] [[package]] name = "rustls" -version = "0.20.8" +version = "0.20.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" +checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" dependencies = [ "log", - "ring", + "ring 0.16.20", "sct", "webpki", ] [[package]] name = "rustls" -version = "0.21.6" +version = "0.21.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" +checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" dependencies = [ "log", - "ring", + "ring 0.17.7", "rustls-webpki", "sct", ] @@ -3978,21 +4044,21 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" +checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", ] [[package]] name = "rustls-webpki" -version = "0.101.4" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", - "untrusted", + "ring 0.17.7", + "untrusted 0.9.0", ] [[package]] @@ -4003,9 +4069,9 @@ checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "ryu" -version = "1.0.15" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" +checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" [[package]] name = "same-file" @@ -4033,12 +4099,12 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", - "untrusted", + "ring 0.17.7", + "untrusted 0.9.0", ] [[package]] @@ -4063,7 +4129,7 @@ dependencies = [ "serde_json", "sqlx", "thiserror", - "time 0.3.27", + "time", "tracing", "url", "uuid", @@ -4093,7 +4159,7 @@ dependencies = [ "rust_decimal", "sea-query-derive", "serde_json", - "time 0.3.27", + "time", "uuid", ] @@ -4109,7 +4175,7 @@ dependencies = [ "sea-query", "serde_json", "sqlx", - "time 0.3.27", + "time", "uuid", ] @@ -4203,24 +4269,24 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" +checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "serde" -version = "1.0.186" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f5db24220c009de9bd45e69fb2938f4b6d2df856aa9304ce377b3180f83b7c1" +checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" dependencies = [ "serde_derive", ] [[package]] name = "serde-aux" -version = "4.2.0" +version = "4.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3dfe1b7eb6f9dcf011bd6fad169cdeaae75eda0d61b1a99a3f015b41b0cae39" +checksum = "184eba62ebddb71658697c8b08822edee89970bf318c5362189f0de27f85b498" dependencies = [ "chrono", "serde", @@ -4229,22 +4295,22 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.186" +version = "1.0.193" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad697f7e0b65af4983a4ce8f56ed5b357e8d3c36651bf6a7e13639c17b8e670" +checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "serde_json" -version = "1.0.105" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -4271,20 +4337,20 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" +checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "serde_spanned" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" +checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" dependencies = [ "serde", ] @@ -4324,24 +4390,24 @@ dependencies = [ "serde", "serde_json", "serde_with_macros 2.3.3", - "time 0.3.27", + "time", ] [[package]] name = "serde_with" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237" +checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" dependencies = [ - "base64 0.21.2", + "base64 0.21.5", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.0.0", + "indexmap 2.1.0", "serde", "serde_json", - "serde_with_macros 3.3.0", - "time 0.3.27", + "serde_with_macros 3.4.0", + "time", ] [[package]] @@ -4353,28 +4419,28 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "serde_with_macros" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c" +checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "serde_yaml" -version = "0.9.25" +version = "0.9.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" +checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -4383,9 +4449,9 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -4407,9 +4473,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -4418,9 +4484,9 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" dependencies = [ "lazy_static", ] @@ -4467,6 +4533,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "signatory" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1e303f8205714074f6068773f0e29527e0453937fe837c9717d066635b65f31" +dependencies = [ + "pkcs8 0.10.2", + "rand_core 0.6.4", + "signature 2.2.0", + "zeroize", +] + [[package]] name = "signature" version = "1.6.4" @@ -4479,9 +4557,9 @@ dependencies = [ [[package]] name = "signature" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -4499,6 +4577,12 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" +[[package]] +name = "siphasher" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54ac45299ccbd390721be55b412d41931911f654fa99e2cb8bfb57184b2061fe" + [[package]] name = "slab" version = "0.4.9" @@ -4517,17 +4601,26 @@ dependencies = [ "smallvec", ] +[[package]] +name = "smallstr" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63b1aefdf380735ff8ded0b15f31aab05daf1f70216c01c02a12926badd1df9d" +dependencies = [ + "smallvec", +] + [[package]] name = "smallvec" -version = "1.11.0" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" +checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" [[package]] name = "socket2" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" dependencies = [ "libc", "winapi", @@ -4535,9 +4628,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.3" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", "windows-sys 0.48.0", @@ -4549,7 +4642,7 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e26be3acb6c2d9a7aac28482586a7856436af4cfe7100031d219de2d2ecb0028" dependencies = [ - "ed25519", + "ed25519 1.5.3", "libc", "libsodium-sys", "serde", @@ -4588,9 +4681,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" +checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" dependencies = [ "base64ct", "der 0.7.8", @@ -4598,11 +4691,11 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.1" +version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" +checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" dependencies = [ - "itertools", + "itertools 0.12.0", "nom", "unicode_categories", ] @@ -4623,13 +4716,13 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.7", "atoi", "base64 0.13.1", "bigdecimal", "bitflags 1.3.2", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "crossbeam-queue", "dirs", @@ -4656,18 +4749,18 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "rust_decimal", - "rustls 0.20.8", + "rustls 0.20.9", "rustls-pemfile", "serde", "serde_json", "sha1", - "sha2 0.10.7", + "sha2 0.10.8", "smallvec", "sqlformat", "sqlx-rt", "stringprep", "thiserror", - "time 0.3.27", + "time", "tokio-stream", "url", "uuid", @@ -4711,12 +4804,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" -[[package]] -name = "static_assertions" -version = "1.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" - [[package]] name = "stream-cancel" version = "0.8.1" @@ -4730,10 +4817,11 @@ dependencies = [ [[package]] name = "stringprep" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da" +checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" dependencies = [ + "finl_unicode", "unicode-bidi", "unicode-normalization", ] @@ -4750,9 +4838,15 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", ] +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" + [[package]] name = "strum_macros" version = "0.24.3" @@ -4766,6 +4860,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.25.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.40", +] + [[package]] name = "subtle" version = "2.5.0" @@ -4785,21 +4892,54 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.29" +version = "2.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" +checksum = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] +[[package]] +name = "syn_derive" +version = "0.1.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" +dependencies = [ + "proc-macro-error", + "proc-macro2", + "quote", + "syn 2.0.40", +] + [[package]] name = "sync_wrapper" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "system-configuration" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" +dependencies = [ + "bitflags 1.3.2", + "core-foundation", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" +dependencies = [ + "core-foundation-sys", + "libc", +] + [[package]] name = "tap" version = "1.0.1" @@ -4828,36 +4968,46 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.8.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" dependencies = [ "cfg-if", - "fastrand 2.0.0", - "redox_syscall 0.3.5", - "rustix 0.38.8", + "fastrand 2.0.1", + "redox_syscall 0.4.1", + "rustix", "windows-sys 0.48.0", ] [[package]] name = "terminal_size" -version = "0.2.6" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237" +checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" dependencies = [ - "rustix 0.37.23", + "rustix", "windows-sys 0.48.0", ] [[package]] name = "test-log" -version = "0.2.12" +version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9601d162c1d77e62c1ea0bc8116cd1caf143ce3af947536c3c9052a1677fe0c" +checksum = "6159ab4116165c99fc88cce31f99fa2c9dbe08d3691cb38da02fc3b45f357d2b" +dependencies = [ + "test-log-macros", + "tracing-subscriber", +] + +[[package]] +name = "test-log-macros" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", + "syn 2.0.40", ] [[package]] @@ -4868,10 +5018,10 @@ dependencies = [ "async-recursion", "async-trait", "axum", - "base64 0.21.2", + "base64 0.21.5", "blake3", "bollard", - "bytes 1.4.0", + "bytes 1.5.0", "chrono", "ciborium", "clap", @@ -4901,11 +5051,11 @@ dependencies = [ "indicatif", "indoc", "inquire", - "itertools", + "itertools 0.10.5", "jwt-simple", "lazy_static", "names", - "nix 0.26.2", + "nix 0.26.4", "nkeys 0.2.0", "num_cpus", "once_cell", @@ -4930,19 +5080,19 @@ dependencies = [ "remain", "reqwest", "rust-s3", - "rustls 0.21.6", + "rustls 0.21.10", "sea-orm", "self-replace", "serde", "serde-aux", "serde_json", "serde_url_params", - "serde_with 3.3.0", + "serde_with 3.4.0", "serde_yaml", "sodiumoxide", "stream-cancel", - "strum", - "syn 2.0.29", + "strum 0.24.1", + "syn 2.0.40", "tar", "tempfile", "test-log", @@ -4955,7 +5105,7 @@ dependencies = [ "tokio-tungstenite 0.18.0", "tokio-util", "tokio-vsock", - "toml 0.7.6", + "toml 0.7.8", "tower", "tower-http", "tracing", @@ -4967,27 +5117,27 @@ dependencies = [ "vfs", "vfs-tar", "y-sync", - "yrs", + "yrs 0.16.10", ] [[package]] name = "thiserror" -version = "1.0.47" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f" +checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.47" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b" +checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] @@ -5002,23 +5152,13 @@ dependencies = [ [[package]] name = "time" -version = "0.1.45" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" -dependencies = [ - "libc", - "wasi 0.10.0+wasi-snapshot-preview1", - "winapi", -] - -[[package]] -name = "time" -version = "0.3.27" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bb39ee79a6d8de55f48f2293a830e040392f1c5f16e336bdd1788cd0aadce07" +checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" dependencies = [ "deranged", "itoa", + "powerfmt", "serde", "time-core", "time-macros", @@ -5026,15 +5166,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.13" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "733d258752e9303d392b94b75230d07b0b9c489350c69b851fc6c065fde3e8f9" +checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" dependencies = [ "time-core", ] @@ -5056,19 +5196,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.32.0" +version = "1.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" +checksum = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c" dependencies = [ "backtrace", - "bytes 1.4.0", + "bytes 1.5.0", "libc", "mio", "num_cpus", "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.3", + "socket2 0.5.5", "tokio-macros", "windows-sys 0.48.0", ] @@ -5085,24 +5225,24 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.1.0" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" +checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "tokio-postgres" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "000387915083ea6406ee44b50ca74813aba799fe682a7689e382bf9e13b74ce9" +checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" dependencies = [ "async-trait", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "fallible-iterator", "futures-channel", "futures-util", @@ -5114,7 +5254,7 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand 0.8.5", - "socket2 0.5.3", + "socket2 0.5.5", "tokio", "tokio-util", "whoami", @@ -5137,7 +5277,7 @@ version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ - "rustls 0.20.8", + "rustls 0.20.9", "tokio", "webpki", ] @@ -5148,7 +5288,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.6", + "rustls 0.21.10", "tokio", ] @@ -5158,7 +5298,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "911a61637386b789af998ee23f50aa30d5fd7edcec8d6d3dedae5e5815205466" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "educe", "futures-core", "futures-sink", @@ -5185,7 +5325,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89b3cbabd3ae862100094ae433e1def582cf86451b4e9bf83aa7ac1d8a7d719" dependencies = [ "async-stream", - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "tokio", "tokio-stream", @@ -5205,23 +5345,23 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", "tokio", - "tungstenite 0.20.0", + "tungstenite 0.20.1", ] [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "futures-sink", "pin-project-lite", @@ -5235,7 +5375,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52a15c15b1bc91f90902347eff163b5b682643aff0c8e972912cca79bd9208dd" dependencies = [ - "bytes 1.4.0", + "bytes 1.5.0", "futures", "libc", "tokio", @@ -5253,38 +5393,49 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.6" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542" +checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit", + "toml_edit 0.19.15", ] [[package]] name = "toml_datetime" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.19.14" +version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" +checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.1.0", "serde", "serde_spanned", "toml_datetime", "winnow", ] +[[package]] +name = "toml_edit" +version = "0.20.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" +dependencies = [ + "indexmap 2.1.0", + "toml_datetime", + "winnow", +] + [[package]] name = "tonic" version = "0.8.3" @@ -5295,7 +5446,7 @@ dependencies = [ "async-trait", "axum", "base64 0.13.1", - "bytes 1.4.0", + "bytes 1.5.0", "futures-core", "futures-util", "h2", @@ -5352,12 +5503,12 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55ae70283aba8d2a8b411c695c437fe25b8b5e44e23e780662002fc72fb47a82" +checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" dependencies = [ - "bitflags 2.4.0", - "bytes 1.4.0", + "bitflags 2.4.1", + "bytes 1.5.0", "futures-core", "futures-util", "http", @@ -5383,11 +5534,10 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -5396,20 +5546,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", "valuable", @@ -5437,12 +5587,23 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" +dependencies = [ + "log", + "once_cell", + "tracing-core", +] + +[[package]] +name = "tracing-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" dependencies = [ - "lazy_static", "log", + "once_cell", "tracing-core", ] @@ -5456,15 +5617,15 @@ dependencies = [ "opentelemetry", "tracing", "tracing-core", - "tracing-log", + "tracing-log 0.1.4", "tracing-subscriber", ] [[package]] name = "tracing-subscriber" -version = "0.3.17" +version = "0.3.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" dependencies = [ "matchers", "nu-ansi-term", @@ -5475,14 +5636,14 @@ dependencies = [ "thread_local", "tracing", "tracing-core", - "tracing-log", + "tracing-log 0.2.0", ] [[package]] name = "try-lock" -version = "0.2.4" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" +checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "tungstenite" @@ -5492,7 +5653,7 @@ checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "http", "httparse", "log", @@ -5505,12 +5666,12 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" dependencies = [ "byteorder", - "bytes 1.4.0", + "bytes 1.5.0", "data-encoding", "http", "httparse", @@ -5524,15 +5685,15 @@ dependencies = [ [[package]] name = "typenum" -version = "1.16.0" +version = "1.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" +checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "ulid" -version = "1.0.0" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13a3aaa69b04e5b66cc27309710a569ea23593612387d67daaf102e73aa974fd" +checksum = "7e37c4b6cbcc59a8dcd09a6429fbc7890286bcbb79215cea7b38a3c4c0921d93" dependencies = [ "rand 0.8.5", "serde", @@ -5549,15 +5710,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" +checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" [[package]] name = "unicode-ident" -version = "1.0.11" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" +checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" [[package]] name = "unicode-normalization" @@ -5576,9 +5737,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode-xid" @@ -5604,11 +5765,17 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" -version = "2.4.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" +checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" dependencies = [ "form_urlencoded", "idna", @@ -5624,9 +5791,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8-width" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" +checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" [[package]] name = "utf8parse" @@ -5636,11 +5803,11 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.4.1" +version = "1.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560" dependencies = [ - "getrandom 0.2.10", + "getrandom 0.2.11", "serde", ] @@ -5686,15 +5853,15 @@ dependencies = [ [[package]] name = "waker-fn" -version = "1.1.0" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" +checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" [[package]] name = "walkdir" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" +checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" dependencies = [ "same-file", "winapi-util", @@ -5715,12 +5882,6 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" -[[package]] -name = "wasi" -version = "0.10.0+wasi-snapshot-preview1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" - [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -5729,9 +5890,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -5739,24 +5900,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.37" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12" dependencies = [ "cfg-if", "js-sys", @@ -5766,9 +5927,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5776,22 +5937,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.89" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" [[package]] name = "wasm-streams" @@ -5808,9 +5969,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.64" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" dependencies = [ "js-sys", "wasm-bindgen", @@ -5818,12 +5979,12 @@ dependencies = [ [[package]] name = "webpki" -version = "0.22.0" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" +checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" dependencies = [ - "ring", - "untrusted", + "ring 0.17.7", + "untrusted 0.9.0", ] [[package]] @@ -5837,19 +5998,20 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.2" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" +checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" [[package]] name = "which" -version = "4.4.0" +version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ "either", - "libc", + "home", "once_cell", + "rustix", ] [[package]] @@ -5880,9 +6042,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -5894,10 +6056,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "windows" -version = "0.48.0" +name = "windows-core" +version = "0.51.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" +checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" dependencies = [ "windows-targets 0.48.5", ] @@ -5920,6 +6082,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-sys" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -5950,6 +6121,21 @@ dependencies = [ "windows_x86_64_msvc 0.48.5", ] +[[package]] +name = "windows-targets" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" +dependencies = [ + "windows_aarch64_gnullvm 0.52.0", + "windows_aarch64_msvc 0.52.0", + "windows_i686_gnu 0.52.0", + "windows_i686_msvc 0.52.0", + "windows_x86_64_gnu 0.52.0", + "windows_x86_64_gnullvm 0.52.0", + "windows_x86_64_msvc 0.52.0", +] + [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -5962,6 +6148,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" + [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -5974,6 +6166,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +[[package]] +name = "windows_aarch64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" + [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -5986,6 +6184,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +[[package]] +name = "windows_i686_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" + [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -5998,6 +6202,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +[[package]] +name = "windows_i686_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" + [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -6010,6 +6220,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +[[package]] +name = "windows_x86_64_gnu" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" + [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -6022,6 +6238,12 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" + [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -6034,11 +6256,17 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" +[[package]] +name = "windows_x86_64_msvc" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" + [[package]] name = "winnow" -version = "0.5.15" +version = "0.5.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" +checksum = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2" dependencies = [ "memchr", ] @@ -6064,11 +6292,13 @@ dependencies = [ [[package]] name = "xattr" -version = "1.0.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" +checksum = "d367426ae76bdfce3d8eaea6e94422afd6def7d46f9c89e2980309115b3c2c41" dependencies = [ "libc", + "linux-raw-sys", + "rustix", ] [[package]] @@ -6081,7 +6311,7 @@ dependencies = [ "lib0", "thiserror", "tokio", - "yrs", + "yrs 0.17.2", ] [[package]] @@ -6099,16 +6329,51 @@ dependencies = [ "atomic_refcell", "lib0", "rand 0.7.3", - "smallstr", + "smallstr 0.2.0", + "smallvec", + "thiserror", +] + +[[package]] +name = "yrs" +version = "0.17.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "68aea14c6c33f2edd8a5ff9415360cfa5b98d90cce30c5ee3be59a8419fb15a9" +dependencies = [ + "atomic_refcell", + "rand 0.7.3", + "serde", + "serde_json", + "smallstr 0.3.0", "smallvec", "thiserror", ] +[[package]] +name = "zerocopy" +version = "0.7.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.40", +] + [[package]] name = "zeroize" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" +checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" dependencies = [ "zeroize_derive", ] @@ -6121,5 +6386,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.29", + "syn 2.0.40", ] diff --git a/third-party/rust/Cargo.toml b/third-party/rust/Cargo.toml index 8e09e8cdac..ae489d63f6 100644 --- a/third-party/rust/Cargo.toml +++ b/third-party/rust/Cargo.toml @@ -33,9 +33,9 @@ clap = { version = "4.2.7", features = ["derive", "color", "env", "wrap_help"] } color-eyre = "0.6.2" colored = "2.0.4" comfy-table = { version = "7.0.1", features = [ - "crossterm", - "tty", - "custom_styling", + "crossterm", + "tty", + "custom_styling", ] } config = { version = "0.13.3", default-features = false, features = ["toml"] } console = "0.15.7" @@ -55,13 +55,13 @@ futures-lite = "1.13.0" hex = "0.4.3" http = "0.2.9" hyper = { version = "0.14.26", features = [ - "client", - "http1", - "runtime", - "server", + "client", + "http1", + "runtime", + "server", ] } hyperlocal = { version = "0.8.0", default-features = false, features = [ - "client", + "client", ] } iftree = "1.0.4" indicatif = "0.17.5" @@ -77,8 +77,8 @@ num_cpus = "1.15.0" once_cell = "1.17.1" open = "5.0.0" opentelemetry = { version = "~0.18.0", features = [ - "rt-tokio", - "trace", + "rt-tokio", + "trace", ] } # pinned, pending new release of tracing-opentelemetry, 0.18 opentelemetry-otlp = "~0.11.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 opentelemetry-semantic-conventions = "~0.10.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 @@ -98,20 +98,20 @@ refinery = { version = "0.8.9", features = ["tokio-postgres"] } regex = "1.8.1" remain = "0.2.8" reqwest = { version = "0.11.17", default-features = false, features = [ - "rustls-tls", - "json", - "multipart", + "rustls-tls", + "json", + "multipart", ] } rust-s3 = { version = "0.33.0", default-features = false, features = [ - "tokio-rustls-tls", + "tokio-rustls-tls", ] } rustls = "0.21.6" # pinned, pending update from tokio-rustls for async-nats sea-orm = { version = "0.11", features = [ - "sqlx-postgres", - "runtime-tokio-rustls", - "macros", - "with-chrono", - "debug-print", + "sqlx-postgres", + "runtime-tokio-rustls", + "macros", + "with-chrono", + "debug-print", ] } self-replace = "1.3.5" serde = { version = "1.0.160", features = ["derive", "rc"] } @@ -127,14 +127,14 @@ syn = { version = "2.0.15", features = ["full", "extra-traits"] } tar = "0.4.38" tempfile = "3.5.0" test-log = { version = "0.2.11", default-features = false, features = [ - "trace", + "trace", ] } thiserror = "1.0.40" tokio = { version = "1.28.0", features = ["full"] } tokio-postgres = { version = "0.7.8", features = [ - "runtime", - "with-chrono-0_4", - "with-serde_json-1", + "runtime", + "with-chrono-0_4", + "with-serde_json-1", ] } tokio-serde = { version = "0.8.0", features = ["json"] } tokio-stream = "0.1.14" diff --git a/third-party/rust/fixups/semver/fixups.toml b/third-party/rust/fixups/semver/fixups.toml new file mode 100644 index 0000000000..46dfe3a3ac --- /dev/null +++ b/third-party/rust/fixups/semver/fixups.toml @@ -0,0 +1,4 @@ +cargo_env = true + +[[buildscript]] +[buildscript.rustc_flags] From 48a7e59f499775956ba70a72dca75bbf1e1ab760 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Thu, 31 Aug 2023 07:53:46 -0700 Subject: [PATCH 21/92] Use consistent naming for EdgeWeightKind variants The `EdgeWeightKind` variants aside from `Uses` were all using more imperative style naming. This renames `Uses` to `Use` to more closely match that style of naming. --- lib/dal/src/workspace_snapshot/edge_weight.rs | 3 +- lib/dal/src/workspace_snapshot/graph.rs | 224 +++++++++--------- 2 files changed, 114 insertions(+), 113 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs index 92eac13e96..8c24284eee 100644 --- a/lib/dal/src/workspace_snapshot/edge_weight.rs +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -15,6 +15,7 @@ pub enum EdgeWeightError { pub type EdgeWeightResult = Result; +#[remain::sorted] #[derive(Default, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] pub enum EdgeWeightKind { /// Used to record the order that the elements of a container should be presented in. @@ -23,7 +24,7 @@ pub enum EdgeWeightKind { /// Schema variants "use" props. Props "use" functions, and other props. Modules /// "use" functions, schemas, and eventually(?) components. #[default] - Uses, + Use, } #[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 195c84c8a9..62a905c73b 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -1339,7 +1339,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), component_index, ) @@ -1348,7 +1348,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -1359,7 +1359,7 @@ mod test { graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -1370,7 +1370,7 @@ mod test { graph .get_node_index_by_id(component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(schema_variant_id) @@ -1409,7 +1409,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), func_index, ) @@ -1420,7 +1420,7 @@ mod test { graph .get_node_index_by_id(schema_variant_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), prop_index, ) @@ -1431,7 +1431,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(func_id) @@ -1493,7 +1493,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), initial_component_node_index, ) @@ -1502,7 +1502,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), initial_schema_node_index, ) @@ -1513,7 +1513,7 @@ mod test { graph .get_node_index_by_id(schema_id) .expect("Cannot find NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), initial_schema_variant_node_index, ) @@ -1524,7 +1524,7 @@ mod test { graph .get_node_index_by_id(component_id) .expect("Cannot find NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(schema_variant_id) @@ -1541,7 +1541,7 @@ mod test { graph .get_node_index_by_id(schema_variant_id) .expect("Cannot find NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(component_id) @@ -1599,7 +1599,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), component_index, ) @@ -1608,7 +1608,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -1619,7 +1619,7 @@ mod test { graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -1630,7 +1630,7 @@ mod test { graph .get_node_index_by_id(component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(schema_variant_id) @@ -1751,7 +1751,7 @@ mod test { .add_edge( initial_change_set, initial_graph.root_index, - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -1762,7 +1762,7 @@ mod test { initial_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -1791,7 +1791,7 @@ mod test { .add_edge( new_change_set, new_graph.root_index, - EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), component_index, ) @@ -1802,7 +1802,7 @@ mod test { new_graph .get_node_index_by_id(component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), new_graph .get_node_index_by_id(schema_variant_id) @@ -1858,7 +1858,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -1869,7 +1869,7 @@ mod test { base_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -1899,7 +1899,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), new_onto_component_index, ) @@ -1910,7 +1910,7 @@ mod test { base_graph .get_node_index_by_id(new_onto_component_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(schema_variant_id) @@ -1938,7 +1938,7 @@ mod test { }] => { assert_eq!(new_graph.root_index, *source); assert_eq!(new_onto_component_index, *destination); - assert_eq!(EdgeWeightKind::Uses, edge_weight.kind()); + assert_eq!(EdgeWeightKind::Use, edge_weight.kind()); } other => panic!("Unexpected updates: {:?}", other), } @@ -1982,7 +1982,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -1993,7 +1993,7 @@ mod test { base_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -2023,7 +2023,7 @@ mod test { .add_edge( new_change_set, new_graph.root_index, - EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), component_index, ) @@ -2034,7 +2034,7 @@ mod test { new_graph .get_node_index_by_id(component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), new_graph .get_node_index_by_id(schema_variant_id) @@ -2062,7 +2062,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), new_onto_component_index, ) @@ -2073,7 +2073,7 @@ mod test { base_graph .get_node_index_by_id(new_onto_component_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(schema_variant_id) @@ -2101,7 +2101,7 @@ mod test { }] => { assert_eq!(new_graph.root_index, *source); assert_eq!(new_onto_component_index, *destination); - assert_eq!(EdgeWeightKind::Uses, edge_weight.kind()); + assert_eq!(EdgeWeightKind::Use, edge_weight.kind()); } other => panic!("Unexpected updates: {:?}", other), } @@ -2145,7 +2145,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -2156,7 +2156,7 @@ mod test { base_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -2179,7 +2179,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), component_index, ) @@ -2190,7 +2190,7 @@ mod test { base_graph .get_node_index_by_id(component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(schema_variant_id) @@ -2286,7 +2286,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -2297,7 +2297,7 @@ mod test { base_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -2320,7 +2320,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), component_index, ) @@ -2331,7 +2331,7 @@ mod test { base_graph .get_node_index_by_id(component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(schema_variant_id) @@ -2354,7 +2354,7 @@ mod test { base_graph .get_node_index_by_id(component_id) .expect("Unable to get NodeIndex"), - EdgeWeightKind::Uses, + EdgeWeightKind::Use, ) .expect("Unable to remove Component A"); @@ -2414,7 +2414,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), docker_image_schema_index, ) @@ -2440,7 +2440,7 @@ mod test { base_graph .get_node_index_by_id(docker_image_schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), docker_image_schema_variant_index, ) @@ -2464,7 +2464,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), nginx_docker_image_component_index, ) @@ -2475,7 +2475,7 @@ mod test { base_graph .get_node_index_by_id(nginx_docker_image_component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(docker_image_schema_variant_id) @@ -2501,7 +2501,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), alpine_component_index, ) @@ -2512,7 +2512,7 @@ mod test { base_graph .get_node_index_by_id(alpine_component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(docker_image_schema_variant_id) @@ -2538,7 +2538,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), butane_schema_index, ) @@ -2564,7 +2564,7 @@ mod test { base_graph .get_node_index_by_id(butane_schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), butane_schema_variant_index, ) @@ -2588,7 +2588,7 @@ mod test { .add_edge( base_change_set, base_graph.root_index, - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), nginx_butane_node_index, ) @@ -2599,7 +2599,7 @@ mod test { base_graph .get_node_index_by_id(nginx_butane_component_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(base_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(butane_schema_variant_id) @@ -2624,7 +2624,7 @@ mod test { base_graph .get_node_index_by_id(nginx_butane_component_id) .expect("Unable to get NodeIndex"), - EdgeWeightKind::Uses, + EdgeWeightKind::Use, ) .expect("Unable to update the component"); new_graph @@ -2740,7 +2740,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -2751,7 +2751,7 @@ mod test { graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -2774,7 +2774,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), func_index, ) @@ -2800,7 +2800,7 @@ mod test { graph .get_node_index_by_id(schema_variant_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), prop_index, ) @@ -2810,7 +2810,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(func_id) @@ -2840,7 +2840,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_1_index, ) @@ -2866,7 +2866,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_2_index, ) @@ -2892,7 +2892,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_3_index, ) @@ -2954,7 +2954,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -2965,7 +2965,7 @@ mod test { graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -2988,7 +2988,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), func_index, ) @@ -3014,7 +3014,7 @@ mod test { graph .get_node_index_by_id(schema_variant_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), prop_index, ) @@ -3024,7 +3024,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(func_id) @@ -3054,7 +3054,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_1_index, ) @@ -3080,7 +3080,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_2_index, ) @@ -3106,7 +3106,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_3_index, ) @@ -3132,7 +3132,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_4_index, ) @@ -3223,7 +3223,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -3234,7 +3234,7 @@ mod test { graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -3257,7 +3257,7 @@ mod test { .add_edge( change_set, graph.root_index, - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), func_index, ) @@ -3283,7 +3283,7 @@ mod test { graph .get_node_index_by_id(schema_variant_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), prop_index, ) @@ -3293,7 +3293,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(func_id) @@ -3323,7 +3323,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_1_index, ) @@ -3349,7 +3349,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_2_index, ) @@ -3375,7 +3375,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_3_index, ) @@ -3401,7 +3401,7 @@ mod test { graph .get_node_index_by_id(prop_id) .expect("Unable to get NodeWeight for prop"), - EdgeWeight::new(change_set, EdgeWeightKind::Uses) + EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create uses edge weight"), ordered_prop_4_index, ) @@ -3433,7 +3433,7 @@ mod test { .get_node_index_by_id(prop_id) .expect("Unable to get NodeIndex for prop"), ordered_prop_2_index, - EdgeWeightKind::Uses, + EdgeWeightKind::Use, ) .expect("Unable to update order of prop's children"); @@ -3511,7 +3511,7 @@ mod test { .add_edge( initial_change_set, initial_graph.root_index, - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -3522,7 +3522,7 @@ mod test { initial_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -3550,7 +3550,7 @@ mod test { initial_graph .get_node_index_by_id(schema_variant_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), container_prop_index, ) @@ -3577,7 +3577,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_1_index, ) @@ -3604,7 +3604,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_2_index, ) @@ -3631,7 +3631,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_3_index, ) @@ -3658,7 +3658,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_4_index, ) @@ -3692,7 +3692,7 @@ mod test { new_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(new_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_5_index, ) @@ -3747,7 +3747,7 @@ mod test { .add_edge( initial_change_set, initial_graph.root_index, - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -3758,7 +3758,7 @@ mod test { initial_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -3786,7 +3786,7 @@ mod test { initial_graph .get_node_index_by_id(schema_variant_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), container_prop_index, ) @@ -3813,7 +3813,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_1_index, ) @@ -3840,7 +3840,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_2_index, ) @@ -3867,7 +3867,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_3_index, ) @@ -3894,7 +3894,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_4_index, ) @@ -3921,7 +3921,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add ordered prop 5"); - let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"); initial_graph .add_edge( @@ -4013,7 +4013,7 @@ mod test { .add_edge( initial_change_set, initial_graph.root_index, - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -4024,7 +4024,7 @@ mod test { initial_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -4052,7 +4052,7 @@ mod test { initial_graph .get_node_index_by_id(schema_variant_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), container_prop_index, ) @@ -4079,7 +4079,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_1_index, ) @@ -4106,7 +4106,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_2_index, ) @@ -4133,7 +4133,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_3_index, ) @@ -4160,7 +4160,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_4_index, ) @@ -4197,7 +4197,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add ordered prop 5"); - let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"); initial_graph .add_edge( @@ -4290,7 +4290,7 @@ mod test { .add_edge( initial_change_set, initial_graph.root_index, - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_index, ) @@ -4301,7 +4301,7 @@ mod test { initial_graph .get_node_index_by_id(schema_id) .expect("Cannot get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) @@ -4329,7 +4329,7 @@ mod test { initial_graph .get_node_index_by_id(schema_variant_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), container_prop_index, ) @@ -4356,7 +4356,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_1_index, ) @@ -4383,7 +4383,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_2_index, ) @@ -4410,7 +4410,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_3_index, ) @@ -4437,7 +4437,7 @@ mod test { initial_graph .get_node_index_by_id(container_prop_id) .expect("Unable to get NodeIndex"), - EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), ordered_prop_4_index, ) @@ -4460,7 +4460,7 @@ mod test { .get_node_index_by_id(container_prop_id) .expect("Unable to get container NodeIndex"), ordered_prop_2_index, - EdgeWeightKind::Uses, + EdgeWeightKind::Use, ) .expect("Unable to remove container prop -> prop 2 edge"); @@ -4480,7 +4480,7 @@ mod test { ) .expect("Unable to add ordered prop 5"); - let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Uses) + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"); initial_graph .add_edge( From d4ffe5f00c65fb893d6f1aaad9c52782396b5ed0 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Thu, 31 Aug 2023 13:16:55 -0700 Subject: [PATCH 22/92] Fix naming of variable & debug text in ContentNodeWeight The "kind" was changed to also include the content hash a while back, and the combined kind + hash is what we're calling a `ContentAddress`. This renames the vaiable to reflect this refactoring. The `Debug` implementation for `ContentNodeWeight` was copied from before `NodeWeight` was refactored into being a wrapper enum around inner types, so now we're using the correct name for the struct in the debug output. --- .../workspace_snapshot/node_weight/content_node_weight.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index 7e7f9fdc34..f4bb6dd36d 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -137,7 +137,7 @@ impl ContentNodeWeight { } pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { - let new_kind = match &self.content_address { + let new_address = match &self.content_address { ContentAddress::Component(_) => ContentAddress::Component(content_hash), ContentAddress::Func(_) => ContentAddress::Func(content_hash), ContentAddress::FuncArg(_) => ContentAddress::FuncArg(content_hash), @@ -147,7 +147,7 @@ impl ContentNodeWeight { ContentAddress::SchemaVariant(_) => ContentAddress::SchemaVariant(content_hash), }; - self.content_address = new_kind; + self.content_address = new_address; Ok(()) } @@ -189,7 +189,7 @@ impl ContentNodeWeight { impl std::fmt::Debug for ContentNodeWeight { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - f.debug_struct("NodeWeight") + f.debug_struct("ContentNodeWeight") .field("id", &self.id.to_string()) .field("lineage_id", &self.lineage_id.to_string()) .field("content_address", &self.content_address) From 85a499618de4178abbddee59a7ef7474815fd059 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Thu, 31 Aug 2023 13:25:18 -0700 Subject: [PATCH 23/92] Clean up use statements in node weights for workspace snapshot graph --- lib/dal/src/workspace_snapshot/node_weight.rs | 8 +++++--- .../node_weight/content_node_weight.rs | 12 +++++++----- 2 files changed, 12 insertions(+), 8 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index f76a39f32e..12485138f2 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -3,9 +3,11 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use ulid::Ulid; -use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}; -use crate::workspace_snapshot::vector_clock::{VectorClock, VectorClockError}; -use crate::ContentHash; +use crate::{ + change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}, + workspace_snapshot::vector_clock::{VectorClock, VectorClockError}, + ContentHash, +}; pub use crate::workspace_snapshot::node_weight::content_node_weight::ContentAddress; pub use content_node_weight::ContentNodeWeight; diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index f4bb6dd36d..15328c627f 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -2,12 +2,14 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use ulid::Ulid; -use crate::change_set_pointer::ChangeSetPointer; -use crate::workspace_snapshot::{ - node_weight::{NodeWeightError, NodeWeightResult}, - vector_clock::VectorClock, +use crate::{ + change_set_pointer::ChangeSetPointer, + workspace_snapshot::{ + node_weight::{NodeWeightError, NodeWeightResult}, + vector_clock::VectorClock, + }, + ContentHash, }; -use crate::ContentHash; pub type LineageId = Ulid; From 6c305955c8126b9d3794a66e415c8bb74ad4f845 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Tue, 19 Sep 2023 08:56:42 -0700 Subject: [PATCH 24/92] Initial AttributeView support in WorkspaceSnapshotGraph --- lib/dal/src/content.rs | 2 + lib/dal/src/workspace_snapshot.rs | 10 +- .../src/workspace_snapshot/content_address.rs | 40 + lib/dal/src/workspace_snapshot/edge_weight.rs | 19 +- lib/dal/src/workspace_snapshot/graph.rs | 1881 ++++++++++++++--- lib/dal/src/workspace_snapshot/node_weight.rs | 58 +- .../node_weight/content_node_weight.rs | 56 +- .../node_weight/ordering_node_weight.rs | 15 +- .../node_weight/prop_node_weight.rs | 238 +++ 9 files changed, 1973 insertions(+), 346 deletions(-) create mode 100644 lib/dal/src/workspace_snapshot/content_address.rs create mode 100644 lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs diff --git a/lib/dal/src/content.rs b/lib/dal/src/content.rs index 2330f4a9e6..77f62186fe 100644 --- a/lib/dal/src/content.rs +++ b/lib/dal/src/content.rs @@ -4,3 +4,5 @@ pub mod hash; pub mod pair; pub mod store; + +pub use store::Store; diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs index 6444044329..818f0e5bb7 100644 --- a/lib/dal/src/workspace_snapshot.rs +++ b/lib/dal/src/workspace_snapshot.rs @@ -23,6 +23,7 @@ // )] pub mod conflict; +pub mod content_address; pub mod edge_weight; pub mod graph; pub mod lamport_clock; @@ -160,12 +161,9 @@ impl WorkspaceSnapshot { edge_weight: EdgeWeight, to_node_index: NodeIndex, ) -> WorkspaceSnapshotResult { - Ok(self.working_copy()?.add_edge( - change_set, - from_node_index, - edge_weight, - to_node_index, - )?) + Ok(self + .working_copy()? + .add_edge(from_node_index, edge_weight, to_node_index)?) } pub async fn detect_conflicts_and_updates( diff --git a/lib/dal/src/workspace_snapshot/content_address.rs b/lib/dal/src/workspace_snapshot/content_address.rs new file mode 100644 index 0000000000..9644808bd0 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/content_address.rs @@ -0,0 +1,40 @@ +use serde::{Deserialize, Serialize}; + +use crate::content::hash::ContentHash; + +#[remain::sorted] +#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq)] +/// The type of the object, and the content-addressable-storage address (content hash) +/// of the object itself. +pub enum ContentAddress { + AttributePrototype(ContentHash), + AttributeValue(ContentHash), + Component(ContentHash), + ExternalProvider(ContentHash), + Func(ContentHash), + FuncArg(ContentHash), + InternalProvider(ContentHash), + Prop(ContentHash), + Root, + Schema(ContentHash), + SchemaVariant(ContentHash), +} + +impl ContentAddress { + pub fn content_hash(&self) -> ContentHash { + match self { + ContentAddress::AttributePrototype(id) => Some(*id), + ContentAddress::AttributeValue(id) => Some(*id), + ContentAddress::Component(id) => Some(*id), + ContentAddress::ExternalProvider(id) => Some(*id), + ContentAddress::FuncArg(id) => Some(*id), + ContentAddress::Func(id) => Some(*id), + ContentAddress::InternalProvider(id) => Some(*id), + ContentAddress::Prop(id) => Some(*id), + ContentAddress::Root => None, + ContentAddress::Schema(id) => Some(*id), + ContentAddress::SchemaVariant(id) => Some(*id), + } + .unwrap_or_default() + } +} diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs index 8c24284eee..b1c94aaf86 100644 --- a/lib/dal/src/workspace_snapshot/edge_weight.rs +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -16,10 +16,23 @@ pub enum EdgeWeightError { pub type EdgeWeightResult = Result; #[remain::sorted] -#[derive(Default, Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Default, Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] pub enum EdgeWeightKind { + /// An argument to a function defined by an [`AttributePrototype`][crate::AttributePrototype], + /// including the name of the argument to the function. + Argument(String), + /// An [`AttributeValue`] "contained" by another [`AttributeValue`], such as an entry in an + /// array/map, or a field of an object. The optional [`String`] represents the key of the entry + /// in a map. + Contain(Option), + /// Used when the target/destination of an edge is an [`InternalProvider`], or an + /// [`ExternalProvider`]. + DataProvider, /// Used to record the order that the elements of a container should be presented in. Ordering, + Prop, + Prototype, + Proxy, /// Workspaces "use" functions, modules, schemas. Schemas "use" schema variants. /// Schema variants "use" props. Props "use" functions, and other props. Modules /// "use" functions, schemas, and eventually(?) components. @@ -44,8 +57,8 @@ impl EdgeWeight { Ok(()) } - pub fn kind(&self) -> EdgeWeightKind { - self.kind + pub fn kind(&self) -> &EdgeWeightKind { + &self.kind } pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 62a905c73b..cd19a48adc 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -1,23 +1,25 @@ use chrono::Utc; use petgraph::{algo, prelude::*, visit::DfsEvent}; use serde::{Deserialize, Serialize}; -use std::collections::{HashMap, HashSet}; +use std::collections::{HashMap, HashSet, VecDeque}; use telemetry::prelude::*; use thiserror::Error; use ulid::Ulid; use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}; use crate::{ + content::{self, store::StoreError}, workspace_snapshot::{ conflict::Conflict, + content_address::ContentAddress, edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}, - node_weight::{ContentAddress, NodeWeight, NodeWeightError}, + node_weight::{NodeWeight, NodeWeightError, OrderingNodeWeight}, update::Update, }, ContentHash, }; -use super::node_weight::OrderingNodeWeight; +pub type LineageId = Ulid; #[allow(clippy::large_enum_variant)] #[remain::sorted] @@ -27,6 +29,10 @@ pub enum WorkspaceSnapshotGraphError { CannotCompareOrderedAndUnorderedContainers(NodeIndex, NodeIndex), #[error("ChangeSet error: {0}")] ChangeSet(#[from] ChangeSetPointerError), + #[error("Unable to retrieve content for ContentHash")] + ContentMissingForContentHash, + #[error("Content store error: {0}")] + ContentStore(#[from] StoreError), #[error("Action would create a graph cycle")] CreateGraphCycle, #[error("EdgeWeight error: {0}")] @@ -37,14 +43,20 @@ pub enum WorkspaceSnapshotGraphError { GraphTraversal(petgraph::visit::DfsEvent), #[error("Incompatible node types")] IncompatibleNodeTypes, + #[error("Invalid value graph")] + InvalidValueGraph, #[error("NodeWeight error: {0}")] NodeWeight(#[from] NodeWeightError), #[error("node weight not found")] NodeWeightNotFound, #[error("Node with ID {0} not found")] NodeWithIdNotFound(Ulid), + #[error("No Prop found for NodeIndex {0:?}")] + NoPropFound(NodeIndex), #[error("NodeIndex has too many Ordering children: {0:?}")] TooManyOrderingForNode(NodeIndex), + #[error("NodeIndex has too many Prop children: {0:?}")] + TooManyPropForNode(NodeIndex), #[error("Unable to add node to the graph")] UnableToAddNode, #[error("Workspace Snapshot has conflicts and must be rebased")] @@ -82,19 +94,54 @@ impl WorkspaceSnapshotGraph { Ok(Self { root_index, graph }) } - pub fn node_count(&self) -> usize { - self.graph.node_count() + pub fn add_edge( + &mut self, + from_node_index: NodeIndex, + mut edge_weight: EdgeWeight, + to_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + // Temporarily add the edge to the existing tree to see if it would create a cycle. + let temp_edge = self + .graph + .update_edge(from_node_index, to_node_index, edge_weight.clone()); + let would_create_a_cycle = !self.is_acyclic_directed(); + self.graph.remove_edge(temp_edge); + if would_create_a_cycle { + return Err(WorkspaceSnapshotGraphError::CreateGraphCycle); + } + + // Because outgoing edges are part of a node's identity, we create a new "from" node + // as we are effectively writing to that node (we'll need to update the merkle tree + // hash), and everything in the graph should be treated as copy-on-write. + let new_from_node_index = self.copy_node_index(from_node_index)?; + + // Add the new edge to the new version of the "from" node. + let new_edge_index = + self.graph + .update_edge(new_from_node_index, to_node_index, edge_weight); + self.update_merkle_tree_hash(new_from_node_index)?; + + // Update the rest of the graph to reflect the new node/edge. + self.replace_references(from_node_index, new_from_node_index)?; + + Ok(new_edge_index) } - pub fn add_edge( + pub fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotGraphResult { + let new_node_index = self.graph.add_node(node); + self.update_merkle_tree_hash(new_node_index)?; + + Ok(new_node_index) + } + + pub fn add_ordered_edge( &mut self, change_set: &ChangeSetPointer, from_node_index: NodeIndex, edge_weight: EdgeWeight, to_node_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult { - let new_edge_index = - self.add_unordered_edge(from_node_index, edge_weight, to_node_index)?; + let new_edge_index = self.add_edge(from_node_index, edge_weight, to_node_index)?; let (new_from_node_index, _) = self .graph @@ -135,13 +182,6 @@ impl WorkspaceSnapshotGraph { Ok(new_edge_index) } - pub fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotGraphResult { - let new_node_index = self.graph.add_node(node); - self.update_merkle_tree_hash(new_node_index)?; - - Ok(new_node_index) - } - fn add_ordered_node( &mut self, change_set: &ChangeSetPointer, @@ -152,7 +192,6 @@ impl WorkspaceSnapshotGraph { let ordering_node_index = self.add_node(NodeWeight::Ordering(OrderingNodeWeight::new(change_set)?))?; self.add_edge( - change_set, new_node_index, EdgeWeight::new(change_set, EdgeWeightKind::Ordering)?, ordering_node_index, @@ -172,37 +211,165 @@ impl WorkspaceSnapshotGraph { Err(WorkspaceSnapshotGraphError::UnableToAddNode) } - pub fn add_unordered_edge( - &mut self, - from_node_index: NodeIndex, - mut edge_weight: EdgeWeight, - to_node_index: NodeIndex, - ) -> WorkspaceSnapshotGraphResult { - // Temporarily add the edge to the existing tree to see if it would create a cycle. - let temp_edge = self - .graph - .update_edge(from_node_index, to_node_index, edge_weight.clone()); - let would_create_a_cycle = !self.is_acyclic_directed(); - self.graph.remove_edge(temp_edge); - if would_create_a_cycle { - return Err(WorkspaceSnapshotGraphError::CreateGraphCycle); - } + pub fn attribute_value_view( + &self, + content_store: &content::Store, + root_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + let mut view = serde_json::json![{}]; + let mut nodes_to_add = VecDeque::from([(root_index, "".to_string())]); + + while let Some((current_node_index, write_location)) = nodes_to_add.pop_front() { + let current_node_weight = self.get_node_weight(current_node_index)?; + let current_node_content: serde_json::Value = content_store + .get(¤t_node_weight.content_hash())? + .ok_or(WorkspaceSnapshotGraphError::ContentMissingForContentHash)?; + // We don't need to care what kind the prop is, since assigning a value via + // `pointer_mut` completely overwrites the existing value, regardless of any + // pre-existing data types. + let view_pointer = match view.pointer_mut(&write_location) { + Some(pointer) => { + *pointer = current_node_content.clone(); + pointer + } + None => { + // This is an error, and really shouldn't ever happen. + dbg!(view, write_location, current_node_content); + todo!(); + } + }; - // Because outgoing edges are part of a node's identity, we create a new "from" node - // as we are effectively writing to that node (we'll need to update the merkle tree - // hash), and everything in the graph should be treated as copy-on-write. - let new_from_node_index = self.copy_node_index(from_node_index)?; + if current_node_content.is_null() { + // If the value we just inserted is "null", then there shouldn't be any child + // values, so don't bother looking for them in the graph to be able to add + // them to the work queue. + continue; + } - // Add the new edge to the new version of the "from" node. - let new_edge_index = - self.graph - .update_edge(new_from_node_index, to_node_index, edge_weight); - self.update_merkle_tree_hash(new_from_node_index)?; + // Find the ordering if there is one, so we can add the children in the proper order. + if let Some(child_ordering) = self.ordered_children_for_node(current_node_index)? { + for (child_position_index, &child_node_index) in child_ordering.iter().enumerate() { + // `.enumerate()` gives us 1-indexed, but we need 0-indexed. + dbg!(child_position_index, child_node_index); - // Update the rest of the graph to reflect the new node/edge. - self.replace_references(from_node_index, new_from_node_index)?; + // We insert a JSON `Null` as a "place holder" for the write location. We need + // it to exist to be able to get a `pointer_mut` to it on the next time around, + // but we don't really care what it is, since we're going to completely + // overwrite it anyway. + for edge in self + .graph + .edges_connecting(current_node_index, child_node_index) + { + let child_position = match edge.weight().kind() { + EdgeWeightKind::Contain(Some(key)) => { + view_pointer + .as_object_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .insert(key.clone(), serde_json::json![null]); + key.clone() + } + EdgeWeightKind::Contain(None) => { + if current_node_content.is_array() { + view_pointer + .as_array_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .push(serde_json::json![null]); + child_position_index.to_string() + } else { + // Get prop name + if let NodeWeight::Prop(prop_weight) = self.get_node_weight( + self.prop_node_index_for_node_index(child_node_index)? + .ok_or(WorkspaceSnapshotGraphError::NoPropFound( + child_node_index, + ))?, + )? { + view_pointer + .as_object_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .insert( + prop_weight.name().to_string(), + serde_json::json![null], + ); + prop_weight.name().to_string() + } else { + return Err(WorkspaceSnapshotGraphError::InvalidValueGraph); + } + } + } + _ => continue, + }; + let child_write_location = format!("{}/{}", write_location, child_position); + nodes_to_add.push_back(dbg!((child_node_index, child_write_location))); + } + } + } else { + // The child nodes aren't explicitly ordered, so we'll need to come up with one of + // our own. We'll sort the nodes by their `NodeIndex`, which means that when a + // write last happened to them (or anywhere further towards the leaves) will + // determine their sorting in oldest to most recent order. + let mut child_index_to_position = HashMap::new(); + let mut child_indexes = Vec::new(); + let mut outgoing_edges = self.graph.edges_directed(current_node_index, Outgoing); + while let Some(edge_ref) = outgoing_edges.next() { + match edge_ref.weight().kind() { + EdgeWeightKind::Contain(Some(key)) => { + view_pointer + .as_object_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .insert(key.clone(), serde_json::json![null]); + child_index_to_position.insert(edge_ref.target(), key.clone()); + child_indexes.push(edge_ref.target()); + } + EdgeWeightKind::Contain(None) => { + child_indexes.push(edge_ref.target()); + if current_node_content.is_array() { + view_pointer + .as_array_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .push(serde_json::json![null]); + } else { + // Get prop name + if let NodeWeight::Prop(prop_weight) = self.get_node_weight( + self.prop_node_index_for_node_index(edge_ref.target())? + .ok_or(WorkspaceSnapshotGraphError::NoPropFound( + edge_ref.target(), + ))?, + )? { + view_pointer + .as_object_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .insert( + prop_weight.name().to_string(), + serde_json::json![null], + ); + child_index_to_position + .insert(edge_ref.target(), prop_weight.name().to_string()); + child_indexes.push(edge_ref.target()); + } else { + return Err(WorkspaceSnapshotGraphError::InvalidValueGraph); + } + } + } + _ => continue, + } + } + child_indexes.sort(); - Ok(new_edge_index) + for (child_position_index, child_node_index) in child_indexes.iter().enumerate() { + if let Some(key) = child_index_to_position.get(child_node_index) { + nodes_to_add + .push_back((*child_node_index, format!("{}/{}", write_location, key))); + } else { + nodes_to_add.push_back(( + *child_node_index, + format!("{}/{}", write_location, child_position_index), + )); + } + } + } + } + + Ok(view) } pub fn cleanup(&mut self) { @@ -731,7 +898,7 @@ impl WorkspaceSnapshotGraph { onto_change_set: &ChangeSetPointer, onto_container_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { - #[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)] + #[derive(Debug, Clone, Hash, PartialEq, Eq)] struct UniqueEdgeInfo { pub kind: EdgeWeightKind, pub target_lineage: Ulid, @@ -754,7 +921,7 @@ impl WorkspaceSnapshotGraph { let target_node_weight = self.get_node_weight(edgeref.target())?; to_rebase_edges.insert( UniqueEdgeInfo { - kind: edgeref.weight().kind(), + kind: edgeref.weight().kind().clone(), target_lineage: target_node_weight.lineage_id(), }, EdgeInfo { @@ -769,7 +936,7 @@ impl WorkspaceSnapshotGraph { let target_node_weight = onto.get_node_weight(edgeref.target())?; onto_edges.insert( UniqueEdgeInfo { - kind: edgeref.weight().kind(), + kind: edgeref.weight().kind().clone(), target_lineage: target_node_weight.lineage_id(), }, EdgeInfo { @@ -982,10 +1149,18 @@ impl WorkspaceSnapshotGraph { Ok(()) } + pub fn node_count(&self) -> usize { + self.graph.node_count() + } + + /// Returns an `Option>`. If there is an ordering node, then the return will be a + /// [`Some`], where the [`Vec`] is populated with the [`NodeIndex`] of the nodes specified by + /// the ordering node, in the order defined by the ordering node. If there is not an ordering + /// node, then the return will be [`None`]. pub fn ordered_children_for_node( &self, container_node_index: NodeIndex, - ) -> WorkspaceSnapshotGraphResult> { + ) -> WorkspaceSnapshotGraphResult>> { let mut ordered_child_indexes = Vec::new(); if let Some(container_ordering_index) = self.ordering_node_index_for_container(container_node_index)? @@ -1007,9 +1182,11 @@ impl WorkspaceSnapshotGraph { )?); } } + } else { + return Ok(None); } - Ok(ordered_child_indexes) + Ok(Some(ordered_child_indexes)) } pub fn ordering_node_index_for_container( @@ -1030,6 +1207,18 @@ impl WorkspaceSnapshotGraph { Ok(onto_ordering_node_indexes.get(0).copied()) } + pub fn prop_node_index_for_node_index( + &self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult> { + let prop_node_indexes = prop_node_indexes_for_node_index(self, node_index); + if prop_node_indexes.len() > 1 { + error!("Too many prop nodes found for NodeIndex {:?}", node_index); + return Err(WorkspaceSnapshotGraphError::TooManyPropForNode(node_index)); + } + Ok(prop_node_indexes.get(0).copied()) + } + /// [`StableGraph`] guarantees the stability of [`NodeIndex`] across removals, however there /// are **NO** guarantees around the stability of [`EdgeIndex`] across removals. If /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] found before @@ -1049,7 +1238,7 @@ impl WorkspaceSnapshotGraph { .graph .edges_connecting(new_source_node_index, target_node_index) { - if edgeref.weight().kind() == edge_kind { + if edgeref.weight().kind() == &edge_kind { edges_to_remove.push(edgeref.id()); } } @@ -1204,7 +1393,7 @@ impl WorkspaceSnapshotGraph { let mut hasher = ContentHash::hasher(); hasher.update( self.get_node_weight(node_index_to_update)? - .content_hash() + .node_hash() .to_string() .as_bytes(), ); @@ -1221,6 +1410,10 @@ impl WorkspaceSnapshotGraph { ordered_neighbors.sort(); for neighbor_node in ordered_neighbors { + // TODO: This needs to take into account the edge weight(s) between the node being + // updated, and the neighboring node, as important information may be encoded in + // the edge weight itself such as the name of the function argument, or the key + // of an entry in a map. hasher.update( self.graph .node_weight(neighbor_node) @@ -1249,16 +1442,12 @@ fn ordering_node_indexes_for_node_index( .graph .edges_directed(node_index, Outgoing) .filter_map(|edge_reference| { - if edge_reference.weight().kind() == EdgeWeightKind::Ordering { - if let Some((_, destination_node_index)) = - snapshot.graph.edge_endpoints(edge_reference.id()) - { - if matches!( - snapshot.get_node_weight(destination_node_index), - Ok(NodeWeight::Ordering(_)) - ) { - return Some(destination_node_index); - } + if edge_reference.weight().kind() == &EdgeWeightKind::Ordering { + if matches!( + snapshot.get_node_weight(edge_reference.target()), + Ok(NodeWeight::Ordering(_)) + ) { + return Some(edge_reference.target()); } } @@ -1267,10 +1456,31 @@ fn ordering_node_indexes_for_node_index( .collect() } +fn prop_node_indexes_for_node_index( + snapshot: &WorkspaceSnapshotGraph, + node_index: NodeIndex, +) -> Vec { + snapshot + .graph + .edges_directed(node_index, Outgoing) + .filter_map(|edge_reference| { + if edge_reference.weight().kind() == &EdgeWeightKind::Prop { + if matches!( + snapshot.get_node_weight(edge_reference.target()), + Ok(NodeWeight::Prop(_)) + ) { + return Some(edge_reference.target()); + } + } + None + }) + .collect() +} + #[cfg(test)] mod test { use super::*; - use crate::{ComponentId, ContentHash, FuncId, PropId, SchemaId, SchemaVariantId}; + use crate::{ComponentId, ContentHash, FuncId, PropId, PropKind, SchemaId, SchemaVariantId}; use pretty_assertions_sorted::assert_eq; #[derive(Debug, PartialEq)] @@ -1295,7 +1505,7 @@ mod test { let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_index = graph .add_node( NodeWeight::new_content( @@ -1308,7 +1518,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema"); - let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_variant_index = graph .add_node( NodeWeight::new_content( @@ -1321,7 +1531,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema variant"); - let component_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let component_index = graph .add_node( NodeWeight::new_content( @@ -1337,7 +1547,6 @@ mod test { graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1346,7 +1555,6 @@ mod test { .expect("Unable to add root -> component edge"); graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1355,10 +1563,9 @@ mod test { .expect("Unable to add root -> schema edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -1366,19 +1573,18 @@ mod test { .expect("Unable to add schema -> schema variant edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); - let func_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let func_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let func_index = graph .add_node( NodeWeight::new_content( @@ -1391,7 +1597,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add func"); - let prop_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let prop_index = graph .add_node( NodeWeight::new_content( @@ -1407,7 +1613,6 @@ mod test { graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1416,10 +1621,9 @@ mod test { .expect("Unable to add root -> func edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), prop_index, @@ -1427,15 +1631,14 @@ mod test { .expect("Unable to add schema variant -> prop edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(prop_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(func_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add prop -> func edge"); @@ -1449,7 +1652,7 @@ mod test { let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let initial_schema_node_index = graph .add_node( NodeWeight::new_content( @@ -1462,7 +1665,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema"); - let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let initial_schema_variant_node_index = graph .add_node( NodeWeight::new_content( @@ -1475,7 +1678,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema variant"); - let component_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let initial_component_node_index = graph .add_node( NodeWeight::new_content( @@ -1491,7 +1694,6 @@ mod test { graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1500,7 +1702,6 @@ mod test { .expect("Unable to add root -> component edge"); graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1509,10 +1710,9 @@ mod test { .expect("Unable to add root -> schema edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_id) - .expect("Cannot find NodeIndex"), + .expect("Unable to find NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), initial_schema_variant_node_index, @@ -1520,15 +1720,14 @@ mod test { .expect("Unable to add schema -> schema variant edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(component_id) - .expect("Cannot find NodeIndex"), + .expect("Unable to find NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot find NodeIndex"), + .expect("Unable to find NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); @@ -1537,15 +1736,14 @@ mod test { // This should cause a cycle. graph .add_edge( - change_set, graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot find NodeIndex"), + .expect("Unable to find NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(component_id) - .expect("Cannot find NodeIndex"), + .expect("Unable to find NodeIndex"), ) .expect_err("Created a cycle"); @@ -1559,7 +1757,7 @@ mod test { let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_index = graph .add_node( NodeWeight::new_content( @@ -1570,7 +1768,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema"); - let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_variant_index = graph .add_node( NodeWeight::new_content( @@ -1583,7 +1781,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema variant"); - let component_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let component_index = graph .add_node( NodeWeight::new_content( @@ -1597,7 +1795,6 @@ mod test { graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1606,7 +1803,6 @@ mod test { .expect("Unable to add root -> component edge"); graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1615,10 +1811,9 @@ mod test { .expect("Unable to add root -> schema edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -1626,15 +1821,14 @@ mod test { .expect("Unable to add schema -> schema variant edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); @@ -1722,7 +1916,7 @@ mod test { let schema_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = initial_graph .add_node( NodeWeight::new_content( @@ -1735,7 +1929,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = initial_graph .add_node( NodeWeight::new_content( @@ -1749,7 +1943,6 @@ mod test { initial_graph .add_edge( - initial_change_set, initial_graph.root_index, EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1758,10 +1951,9 @@ mod test { .expect("Unable to add root -> schema edge"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -1776,7 +1968,7 @@ mod test { let component_id = new_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let component_index = new_graph .add_node( NodeWeight::new_content( @@ -1789,7 +1981,6 @@ mod test { .expect("Unable to add Component A"); new_graph .add_edge( - new_change_set, new_graph.root_index, EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1798,15 +1989,14 @@ mod test { .expect("Unable to add root -> component edge"); new_graph .add_edge( - new_change_set, new_graph .get_node_index_by_id(component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), new_graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); @@ -1829,7 +2019,7 @@ mod test { let schema_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = base_graph .add_node( NodeWeight::new_content( @@ -1842,7 +2032,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = base_graph .add_node( NodeWeight::new_content( @@ -1856,7 +2046,6 @@ mod test { base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1865,10 +2054,9 @@ mod test { .expect("Unable to add root -> schema edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -1884,7 +2072,7 @@ mod test { let new_onto_component_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let new_onto_component_index = base_graph .add_node( NodeWeight::new_content( @@ -1897,7 +2085,6 @@ mod test { .expect("Unable to add Component B"); let new_onto_root_component_edge_index = base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1906,7 +2093,6 @@ mod test { .expect("Unable to add root -> component edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(new_onto_component_id) .expect("Unable to get NodeIndex"), @@ -1938,7 +2124,7 @@ mod test { }] => { assert_eq!(new_graph.root_index, *source); assert_eq!(new_onto_component_index, *destination); - assert_eq!(EdgeWeightKind::Use, edge_weight.kind()); + assert_eq!(&EdgeWeightKind::Use, edge_weight.kind()); } other => panic!("Unexpected updates: {:?}", other), } @@ -1953,7 +2139,7 @@ mod test { let schema_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = base_graph .add_node( NodeWeight::new_content( @@ -1966,7 +2152,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = base_graph .add_node( NodeWeight::new_content( @@ -1980,7 +2166,6 @@ mod test { base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -1989,10 +2174,9 @@ mod test { .expect("Unable to add root -> schema edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -2008,7 +2192,7 @@ mod test { let component_id = new_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let component_index = new_graph .add_node( NodeWeight::new_content( @@ -2021,7 +2205,6 @@ mod test { .expect("Unable to add Component A"); new_graph .add_edge( - new_change_set, new_graph.root_index, EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2030,15 +2213,14 @@ mod test { .expect("Unable to add root -> component edge"); new_graph .add_edge( - new_change_set, new_graph .get_node_index_by_id(component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(new_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), new_graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); @@ -2047,7 +2229,7 @@ mod test { let new_onto_component_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let new_onto_component_index = base_graph .add_node( NodeWeight::new_content( @@ -2060,7 +2242,6 @@ mod test { .expect("Unable to add Component B"); base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2069,7 +2250,6 @@ mod test { .expect("Unable to add root -> component edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(new_onto_component_id) .expect("Unable to get NodeIndex"), @@ -2101,7 +2281,7 @@ mod test { }] => { assert_eq!(new_graph.root_index, *source); assert_eq!(new_onto_component_index, *destination); - assert_eq!(EdgeWeightKind::Use, edge_weight.kind()); + assert_eq!(&EdgeWeightKind::Use, edge_weight.kind()); } other => panic!("Unexpected updates: {:?}", other), } @@ -2116,7 +2296,7 @@ mod test { let schema_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = base_graph .add_node( NodeWeight::new_content( @@ -2129,7 +2309,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = base_graph .add_node( NodeWeight::new_content( @@ -2143,7 +2323,6 @@ mod test { base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2152,10 +2331,9 @@ mod test { .expect("Unable to add root -> schema edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -2164,7 +2342,7 @@ mod test { let component_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let component_index = base_graph .add_node( NodeWeight::new_content( @@ -2177,7 +2355,6 @@ mod test { .expect("Unable to add Component A"); base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2186,15 +2363,14 @@ mod test { .expect("Unable to add root -> component edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); @@ -2257,7 +2433,7 @@ mod test { let schema_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = base_graph .add_node( NodeWeight::new_content( @@ -2270,7 +2446,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = base_graph .add_node( NodeWeight::new_content( @@ -2284,7 +2460,6 @@ mod test { base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2293,10 +2468,9 @@ mod test { .expect("Unable to add root -> schema edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -2305,7 +2479,7 @@ mod test { let component_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let component_index = base_graph .add_node( NodeWeight::new_content( @@ -2318,7 +2492,6 @@ mod test { .expect("Unable to add Component A"); base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2327,15 +2500,14 @@ mod test { .expect("Unable to add root -> component edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); @@ -2399,7 +2571,7 @@ mod test { // Docker Image Schema let docker_image_schema_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let docker_image_schema_index = base_graph .add_node( NodeWeight::new_content( @@ -2412,7 +2584,6 @@ mod test { .expect("Unable to add Schema A"); base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2423,7 +2594,7 @@ mod test { // Docker Image Schema Variant let docker_image_schema_variant_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let docker_image_schema_variant_index = base_graph .add_node( NodeWeight::new_content( @@ -2436,10 +2607,9 @@ mod test { .expect("Unable to add Schema Variant A"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(docker_image_schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), docker_image_schema_variant_index, @@ -2449,7 +2619,7 @@ mod test { // Nginx Docker Image Component let nginx_docker_image_component_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let nginx_docker_image_component_index = base_graph .add_node( NodeWeight::new_content( @@ -2462,7 +2632,6 @@ mod test { .expect("Unable to add Component A"); base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2471,22 +2640,21 @@ mod test { .expect("Unable to add root -> component edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(nginx_docker_image_component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(docker_image_schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); // Alpine Component let alpine_component_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let alpine_component_index = base_graph .add_node( NodeWeight::new_content( @@ -2499,7 +2667,6 @@ mod test { .expect("Unable to add Component A"); base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2508,22 +2675,21 @@ mod test { .expect("Unable to add root -> component edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(alpine_component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(docker_image_schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); // Butane Schema let butane_schema_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let butane_schema_index = base_graph .add_node( NodeWeight::new_content( @@ -2536,7 +2702,6 @@ mod test { .expect("Unable to add Schema A"); base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2547,7 +2712,7 @@ mod test { // Butane Schema Variant let butane_schema_variant_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let butane_schema_variant_index = base_graph .add_node( NodeWeight::new_content( @@ -2560,10 +2725,9 @@ mod test { .expect("Unable to add Schema Variant A"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(butane_schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), butane_schema_variant_index, @@ -2573,7 +2737,7 @@ mod test { // Nginx Butane Component let nginx_butane_component_id = base_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let nginx_butane_node_index = base_graph .add_node( NodeWeight::new_content( @@ -2586,7 +2750,6 @@ mod test { .expect("Unable to add Schema Variant A"); base_graph .add_edge( - base_change_set, base_graph.root_index, EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2595,15 +2758,14 @@ mod test { .expect("Unable to add root -> component edge"); base_graph .add_edge( - base_change_set, base_graph .get_node_index_by_id(nginx_butane_component_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(base_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), base_graph .get_node_index_by_id(butane_schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add component -> schema variant edge"); @@ -2709,7 +2871,7 @@ mod test { let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_index = graph .add_node( NodeWeight::new_content( @@ -2722,7 +2884,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema"); - let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_variant_index = graph .add_node( NodeWeight::new_content( @@ -2738,7 +2900,6 @@ mod test { graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2747,17 +2908,16 @@ mod test { .expect("Unable to add root -> schema edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) .expect("Unable to add schema -> schema variant edge"); - let func_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let func_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let func_index = graph .add_node( NodeWeight::new_content( @@ -2772,7 +2932,6 @@ mod test { .expect("Unable to add func"); graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2780,7 +2939,7 @@ mod test { ) .expect("Unable to add root -> func edge"); - let prop_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let prop_index = graph .add_ordered_node( change_set, @@ -2796,25 +2955,24 @@ mod test { .expect("Unable to add prop"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), prop_index, ) .expect("Unable to add schema variant -> prop edge"); graph - .add_unordered_edge( + .add_edge( graph .get_node_index_by_id(prop_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(func_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add prop -> func edge"); graph.cleanup(); @@ -2822,8 +2980,7 @@ mod test { let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_1_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_1_id, @@ -2835,7 +2992,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -2848,8 +3005,7 @@ mod test { let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_2_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_2_id, @@ -2861,7 +3017,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -2874,8 +3030,7 @@ mod test { let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_3_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_3_id, @@ -2887,7 +3042,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -2912,7 +3067,8 @@ mod test { .get_node_index_by_id(prop_id) .expect("Unable to get prop NodeIndex") ) - .expect("Unable to find ordered cchildren for node") + .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") ); } @@ -2923,7 +3079,7 @@ mod test { let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_index = graph .add_node( NodeWeight::new_content( @@ -2936,7 +3092,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema"); - let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_variant_index = graph .add_node( NodeWeight::new_content( @@ -2952,7 +3108,6 @@ mod test { graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2961,17 +3116,16 @@ mod test { .expect("Unable to add root -> schema edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) .expect("Unable to add schema -> schema variant edge"); - let func_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let func_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let func_index = graph .add_node( NodeWeight::new_content( @@ -2986,7 +3140,6 @@ mod test { .expect("Unable to add func"); graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -2994,7 +3147,7 @@ mod test { ) .expect("Unable to add root -> func edge"); - let prop_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let prop_index = graph .add_ordered_node( change_set, @@ -3010,25 +3163,24 @@ mod test { .expect("Unable to add prop"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), prop_index, ) .expect("Unable to add schema variant -> prop edge"); graph - .add_unordered_edge( + .add_edge( graph .get_node_index_by_id(prop_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(func_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add prop -> func edge"); graph.cleanup(); @@ -3036,8 +3188,7 @@ mod test { let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_1_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_1_id, @@ -3049,7 +3200,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -3062,8 +3213,7 @@ mod test { let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_2_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_2_id, @@ -3075,7 +3225,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -3088,8 +3238,7 @@ mod test { let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_3_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_3_id, @@ -3101,7 +3250,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -3114,8 +3263,7 @@ mod test { let ordered_prop_4_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_4_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_4_id, @@ -3127,7 +3275,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -3155,6 +3303,7 @@ mod test { .expect("Unable to get prop NodeIndex") ) .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") ); let new_order = vec![ @@ -3182,6 +3331,7 @@ mod test { .expect("Unable to get prop NodeIndex") ) .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") ); } @@ -3192,7 +3342,7 @@ mod test { let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let schema_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_index = graph .add_node( NodeWeight::new_content( @@ -3205,7 +3355,7 @@ mod test { .expect("Unable to create NodeWeight"), ) .expect("Unable to add schema"); - let schema_variant_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let schema_variant_index = graph .add_node( NodeWeight::new_content( @@ -3221,7 +3371,6 @@ mod test { graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -3230,17 +3379,16 @@ mod test { .expect("Unable to add root -> schema edge"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, ) .expect("Unable to add schema -> schema variant edge"); - let func_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let func_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let func_index = graph .add_node( NodeWeight::new_content( @@ -3255,7 +3403,6 @@ mod test { .expect("Unable to add func"); graph .add_edge( - change_set, graph.root_index, EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -3263,7 +3410,7 @@ mod test { ) .expect("Unable to add root -> func edge"); - let prop_id = change_set.generate_ulid().expect("Cannot generate Ulid"); + let prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let prop_index = graph .add_ordered_node( change_set, @@ -3279,25 +3426,24 @@ mod test { .expect("Unable to add prop"); graph .add_edge( - change_set, graph .get_node_index_by_id(schema_variant_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), prop_index, ) .expect("Unable to add schema variant -> prop edge"); graph - .add_unordered_edge( + .add_edge( graph .get_node_index_by_id(prop_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), graph .get_node_index_by_id(func_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), ) .expect("Unable to add prop -> func edge"); graph.cleanup(); @@ -3305,8 +3451,7 @@ mod test { let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_1_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_1_id, @@ -3318,7 +3463,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -3331,8 +3476,7 @@ mod test { let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_2_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_2_id, @@ -3344,7 +3488,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -3357,8 +3501,7 @@ mod test { let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_3_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_3_id, @@ -3370,7 +3513,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -3383,8 +3526,7 @@ mod test { let ordered_prop_4_id = change_set.generate_ulid().expect("Unable to generate Ulid"); let ordered_prop_4_index = graph - .add_ordered_node( - change_set, + .add_node( NodeWeight::new_content( change_set, ordered_prop_4_id, @@ -3396,7 +3538,7 @@ mod test { ) .expect("Unable to add ordered prop"); graph - .add_edge( + .add_ordered_edge( change_set, graph .get_node_index_by_id(prop_id) @@ -3424,6 +3566,7 @@ mod test { .expect("Unable to get prop NodeIndex") ) .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") ); graph @@ -3450,6 +3593,7 @@ mod test { .expect("Unable to get prop NodeIndex") ) .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") ); if let NodeWeight::Ordering(ordering_weight) = graph .get_node_weight( @@ -3482,7 +3626,7 @@ mod test { let schema_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = initial_graph .add_node( NodeWeight::new_content( @@ -3495,7 +3639,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = initial_graph .add_node( NodeWeight::new_content( @@ -3509,7 +3653,6 @@ mod test { initial_graph .add_edge( - initial_change_set, initial_graph.root_index, EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -3518,10 +3661,9 @@ mod test { .expect("Unable to add root -> schema edge"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -3546,7 +3688,6 @@ mod test { .expect("Unable to add container prop"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_variant_id) .expect("Unable to get NodeIndex"), @@ -3572,7 +3713,7 @@ mod test { ) .expect("Unable to add ordered prop 1"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3599,7 +3740,7 @@ mod test { ) .expect("Unable to add ordered prop 2"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3626,7 +3767,7 @@ mod test { ) .expect("Unable to add ordered prop 3"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3653,7 +3794,7 @@ mod test { ) .expect("Unable to add ordered prop 4"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3687,7 +3828,7 @@ mod test { ) .expect("Unable to add ordered prop 5"); new_graph - .add_edge( + .add_ordered_edge( new_change_set, new_graph .get_node_index_by_id(container_prop_id) @@ -3718,7 +3859,7 @@ mod test { let schema_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = initial_graph .add_node( NodeWeight::new_content( @@ -3731,7 +3872,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = initial_graph .add_node( NodeWeight::new_content( @@ -3745,7 +3886,6 @@ mod test { initial_graph .add_edge( - initial_change_set, initial_graph.root_index, EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -3754,10 +3894,9 @@ mod test { .expect("Unable to add root -> schema edge"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -3782,7 +3921,6 @@ mod test { .expect("Unable to add container prop"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_variant_id) .expect("Unable to get NodeIndex"), @@ -3808,7 +3946,7 @@ mod test { ) .expect("Unable to add ordered prop 1"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3835,7 +3973,7 @@ mod test { ) .expect("Unable to add ordered prop 2"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3862,7 +4000,7 @@ mod test { ) .expect("Unable to add ordered prop 3"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3889,7 +4027,7 @@ mod test { ) .expect("Unable to add ordered prop 4"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3924,7 +4062,7 @@ mod test { let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -3984,7 +4122,7 @@ mod test { let schema_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = initial_graph .add_node( NodeWeight::new_content( @@ -3997,7 +4135,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = initial_graph .add_node( NodeWeight::new_content( @@ -4011,7 +4149,6 @@ mod test { initial_graph .add_edge( - initial_change_set, initial_graph.root_index, EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -4020,10 +4157,9 @@ mod test { .expect("Unable to add root -> schema edge"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -4048,7 +4184,6 @@ mod test { .expect("Unable to add container prop"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_variant_id) .expect("Unable to get NodeIndex"), @@ -4074,7 +4209,7 @@ mod test { ) .expect("Unable to add ordered prop 1"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4101,7 +4236,7 @@ mod test { ) .expect("Unable to add ordered prop 2"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4128,7 +4263,7 @@ mod test { ) .expect("Unable to add ordered prop 3"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4155,7 +4290,7 @@ mod test { ) .expect("Unable to add ordered prop 4"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4200,7 +4335,7 @@ mod test { let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4261,7 +4396,7 @@ mod test { let schema_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_index = initial_graph .add_node( NodeWeight::new_content( @@ -4274,7 +4409,7 @@ mod test { .expect("Unable to add Schema A"); let schema_variant_id = initial_change_set .generate_ulid() - .expect("Cannot generate Ulid"); + .expect("Unable to generate Ulid"); let schema_variant_index = initial_graph .add_node( NodeWeight::new_content( @@ -4288,7 +4423,6 @@ mod test { initial_graph .add_edge( - initial_change_set, initial_graph.root_index, EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), @@ -4297,10 +4431,9 @@ mod test { .expect("Unable to add root -> schema edge"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_id) - .expect("Cannot get NodeIndex"), + .expect("Unable to get NodeIndex"), EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"), schema_variant_index, @@ -4325,7 +4458,6 @@ mod test { .expect("Unable to add container prop"); initial_graph .add_edge( - initial_change_set, initial_graph .get_node_index_by_id(schema_variant_id) .expect("Unable to get NodeIndex"), @@ -4351,7 +4483,7 @@ mod test { ) .expect("Unable to add ordered prop 1"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4378,7 +4510,7 @@ mod test { ) .expect("Unable to add ordered prop 2"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4405,7 +4537,7 @@ mod test { ) .expect("Unable to add ordered prop 3"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4432,7 +4564,7 @@ mod test { ) .expect("Unable to add ordered prop 4"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4483,7 +4615,7 @@ mod test { let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) .expect("Unable to create EdgeWeight"); initial_graph - .add_edge( + .add_ordered_edge( initial_change_set, initial_graph .get_node_index_by_id(container_prop_id) @@ -4517,4 +4649,1159 @@ mod test { updates ); } + + #[test] + fn attribute_value_build_view() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + let mut content_store = crate::content::Store::new(); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (schema_content_hash, _) = content_store + .add(serde_json::json!("Schema A")) + .expect("Unable to add to content store"); + let schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(schema_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (schema_variant_content_hash, _) = content_store + .add(serde_json::json!("Schema Variant A")) + .expect("Unable to add to content store"); + let schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(schema_variant_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (root_prop_content_hash, _) = content_store + .add(serde_json::json!("Root prop")) + .expect("Unable to add to content store"); + let root_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + root_prop_id, + PropKind::Object, + "root", + root_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_node_index, + ) + .expect("Unable to add schema variant -> root prop edge"); + + let si_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (si_prop_content_hash, _) = content_store + .add(serde_json::json!("SI Prop Content")) + .expect("Unable to add to content store"); + let si_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + si_prop_id, + PropKind::Object, + "si", + si_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add si prop"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + si_prop_node_index, + ) + .expect("Unable to add root prop -> si prop edge"); + + let name_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (name_prop_content_hash, _) = content_store + .add(serde_json::json!("Name Prop Content")) + .expect("Unable to add to content store"); + let name_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + name_prop_id, + PropKind::Object, + "name", + name_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add name prop"); + graph + .add_edge( + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + name_prop_node_index, + ) + .expect("Unable to add si prop -> name prop edge"); + + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (component_content_hash, _) = content_store + .add(serde_json::json!("Component Content")) + .expect("Unable to add to content store"); + let component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(component_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (root_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let root_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + root_av_id, + ContentAddress::AttributeValue(root_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root av"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_av_node_index, + ) + .expect("Unable to add component -> root av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add root av -> root prop edge"); + + let si_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (si_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let si_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + si_av_id, + ContentAddress::AttributeValue(si_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add si av"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + si_av_node_index, + ) + .expect("Unable to add root av -> si av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add si av -> si prop edge"); + + let name_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (name_av_content_hash, _) = content_store + .add(serde_json::json!("component name")) + .expect("Unable to add to content store"); + let name_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + name_av_id, + ContentAddress::AttributeValue(name_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add name av"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeWeight"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + name_av_node_index, + ) + .expect("Unable to add si av -> name av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(name_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(name_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to create name av -> name prop edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + serde_json::json![{"si": {"name": "component name"}}], + graph + .attribute_value_view( + &content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex") + ) + .expect("Unable to generate attribute value view"), + ); + } + + #[test] + fn attribute_value_build_view_unordered_object() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + let mut content_store = crate::content::Store::new(); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (schema_content_hash, _) = content_store + .add(serde_json::json!("Schema A")) + .expect("Unable to add to content store"); + let schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(schema_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (schema_variant_content_hash, _) = content_store + .add(serde_json::json!("Schema Variant A")) + .expect("Unable to add to content store"); + let schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(schema_variant_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (root_prop_content_hash, _) = content_store + .add(serde_json::json!("Root prop")) + .expect("Unable to add to content store"); + let root_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + root_prop_id, + PropKind::Object, + "root", + root_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_node_index, + ) + .expect("Unable to add schema variant -> root prop edge"); + + let si_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (si_prop_content_hash, _) = content_store + .add(serde_json::json!("SI Prop Content")) + .expect("Unable to add to content store"); + let si_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + si_prop_id, + PropKind::Object, + "si", + si_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add si prop"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + si_prop_node_index, + ) + .expect("Unable to add root prop -> si prop edge"); + + let name_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (name_prop_content_hash, _) = content_store + .add(serde_json::json!("Name Prop Content")) + .expect("Unable to add to content store"); + let name_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + name_prop_id, + PropKind::Object, + "name", + name_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add name prop"); + graph + .add_edge( + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + name_prop_node_index, + ) + .expect("Unable to add si prop -> name prop edge"); + + let description_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (description_prop_content_hash, _) = content_store + .add(serde_json::json!("Description Prop Content")) + .expect("Unable to add to content store"); + let description_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + description_prop_id, + PropKind::String, + "description", + description_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add description prop"); + graph + .add_edge( + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + description_prop_node_index, + ) + .expect("Unable to add si prop -> description prop edge"); + + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (component_content_hash, _) = content_store + .add(serde_json::json!("Component Content")) + .expect("Unable to add to content store"); + let component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(component_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (root_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let root_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + root_av_id, + ContentAddress::AttributeValue(root_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root av"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_av_node_index, + ) + .expect("Unable to add component -> root av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add root av -> root prop edge"); + + let si_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (si_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let si_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + si_av_id, + ContentAddress::AttributeValue(si_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add si av"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + si_av_node_index, + ) + .expect("Unable to add root av -> si av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add si av -> si prop edge"); + + let name_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (name_av_content_hash, _) = content_store + .add(serde_json::json!("component name")) + .expect("Unable to add to content store"); + let name_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + name_av_id, + ContentAddress::AttributeValue(name_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add name av"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + name_av_node_index, + ) + .expect("Unable to add si av -> name av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(name_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(name_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to create name av -> name prop edge"); + + let description_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (description_av_content_hash, _) = content_store + .add(serde_json::json!("Component description")) + .expect("Unable to add to content store"); + let description_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + description_av_id, + ContentAddress::AttributeValue(description_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add description av"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + description_av_node_index, + ) + .expect("Unable to add si av -> description av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(description_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(description_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add description av -> description prop edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + serde_json::json![{ + "si": { + "description": "Component description", + "name": "component name", + } + }], + graph + .attribute_value_view( + &content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex") + ) + .expect("Unable to generate attribute value view"), + ); + } + + #[test] + fn attribute_value_build_view_ordered_array() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + let mut content_store = crate::content::Store::new(); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (schema_content_hash, _) = content_store + .add(serde_json::json!("Schema A")) + .expect("Unable to add to content store"); + let schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(schema_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (schema_variant_content_hash, _) = content_store + .add(serde_json::json!("Schema Variant A")) + .expect("Unable to add to content store"); + let schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(schema_variant_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (root_prop_content_hash, _) = content_store + .add(serde_json::json!("Root prop")) + .expect("Unable to add to content store"); + let root_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + root_prop_id, + PropKind::Object, + "root", + root_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_node_index, + ) + .expect("Unable to add schema variant -> root prop edge"); + + let domain_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (domain_prop_content_hash, _) = content_store + .add(serde_json::json!("domain Prop Content")) + .expect("Unable to add to content store"); + let domain_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + domain_prop_id, + PropKind::Object, + "domain", + domain_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add domain prop"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + domain_prop_node_index, + ) + .expect("Unable to add root prop -> domain prop edge"); + + let ports_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (ports_prop_content_hash, _) = content_store + .add(serde_json::json!("ports Prop Content")) + .expect("Unable to add to content store"); + let ports_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + ports_prop_id, + PropKind::Array, + "ports", + ports_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ports prop"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ports_prop_node_index, + ) + .expect("Unable to add domain prop -> ports prop edge"); + + let port_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (port_prop_content_hash, _) = content_store + .add(serde_json::json!("port Prop Content")) + .expect("Unable to add to content store"); + let port_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + port_prop_id, + PropKind::String, + "port", + port_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port prop"); + graph + .add_edge( + graph + .get_node_index_by_id(ports_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + port_prop_node_index, + ) + .expect("Unable to add ports prop -> port prop edge"); + + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (component_content_hash, _) = content_store + .add(serde_json::json!("Component Content")) + .expect("Unable to add to content store"); + let component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(component_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (root_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let root_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + root_av_id, + ContentAddress::AttributeValue(root_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root av"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_av_node_index, + ) + .expect("Unable to add component -> root av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add root av -> root prop edge"); + + let domain_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (domain_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let domain_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + domain_av_id, + ContentAddress::AttributeValue(domain_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add domain av"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + domain_av_node_index, + ) + .expect("Unable to add root av -> domain av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(domain_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add domain av -> domain prop edge"); + + let ports_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (ports_av_content_hash, _) = content_store + .add(serde_json::json!([])) + .expect("Unable to add to content store"); + let ports_av_node_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ports_av_id, + ContentAddress::AttributeValue(ports_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ports av"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + ports_av_node_index, + ) + .expect("Unable to add domain av -> ports av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(ports_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to create ports av -> ports prop edge"); + + let port1_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (port1_av_content_hash, _) = content_store + .add(serde_json::json!("Port 1")) + .expect("Unable to add to content store"); + let port1_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port1_av_id, + ContentAddress::AttributeValue(port1_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 1 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port1_av_node_index, + ) + .expect("Unable to add ports av -> port 1 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port1_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 1 av -> port prop edge"); + + let port2_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (port2_av_content_hash, _) = content_store + .add(serde_json::json!("Port 2")) + .expect("Unable to add to content store"); + let port2_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port2_av_id, + ContentAddress::AttributeValue(port2_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 2 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port2_av_node_index, + ) + .expect("Unable to add ports av -> port 2 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port2_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 2 av -> port prop edge"); + + let port3_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (port3_av_content_hash, _) = content_store + .add(serde_json::json!("Port 3")) + .expect("Unable to add to content store"); + let port3_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port3_av_id, + ContentAddress::AttributeValue(port3_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 3 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port3_av_node_index, + ) + .expect("Unable to add ports av -> port 3 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port3_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 3 av -> port prop edge"); + + let port4_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (port4_av_content_hash, _) = content_store + .add(serde_json::json!("Port 4")) + .expect("Unable to add to content store"); + let port4_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port4_av_id, + ContentAddress::AttributeValue(port4_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 4 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port4_av_node_index, + ) + .expect("Unable to add ports av -> port 4 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port4_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 4 av -> port prop edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + serde_json::json![{ + "domain": { + "ports": [ + "Port 1", + "Port 2", + "Port 3", + "Port 4", + ], + } + }], + graph + .attribute_value_view( + &content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex") + ) + .expect("Unable to generate attribute value view"), + ); + } } diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index 12485138f2..b863e8d24e 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -5,16 +5,20 @@ use ulid::Ulid; use crate::{ change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}, - workspace_snapshot::vector_clock::{VectorClock, VectorClockError}, - ContentHash, + workspace_snapshot::{ + content_address::ContentAddress, + vector_clock::{VectorClock, VectorClockError}, + }, + ContentHash, PropKind, }; -pub use crate::workspace_snapshot::node_weight::content_node_weight::ContentAddress; pub use content_node_weight::ContentNodeWeight; pub use ordering_node_weight::OrderingNodeWeight; +pub use prop_node_weight::PropNodeWeight; pub mod content_node_weight; pub mod ordering_node_weight; +pub mod prop_node_weight; #[derive(Debug, Error)] pub enum NodeWeightError { @@ -28,6 +32,8 @@ pub enum NodeWeightError { ChangeSet(#[from] ChangeSetPointerError), #[error("Incompatible node weights")] IncompatibleNodeWeightVariants, + #[error("Invalid ContentAddress variant ({0}) for NodeWeight variant ({1})")] + InvalidContentAddressForWeightKind(String, String), #[error("Vector Clock error: {0}")] VectorClock(#[from] VectorClockError), } @@ -38,6 +44,7 @@ pub type NodeWeightResult = Result; pub enum NodeWeight { Content(ContentNodeWeight), Ordering(OrderingNodeWeight), + Prop(PropNodeWeight), } impl NodeWeight { @@ -45,6 +52,7 @@ impl NodeWeight { match self { NodeWeight::Content(content_weight) => content_weight.content_hash(), NodeWeight::Ordering(ordering_weight) => ordering_weight.content_hash(), + NodeWeight::Prop(prop_weight) => prop_weight.content_hash(), } } @@ -52,6 +60,7 @@ impl NodeWeight { match self { NodeWeight::Content(content_weight) => content_weight.id(), NodeWeight::Ordering(ordering_weight) => ordering_weight.id(), + NodeWeight::Prop(prop_weight) => prop_weight.id(), } } @@ -66,6 +75,7 @@ impl NodeWeight { NodeWeight::Ordering(ordering_weight) => { ordering_weight.increment_vector_clock(change_set) } + NodeWeight::Prop(prop_weight) => prop_weight.increment_vector_clock(change_set), } } @@ -73,6 +83,7 @@ impl NodeWeight { match self { NodeWeight::Content(content_weight) => content_weight.lineage_id(), NodeWeight::Ordering(ordering_weight) => ordering_weight.lineage_id(), + NodeWeight::Prop(prop_weight) => prop_weight.lineage_id(), } } @@ -82,6 +93,7 @@ impl NodeWeight { NodeWeight::Ordering(ordering_weight) => { ordering_weight.mark_seen_at(change_set, seen_at) } + NodeWeight::Prop(prop_weight) => prop_weight.mark_seen_at(change_set, seen_at), } } @@ -99,6 +111,9 @@ impl NodeWeight { NodeWeight::Ordering(self_ordering_weight), NodeWeight::Ordering(other_ordering_weight), ) => self_ordering_weight.merge_clocks(change_set, other_ordering_weight), + (NodeWeight::Prop(self_prop_weight), NodeWeight::Prop(other_prop_weight)) => { + self_prop_weight.merge_clocks(change_set, other_prop_weight) + } _ => Err(NodeWeightError::IncompatibleNodeWeightVariants), } } @@ -107,6 +122,7 @@ impl NodeWeight { match self { NodeWeight::Content(content_weight) => content_weight.merkle_tree_hash(), NodeWeight::Ordering(ordering_weight) => ordering_weight.merkle_tree_hash(), + NodeWeight::Prop(prop_weight) => prop_weight.merkle_tree_hash(), } } @@ -124,9 +140,26 @@ impl NodeWeight { match self { NodeWeight::Content(content_weight) => content_weight.new_content_hash(content_hash), NodeWeight::Ordering(_) => Err(NodeWeightError::CannotSetContentHashOnKind), + NodeWeight::Prop(prop_weight) => prop_weight.new_content_hash(content_hash), } } + pub fn new_prop( + change_set: &ChangeSetPointer, + prop_id: Ulid, + prop_kind: PropKind, + name: impl AsRef, + content_hash: ContentHash, + ) -> NodeWeightResult { + Ok(NodeWeight::Prop(PropNodeWeight::new( + change_set, + prop_id, + ContentAddress::Prop(content_hash), + prop_kind, + name.as_ref().to_string(), + )?)) + } + pub fn new_with_incremented_vector_clock( &self, change_set: &ChangeSetPointer, @@ -138,15 +171,27 @@ impl NodeWeight { NodeWeight::Ordering(ordering_weight) => { NodeWeight::Ordering(ordering_weight.new_with_incremented_vector_clock(change_set)?) } + NodeWeight::Prop(prop_weight) => { + NodeWeight::Prop(prop_weight.new_with_incremented_vector_clock(change_set)?) + } }; Ok(new_weight) } + pub fn node_hash(&self) -> ContentHash { + match self { + NodeWeight::Content(content_weight) => content_weight.node_hash(), + NodeWeight::Ordering(ordering_weight) => ordering_weight.node_hash(), + NodeWeight::Prop(prop_weight) => prop_weight.node_hash(), + } + } + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { match self { NodeWeight::Content(content_weight) => content_weight.set_merkle_tree_hash(new_hash), NodeWeight::Ordering(ordering_weight) => ordering_weight.set_merkle_tree_hash(new_hash), + NodeWeight::Prop(prop_weight) => prop_weight.set_merkle_tree_hash(new_hash), } } @@ -158,6 +203,7 @@ impl NodeWeight { match self { NodeWeight::Content(_) => Err(NodeWeightError::CannotSetOrderOnKind), NodeWeight::Ordering(ordering_weight) => ordering_weight.set_order(change_set, order), + NodeWeight::Prop(_) => Err(NodeWeightError::CannotSetOrderOnKind), } } @@ -173,6 +219,9 @@ impl NodeWeight { NodeWeight::Ordering(ordering_weight) => { ordering_weight.set_vector_clock_recently_seen_to(change_set, new_val) } + NodeWeight::Prop(prop_weight) => { + prop_weight.set_vector_clock_recently_seen_to(change_set, new_val) + } } } @@ -180,6 +229,7 @@ impl NodeWeight { match self { NodeWeight::Content(content_weight) => content_weight.vector_clock_first_seen(), NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_first_seen(), + NodeWeight::Prop(prop_weight) => prop_weight.vector_clock_first_seen(), } } @@ -187,6 +237,7 @@ impl NodeWeight { match self { NodeWeight::Content(content_weight) => content_weight.vector_clock_recently_seen(), NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_recently_seen(), + NodeWeight::Prop(prop_weight) => prop_weight.vector_clock_recently_seen(), } } @@ -194,6 +245,7 @@ impl NodeWeight { match self { NodeWeight::Content(content_weight) => content_weight.vector_clock_write(), NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_write(), + NodeWeight::Prop(prop_weight) => prop_weight.vector_clock_write(), } } } diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index 15328c627f..162dc447db 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -5,44 +5,15 @@ use ulid::Ulid; use crate::{ change_set_pointer::ChangeSetPointer, workspace_snapshot::{ + content_address::ContentAddress, + graph::LineageId, node_weight::{NodeWeightError, NodeWeightResult}, vector_clock::VectorClock, }, ContentHash, }; -pub type LineageId = Ulid; - -#[remain::sorted] -#[derive(Debug, Serialize, Deserialize, Clone, Copy, PartialEq, Eq)] -/// The type of the object, and the content-addressable-storage address (content hash) -/// of the object itself. -pub enum ContentAddress { - Component(ContentHash), - Func(ContentHash), - FuncArg(ContentHash), - Prop(ContentHash), - Root, - Schema(ContentHash), - SchemaVariant(ContentHash), -} - -impl ContentAddress { - fn content_hash(&self) -> ContentHash { - match self { - ContentAddress::Component(id) => Some(*id), - ContentAddress::Func(id) => Some(*id), - ContentAddress::FuncArg(id) => Some(*id), - ContentAddress::Prop(id) => Some(*id), - ContentAddress::Root => None, - ContentAddress::Schema(id) => Some(*id), - ContentAddress::SchemaVariant(id) => Some(*id), - } - .unwrap_or_default() - } -} - -#[derive(Serialize, Deserialize, Clone)] +#[derive(Clone, Serialize, Deserialize)] pub struct ContentNodeWeight { /// The stable local ID of the object in question. Mainly used by external things like /// the UI to be able to say "do X to _this_ thing" since the `NodeIndex` is an @@ -122,7 +93,7 @@ impl ContentNodeWeight { pub fn merge_clocks( &mut self, change_set: &ChangeSetPointer, - other: &ContentNodeWeight, + other: &Self, ) -> NodeWeightResult<()> { self.vector_clock_write .merge(change_set, &other.vector_clock_write)?; @@ -140,10 +111,21 @@ impl ContentNodeWeight { pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { let new_address = match &self.content_address { + ContentAddress::AttributePrototype(_) => { + ContentAddress::AttributePrototype(content_hash) + } + ContentAddress::AttributeValue(_) => ContentAddress::AttributeValue(content_hash), ContentAddress::Component(_) => ContentAddress::Component(content_hash), - ContentAddress::Func(_) => ContentAddress::Func(content_hash), + ContentAddress::ExternalProvider(_) => ContentAddress::ExternalProvider(content_hash), ContentAddress::FuncArg(_) => ContentAddress::FuncArg(content_hash), - ContentAddress::Prop(_) => ContentAddress::Prop(content_hash), + ContentAddress::Func(_) => ContentAddress::Func(content_hash), + ContentAddress::InternalProvider(_) => ContentAddress::InternalProvider(content_hash), + ContentAddress::Prop(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Prop".to_string(), + "Content".to_string(), + )) + } ContentAddress::Root => return Err(NodeWeightError::CannotUpdateRootNodeContentHash), ContentAddress::Schema(_) => ContentAddress::Schema(content_hash), ContentAddress::SchemaVariant(_) => ContentAddress::SchemaVariant(content_hash), @@ -164,6 +146,10 @@ impl ContentNodeWeight { Ok(new_node_weight) } + pub fn node_hash(&self) -> ContentHash { + self.content_hash() + } + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { self.merkle_tree_hash = new_hash; } diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs index e02171c637..fd0efe7a2e 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -95,6 +95,10 @@ impl OrderingNodeWeight { Ok(new_ordering_weight) } + pub fn node_hash(&self) -> ContentHash { + self.content_hash() + } + pub fn order(&self) -> &Vec { &self.order } @@ -152,10 +156,17 @@ impl OrderingNodeWeight { impl std::fmt::Debug for OrderingNodeWeight { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { - f.debug_struct("NodeWeight") + f.debug_struct("OrderingNodeWeight") .field("id", &self.id.to_string()) .field("lineage_id", &self.lineage_id.to_string()) - .field("order", &self.order) + .field( + "order", + &self + .order + .iter() + .map(|id| id.to_string()) + .collect::>(), + ) .field("content_hash", &self.content_hash) .field("merkle_tree_hash", &self.merkle_tree_hash) .field("vector_clock_first_seen", &self.vector_clock_first_seen) diff --git a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs new file mode 100644 index 0000000000..d88f419f6b --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs @@ -0,0 +1,238 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::{ + change_set_pointer::ChangeSetPointer, + content::hash::ContentHash, + workspace_snapshot::{ + content_address::ContentAddress, + graph::LineageId, + node_weight::{NodeWeightError, NodeWeightResult}, + vector_clock::VectorClock, + }, + PropKind, +}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct PropNodeWeight { + id: Ulid, + lineage_id: LineageId, + content_address: ContentAddress, + merkle_tree_hash: ContentHash, + kind: PropKind, + name: String, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl PropNodeWeight { + pub fn new( + change_set: &ChangeSetPointer, + id: Ulid, + content_address: ContentAddress, + kind: PropKind, + name: String, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + content_address, + merkle_tree_hash: ContentHash::default(), + kind, + name, + vector_clock_first_seen: VectorClock::new(change_set)?, + vector_clock_recently_seen: VectorClock::new(change_set)?, + vector_clock_write: VectorClock::new(change_set)?, + }) + } + + pub fn content_address(&self) -> ContentAddress { + self.content_address + } + + pub fn content_hash(&self) -> ContentHash { + self.content_address.content_hash() + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set)?; + self.vector_clock_recently_seen.inc(change_set)?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(change_set, seen_at.clone()); + if self.vector_clock_first_seen.entry_for(change_set).is_none() { + self.vector_clock_first_seen.inc_to(change_set, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &Self, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set, &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set, &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen + .merge(change_set, &other.vector_clock_recently_seen)?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn name(&self) -> &str { + &self.name + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + let new_address = match &self.content_address { + ContentAddress::AttributePrototype(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "AttributePrototype".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::AttributeValue(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "AttributeValue".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::Component(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Component".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::ExternalProvider(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "ExternalProvider".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::Func(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Func".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::FuncArg(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "FuncArc".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::InternalProvider(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "InternalProvider".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::Prop(_) => ContentAddress::Prop(content_hash), + ContentAddress::Root => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Root".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::Schema(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Schema".to_string(), + "Prop".to_string(), + )) + } + ContentAddress::SchemaVariant(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "SchemaVariant".to_string(), + "Prop".to_string(), + )) + } + }; + + self.content_address = new_address; + + Ok(()) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn node_hash(&self) -> ContentHash { + ContentHash::from(&serde_json::json![{ + "content_address": self.content_address, + "kind": self.kind, + "name": self.name, + }]) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen.inc_to(change_set, new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for PropNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("PropNodeWeight") + .field("id", &self.id().to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("kind", &self.kind) + .field("name", &self.name) + .field("content_hash", &self.content_hash()) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} From ec0ecfa66159ddb7a29a7e6d47187867d429cf1f Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Tue, 19 Sep 2023 11:23:05 -0700 Subject: [PATCH 25/92] Expand testing of ordered containers with workspace snapshot attribute views --- lib/dal/src/workspace_snapshot/graph.rs | 85 +++++++++++++++++++++++++ 1 file changed, 85 insertions(+) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index cd19a48adc..6211c151af 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -5803,5 +5803,90 @@ mod test { ) .expect("Unable to generate attribute value view"), ); + + let new_order = vec![port3_av_id, port1_av_id, port4_av_id, port2_av_id]; + graph + .update_order(change_set, ports_av_id, new_order) + .expect("Unable to update order of ports attribute value's children"); + assert_eq!( + serde_json::json![{ + "domain": { + "ports": [ + "Port 3", + "Port 1", + "Port 4", + "Port 2", + ] + } + }], + graph + .attribute_value_view( + &content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex") + ) + .expect("Unable to generate attribute value view"), + ); + + let port5_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (port5_av_content_hash, _) = content_store + .add(serde_json::json!("Port 5")) + .expect("Unable to add to content store"); + let port5_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port5_av_id, + ContentAddress::AttributeValue(port5_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 5 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port5_av_node_index, + ) + .expect("Unable to add ports av -> port 5 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port5_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 5 av -> port prop edge"); + + assert_eq!( + serde_json::json![{ + "domain": { + "ports": [ + "Port 3", + "Port 1", + "Port 4", + "Port 2", + "Port 5", + ] + } + }], + graph + .attribute_value_view( + &content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex") + ) + .expect("Unable to generate attribute value view"), + ); } } From 0962a025e4ba6078cb9e137baa8d75ae02992fca Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Thu, 21 Sep 2023 12:56:11 -0700 Subject: [PATCH 26/92] Include information encoded in EdgeWeights in WorkspaceSnapshotGraph merkle tree hashes --- lib/dal/src/workspace_snapshot/graph.rs | 105 +++++++++++++++++------- 1 file changed, 75 insertions(+), 30 deletions(-) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 6211c151af..b4e55f338d 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -49,7 +49,7 @@ pub enum WorkspaceSnapshotGraphError { NodeWeight(#[from] NodeWeightError), #[error("node weight not found")] NodeWeightNotFound, - #[error("Node with ID {0} not found")] + #[error("Node with ID {} not found", .0.to_string())] NodeWithIdNotFound(Ulid), #[error("No Prop found for NodeIndex {0:?}")] NoPropFound(NodeIndex), @@ -1177,9 +1177,11 @@ impl WorkspaceSnapshotGraph { node_index_by_id.insert(neighbor_weight.id(), neighbor_index); } for ordered_id in ordering_weight.order() { - ordered_child_indexes.push(*node_index_by_id.get(ordered_id).ok_or_else( - || WorkspaceSnapshotGraphError::NodeWithIdNotFound(*ordered_id), - )?); + ordered_child_indexes.push( + *node_index_by_id + .get(ordered_id) + .ok_or(WorkspaceSnapshotGraphError::NodeWithIdNotFound(*ordered_id))?, + ); } } } else { @@ -1245,7 +1247,6 @@ impl WorkspaceSnapshotGraph { for edge_to_remove in edges_to_remove { self.graph.remove_edge(edge_to_remove); } - self.update_merkle_tree_hash(new_source_node_index)?; if let Some(previous_container_ordering_node_index) = self.ordering_node_index_for_container(new_source_node_index)? @@ -1280,6 +1281,13 @@ impl WorkspaceSnapshotGraph { } } + self.update_merkle_tree_hash( + // If we updated the ordering node, that means we've invalidated the container's + // NodeIndex (new_source_node_index), so we need to find the new NodeIndex to be able + // to update the container's merkle tree hash. + self.get_node_index_by_id(self.get_node_weight(new_source_node_index)?.id())?, + )?; + Ok(()) } @@ -1308,18 +1316,11 @@ impl WorkspaceSnapshotGraph { } }; - // Find all outgoing edges. From those outgoing edges and find their destinations. - // If they do not have destinations, then there is no work to do (i.e. stale edge - // reference, which should only happen if an edge was removed after we got the - // edge ref, but before we asked about the edge's endpoints). + // Find all outgoing edges weights and find the edge targets. let mut edges_to_create: Vec<(EdgeWeight, NodeIndex)> = Vec::new(); for edge_reference in self.graph.edges_directed(old_node_index, Outgoing) { - let edge_weight = edge_reference.weight(); - if let Some((_, destination_node_index)) = - self.graph.edge_endpoints(edge_reference.id()) - { - edges_to_create.push((edge_weight.clone(), destination_node_index)); - } + edges_to_create + .push((edge_reference.weight().clone(), edge_reference.target())); } // Make copies of these edges where the source is the new node index and the @@ -1344,14 +1345,14 @@ impl WorkspaceSnapshotGraph { } self.update_merkle_tree_hash(new_node_index)?; - - // Use the new version of the old root node as our root node. - if let Some(new_root_node_index) = old_to_new_node_indices.get(&self.root_index) { - self.root_index = *new_root_node_index; - } } } + // Use the new version of the old root node as our root node. + if let Some(new_root_node_index) = old_to_new_node_indices.get(&self.root_index) { + self.root_index = *new_root_node_index; + } + Ok(()) } @@ -1398,22 +1399,42 @@ impl WorkspaceSnapshotGraph { .as_bytes(), ); - // Need to make sure the neighbors are added to the hash in a stable order to ensure the - // merkle tree hash is identical for identical trees. - let mut ordered_neighbors = Vec::new(); + // Need to make sure that ordered containers have their ordered children in the + // order specified by the ordering graph node. + let explicitly_ordered_children = self + .ordered_children_for_node(node_index_to_update)? + .unwrap_or_else(Vec::new); + + // Need to make sure the unordered neighbors are added to the hash in a stable order to + // ensure the merkle tree hash is identical for identical trees. + let mut unordered_neighbors = Vec::new(); for neighbor_node in self .graph .neighbors_directed(node_index_to_update, Outgoing) { - ordered_neighbors.push(neighbor_node); + // Only add the neighbor if it's not one of the ones with an explicit ordering. + if !explicitly_ordered_children.contains(&neighbor_node) { + let neighbor_id = self.get_node_weight(neighbor_node)?.id(); + unordered_neighbors.push((neighbor_id, neighbor_node)); + } } - ordered_neighbors.sort(); + // We'll sort the neighbors by the ID in the NodeWeight, as that will result in more stable + // results than if we sorted by the NodeIndex itself. + unordered_neighbors.sort_by_cached_key(|(id, _index)| *id); + // It's not important whether the explicitly ordered children are first or last, as long as + // they are always in that position, and are always in the sequence specified by the + // container's Ordering node. + let mut ordered_neighbors = + Vec::with_capacity(explicitly_ordered_children.len() + unordered_neighbors.len()); + ordered_neighbors.extend(explicitly_ordered_children); + ordered_neighbors.extend::>( + unordered_neighbors + .iter() + .map(|(_id, index)| *index) + .collect(), + ); for neighbor_node in ordered_neighbors { - // TODO: This needs to take into account the edge weight(s) between the node being - // updated, and the neighboring node, as important information may be encoded in - // the edge weight itself such as the name of the function argument, or the key - // of an entry in a map. hasher.update( self.graph .node_weight(neighbor_node) @@ -1422,6 +1443,30 @@ impl WorkspaceSnapshotGraph { .to_string() .as_bytes(), ); + + // The edge(s) between `node_index_to_update`, and `neighbor_node` potentially encode + // important information related to the "identity" of `node_index_to_update`. + for connecting_edgeref in self + .graph + .edges_connecting(node_index_to_update, neighbor_node) + { + match connecting_edgeref.weight().kind() { + // This is the name of the argument to the function. + EdgeWeightKind::Argument(arg_name) => hasher.update(arg_name.as_bytes()), + // This is the key for an entry in a map. + EdgeWeightKind::Contain(Some(key)) => hasher.update(key.as_bytes()), + + // Nothing to do, as these EdgeWeightKind do not encode extra information + // in the edge itself. + EdgeWeightKind::Contain(None) + | EdgeWeightKind::DataProvider + | EdgeWeightKind::Ordering + | EdgeWeightKind::Prop + | EdgeWeightKind::Prototype + | EdgeWeightKind::Proxy + | EdgeWeightKind::Use => {} + } + } } let new_node_weight = self @@ -3578,7 +3623,7 @@ mod test { ordered_prop_2_index, EdgeWeightKind::Use, ) - .expect("Unable to update order of prop's children"); + .expect("Unable to remove prop -> ordered_prop_2 edge"); assert_eq!( vec![ From 83461f6299f5a2d1eb35e53023d596637e779508 Mon Sep 17 00:00:00 2001 From: Jacob Helwig Date: Thu, 21 Sep 2023 14:20:54 -0700 Subject: [PATCH 27/92] Test view generation for maps in WorkspaceSnapshotGraph --- lib/dal/src/workspace_snapshot/graph.rs | 593 ++++++++++++++++++++++++ 1 file changed, 593 insertions(+) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index b4e55f338d..3498b18c0a 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -5934,4 +5934,597 @@ mod test { .expect("Unable to generate attribute value view"), ); } + + #[test] + fn attribute_value_build_view_ordered_map() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + let mut content_store = crate::content::Store::new(); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (schema_content_hash, _) = content_store + .add(serde_json::json!("Schema A")) + .expect("Unable to add to content store"); + let schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(schema_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (schema_variant_content_hash, _) = content_store + .add(serde_json::json!("Schema Variant A")) + .expect("Unable to add to content store"); + let schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(schema_variant_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (root_prop_content_hash, _) = content_store + .add(serde_json::json!("Root prop")) + .expect("Unable to add to content store"); + let root_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + root_prop_id, + PropKind::Object, + "root", + root_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_node_index, + ) + .expect("Unable to add schema variant -> root prop edge"); + + let domain_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (domain_prop_content_hash, _) = content_store + .add(serde_json::json!("domain Prop Content")) + .expect("Unable to add to content store"); + let domain_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + domain_prop_id, + PropKind::Object, + "domain", + domain_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add domain prop"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + domain_prop_node_index, + ) + .expect("Unable to add root prop -> domain prop edge"); + + let environment_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (environment_prop_content_hash, _) = content_store + .add(serde_json::json!("environment Prop Content")) + .expect("Unable to add to content store"); + let environment_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + environment_prop_id, + PropKind::Array, + "environment", + environment_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add environment prop"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + environment_prop_node_index, + ) + .expect("Unable to add domain prop -> environment prop edge"); + + let env_var_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (env_var_prop_content_hash, _) = content_store + .add(serde_json::json!("port Prop Content")) + .expect("Unable to add to content store"); + let env_var_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + env_var_prop_id, + PropKind::String, + "port", + env_var_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var prop"); + graph + .add_edge( + graph + .get_node_index_by_id(environment_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + env_var_prop_node_index, + ) + .expect("Unable to add environment prop -> env var prop edge"); + + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (component_content_hash, _) = content_store + .add(serde_json::json!("Component Content")) + .expect("Unable to add to content store"); + let component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(component_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (root_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let root_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + root_av_id, + ContentAddress::AttributeValue(root_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root av"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_av_node_index, + ) + .expect("Unable to add component -> root av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add root av -> root prop edge"); + + let domain_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (domain_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let domain_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + domain_av_id, + ContentAddress::AttributeValue(domain_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add domain av"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + domain_av_node_index, + ) + .expect("Unable to add root av -> domain av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(domain_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add domain av -> domain prop edge"); + + let envrionment_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (ports_av_content_hash, _) = content_store + .add(serde_json::json!({})) + .expect("Unable to add to content store"); + let environment_av_node_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + envrionment_av_id, + ContentAddress::AttributeValue(ports_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add environment av"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + environment_av_node_index, + ) + .expect("Unable to add domain av -> environment av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(environment_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to create environment av -> environment prop edge"); + + let env_var1_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (env_var1_av_content_hash, _) = content_store + .add(serde_json::json!("1111")) + .expect("Unable to add to content store"); + let port1_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var1_av_id, + ContentAddress::AttributeValue(env_var1_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env_var 1 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_1".to_string())), + ) + .expect("Unable to create EdgeWeight"), + port1_av_node_index, + ) + .expect("Unable to add environment av -> env var 1 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var1_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 1 av -> env var prop edge"); + + let env_var2_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (env_var2_av_content_hash, _) = content_store + .add(serde_json::json!("2222")) + .expect("Unable to add to content store"); + let env_var2_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var2_av_id, + ContentAddress::AttributeValue(env_var2_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var 2 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_2".to_string())), + ) + .expect("Unable to create EdgeWeight"), + env_var2_av_node_index, + ) + .expect("Unable to add environment av -> env var 2 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var2_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 2 av -> env var prop edge"); + + let env_var3_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (env_var3_av_content_hash, _) = content_store + .add(serde_json::json!("3333")) + .expect("Unable to add to content store"); + let port3_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var3_av_id, + ContentAddress::AttributeValue(env_var3_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var 3 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_3".to_string())), + ) + .expect("Unable to create EdgeWeight"), + port3_av_node_index, + ) + .expect("Unable to add environment av -> env var 3 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var3_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 3 av -> env var prop edge"); + + let env_var4_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (env_var4_av_content_hash, _) = content_store + .add(serde_json::json!("4444")) + .expect("Unable to add to content store"); + let env_var4_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var4_av_id, + ContentAddress::AttributeValue(env_var4_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var 4 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_4".to_string())), + ) + .expect("Unable to create EdgeWeight"), + env_var4_av_node_index, + ) + .expect("Unable to add environment av -> env var 4 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var4_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 4 av -> env var prop edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + serde_json::json![{ + "domain": { + "environment": { + "PORT_1": "1111", + "PORT_2": "2222", + "PORT_3": "3333", + "PORT_4": "4444", + }, + } + }], + graph + .attribute_value_view( + &content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex") + ) + .expect("Unable to generate attribute value view"), + ); + + let new_order = vec![ + env_var3_av_id, + env_var1_av_id, + env_var4_av_id, + env_var2_av_id, + ]; + graph + .update_order(change_set, envrionment_av_id, new_order) + .expect("Unable to update order of environment attribute value's children"); + assert_eq!( + serde_json::json![{ + "domain": { + "environment": { + "PORT_3": "3333", + "PORT_1": "1111", + "PORT_4": "4444", + "PORT_2": "2222", + }, + } + }], + graph + .attribute_value_view( + &content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex") + ) + .expect("Unable to generate attribute value view"), + ); + + let env_var5_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let (env_var5_av_content_hash, _) = content_store + .add(serde_json::json!("5555")) + .expect("Unable to add to content store"); + let env_var5_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var5_av_id, + ContentAddress::AttributeValue(env_var5_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var 5 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_5".to_string())), + ) + .expect("Unable to create EdgeWeight"), + env_var5_av_node_index, + ) + .expect("Unable to add environment av -> env var 5 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var5_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 5 av -> env var prop edge"); + + assert_eq!( + serde_json::json![{ + "domain": { + "environment": { + "PORT_3": "3333", + "PORT_1": "1111", + "PORT_4": "4444", + "PORT_2": "2222", + "PORT_5": "5555", + }, + } + }], + graph + .attribute_value_view( + &content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex") + ) + .expect("Unable to generate attribute value view"), + ); + } } From a3fb9107697f04c45f386ad610c795234104ccc9 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Tue, 26 Sep 2023 17:59:43 -0400 Subject: [PATCH 28/92] Fix compilation error in rebaser integration test Fix compilation error in rebaser integration test where the "ContentAddress" export location changed. Signed-off-by: Nick Gerace --- lib/dal/tests/integration_test/internal/rebaser.rs | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/lib/dal/tests/integration_test/internal/rebaser.rs b/lib/dal/tests/integration_test/internal/rebaser.rs index e00559fe02..2c7bd2e117 100644 --- a/lib/dal/tests/integration_test/internal/rebaser.rs +++ b/lib/dal/tests/integration_test/internal/rebaser.rs @@ -2,7 +2,8 @@ use dal::change_set_pointer::ChangeSetPointer; use dal::content::hash::ContentHash; -use dal::workspace_snapshot::node_weight::{ContentAddress, NodeWeight}; +use dal::workspace_snapshot::content_address::ContentAddress; +use dal::workspace_snapshot::node_weight::NodeWeight; use dal::{DalContext, Tenancy, Visibility, WorkspacePk, WorkspaceSnapshot}; use dal_test::test; use rebaser_client::Client; From 67eed49d905113ea2d42fd1efa85bb02c37b087c Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Tue, 26 Sep 2023 18:04:05 -0400 Subject: [PATCH 29/92] Add content-store and content-store-test crates This commit adds the content-store and content-store-test crates, which primarily revolve around the "Store" trait. They also provide the ability to perform migrations and database setup for "PgStore", a user of the aforementioned crate. Why two crates? The test database setup and migration logic should never be used in production, but it should remain portable. Thus, the content-store-test crate exists. The "PgStore" can have three different backing databases: "si_content_store", "si_test_content_store", and "si_test_content_store_". The first is the database used for production. The second is the global database used for integration tests and the final copies its contents on a test-by-test basis. The new content-store crate brings over the "content" module contents from the dal. That includes the "ContentPair" concept as well as the primitives migration. Currently, there are two users of the "Store" trait: - "LocalStore": exists entirely in-memeory and can be used everywhere (currently used for unit tests, but may have other use cases in the future) - "PgStore": leverages pull-through caching, offers persistent writes to Postgres and can be used everwhere so long as _someone_ creates the database and performs migrations Let's talk about integration tests then since they are the sole users of the "PgStore" currently. The dal-test macro now includes setup for the global "si_test_content_store" database. It mirrors the "si_test_dal" database setup path, but is provided via a single client from the content-store-test crate. There's a catch: the "DalTestPgStore" wrapper should be used in dal integration tests because dal-test does not provide a "PgStore" for every single test yet. Rather than performing surgery in dal-test and si-test-macros' expand module, we'll create "PgStores" with accompanying test-specific databases on a case-by-case basis for now. This commit contains secondary changes encountered during refactoring as well: - Remove "si_test_rebaser" and rebaser integration tests since the rebaser mainly uses dal logic and few meaningful tests can currently be conducted with the rebaser on its own - Reduce log level for the rebaser skip message during test setup - ChangeSetPointer and WorkspaceSnapshot queries, structs and migrations have slightly changed to adapt to the new changes - Where relevant, we implement "TryFrom" instead of using json serialization for speed - Co-locate all integration tests for the "Mostly Everything is a Node or an Edge" work Signed-off-by: Nick Gerace --- .ci/docker-compose.test-integration.yml | 2 +- Cargo.lock | 38 ++- Cargo.toml | 10 +- component/postgres/BUCK | 2 +- dev/docker-compose.platform.yml | 2 +- lib/content-store-test/BUCK | 13 + lib/content-store-test/Cargo.toml | 13 + lib/content-store-test/src/lib.rs | 170 +++++++++++ lib/content-store/BUCK | 26 ++ lib/content-store/Cargo.toml | 20 ++ lib/content-store/build.rs | 13 + .../src/content => content-store/src}/hash.rs | 3 + lib/content-store/src/lib.rs | 34 +++ lib/content-store/src/pair.rs | 81 ++++++ lib/content-store/src/store.rs | 53 ++++ lib/content-store/src/store/local.rs | 46 +++ lib/content-store/src/store/pg.rs | 91 ++++++ lib/content-store/src/store/pg/migrate.rs | 44 +++ .../pg/migrations/U0001__content_pairs.sql | 6 + lib/dal-test/BUCK | 1 + lib/dal-test/Cargo.toml | 24 +- lib/dal-test/src/lib.rs | 33 ++- lib/dal/BUCK | 5 + lib/dal/Cargo.toml | 3 + lib/dal/src/change_set_pointer.rs | 53 ++-- lib/dal/src/content.rs | 8 - lib/dal/src/content/pair.rs | 77 ----- lib/dal/src/content/store.rs | 194 ------------ lib/dal/src/lib.rs | 2 - .../migrations/U3000__workspace_snapshots.sql | 29 +- .../migrations/U3001__change_set_pointers.sql | 10 + .../src/migrations/U3001__content_pairs.sql | 19 -- .../migrations/U3002__change_set_pointers.sql | 19 -- .../src/queries/change_set_pointers/find.sql | 3 - .../src/queries/workspace_snapshot/find.sql | 3 - .../find_for_change_set.sql | 3 +- lib/dal/src/workspace_snapshot.rs | 82 +++--- .../src/workspace_snapshot/content_address.rs | 3 +- lib/dal/src/workspace_snapshot/graph.rs | 275 +++++++++--------- .../src/workspace_snapshot/lamport_clock.rs | 3 +- lib/dal/src/workspace_snapshot/node_weight.rs | 3 +- .../node_weight/content_node_weight.rs | 2 +- .../node_weight/ordering_node_weight.rs | 6 +- .../node_weight/prop_node_weight.rs | 2 +- .../src/workspace_snapshot/vector_clock.rs | 1 - .../tests/integration_test/internal/mod.rs | 2 +- .../mostly_everything_is_a_node_or_an_edge.rs | 7 + .../content_store.rs | 21 ++ .../rebaser.rs | 16 +- lib/rebaser-client/Cargo.toml | 1 - lib/rebaser-client/src/lib.rs | 1 - lib/rebaser-core/src/lib.rs | 1 - lib/rebaser-server/BUCK | 44 +-- lib/rebaser-server/Cargo.toml | 20 +- lib/rebaser-server/src/lib.rs | 1 - lib/rebaser-server/tests/integration.rs | 9 - .../tests/integration_test/client.rs | 46 --- .../tests/integration_test/connection.rs | 17 -- .../tests/integration_test/mod.rs | 2 - lib/si-rabbitmq/src/lib.rs | 1 - lib/si-test-macros/src/lib.rs | 10 - 61 files changed, 1010 insertions(+), 719 deletions(-) create mode 100644 lib/content-store-test/BUCK create mode 100644 lib/content-store-test/Cargo.toml create mode 100644 lib/content-store-test/src/lib.rs create mode 100644 lib/content-store/BUCK create mode 100644 lib/content-store/Cargo.toml create mode 100644 lib/content-store/build.rs rename lib/{dal/src/content => content-store/src}/hash.rs (93%) create mode 100644 lib/content-store/src/lib.rs create mode 100644 lib/content-store/src/pair.rs create mode 100644 lib/content-store/src/store.rs create mode 100644 lib/content-store/src/store/local.rs create mode 100644 lib/content-store/src/store/pg.rs create mode 100644 lib/content-store/src/store/pg/migrate.rs create mode 100644 lib/content-store/src/store/pg/migrations/U0001__content_pairs.sql delete mode 100644 lib/dal/src/content.rs delete mode 100644 lib/dal/src/content/pair.rs delete mode 100644 lib/dal/src/content/store.rs create mode 100644 lib/dal/src/migrations/U3001__change_set_pointers.sql delete mode 100644 lib/dal/src/migrations/U3001__content_pairs.sql delete mode 100644 lib/dal/src/migrations/U3002__change_set_pointers.sql delete mode 100644 lib/dal/src/queries/change_set_pointers/find.sql delete mode 100644 lib/dal/src/queries/workspace_snapshot/find.sql create mode 100644 lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs create mode 100644 lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs rename lib/dal/tests/integration_test/internal/{ => mostly_everything_is_a_node_or_an_edge}/rebaser.rs (88%) delete mode 100644 lib/rebaser-server/tests/integration.rs delete mode 100644 lib/rebaser-server/tests/integration_test/client.rs delete mode 100644 lib/rebaser-server/tests/integration_test/connection.rs delete mode 100644 lib/rebaser-server/tests/integration_test/mod.rs diff --git a/.ci/docker-compose.test-integration.yml b/.ci/docker-compose.test-integration.yml index 4197b04ede..bb6a9562dd 100644 --- a/.ci/docker-compose.test-integration.yml +++ b/.ci/docker-compose.test-integration.yml @@ -20,7 +20,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si" - "POSTGRES_DB=si" - - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index,si_test,si_test_dal,si_test_sdf_server,si_test_rebaser" + - "POSTGRES_MULTIPLE_DBS=si_content_store,si_auth,si_module_index,si_test,si_test_content_store,si_test_dal,si_test_sdf_server" nats: image: systeminit/nats:stable diff --git a/Cargo.lock b/Cargo.lock index 07db076e44..444e117968 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -936,6 +936,35 @@ dependencies = [ "url", ] +[[package]] +name = "content-store" +version = "0.1.0" +dependencies = [ + "async-trait", + "blake3", + "chrono", + "color-eyre", + "refinery", + "remain", + "serde", + "serde_json", + "si-data-pg", + "telemetry", + "thiserror", + "uuid", +] + +[[package]] +name = "content-store-test" +version = "0.1.0" +dependencies = [ + "color-eyre", + "content-store", + "si-data-pg", + "telemetry", + "uuid", +] + [[package]] name = "convert_case" version = "0.4.0" @@ -1270,6 +1299,8 @@ dependencies = [ "buck2-resources", "chrono", "ciborium", + "content-store", + "content-store-test", "convert_case 0.6.0", "council-server", "dal-test", @@ -1291,6 +1322,7 @@ dependencies = [ "pretty_assertions_sorted", "rand 0.8.5", "rebaser-client", + "rebaser-core", "rebaser-server", "refinery", "regex", @@ -1322,6 +1354,7 @@ version = "0.1.0" dependencies = [ "buck2-resources", "color-eyre", + "content-store-test", "council-server", "dal", "derive_builder", @@ -4115,12 +4148,8 @@ version = "0.1.0" dependencies = [ "buck2-resources", "dal", - "dal-test", "derive_builder", - "futures", "nats-subscriber", - "pretty_assertions_sorted", - "rebaser-client", "rebaser-core", "remain", "serde", @@ -4136,7 +4165,6 @@ dependencies = [ "telemetry", "thiserror", "tokio", - "tokio-stream", "ulid", "veritech-client", ] diff --git a/Cargo.toml b/Cargo.toml index a02dc0e3ef..f90c941dfe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,9 +6,9 @@ resolver = "2" members = [ "bin/council", "bin/cyclone", - "bin/rebaser", "bin/module-index", "bin/pinga", + "bin/rebaser", "bin/sdf", "bin/si", "bin/veritech", @@ -16,6 +16,8 @@ members = [ "lib/buck2-resources", "lib/bytes-lines-codec", "lib/config-file", + "lib/content-store", + "lib/content-store-test", "lib/council-server", "lib/cyclone-client", "lib/cyclone-core", @@ -23,14 +25,14 @@ members = [ "lib/dal", "lib/dal-test", "lib/deadpool-cyclone", - "lib/rebaser-client", - "lib/rebaser-core", - "lib/rebaser-server", "lib/module-index-client", "lib/module-index-server", "lib/nats-subscriber", "lib/object-tree", "lib/pinga-server", + "lib/rebaser-client", + "lib/rebaser-core", + "lib/rebaser-server", "lib/sdf-server", "lib/si-crypto", "lib/si-data-nats", diff --git a/component/postgres/BUCK b/component/postgres/BUCK index 9feb0256ce..e667ec53be 100644 --- a/component/postgres/BUCK +++ b/component/postgres/BUCK @@ -28,7 +28,7 @@ docker_image( "--env", "POSTGRES_DB=si", "--env", - "POSTGRES_MULTIPLE_DBS=si_auth,si_test,si_test_dal,si_test_sdf_server,si_test_rebaser", + "POSTGRES_MULTIPLE_DBS=si_content_store,si_auth,si_test,si_test_content_store,si_test_dal,si_test_sdf_server", "--publish", "5432:5432", ], diff --git a/dev/docker-compose.platform.yml b/dev/docker-compose.platform.yml index bd2b9894d0..862be53da1 100644 --- a/dev/docker-compose.platform.yml +++ b/dev/docker-compose.platform.yml @@ -9,7 +9,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si" - "POSTGRES_DB=si" - - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index,si_test,si_test_dal,si_test_sdf_server,si_test_rebaser" + - "POSTGRES_MULTIPLE_DBS=si_content_store,si_auth,si_module_index,si_test,si_test_content_store,si_test_dal,si_test_sdf_server" ports: - "5432:5432" diff --git a/lib/content-store-test/BUCK b/lib/content-store-test/BUCK new file mode 100644 index 0000000000..4d481a65ec --- /dev/null +++ b/lib/content-store-test/BUCK @@ -0,0 +1,13 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "content-store-test", + deps = [ + "//lib/content-store:content-store", + "//lib/si-data-pg:si-data-pg", + "//lib/telemetry-rs:telemetry", + "//third-party/rust:color-eyre", + "//third-party/rust:uuid", + ], + srcs = glob(["src/**/*.rs"]), +) \ No newline at end of file diff --git a/lib/content-store-test/Cargo.toml b/lib/content-store-test/Cargo.toml new file mode 100644 index 0000000000..ee7e178087 --- /dev/null +++ b/lib/content-store-test/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "content-store-test" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +content-store = { path = "../../lib/content-store" } +si-data-pg = { path = "../../lib/si-data-pg" } +telemetry = { path = "../../lib/telemetry-rs" } + +color-eyre = { workspace = true } +uuid = { workspace = true } \ No newline at end of file diff --git a/lib/content-store-test/src/lib.rs b/lib/content-store-test/src/lib.rs new file mode 100644 index 0000000000..98c35a3bb6 --- /dev/null +++ b/lib/content-store-test/src/lib.rs @@ -0,0 +1,170 @@ +//! This crate provides tools for using the content store in integration tests. + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + +use color_eyre::eyre::{Result, WrapErr}; +use content_store::{PgMigrationHelpers, PgStore}; +use si_data_pg::{PgPool, PgPoolConfig}; +use telemetry::prelude::*; +use uuid::Uuid; + +const TEST_DBNAME: &str = "si_test_content_store"; +const TEST_APPLICATION_NAME: &str = "si-test-content-store"; + +/// A client for preparing the global content store test database. +#[derive(Debug)] +pub struct PgTestMigrationClient { + pg_pool: PgPool, +} + +impl PgTestMigrationClient { + /// Create a new [`test migration client`](Self). + pub async fn new() -> Result { + let pg_pool_config = PgPoolConfig { + dbname: TEST_DBNAME.to_string(), + application_name: TEST_APPLICATION_NAME.to_string(), + ..Default::default() + }; + let pg_pool = PgPool::new(&pg_pool_config).await?; + Ok(Self { pg_pool }) + } + + /// Test the connection to the global content store test database. + pub async fn test_connection(&self) -> Result<()> { + Ok(self.pg_pool.test_connection().await?) + } + + /// Drop old test databases using the global content store test database as the prefix. + pub async fn drop_old_test_databases(&self) -> Result<()> { + let name_prefix = format!("{}_%", &self.pg_pool.db_name()); + let pg_conn = self.pg_pool.get().await?; + + let rows = pg_conn + .query( + "SELECT datname FROM pg_database WHERE datname LIKE $1", + &[&name_prefix.as_str()], + ) + .await?; + + for row in rows { + let dbname: String = row.try_get("datname")?; + debug!(db_name = %dbname, "dropping database"); + pg_conn + .execute(&format!("DROP DATABASE IF EXISTS {dbname}"), &[]) + .await?; + } + + Ok(()) + } + + /// Drop and create the public schema for the global content store test database. + pub async fn drop_and_create_public_schema(&self) -> Result<()> { + Ok(self.pg_pool.drop_and_create_public_schema().await?) + } + + /// Perform migrations for the global content store test database. + pub async fn migrate(&self) -> Result<()> { + Ok(PgMigrationHelpers::migrate(&self.pg_pool).await?) + } +} + +/// This unit struct provides method(s) for creating [`PgStores`](PgStore) in `dal` integration +/// tests. +#[allow(missing_debug_implementations)] +pub struct DalTestPgStore; + +impl DalTestPgStore { + /// Creates a test-specific database using the global content store test database. Then, a + /// [`PgPool`] is created for the new database. Finally, a [`PgStore`] is created from that + /// pool. + /// + /// This should be used over [`PgStore::new`] for `dal` integration tests until `dal-test` is + /// able to perform this functionality on its own. + #[allow(clippy::new_ret_no_self)] + pub async fn new() -> Result { + let global_test_dbname = TEST_DBNAME.to_string(); + let global_application_name = TEST_APPLICATION_NAME.to_string(); + + // Connect to the 'postgres' database so we can copy our migrated template test database + let pg = PgPoolConfig { + dbname: global_test_dbname, + application_name: global_application_name, + ..Default::default() + }; + let mut new_pg_pool_config = pg.clone(); + new_pg_pool_config.dbname = "postgres".to_string(); + let new_pg_pool = PgPool::new(&new_pg_pool_config) + .await + .wrap_err("failed to create PgPool to db 'postgres'")?; + let db_conn = new_pg_pool + .get() + .await + .wrap_err("failed to connect to db 'postgres'")?; + + // Create new database from template + let db_name_suffix = Uuid::new_v4().as_simple().to_string(); + let dbname = format!("{}_{}", pg.dbname, db_name_suffix); + let query = format!( + "CREATE DATABASE {dbname} WITH TEMPLATE {} OWNER {};", + pg.dbname, pg.user, + ); + let db_exists_check = db_conn + .query_opt( + "SELECT datname FROM pg_database WHERE datname = $1", + &[&dbname], + ) + .await?; + if db_exists_check.is_none() { + info!(dbname = %dbname, "creating test-specific database"); + db_conn + .execute(&query, &[]) + .instrument(debug_span!("creating test database from template")) + .await + .wrap_err("failed to create test specific database")?; + } else { + info!(dbname = %dbname, "test-specific database already exists"); + } + // This is ugly, but we pretty much always want to know which test DB is used for + // any given test when it fails, and the logging/tracing macros aren't captured + // (or displayed) during tests, while `println!(...)` will be captured the same as + // "normal" test output, meaning it respects --nocapture and being displayed for + // failing tests. + println!("Content store test database: {}", &dbname); + + // Create the pg pool for the new database. + new_pg_pool_config.dbname = dbname; + let test_specific_pg_pool = PgPool::new(&new_pg_pool_config) + .await + .wrap_err("failed to create PgPool to db 'postgres'")?; + + // Before returning the new store, test the connection. + test_specific_pg_pool + .test_connection() + .await + .wrap_err("failed to connect to the database")?; + + // Return the pg store using the new pool. + PgStore::new(test_specific_pg_pool) + .await + .wrap_err("failed to create PgStore for new, test-specific database") + } +} diff --git a/lib/content-store/BUCK b/lib/content-store/BUCK new file mode 100644 index 0000000000..26f8e270c3 --- /dev/null +++ b/lib/content-store/BUCK @@ -0,0 +1,26 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "content-store", + deps = [ + "//lib/si-data-pg:si-data-pg", + "//lib/telemetry-rs:telemetry", + "//third-party/rust:async-trait", + "//third-party/rust:blake3", + "//third-party/rust:chrono", + "//third-party/rust:color-eyre", + "//third-party/rust:refinery", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:serde_json", + "//third-party/rust:thiserror", + "//third-party/rust:uuid", + ], + srcs = glob([ + "src/**/*.rs", + "src/store/pg/migrations/**/*.sql", + ]), + env = { + "CARGO_MANIFEST_DIR": ".", + }, +) \ No newline at end of file diff --git a/lib/content-store/Cargo.toml b/lib/content-store/Cargo.toml new file mode 100644 index 0000000000..099021134c --- /dev/null +++ b/lib/content-store/Cargo.toml @@ -0,0 +1,20 @@ +[package] +name = "content-store" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +si-data-pg = { path = "../../lib/si-data-pg" } +telemetry = { path = "../../lib/telemetry-rs" } + +async-trait = { workspace = true } +blake3 = { workspace = true } +chrono = { workspace = true } +color-eyre = { workspace = true } +refinery = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +thiserror = { workspace = true } +uuid = { workspace = true } \ No newline at end of file diff --git a/lib/content-store/build.rs b/lib/content-store/build.rs new file mode 100644 index 0000000000..d2e79ef4d0 --- /dev/null +++ b/lib/content-store/build.rs @@ -0,0 +1,13 @@ +use std::fs; + +fn main() -> Result<(), Box> { + println!("cargo:rerun-if-changed=src/store/pg/migrations"); + for entry in fs::read_dir("./src/store/pg/migrations")? { + let entry = entry?; + let path = entry.path(); + if path.is_file() { + println!("cargo:rerun-if-changed={}", path.display()); + } + } + Ok(()) +} diff --git a/lib/dal/src/content/hash.rs b/lib/content-store/src/hash.rs similarity index 93% rename from lib/dal/src/content/hash.rs rename to lib/content-store/src/hash.rs index 6f58d17fbd..6ff6da9273 100644 --- a/lib/dal/src/content/hash.rs +++ b/lib/content-store/src/hash.rs @@ -7,15 +7,18 @@ use serde::{ use serde_json::Value; use thiserror::Error; +/// The [`blake3::Hash`] of a given set of contents. #[derive(Clone, Copy, Eq, Hash, PartialEq)] pub struct ContentHash(blake3::Hash); impl ContentHash { + /// Create a new [`ContentHash`] from a byte array. #[must_use] pub fn new(input: &[u8]) -> Self { Self(blake3::hash(input)) } + /// Provide a [`hasher`](ContentHasher) to create [`hashes`](ContentHash). pub fn hasher() -> ContentHasher { ContentHasher::new() } diff --git a/lib/content-store/src/lib.rs b/lib/content-store/src/lib.rs new file mode 100644 index 0000000000..600e76d9e0 --- /dev/null +++ b/lib/content-store/src/lib.rs @@ -0,0 +1,34 @@ +//! This crate provides the ability to interface with content stores of varying kinds as well as +//! the ability to generate hashes for hashable content blobs. + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + +mod hash; +mod pair; +mod store; + +pub use hash::ContentHash; +pub use store::local::LocalStore; +pub use store::pg::migrate::PgMigrationHelpers; +pub use store::pg::PgStore; +pub use store::Store; +pub use store::StoreError; diff --git a/lib/content-store/src/pair.rs b/lib/content-store/src/pair.rs new file mode 100644 index 0000000000..b0f984c361 --- /dev/null +++ b/lib/content-store/src/pair.rs @@ -0,0 +1,81 @@ +use crate::hash::ContentHash; +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use si_data_pg::{PgError, PgPool, PgPoolError, PgRow}; +use thiserror::Error; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum ContentPairError { + #[error("pg error: {0}")] + Pg(#[from] PgError), + #[error("pg pool error: {0}")] + PgPool(#[from] PgPoolError), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), +} + +pub(crate) type ContentPairResult = Result; + +#[derive(Debug, Serialize, Deserialize)] +pub(crate) struct ContentPair { + key: String, + created_at: DateTime, + value: Vec, +} + +impl TryFrom for ContentPair { + type Error = ContentPairError; + + fn try_from(row: PgRow) -> Result { + Ok(Self { + key: row.try_get("key")?, + created_at: row.try_get("created_at")?, + value: row.try_get("value")?, + }) + } +} + +impl ContentPair { + pub(crate) fn value(&self) -> &[u8] { + &self.value + } + + pub(crate) async fn find_or_create( + pg_pool: &PgPool, + key: ContentHash, + value: Vec, + ) -> ContentPairResult { + let content_pair = match Self::find(pg_pool, &key).await? { + Some(found_content_pair) => found_content_pair, + None => { + let client = pg_pool.get().await?; + let row = client + .query_one( + "INSERT INTO content_pairs (key, value) VALUES ($1, $2) RETURNING *", + &[&key.to_string(), &value], + ) + .await?; + Self::try_from(row)? + } + }; + Ok(content_pair) + } + + pub(crate) async fn find( + pg_pool: &PgPool, + key: &ContentHash, + ) -> ContentPairResult> { + let client = pg_pool.get().await?; + let maybe_row = client + .query_opt( + "SELECT * FROM content_pairs WHERE key = $1", + &[&key.to_string()], + ) + .await?; + match maybe_row { + Some(row) => Ok(Some(Self::try_from(row)?)), + None => Ok(None), + } + } +} diff --git a/lib/content-store/src/store.rs b/lib/content-store/src/store.rs new file mode 100644 index 0000000000..177fd04b69 --- /dev/null +++ b/lib/content-store/src/store.rs @@ -0,0 +1,53 @@ +pub(crate) mod local; +pub(crate) mod pg; + +use serde::de::DeserializeOwned; +use serde::Serialize; +use si_data_pg::{PgError, PgPoolError}; + +use thiserror::Error; + +use crate::hash::ContentHash; +use crate::pair::ContentPairError; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Error, Debug)] +pub enum StoreError { + #[error("content pair error: {0}")] + ContentPair(#[from] ContentPairError), + #[error("pg error: {0}")] + Pg(#[from] PgError), + #[error("pg pool error: {0}")] + PgPool(#[from] PgPoolError), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), +} + +type StoreResult = Result; + +/// This trait provides the minimum methods needed to create a content store. +#[async_trait::async_trait] +pub trait Store { + /// Indicates whether or not the store is empty. + fn is_empty(&self) -> bool; + + /// Indicates the number of keys in the store. + fn len(&self) -> usize; + + /// Adds an item to the store. + fn add(&mut self, object: &T) -> StoreResult + where + T: Serialize + ?Sized; + + /// Gets an item from the store. + /// + /// Implementers of this trait may want to consider a "pull-through cache" implementation for + /// this method. + async fn get(&mut self, key: &ContentHash) -> StoreResult> + where + T: DeserializeOwned; + + /// Writes out content in the store to a persistent storage layer, if applicable. + async fn write(&mut self) -> StoreResult<()>; +} diff --git a/lib/content-store/src/store/local.rs b/lib/content-store/src/store/local.rs new file mode 100644 index 0000000000..7f091aef2d --- /dev/null +++ b/lib/content-store/src/store/local.rs @@ -0,0 +1,46 @@ +use crate::hash::ContentHash; +use crate::store::{Store, StoreResult}; +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::collections::HashMap; + +/// A kind of content store that operates entirely in memory. +#[derive(Default, Debug)] +pub struct LocalStore(HashMap>); + +#[async_trait::async_trait] +impl Store for LocalStore { + fn is_empty(&self) -> bool { + self.0.is_empty() + } + + fn len(&self) -> usize { + self.0.len() + } + + fn add(&mut self, object: &T) -> StoreResult + where + T: Serialize + ?Sized, + { + let value = serde_json::to_vec(object)?; + let key = ContentHash::new(&value); + self.0.insert(key, value); + Ok(key) + } + + async fn get(&mut self, key: &ContentHash) -> StoreResult> + where + T: DeserializeOwned, + { + let maybe_object = match self.0.get(key) { + Some(value) => Some(serde_json::from_slice(value)?), + None => None, + }; + Ok(maybe_object) + } + + /// This a "no-op" for the [`LocalStore`] since everything is handled in memory. + async fn write(&mut self) -> StoreResult<()> { + Ok(()) + } +} diff --git a/lib/content-store/src/store/pg.rs b/lib/content-store/src/store/pg.rs new file mode 100644 index 0000000000..003b9d8d6b --- /dev/null +++ b/lib/content-store/src/store/pg.rs @@ -0,0 +1,91 @@ +use crate::hash::ContentHash; +use crate::pair::ContentPair; +use crate::store::{Store, StoreResult}; +use serde::de::DeserializeOwned; +use serde::Serialize; +use si_data_pg::PgPool; +use std::collections::HashMap; + +pub(crate) mod migrate; + +/// A content store backed by Postgres. +#[derive(Debug)] +pub struct PgStore { + inner: HashMap, + pg_pool: PgPool, +} + +#[derive(Default, Debug, Clone, Eq, PartialEq)] +struct PgStoreItem { + value: Vec, + written: bool, +} + +impl PgStoreItem { + fn new(value: Vec) -> Self { + Self { + value, + ..Default::default() + } + } +} + +impl PgStore { + /// Create a new [`PgStore`] from a given [`PgPool`]. + pub async fn new(pg_pool: PgPool) -> StoreResult { + Ok(Self { + inner: Default::default(), + pg_pool, + }) + } +} + +#[async_trait::async_trait] +impl Store for PgStore { + fn is_empty(&self) -> bool { + self.inner.is_empty() + } + + fn len(&self) -> usize { + self.inner.len() + } + + fn add(&mut self, object: &T) -> StoreResult + where + T: Serialize + ?Sized, + { + let value = serde_json::to_vec(object)?; + let key = ContentHash::new(&value); + self.inner.insert(key, PgStoreItem::new(value)); + Ok(key) + } + + async fn get(&mut self, key: &ContentHash) -> StoreResult> + where + T: DeserializeOwned, + { + let object = match self.inner.get(key) { + Some(item) => serde_json::from_slice(&item.value)?, + None => match ContentPair::find(&self.pg_pool, key).await? { + Some(content_pair) => { + let bytes = content_pair.value(); + self.add(bytes)?; + serde_json::from_slice(bytes)? + } + None => return Ok(None), + }, + }; + Ok(Some(object)) + } + + async fn write(&mut self) -> StoreResult<()> { + for (key, item) in self.inner.iter_mut() { + if !item.written { + ContentPair::find_or_create(&self.pg_pool, key.to_owned(), item.value.clone()) + .await?; + item.written = true; + } + } + Ok(()) + } +} diff --git a/lib/content-store/src/store/pg/migrate.rs b/lib/content-store/src/store/pg/migrate.rs new file mode 100644 index 0000000000..dc67148951 --- /dev/null +++ b/lib/content-store/src/store/pg/migrate.rs @@ -0,0 +1,44 @@ +use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; +use telemetry::prelude::*; +use thiserror::Error; + +mod embedded { + use refinery::embed_migrations; + + embed_migrations!("./src/store/pg/migrations"); +} + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum PgMigrationHelpersError { + #[error("pg pool error: {0}")] + PgPool(#[from] PgPoolError), +} + +pub(crate) type PgMigrationHelpersResult = Result; + +const DBNAME: &str = "si_content_store"; +const APPLICATION_NAME: &str = "si_test_content_store"; + +/// A unit struct that provides helpers for performing [`PgStore`] migrations. +#[allow(missing_debug_implementations)] +pub struct PgMigrationHelpers; + +impl PgMigrationHelpers { + /// Create a new [`PgPool`] for a production [`PgStore`]. + pub async fn new_production_pg_pool() -> PgMigrationHelpersResult { + let pg_pool_config = PgPoolConfig { + dbname: DBNAME.to_string(), + application_name: APPLICATION_NAME.to_string(), + ..Default::default() + }; + let pg_pool = PgPool::new(&pg_pool_config).await?; + Ok(pg_pool) + } + + /// Perform migrations for the database. + #[instrument(skip_all)] + pub async fn migrate(pg_pool: &PgPool) -> PgMigrationHelpersResult<()> { + Ok(pg_pool.migrate(embedded::migrations::runner()).await?) + } +} diff --git a/lib/content-store/src/store/pg/migrations/U0001__content_pairs.sql b/lib/content-store/src/store/pg/migrations/U0001__content_pairs.sql new file mode 100644 index 0000000000..110abd758f --- /dev/null +++ b/lib/content-store/src/store/pg/migrations/U0001__content_pairs.sql @@ -0,0 +1,6 @@ +CREATE TABLE content_pairs +( + key text primary key NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + value bytea NOT NULL +); \ No newline at end of file diff --git a/lib/dal-test/BUCK b/lib/dal-test/BUCK index 8267764a6b..fa07a50b78 100644 --- a/lib/dal-test/BUCK +++ b/lib/dal-test/BUCK @@ -4,6 +4,7 @@ rust_library( name = "dal-test", deps = [ "//lib/buck2-resources:buck2-resources", + "//lib/content-store-test:content-store-test", "//lib/council-server:council-server", "//lib/dal:dal", "//lib/module-index-client:module-index-client", diff --git a/lib/dal-test/Cargo.toml b/lib/dal-test/Cargo.toml index 6fd7749ca0..8e0ff18d63 100644 --- a/lib/dal-test/Cargo.toml +++ b/lib/dal-test/Cargo.toml @@ -7,30 +7,32 @@ publish = false [dependencies] buck2-resources = { path = "../../lib/buck2-resources" } -color-eyre = { workspace = true } +content-store-test = { path = "../../lib/content-store-test" } council-server = { path = "../../lib/council-server" } dal = { path = "../../lib/dal" } -derive_builder = { workspace = true } -jwt-simple = { workspace = true } -lazy_static = { workspace = true } module-index-client = { path = "../../lib/module-index-client" } -names = { workspace = true } pinga-server = { path = "../../lib/pinga-server" } rebaser-server = { path = "../../lib/rebaser-server" } -remain = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } si-crypto = { path = "../../lib/si-crypto" } si-data-nats = { path = "../../lib/si-data-nats" } si-data-pg = { path = "../../lib/si-data-pg" } si-std = { path = "../../lib/si-std" } si-test-macros = { path = "../../lib/si-test-macros" } -sodiumoxide = { workspace = true } telemetry = { path = "../../lib/telemetry-rs" } +veritech-client = { path = "../../lib/veritech-client" } +veritech-server = { path = "../../lib/veritech-server" } + +color-eyre = { workspace = true } +derive_builder = { workspace = true } +jwt-simple = { workspace = true } +lazy_static = { workspace = true } +names = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +sodiumoxide = { workspace = true } tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } tracing-subscriber = { workspace = true } uuid = { workspace = true } -veritech-client = { path = "../../lib/veritech-client" } -veritech-server = { path = "../../lib/veritech-server" } diff --git a/lib/dal-test/src/lib.rs b/lib/dal-test/src/lib.rs index 4b4842f709..dce9c0a205 100644 --- a/lib/dal-test/src/lib.rs +++ b/lib/dal-test/src/lib.rs @@ -9,6 +9,7 @@ use std::{ }; use buck2_resources::Buck2Resources; +use content_store_test::PgTestMigrationClient; use dal::{ builtins::SelectedTestBuiltinSchemas, job::processor::{JobQueueProcessor, NatsProcessor}, @@ -294,6 +295,9 @@ impl TestContextBuilder { async fn build_for_test(&self) -> Result { let pg_pool = self.create_test_specific_db_with_pg_pool().await?; + // TODO(nick): create the test-specific content store db with a pg store upon request. Until + // this is resolved, use "TestPgStore::new" instead of "PgStore::new" for integration tests. + self.build_inner(pg_pool).await } @@ -526,7 +530,7 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { tokio::spawn(pinga_server.run()); // Do not start up the Rebaser server since we do not need it for initial migrations. - info!("skipping Rebaser server startup and shutdown for initial migrations"); + debug!("skipping Rebaser server startup and shutdown for initial migrations"); // Start up a Veritech server as a task exclusively to allow the migrations to run info!("starting Veritech server for initial migrations"); @@ -534,6 +538,9 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { let veritech_server_handle = veritech_server.shutdown_handle(); tokio::spawn(veritech_server.run()); + info!("creating client with pg pool for global Content Store test database"); + let content_store_pg_test_migration_client = PgTestMigrationClient::new().await?; + info!("testing database connection"); services_ctx .pg_pool() @@ -541,11 +548,23 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { .await .wrap_err("failed to connect to database, is it running and available?")?; + info!("testing global Content Store database connection"); + content_store_pg_test_migration_client + .test_connection() + .await + .wrap_err("failed to connect to database, is it running and available?")?; + info!("dropping old test-specific databases"); drop_old_test_databases(services_ctx.pg_pool()) .await .wrap_err("failed to drop old databases")?; + info!("dropping old test-specific Content Store databases"); + content_store_pg_test_migration_client + .drop_old_test_databases() + .await + .wrap_err("failed to drop old databases")?; + // Ensure the database is totally clean, then run all migrations info!("dropping and re-creating the database schema"); services_ctx @@ -558,6 +577,18 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { .await .wrap_err("failed to migrate database")?; + // Ensure the Content Store database is totally clean, then run all migrations + info!("dropping and re-creating the Content Store database schema"); + content_store_pg_test_migration_client + .drop_and_create_public_schema() + .await + .wrap_err("failed to drop and create the database")?; + info!("running Content Store database migrations"); + content_store_pg_test_migration_client + .migrate() + .await + .wrap_err("failed to migrate database")?; + // Check if the user would like to skip migrating schemas. This is helpful for boosting // performance when running integration tests that do not rely on builtin schemas. let selected_test_builtin_schemas = determine_selected_test_builtin_schemas(); diff --git a/lib/dal/BUCK b/lib/dal/BUCK index b506a351f9..101a38b9bd 100644 --- a/lib/dal/BUCK +++ b/lib/dal/BUCK @@ -7,6 +7,7 @@ load( rust_library( name = "dal", deps = [ + "//lib/content-store:content-store", "//lib/council-server:council-server", "//lib/nats-subscriber:nats-subscriber", "//lib/object-tree:object-tree", @@ -75,8 +76,12 @@ rust_library( rust_test( name = "test-integration", deps = [ + "//lib/content-store:content-store", + "//lib/content-store-test:content-store-test", "//lib/dal-test:dal-test", "//lib/rebaser-client:rebaser-client", + "//lib/rebaser-core:rebaser-core", + "//lib/rebaser-server:rebaser-server", "//lib/si-pkg:si-pkg", "//lib/veritech-client:veritech-client", "//third-party/rust:base64", diff --git a/lib/dal/Cargo.toml b/lib/dal/Cargo.toml index fb192e1bc0..daf3f5a941 100644 --- a/lib/dal/Cargo.toml +++ b/lib/dal/Cargo.toml @@ -13,6 +13,7 @@ base64 = { workspace = true } blake3 = { workspace = true } chrono = { workspace = true } ciborium = { workspace = true } +content-store = { path = "../../lib/content-store" } convert_case = { workspace = true } council-server = { path = "../../lib/council-server" } derive_more = { workspace = true } @@ -55,9 +56,11 @@ veritech-client = { path = "../../lib/veritech-client" } [dev-dependencies] buck2-resources = { path = "../../lib/buck2-resources" } +content-store-test = { path = "../../lib/content-store-test" } dal-test = { path = "../../lib/dal-test" } itertools = { workspace = true } pretty_assertions_sorted = { workspace = true } rebaser-client = { path = "../../lib/rebaser-client" } +rebaser-core = { path = "../../lib/rebaser-core" } rebaser-server = { path = "../../lib/rebaser-server" } tempfile = { workspace = true } diff --git a/lib/dal/src/change_set_pointer.rs b/lib/dal/src/change_set_pointer.rs index c1c10a4877..da0f66048b 100644 --- a/lib/dal/src/change_set_pointer.rs +++ b/lib/dal/src/change_set_pointer.rs @@ -2,17 +2,15 @@ use std::sync::{Arc, Mutex}; +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use serde_json::Value; -use si_data_pg::PgError; +use si_data_pg::{PgError, PgRow}; use telemetry::prelude::*; use thiserror::Error; use ulid::{Generator, Ulid}; use crate::workspace_snapshot::WorkspaceSnapshotId; -use crate::{pk, standard_model, DalContext, StandardModelError, Timestamp, TransactionsError}; - -const FIND: &str = include_str!("queries/change_set_pointers/find.sql"); +use crate::{pk, DalContext, TransactionsError}; #[remain::sorted] #[derive(Debug, Error)] @@ -25,8 +23,6 @@ pub enum ChangeSetPointerError { Pg(#[from] PgError), #[error("serde json error: {0}")] SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), } @@ -38,12 +34,29 @@ pk!(ChangeSetPointerId); #[derive(Clone, Serialize, Deserialize)] pub struct ChangeSetPointer { pub id: ChangeSetPointerId, - #[serde(flatten)] - pub timestamp: Timestamp, + pub created_at: DateTime, + pub updated_at: DateTime, + + pub name: String, + pub workspace_snapshot_id: Option, + #[serde(skip)] pub generator: Arc>, - pub workspace_snapshot_id: Option, - pub name: String, +} + +impl TryFrom for ChangeSetPointer { + type Error = ChangeSetPointerError; + + fn try_from(value: PgRow) -> Result { + Ok(Self { + id: value.try_get("id")?, + created_at: value.try_get("created_at")?, + updated_at: value.try_get("updated_at")?, + name: value.try_get("name")?, + workspace_snapshot_id: value.try_get("workspace_snapshot_id")?, + generator: Arc::new(Mutex::new(Default::default())), + }) + } } impl ChangeSetPointer { @@ -53,7 +66,8 @@ impl ChangeSetPointer { Ok(Self { id: id.into(), - timestamp: Timestamp::now(), + created_at: Utc::now(), + updated_at: Utc::now(), generator: Arc::new(Mutex::new(generator)), workspace_snapshot_id: None, name: "".to_string(), @@ -67,13 +81,11 @@ impl ChangeSetPointer { .await? .pg() .query_one( - "SELECT change_set_pointer_create_v1($1) AS object", + "INSERT INTO change_set_pointers (name) VALUES ($1) RETURNING *", &[&name], ) .await?; - let json: Value = row.try_get("object")?; - let object: Self = serde_json::from_value(json)?; - Ok(object) + Ok(Self::try_from(row)?) } pub fn generate_ulid(&self) -> ChangeSetPointerResult { @@ -93,7 +105,7 @@ impl ChangeSetPointer { .await? .pg() .query_none( - "UPDATE change_set_pointers AS object SET workspace_snapshot_id = $2 WHERE id = $1", + "UPDATE change_set_pointers SET workspace_snapshot_id = $2 WHERE id = $1", &[&self.id, &workspace_snapshot_id], ) .await?; @@ -110,9 +122,12 @@ impl ChangeSetPointer { .txns() .await? .pg() - .query_one(FIND, &[&change_set_pointer_id]) + .query_one( + "SELECT * FROM change_set_pointers WHERE id = $1", + &[&change_set_pointer_id], + ) .await?; - Ok(standard_model::object_from_row(row)?) + Ok(Self::try_from(row)?) } } diff --git a/lib/dal/src/content.rs b/lib/dal/src/content.rs deleted file mode 100644 index 77f62186fe..0000000000 --- a/lib/dal/src/content.rs +++ /dev/null @@ -1,8 +0,0 @@ -//! This module contains all domain logic related to working with content hashes and the storage -//! of them and their corresponding values. - -pub mod hash; -pub mod pair; -pub mod store; - -pub use store::Store; diff --git a/lib/dal/src/content/pair.rs b/lib/dal/src/content/pair.rs deleted file mode 100644 index 5b33008b89..0000000000 --- a/lib/dal/src/content/pair.rs +++ /dev/null @@ -1,77 +0,0 @@ -use serde::{Deserialize, Serialize}; -use serde_json::Value; -use si_data_pg::PgError; -use thiserror::Error; - -use crate::content::hash::ContentHash; -use crate::{DalContext, StandardModelError, Timestamp, TransactionsError}; - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum ContentPairError { - #[error("si_data_pg error: {0}")] - Pg(#[from] PgError), - #[error("serde json error: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), -} - -pub type ContentPairResult = Result; - -#[derive(Debug, Serialize, Deserialize)] -pub struct ContentPair { - #[serde(flatten)] - timestamp: Timestamp, - key: String, - value: Value, -} - -impl ContentPair { - pub async fn find_or_create( - ctx: &DalContext, - key: ContentHash, - value: Value, - ) -> ContentPairResult<(Self, bool)> { - let (pair, created): (Self, bool) = match Self::find(ctx, &key).await? { - Some(found) => (found, false), - None => { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT content_pair_create_v1($1) AS object", - &[&key.to_string(), &value], - ) - .await?; - let json: Value = row.try_get("object")?; - (serde_json::from_value(json)?, true) - } - }; - Ok((pair, created)) - } - - pub async fn find(ctx: &DalContext, key: &ContentHash) -> ContentPairResult> { - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT * FROM content_pairs WHERE key = $1 AS object", - &[&key.to_string()], - ) - .await?; - let result = match maybe_row { - Some(found_row) => { - let json: Value = found_row.try_get("object")?; - let object: Self = serde_json::from_value(json)?; - Some(object) - } - None => None, - }; - Ok(result) - } -} diff --git a/lib/dal/src/content/store.rs b/lib/dal/src/content/store.rs deleted file mode 100644 index b48f7c41f8..0000000000 --- a/lib/dal/src/content/store.rs +++ /dev/null @@ -1,194 +0,0 @@ -use serde::de::DeserializeOwned; -use serde::{Deserialize, Serialize}; -use serde_json::Value; -use std::collections::HashMap; -use thiserror::Error; - -use crate::content::hash::ContentHash; - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum StoreError { - #[error("serde json error: {0}")] - SerdeJson(#[from] serde_json::Error), -} - -pub type StoreResult = Result; - -#[derive(Debug, Clone, Serialize, Deserialize)] -struct StoreItem { - value: Value, - processed: bool, -} - -#[derive(Default, Debug, Serialize, Deserialize)] -pub struct Store(HashMap); - -impl Store { - pub fn new() -> Self { - Self::default() - } - - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } - - pub fn len(&self) -> usize { - self.0.len() - } - - // NOTE(nick): use local, pull through or return None. - pub fn get(&self, key: &ContentHash) -> StoreResult> - where - T: DeserializeOwned, - { - let maybe_item: Option = self.0.get(key).cloned(); - let value = match maybe_item { - Some(found_item) => Some(serde_json::from_value(found_item.value)?), - None => { - // TODO(nick): either populate from database ("pull-through caching") or return None. - None - } - }; - Ok(value) - } - - // NOTE(nick): existing entries must remain immutable. - pub fn add(&mut self, value: T) -> StoreResult<(ContentHash, bool)> - where - T: Serialize + ToOwned, - { - let value = serde_json::to_value(value)?; - let hash = ContentHash::from(&value); - let already_in_store = self.0.contains_key(&hash); - if !already_in_store { - // NOTE(nick): we DO NOT check that it is in the database because it does not matter. - // We wait until write time to talk to the database. - self.0.insert( - hash, - StoreItem { - value, - processed: false, - }, - ); - } - Ok((hash, already_in_store)) - } - - // TODO(nick): actually do stuff with the database. - pub fn write(&mut self) -> StoreResult<()> { - for item in self.0.values_mut() { - if !item.processed { - // TODO(nick): perform find or create in the database. Either way, we need to - // set "processed" to true for the next time we perform a batch write. - item.processed = true; - } - } - Ok(()) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn add() { - let mut store = Store::new(); - - // Add an item. - let sirens_value = "SIRENS".to_string(); - let (sirens_hash, already_in_store) = store.add(&sirens_value).expect("could not add item"); - assert!(!already_in_store); - - // Grab the value from the store and perform the assertion. - let found_sirens_value: String = store - .get(&sirens_hash) - .expect("could not get item") - .expect("no item found"); - assert_eq!( - sirens_value, // expected - found_sirens_value // actual - ); - assert_eq!( - 1, // expected - store.len() // actual - ); - - // Add another item. - let meltdown_value = "MELTDOWN".to_string(); - let (meltdown_hash, _) = store.add(&meltdown_value).expect("could not add item"); - assert!(!already_in_store); - - // Check both entries to ensure that nothing has drifted. - let found_meltdown_value: String = store - .get(&meltdown_hash) - .expect("could not get item") - .expect("no item found"); - assert_eq!( - meltdown_value, // expected - found_meltdown_value // actual - ); - let found_sirens_value: String = store - .get(&sirens_hash) - .expect("could not get item") - .expect("no item found"); - assert_eq!( - sirens_value, // expected - found_sirens_value // actual - ); - assert_eq!( - 2, // expected - store.len() // actual - ); - - // Try to add one of the items again and check if it already exists. - let (second_meltdown_hash, already_in_store) = - store.add(&meltdown_value).expect("could not add item"); - assert!(already_in_store); - assert_eq!( - meltdown_hash, // expected - second_meltdown_hash, // actual - ) - } - - #[test] - fn write() { - let mut store = Store::new(); - - // Populate the store and then write. - for value in ["PARASAIL", "TELEKINESIS"] { - let (_, already_in_store) = store.add(value).expect("could not add item"); - assert!(!already_in_store); - } - - // Since purely "adding" does not involve the database, none of our entries known if they - // were processed. - for item in store.0.values() { - assert!(!item.processed); - } - - // FIXME(nick): once write actually talks to the database, this will need to move to an - // integration test. Check that all items have been processed. - store.write().expect("could not write"); - for item in store.0.values() { - assert!(item.processed); - } - - // Add another item. - let (utopia_hash, already_in_store) = store.add("UTOPIA").expect("could not add item"); - assert!(!already_in_store); - - // Check that only the new item has not been processed and that all other items have been - // processed. - for (hash, item) in &store.0 { - assert_eq!(hash != &utopia_hash, item.processed); - } - - // Write again and assert all items have been processed. - store.write().expect("could not write"); - for item in store.0.values() { - assert!(item.processed); - } - } -} diff --git a/lib/dal/src/lib.rs b/lib/dal/src/lib.rs index f409b197eb..802ce14447 100644 --- a/lib/dal/src/lib.rs +++ b/lib/dal/src/lib.rs @@ -128,14 +128,12 @@ pub mod action; pub mod action_prototype; pub mod actor_view; pub mod attribute; -pub mod authentication_prototype; pub mod builtins; pub mod change_set; pub mod change_set_pointer; pub mod change_status; pub mod code_view; pub mod component; -pub mod content; pub mod context; pub mod diagram; pub mod edge; diff --git a/lib/dal/src/migrations/U3000__workspace_snapshots.sql b/lib/dal/src/migrations/U3000__workspace_snapshots.sql index f6b3df958b..e35f915d15 100644 --- a/lib/dal/src/migrations/U3000__workspace_snapshots.sql +++ b/lib/dal/src/migrations/U3000__workspace_snapshots.sql @@ -1,18 +1,19 @@ CREATE TABLE workspace_snapshots ( - id ident NOT NULL DEFAULT ident_create_v1(), - created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - snapshot jsonb NOT NULL + id ident primary key NOT NULL DEFAULT ident_create_v1(), + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + snapshot jsonb NOT NULL + -- TODO(nick): add once workspaces are added + -- workspace_id ident REFERENCES workspaces_v2 (id) NOT NULL, + -- TODO(nick): replace the existing primary key with this once workspaces are added + -- primary key (id, workspace_id) ); -CREATE UNIQUE INDEX unique_workspace_snapshots ON workspace_snapshots (id); - -CREATE OR REPLACE FUNCTION workspace_snapshot_create_v1( - this_snapshot jsonb -) RETURNS jsonb AS -$$ - INSERT INTO workspace_snapshots (snapshot) - VALUES (this_snapshot) - RETURNING row_to_json(workspace_snapshots) AS object; -$$ LANGUAGE SQL VOLATILE; +-- TODO(nick): add the new workspaces to their own migration. +-- CREATE TABLE workspaces_v2 +-- ( +-- id ident primary key NOT NULL DEFAULT ident_create_v1(), +-- created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), +-- updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), +-- base_change_set_id ident REFERENCES change_set_pointers (id) +-- ); \ No newline at end of file diff --git a/lib/dal/src/migrations/U3001__change_set_pointers.sql b/lib/dal/src/migrations/U3001__change_set_pointers.sql new file mode 100644 index 0000000000..24aa92fff6 --- /dev/null +++ b/lib/dal/src/migrations/U3001__change_set_pointers.sql @@ -0,0 +1,10 @@ +CREATE TABLE change_set_pointers +( + id ident primary key NOT NULL DEFAULT ident_create_v1(), + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + name text NOT NULL, + -- TODO(nick): add once workspaces are added + -- workspace_id ident REFERENCES workspaces_v2 (id) NOT NULL, + workspace_snapshot_id ident REFERENCES workspace_snapshots (id) +); diff --git a/lib/dal/src/migrations/U3001__content_pairs.sql b/lib/dal/src/migrations/U3001__content_pairs.sql deleted file mode 100644 index 71a0259459..0000000000 --- a/lib/dal/src/migrations/U3001__content_pairs.sql +++ /dev/null @@ -1,19 +0,0 @@ -CREATE TABLE content_pairs -( - created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - key text NOT NULL, - value jsonb NOT NULL -); - -CREATE UNIQUE INDEX unique_content_pairs ON content_pairs (key, value); - -CREATE OR REPLACE FUNCTION content_pair_create_v1( - this_key text, - this_value jsonb -) RETURNS jsonb AS -$$ - INSERT INTO content_pairs (key, value) - VALUES (this_key, this_value) - RETURNING row_to_json(content_pairs) AS object; -$$ LANGUAGE SQL VOLATILE; diff --git a/lib/dal/src/migrations/U3002__change_set_pointers.sql b/lib/dal/src/migrations/U3002__change_set_pointers.sql deleted file mode 100644 index d5e2bae19d..0000000000 --- a/lib/dal/src/migrations/U3002__change_set_pointers.sql +++ /dev/null @@ -1,19 +0,0 @@ -CREATE TABLE change_set_pointers -( - id ident NOT NULL DEFAULT ident_create_v1(), - created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - name text NOT NULL, - workspace_snapshot_id ident -); - -CREATE UNIQUE INDEX unique_change_set_pointers ON change_set_pointers (id); - -CREATE OR REPLACE FUNCTION change_set_pointer_create_v1( - this_name text -) RETURNS jsonb AS -$$ -INSERT INTO change_set_pointers (name) -VALUES (this_name) -RETURNING row_to_json(change_set_pointers) AS object; -$$ LANGUAGE SQL VOLATILE; diff --git a/lib/dal/src/queries/change_set_pointers/find.sql b/lib/dal/src/queries/change_set_pointers/find.sql deleted file mode 100644 index d660730134..0000000000 --- a/lib/dal/src/queries/change_set_pointers/find.sql +++ /dev/null @@ -1,3 +0,0 @@ -SELECT row_to_json(change_set_pointers.*) AS object -FROM change_set_pointers -WHERE change_set_pointers.id = $1 diff --git a/lib/dal/src/queries/workspace_snapshot/find.sql b/lib/dal/src/queries/workspace_snapshot/find.sql deleted file mode 100644 index 6b0e2840ed..0000000000 --- a/lib/dal/src/queries/workspace_snapshot/find.sql +++ /dev/null @@ -1,3 +0,0 @@ -SELECT row_to_json(workspace_snapshots.*) AS object -FROM workspace_snapshots - WHERE workspace_snapshots.id = $1 diff --git a/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql b/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql index 260a220772..1427d7b87b 100644 --- a/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql +++ b/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql @@ -1,5 +1,4 @@ -SELECT row_to_json(workspace_snapshots.*) AS object -FROM workspace_snapshots +SELECT * FROM workspace_snapshots JOIN change_set_pointers ON change_set_pointers.id = $1 AND change_set_pointers.workspace_snapshot_id = workspace_snapshots.id \ No newline at end of file diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs index 818f0e5bb7..2415770329 100644 --- a/lib/dal/src/workspace_snapshot.rs +++ b/lib/dal/src/workspace_snapshot.rs @@ -12,7 +12,6 @@ // overflowing_literals, // path_statements, // patterns_in_fns_without_body, -// private_in_public, // unconditional_recursion, // unused, // unused_allocation, @@ -31,13 +30,13 @@ pub mod node_weight; pub mod update; pub mod vector_clock; +use chrono::{DateTime, Utc}; use petgraph::prelude::*; use serde::{Deserialize, Serialize}; use serde_json::Value; use si_data_pg::{PgError, PgRow}; use telemetry::prelude::*; use thiserror::Error; -use ulid::Ulid; use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; use crate::workspace_snapshot::conflict::Conflict; @@ -45,12 +44,11 @@ use crate::workspace_snapshot::edge_weight::EdgeWeight; use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::update::Update; use crate::{ - pk, standard_model, + pk, workspace_snapshot::{graph::WorkspaceSnapshotGraphError, node_weight::NodeWeightError}, - DalContext, StandardModelError, Timestamp, TransactionsError, WorkspaceSnapshotGraph, + DalContext, TransactionsError, WorkspaceSnapshotGraph, }; -const FIND: &str = include_str!("queries/workspace_snapshot/find.sql"); const FIND_FOR_CHANGE_SET: &str = include_str!("queries/workspace_snapshot/find_for_change_set.sql"); @@ -67,8 +65,6 @@ pub enum WorkspaceSnapshotError { Poison(String), #[error("serde json error: {0}")] SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), #[error("WorkspaceSnapshotGraph error: {0}")] @@ -83,58 +79,68 @@ pk!(WorkspaceSnapshotId); #[derive(Debug, Serialize, Deserialize)] pub struct WorkspaceSnapshot { - pub id: WorkspaceSnapshotId, - #[serde(flatten)] - timestamp: Timestamp, + id: WorkspaceSnapshotId, + created_at: DateTime, snapshot: Value, #[serde(skip_serializing)] working_copy: Option, } +impl TryFrom for WorkspaceSnapshot { + type Error = WorkspaceSnapshotError; + + fn try_from(row: PgRow) -> Result { + Ok(Self { + id: row.try_get("id")?, + created_at: row.try_get("created_at")?, + snapshot: row.try_get("snapshot")?, + working_copy: None, + }) + } +} + impl WorkspaceSnapshot { pub async fn initial( ctx: &DalContext, change_set: &ChangeSetPointer, ) -> WorkspaceSnapshotResult { let snapshot = WorkspaceSnapshotGraph::new(change_set)?; - let serialized_snapshot = serde_json::to_value(&snapshot)?; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT workspace_snapshot_create_v1($1) AS object", - &[&serialized_snapshot], - ) - .await?; - let json: Value = row.try_get("object")?; - let object: WorkspaceSnapshot = serde_json::from_value(json)?; - Ok(object) + Ok(Self::new_inner(ctx, snapshot).await?) } pub async fn write(&mut self, ctx: &DalContext) -> WorkspaceSnapshotResult<()> { let working_copy = self.working_copy()?; working_copy.cleanup(); - let serialized_snapshot = serde_json::to_value(working_copy.clone())?; + let object = Self::new_inner(ctx, working_copy.clone()).await?; + + self.id = object.id; + self.created_at = object.created_at; + self.snapshot = object.snapshot; + Ok(()) + } + + /// This _private_ method crates a new, immutable [`WorkspaceSnapshot`] from a + /// [`WorkspaceSnapshotGraph`]. + async fn new_inner( + ctx: &DalContext, + graph: WorkspaceSnapshotGraph, + ) -> WorkspaceSnapshotResult { + let serialized_snapshot = serde_json::to_value(graph)?; let row = ctx .txns() .await? .pg() .query_one( - "SELECT workspace_snapshot_create_v1($1) AS object", + "INSERT INTO workspace_snapshots (snapshot) VALUES ($1) RETURNING *", &[&serialized_snapshot], ) .await?; + Ok(Self::try_from(row)?) + } - let json: Value = row.try_get("object")?; - let object: WorkspaceSnapshot = serde_json::from_value(json)?; - self.id = object.id; - self.timestamp = object.timestamp; - self.snapshot = object.snapshot; - - Ok(()) + pub fn id(&self) -> WorkspaceSnapshotId { + self.id } fn working_copy(&mut self) -> WorkspaceSnapshotResult<&mut WorkspaceSnapshotGraph> { @@ -156,7 +162,6 @@ impl WorkspaceSnapshot { pub fn add_edge( &mut self, - change_set: &ChangeSetPointer, from_node_index: NodeIndex, edge_weight: EdgeWeight, to_node_index: NodeIndex, @@ -189,9 +194,12 @@ impl WorkspaceSnapshot { .txns() .await? .pg() - .query_one(FIND, &[&workspace_snapshot_id]) + .query_one( + "SELECT * FROM workspace_snapshots WHERE id = $1", + &[&workspace_snapshot_id], + ) .await?; - Ok(standard_model::object_from_row(row)?) + Ok(Self::try_from(row)?) } #[instrument(skip_all)] @@ -205,6 +213,6 @@ impl WorkspaceSnapshot { .pg() .query_one(FIND_FOR_CHANGE_SET, &[&change_set_pointer_id]) .await?; - Ok(standard_model::object_from_row(row)?) + Ok(Self::try_from(row)?) } } diff --git a/lib/dal/src/workspace_snapshot/content_address.rs b/lib/dal/src/workspace_snapshot/content_address.rs index 9644808bd0..4c47f6dbbc 100644 --- a/lib/dal/src/workspace_snapshot/content_address.rs +++ b/lib/dal/src/workspace_snapshot/content_address.rs @@ -1,7 +1,6 @@ +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use crate::content::hash::ContentHash; - #[remain::sorted] #[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq)] /// The type of the object, and the content-addressable-storage address (content hash) diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 3498b18c0a..fe510a6107 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -1,4 +1,5 @@ use chrono::Utc; +use content_store::{ContentHash, Store, StoreError}; use petgraph::{algo, prelude::*, visit::DfsEvent}; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet, VecDeque}; @@ -7,16 +8,12 @@ use thiserror::Error; use ulid::Ulid; use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}; -use crate::{ - content::{self, store::StoreError}, - workspace_snapshot::{ - conflict::Conflict, - content_address::ContentAddress, - edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}, - node_weight::{NodeWeight, NodeWeightError, OrderingNodeWeight}, - update::Update, - }, - ContentHash, +use crate::workspace_snapshot::{ + conflict::Conflict, + content_address::ContentAddress, + edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}, + node_weight::{NodeWeight, NodeWeightError, OrderingNodeWeight}, + update::Update, }; pub type LineageId = Ulid; @@ -211,9 +208,9 @@ impl WorkspaceSnapshotGraph { Err(WorkspaceSnapshotGraphError::UnableToAddNode) } - pub fn attribute_value_view( + pub async fn attribute_value_view( &self, - content_store: &content::Store, + content_store: &mut impl Store, root_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult { let mut view = serde_json::json![{}]; @@ -222,7 +219,8 @@ impl WorkspaceSnapshotGraph { while let Some((current_node_index, write_location)) = nodes_to_add.pop_front() { let current_node_weight = self.get_node_weight(current_node_index)?; let current_node_content: serde_json::Value = content_store - .get(¤t_node_weight.content_hash())? + .get(¤t_node_weight.content_hash()) + .await? .ok_or(WorkspaceSnapshotGraphError::ContentMissingForContentHash)?; // We don't need to care what kind the prop is, since assigning a value via // `pointer_mut` completely overwrites the existing value, regardless of any @@ -1525,7 +1523,8 @@ fn prop_node_indexes_for_node_index( #[cfg(test)] mod test { use super::*; - use crate::{ComponentId, ContentHash, FuncId, PropId, PropKind, SchemaId, SchemaVariantId}; + use crate::{ComponentId, FuncId, PropId, PropKind, SchemaId, SchemaVariantId}; + use content_store::ContentHash; use pretty_assertions_sorted::assert_eq; #[derive(Debug, PartialEq)] @@ -4695,17 +4694,17 @@ mod test { ); } - #[test] - fn attribute_value_build_view() { + #[tokio::test] + async fn attribute_value_build_view() { let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let mut content_store = crate::content::Store::new(); + let mut content_store = content_store::LocalStore::default(); let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (schema_content_hash, _) = content_store - .add(serde_json::json!("Schema A")) + let schema_content_hash = content_store + .add(&serde_json::json!("Schema A")) .expect("Unable to add to content store"); let schema_node_index = graph .add_node( @@ -4727,8 +4726,8 @@ mod test { .expect("Unable to add root -> schema edge"); let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (schema_variant_content_hash, _) = content_store - .add(serde_json::json!("Schema Variant A")) + let schema_variant_content_hash = content_store + .add(&serde_json::json!("Schema Variant A")) .expect("Unable to add to content store"); let schema_variant_node_index = graph .add_node( @@ -4752,8 +4751,8 @@ mod test { .expect("Unable to add schema -> schema variant edge"); let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (root_prop_content_hash, _) = content_store - .add(serde_json::json!("Root prop")) + let root_prop_content_hash = content_store + .add(&serde_json::json!("Root prop")) .expect("Unable to add to content store"); let root_prop_node_index = graph .add_node( @@ -4779,8 +4778,8 @@ mod test { .expect("Unable to add schema variant -> root prop edge"); let si_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (si_prop_content_hash, _) = content_store - .add(serde_json::json!("SI Prop Content")) + let si_prop_content_hash = content_store + .add(&serde_json::json!("SI Prop Content")) .expect("Unable to add to content store"); let si_prop_node_index = graph .add_node( @@ -4806,8 +4805,8 @@ mod test { .expect("Unable to add root prop -> si prop edge"); let name_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (name_prop_content_hash, _) = content_store - .add(serde_json::json!("Name Prop Content")) + let name_prop_content_hash = content_store + .add(&serde_json::json!("Name Prop Content")) .expect("Unable to add to content store"); let name_prop_node_index = graph .add_node( @@ -4833,8 +4832,8 @@ mod test { .expect("Unable to add si prop -> name prop edge"); let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (component_content_hash, _) = content_store - .add(serde_json::json!("Component Content")) + let component_content_hash = content_store + .add(&serde_json::json!("Component Content")) .expect("Unable to add to content store"); let component_node_index = graph .add_node( @@ -4868,8 +4867,8 @@ mod test { .expect("Unable to add component -> schema variant edge"); let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (root_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let root_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let root_av_node_index = graph .add_node( @@ -4905,8 +4904,8 @@ mod test { .expect("Unable to add root av -> root prop edge"); let si_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (si_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let si_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let si_av_node_index = graph .add_node( @@ -4942,8 +4941,8 @@ mod test { .expect("Unable to add si av -> si prop edge"); let name_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (name_av_content_hash, _) = content_store - .add(serde_json::json!("component name")) + let name_av_content_hash = content_store + .add(&serde_json::json!("component name")) .expect("Unable to add to content store"); let name_av_node_index = graph .add_node( @@ -4985,26 +4984,27 @@ mod test { serde_json::json![{"si": {"name": "component name"}}], graph .attribute_value_view( - &content_store, + &mut content_store, graph .get_node_index_by_id(root_av_id) .expect("Unable to get NodeIndex") ) + .await .expect("Unable to generate attribute value view"), ); } - #[test] - fn attribute_value_build_view_unordered_object() { + #[tokio::test] + async fn attribute_value_build_view_unordered_object() { let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let mut content_store = crate::content::Store::new(); + let mut content_store = content_store::LocalStore::default(); let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (schema_content_hash, _) = content_store - .add(serde_json::json!("Schema A")) + let schema_content_hash = content_store + .add(&serde_json::json!("Schema A")) .expect("Unable to add to content store"); let schema_node_index = graph .add_node( @@ -5026,8 +5026,8 @@ mod test { .expect("Unable to add root -> schema edge"); let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (schema_variant_content_hash, _) = content_store - .add(serde_json::json!("Schema Variant A")) + let schema_variant_content_hash = content_store + .add(&serde_json::json!("Schema Variant A")) .expect("Unable to add to content store"); let schema_variant_node_index = graph .add_node( @@ -5051,8 +5051,8 @@ mod test { .expect("Unable to add schema -> schema variant edge"); let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (root_prop_content_hash, _) = content_store - .add(serde_json::json!("Root prop")) + let root_prop_content_hash = content_store + .add(&serde_json::json!("Root prop")) .expect("Unable to add to content store"); let root_prop_node_index = graph .add_node( @@ -5078,8 +5078,8 @@ mod test { .expect("Unable to add schema variant -> root prop edge"); let si_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (si_prop_content_hash, _) = content_store - .add(serde_json::json!("SI Prop Content")) + let si_prop_content_hash = content_store + .add(&serde_json::json!("SI Prop Content")) .expect("Unable to add to content store"); let si_prop_node_index = graph .add_node( @@ -5105,8 +5105,8 @@ mod test { .expect("Unable to add root prop -> si prop edge"); let name_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (name_prop_content_hash, _) = content_store - .add(serde_json::json!("Name Prop Content")) + let name_prop_content_hash = content_store + .add(&serde_json::json!("Name Prop Content")) .expect("Unable to add to content store"); let name_prop_node_index = graph .add_node( @@ -5132,8 +5132,8 @@ mod test { .expect("Unable to add si prop -> name prop edge"); let description_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (description_prop_content_hash, _) = content_store - .add(serde_json::json!("Description Prop Content")) + let description_prop_content_hash = content_store + .add(&serde_json::json!("Description Prop Content")) .expect("Unable to add to content store"); let description_prop_node_index = graph .add_node( @@ -5159,8 +5159,8 @@ mod test { .expect("Unable to add si prop -> description prop edge"); let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (component_content_hash, _) = content_store - .add(serde_json::json!("Component Content")) + let component_content_hash = content_store + .add(&serde_json::json!("Component Content")) .expect("Unable to add to content store"); let component_node_index = graph .add_node( @@ -5194,8 +5194,8 @@ mod test { .expect("Unable to add component -> schema variant edge"); let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (root_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let root_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let root_av_node_index = graph .add_node( @@ -5231,8 +5231,8 @@ mod test { .expect("Unable to add root av -> root prop edge"); let si_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (si_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let si_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let si_av_node_index = graph .add_node( @@ -5268,8 +5268,8 @@ mod test { .expect("Unable to add si av -> si prop edge"); let name_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (name_av_content_hash, _) = content_store - .add(serde_json::json!("component name")) + let name_av_content_hash = content_store + .add(&serde_json::json!("component name")) .expect("Unable to add to content store"); let name_av_node_index = graph .add_node( @@ -5305,8 +5305,8 @@ mod test { .expect("Unable to create name av -> name prop edge"); let description_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (description_av_content_hash, _) = content_store - .add(serde_json::json!("Component description")) + let description_av_content_hash = content_store + .add(&serde_json::json!("Component description")) .expect("Unable to add to content store"); let description_av_node_index = graph .add_node( @@ -5353,26 +5353,27 @@ mod test { }], graph .attribute_value_view( - &content_store, + &mut content_store, graph .get_node_index_by_id(root_av_id) .expect("Unable to get NodeIndex") ) + .await .expect("Unable to generate attribute value view"), ); } - #[test] - fn attribute_value_build_view_ordered_array() { + #[tokio::test] + async fn attribute_value_build_view_ordered_array() { let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let mut content_store = crate::content::Store::new(); + let mut content_store = content_store::LocalStore::default(); let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (schema_content_hash, _) = content_store - .add(serde_json::json!("Schema A")) + let schema_content_hash = content_store + .add(&serde_json::json!("Schema A")) .expect("Unable to add to content store"); let schema_node_index = graph .add_node( @@ -5394,8 +5395,8 @@ mod test { .expect("Unable to add root -> schema edge"); let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (schema_variant_content_hash, _) = content_store - .add(serde_json::json!("Schema Variant A")) + let schema_variant_content_hash = content_store + .add(&serde_json::json!("Schema Variant A")) .expect("Unable to add to content store"); let schema_variant_node_index = graph .add_node( @@ -5419,8 +5420,8 @@ mod test { .expect("Unable to add schema -> schema variant edge"); let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (root_prop_content_hash, _) = content_store - .add(serde_json::json!("Root prop")) + let root_prop_content_hash = content_store + .add(&serde_json::json!("Root prop")) .expect("Unable to add to content store"); let root_prop_node_index = graph .add_node( @@ -5446,8 +5447,8 @@ mod test { .expect("Unable to add schema variant -> root prop edge"); let domain_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (domain_prop_content_hash, _) = content_store - .add(serde_json::json!("domain Prop Content")) + let domain_prop_content_hash = content_store + .add(&serde_json::json!("domain Prop Content")) .expect("Unable to add to content store"); let domain_prop_node_index = graph .add_node( @@ -5473,8 +5474,8 @@ mod test { .expect("Unable to add root prop -> domain prop edge"); let ports_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (ports_prop_content_hash, _) = content_store - .add(serde_json::json!("ports Prop Content")) + let ports_prop_content_hash = content_store + .add(&serde_json::json!("ports Prop Content")) .expect("Unable to add to content store"); let ports_prop_node_index = graph .add_node( @@ -5500,8 +5501,8 @@ mod test { .expect("Unable to add domain prop -> ports prop edge"); let port_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (port_prop_content_hash, _) = content_store - .add(serde_json::json!("port Prop Content")) + let port_prop_content_hash = content_store + .add(&serde_json::json!("port Prop Content")) .expect("Unable to add to content store"); let port_prop_node_index = graph .add_node( @@ -5527,8 +5528,8 @@ mod test { .expect("Unable to add ports prop -> port prop edge"); let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (component_content_hash, _) = content_store - .add(serde_json::json!("Component Content")) + let component_content_hash = content_store + .add(&serde_json::json!("Component Content")) .expect("Unable to add to content store"); let component_node_index = graph .add_node( @@ -5562,8 +5563,8 @@ mod test { .expect("Unable to add component -> schema variant edge"); let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (root_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let root_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let root_av_node_index = graph .add_node( @@ -5599,8 +5600,8 @@ mod test { .expect("Unable to add root av -> root prop edge"); let domain_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (domain_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let domain_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let domain_av_node_index = graph .add_node( @@ -5636,8 +5637,8 @@ mod test { .expect("Unable to add domain av -> domain prop edge"); let ports_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (ports_av_content_hash, _) = content_store - .add(serde_json::json!([])) + let ports_av_content_hash = content_store + .add(&serde_json::json!([])) .expect("Unable to add to content store"); let ports_av_node_index = graph .add_ordered_node( @@ -5674,8 +5675,8 @@ mod test { .expect("Unable to create ports av -> ports prop edge"); let port1_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (port1_av_content_hash, _) = content_store - .add(serde_json::json!("Port 1")) + let port1_av_content_hash = content_store + .add(&serde_json::json!("Port 1")) .expect("Unable to add to content store"); let port1_av_node_index = graph .add_node( @@ -5712,8 +5713,8 @@ mod test { .expect("Unable to add port 1 av -> port prop edge"); let port2_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (port2_av_content_hash, _) = content_store - .add(serde_json::json!("Port 2")) + let port2_av_content_hash = content_store + .add(&serde_json::json!("Port 2")) .expect("Unable to add to content store"); let port2_av_node_index = graph .add_node( @@ -5750,8 +5751,8 @@ mod test { .expect("Unable to add port 2 av -> port prop edge"); let port3_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (port3_av_content_hash, _) = content_store - .add(serde_json::json!("Port 3")) + let port3_av_content_hash = content_store + .add(&serde_json::json!("Port 3")) .expect("Unable to add to content store"); let port3_av_node_index = graph .add_node( @@ -5788,8 +5789,8 @@ mod test { .expect("Unable to add port 3 av -> port prop edge"); let port4_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (port4_av_content_hash, _) = content_store - .add(serde_json::json!("Port 4")) + let port4_av_content_hash = content_store + .add(&serde_json::json!("Port 4")) .expect("Unable to add to content store"); let port4_av_node_index = graph .add_node( @@ -5841,11 +5842,12 @@ mod test { }], graph .attribute_value_view( - &content_store, + &mut content_store, graph .get_node_index_by_id(root_av_id) .expect("Unable to get NodeIndex") ) + .await .expect("Unable to generate attribute value view"), ); @@ -5866,17 +5868,18 @@ mod test { }], graph .attribute_value_view( - &content_store, + &mut content_store, graph .get_node_index_by_id(root_av_id) .expect("Unable to get NodeIndex") ) + .await .expect("Unable to generate attribute value view"), ); let port5_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (port5_av_content_hash, _) = content_store - .add(serde_json::json!("Port 5")) + let port5_av_content_hash = content_store + .add(&serde_json::json!("Port 5")) .expect("Unable to add to content store"); let port5_av_node_index = graph .add_node( @@ -5926,26 +5929,27 @@ mod test { }], graph .attribute_value_view( - &content_store, + &mut content_store, graph .get_node_index_by_id(root_av_id) .expect("Unable to get NodeIndex") ) + .await .expect("Unable to generate attribute value view"), ); } - #[test] - fn attribute_value_build_view_ordered_map() { + #[tokio::test] + async fn attribute_value_build_view_ordered_map() { let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); let change_set = &change_set; let mut graph = WorkspaceSnapshotGraph::new(change_set) .expect("Unable to create WorkspaceSnapshotGraph"); - let mut content_store = crate::content::Store::new(); + let mut content_store = content_store::LocalStore::default(); let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (schema_content_hash, _) = content_store - .add(serde_json::json!("Schema A")) + let schema_content_hash = content_store + .add(&serde_json::json!("Schema A")) .expect("Unable to add to content store"); let schema_node_index = graph .add_node( @@ -5967,8 +5971,8 @@ mod test { .expect("Unable to add root -> schema edge"); let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (schema_variant_content_hash, _) = content_store - .add(serde_json::json!("Schema Variant A")) + let schema_variant_content_hash = content_store + .add(&serde_json::json!("Schema Variant A")) .expect("Unable to add to content store"); let schema_variant_node_index = graph .add_node( @@ -5992,8 +5996,8 @@ mod test { .expect("Unable to add schema -> schema variant edge"); let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (root_prop_content_hash, _) = content_store - .add(serde_json::json!("Root prop")) + let root_prop_content_hash = content_store + .add(&serde_json::json!("Root prop")) .expect("Unable to add to content store"); let root_prop_node_index = graph .add_node( @@ -6019,8 +6023,8 @@ mod test { .expect("Unable to add schema variant -> root prop edge"); let domain_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (domain_prop_content_hash, _) = content_store - .add(serde_json::json!("domain Prop Content")) + let domain_prop_content_hash = content_store + .add(&serde_json::json!("domain Prop Content")) .expect("Unable to add to content store"); let domain_prop_node_index = graph .add_node( @@ -6046,8 +6050,8 @@ mod test { .expect("Unable to add root prop -> domain prop edge"); let environment_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (environment_prop_content_hash, _) = content_store - .add(serde_json::json!("environment Prop Content")) + let environment_prop_content_hash = content_store + .add(&serde_json::json!("environment Prop Content")) .expect("Unable to add to content store"); let environment_prop_node_index = graph .add_node( @@ -6073,8 +6077,8 @@ mod test { .expect("Unable to add domain prop -> environment prop edge"); let env_var_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (env_var_prop_content_hash, _) = content_store - .add(serde_json::json!("port Prop Content")) + let env_var_prop_content_hash = content_store + .add(&serde_json::json!("port Prop Content")) .expect("Unable to add to content store"); let env_var_prop_node_index = graph .add_node( @@ -6100,8 +6104,8 @@ mod test { .expect("Unable to add environment prop -> env var prop edge"); let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (component_content_hash, _) = content_store - .add(serde_json::json!("Component Content")) + let component_content_hash = content_store + .add(&serde_json::json!("Component Content")) .expect("Unable to add to content store"); let component_node_index = graph .add_node( @@ -6135,8 +6139,8 @@ mod test { .expect("Unable to add component -> schema variant edge"); let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (root_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let root_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let root_av_node_index = graph .add_node( @@ -6172,8 +6176,8 @@ mod test { .expect("Unable to add root av -> root prop edge"); let domain_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (domain_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let domain_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let domain_av_node_index = graph .add_node( @@ -6209,8 +6213,8 @@ mod test { .expect("Unable to add domain av -> domain prop edge"); let envrionment_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (ports_av_content_hash, _) = content_store - .add(serde_json::json!({})) + let ports_av_content_hash = content_store + .add(&serde_json::json!({})) .expect("Unable to add to content store"); let environment_av_node_index = graph .add_ordered_node( @@ -6247,8 +6251,8 @@ mod test { .expect("Unable to create environment av -> environment prop edge"); let env_var1_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (env_var1_av_content_hash, _) = content_store - .add(serde_json::json!("1111")) + let env_var1_av_content_hash = content_store + .add(&serde_json::json!("1111")) .expect("Unable to add to content store"); let port1_av_node_index = graph .add_node( @@ -6288,8 +6292,8 @@ mod test { .expect("Unable to add env var 1 av -> env var prop edge"); let env_var2_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (env_var2_av_content_hash, _) = content_store - .add(serde_json::json!("2222")) + let env_var2_av_content_hash = content_store + .add(&serde_json::json!("2222")) .expect("Unable to add to content store"); let env_var2_av_node_index = graph .add_node( @@ -6329,8 +6333,8 @@ mod test { .expect("Unable to add env var 2 av -> env var prop edge"); let env_var3_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (env_var3_av_content_hash, _) = content_store - .add(serde_json::json!("3333")) + let env_var3_av_content_hash = content_store + .add(&serde_json::json!("3333")) .expect("Unable to add to content store"); let port3_av_node_index = graph .add_node( @@ -6370,8 +6374,8 @@ mod test { .expect("Unable to add env var 3 av -> env var prop edge"); let env_var4_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (env_var4_av_content_hash, _) = content_store - .add(serde_json::json!("4444")) + let env_var4_av_content_hash = content_store + .add(&serde_json::json!("4444")) .expect("Unable to add to content store"); let env_var4_av_node_index = graph .add_node( @@ -6426,11 +6430,12 @@ mod test { }], graph .attribute_value_view( - &content_store, + &mut content_store, graph .get_node_index_by_id(root_av_id) .expect("Unable to get NodeIndex") ) + .await .expect("Unable to generate attribute value view"), ); @@ -6456,17 +6461,18 @@ mod test { }], graph .attribute_value_view( - &content_store, + &mut content_store, graph .get_node_index_by_id(root_av_id) .expect("Unable to get NodeIndex") ) + .await .expect("Unable to generate attribute value view"), ); let env_var5_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); - let (env_var5_av_content_hash, _) = content_store - .add(serde_json::json!("5555")) + let env_var5_av_content_hash = content_store + .add(&serde_json::json!("5555")) .expect("Unable to add to content store"); let env_var5_av_node_index = graph .add_node( @@ -6519,11 +6525,12 @@ mod test { }], graph .attribute_value_view( - &content_store, + &mut content_store, graph .get_node_index_by_id(root_av_id) .expect("Unable to get NodeIndex") ) + .await .expect("Unable to generate attribute value view"), ); } diff --git a/lib/dal/src/workspace_snapshot/lamport_clock.rs b/lib/dal/src/workspace_snapshot/lamport_clock.rs index 125301fd60..4b77144236 100644 --- a/lib/dal/src/workspace_snapshot/lamport_clock.rs +++ b/lib/dal/src/workspace_snapshot/lamport_clock.rs @@ -3,9 +3,8 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use thiserror::Error; -use ulid::Ulid; -use crate::workspace_snapshot::{ChangeSetPointer, ChangeSetPointerError}; +use crate::workspace_snapshot::ChangeSetPointerError; #[derive(Debug, Error)] pub enum LamportClockError { diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index b863e8d24e..4b3242e078 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -1,4 +1,5 @@ use chrono::{DateTime, Utc}; +use content_store::ContentHash; use serde::{Deserialize, Serialize}; use thiserror::Error; use ulid::Ulid; @@ -9,7 +10,7 @@ use crate::{ content_address::ContentAddress, vector_clock::{VectorClock, VectorClockError}, }, - ContentHash, PropKind, + PropKind, }; pub use content_node_weight::ContentNodeWeight; diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index 162dc447db..a28d0cda16 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -1,4 +1,5 @@ use chrono::{DateTime, Utc}; +use content_store::ContentHash; use serde::{Deserialize, Serialize}; use ulid::Ulid; @@ -10,7 +11,6 @@ use crate::{ node_weight::{NodeWeightError, NodeWeightResult}, vector_clock::VectorClock, }, - ContentHash, }; #[derive(Clone, Serialize, Deserialize)] diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs index fd0efe7a2e..90342d327b 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -1,12 +1,10 @@ use chrono::{DateTime, Utc}; +use content_store::ContentHash; use serde::{Deserialize, Serialize}; use ulid::Ulid; use crate::change_set_pointer::ChangeSetPointer; -use crate::{ - workspace_snapshot::{node_weight::NodeWeightResult, vector_clock::VectorClock}, - ContentHash, -}; +use crate::workspace_snapshot::{node_weight::NodeWeightResult, vector_clock::VectorClock}; #[derive(Clone, Serialize, Deserialize, Default)] pub struct OrderingNodeWeight { diff --git a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs index d88f419f6b..ccc3129166 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs @@ -1,10 +1,10 @@ use chrono::{DateTime, Utc}; +use content_store::ContentHash; use serde::{Deserialize, Serialize}; use ulid::Ulid; use crate::{ change_set_pointer::ChangeSetPointer, - content::hash::ContentHash, workspace_snapshot::{ content_address::ContentAddress, graph::LineageId, diff --git a/lib/dal/src/workspace_snapshot/vector_clock.rs b/lib/dal/src/workspace_snapshot/vector_clock.rs index 98755e9f11..53ef41ee3a 100644 --- a/lib/dal/src/workspace_snapshot/vector_clock.rs +++ b/lib/dal/src/workspace_snapshot/vector_clock.rs @@ -5,7 +5,6 @@ use std::collections::HashMap; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use thiserror::Error; -use ulid::Ulid; use crate::workspace_snapshot::{ lamport_clock::{LamportClock, LamportClockError}, diff --git a/lib/dal/tests/integration_test/internal/mod.rs b/lib/dal/tests/integration_test/internal/mod.rs index db3a310050..47d1c18820 100644 --- a/lib/dal/tests/integration_test/internal/mod.rs +++ b/lib/dal/tests/integration_test/internal/mod.rs @@ -9,6 +9,7 @@ mod func_execution; mod graph; mod history_event; mod key_pair; +mod mostly_everything_is_a_node_or_an_edge; mod node; mod node_menu; mod pkg; @@ -16,7 +17,6 @@ mod prop; mod prop_tree; mod property_editor; mod provider; -mod rebaser; mod schema; mod secret; mod socket; diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs new file mode 100644 index 0000000000..1f279e71bd --- /dev/null +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs @@ -0,0 +1,7 @@ +//! This is a temporary module to co-locate all tests for the new engine layer. Once everything is +//! working, this module will go away and the tests will be moved. +//! +//! For all tests in this module, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. + +mod content_store; +mod rebaser; diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs new file mode 100644 index 0000000000..d6188d0055 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs @@ -0,0 +1,21 @@ +//! For all tests in this file, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. + +use content_store::Store; +use content_store_test::DalTestPgStore; +use dal::component::ComponentKind; +use dal::{DalContext, Schema}; +use dal_test::test; + +#[test] +async fn new(ctx: &DalContext) { + let mut store = DalTestPgStore::new().await.expect("could not create store"); + + // TODO(nick): replace this with something more useful. We're just trying to make sure we can + // use both the DalContext and the store at the same time to talk to PG. + let schema = Schema::new(ctx, "cumbersome", &ComponentKind::Standard) + .await + .expect("could not create schema"); + + store.add(schema.name()).expect("could not add"); + store.write().await.expect("could not write"); +} diff --git a/lib/dal/tests/integration_test/internal/rebaser.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs similarity index 88% rename from lib/dal/tests/integration_test/internal/rebaser.rs rename to lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs index 2c7bd2e117..4a0215f4f4 100644 --- a/lib/dal/tests/integration_test/internal/rebaser.rs +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs @@ -1,12 +1,13 @@ //! For all tests in this module, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. +use content_store::ContentHash; use dal::change_set_pointer::ChangeSetPointer; -use dal::content::hash::ContentHash; use dal::workspace_snapshot::content_address::ContentAddress; use dal::workspace_snapshot::node_weight::NodeWeight; use dal::{DalContext, Tenancy, Visibility, WorkspacePk, WorkspaceSnapshot}; use dal_test::test; use rebaser_client::Client; +use rebaser_core::ChangeSetReplyMessage; #[test] async fn simple_rebase(ctx: &mut DalContext) { @@ -40,7 +41,7 @@ async fn simple_rebase(ctx: &mut DalContext) { snapshot.write(ctx).await.expect("could not write snapshot"); base_change_set - .update_pointer(ctx, snapshot.id) + .update_pointer(ctx, snapshot.id()) .await .expect("could not update pointer"); @@ -63,7 +64,7 @@ async fn simple_rebase(ctx: &mut DalContext) { .expect("could not add node"); snapshot.write(ctx).await.expect("could not write snapshot"); forked_change_set - .update_pointer(ctx, snapshot.id) + .update_pointer(ctx, snapshot.id()) .await .expect("could not update pointer"); @@ -81,14 +82,19 @@ async fn simple_rebase(ctx: &mut DalContext) { let response = client .send_with_reply( base_change_set.id.into(), - snapshot.id.into(), + snapshot.id().into(), forked_change_set.id.into(), ) .await .expect("could not send"); // TODO(nick): do something useful with this. - dbg!(response); + match response { + ChangeSetReplyMessage::Success { results } => { + dbg!(results); + } + ChangeSetReplyMessage::Failure { error } => panic!("{}", error), + } // TODO(nick): move cleanup to the test harness. let _ = client diff --git a/lib/rebaser-client/Cargo.toml b/lib/rebaser-client/Cargo.toml index 24674495e2..7d27807626 100644 --- a/lib/rebaser-client/Cargo.toml +++ b/lib/rebaser-client/Cargo.toml @@ -2,7 +2,6 @@ name = "rebaser-client" version = "0.1.0" edition = "2021" -rust-version = "1.64" publish = false [dependencies] diff --git a/lib/rebaser-client/src/lib.rs b/lib/rebaser-client/src/lib.rs index 8797cea599..8b5be47345 100644 --- a/lib/rebaser-client/src/lib.rs +++ b/lib/rebaser-client/src/lib.rs @@ -13,7 +13,6 @@ overflowing_literals, path_statements, patterns_in_fns_without_body, - private_in_public, unconditional_recursion, unused, unused_allocation, diff --git a/lib/rebaser-core/src/lib.rs b/lib/rebaser-core/src/lib.rs index 8a6236b66d..64e2bfcaca 100644 --- a/lib/rebaser-core/src/lib.rs +++ b/lib/rebaser-core/src/lib.rs @@ -16,7 +16,6 @@ overflowing_literals, path_statements, patterns_in_fns_without_body, - private_in_public, unconditional_recursion, unused, unused_allocation, diff --git a/lib/rebaser-server/BUCK b/lib/rebaser-server/BUCK index 4f857d629e..3b9d87ad55 100644 --- a/lib/rebaser-server/BUCK +++ b/lib/rebaser-server/BUCK @@ -17,56 +17,14 @@ rust_library( "//lib/telemetry-rs:telemetry", "//lib/veritech-client:veritech-client", "//third-party/rust:derive_builder", - "//third-party/rust:futures", "//third-party/rust:remain", "//third-party/rust:serde", "//third-party/rust:serde_json", - "//third-party/rust:stream-cancel", "//third-party/rust:thiserror", "//third-party/rust:tokio", - "//third-party/rust:tokio-stream", "//third-party/rust:ulid", ], srcs = glob([ "src/**/*.rs", ]), -) - -rust_test( - name = "test-integration", - deps = [ - "//lib/dal:dal", - "//lib/dal-test:dal-test", - "//lib/rebaser-client:rebaser-client", - "//lib/rebaser-server:rebaser-server", - "//lib/si-pkg:si-pkg", - "//lib/si-rabbitmq:si-rabbitmq", - "//lib/si-test-macros:si-test-macros", - "//lib/veritech-client:veritech-client", - "//third-party/rust:base64", - "//third-party/rust:itertools", - "//third-party/rust:pretty_assertions_sorted", - "//third-party/rust:serde_json", - "//third-party/rust:sodiumoxide", - "//third-party/rust:strum", - "//third-party/rust:tempfile", - "//third-party/rust:tokio", - "//third-party/rust:ulid", - ], - crate_root = "tests/integration.rs", - srcs = glob(["tests/**/*.rs"]), - env = { - "CARGO_PKG_NAME": "integration", - }, - resources = { - "cyclone": "//bin/cyclone:cyclone", - "dev.decryption.key": "//lib/cyclone-server:dev.decryption.key", - "dev.encryption.key": "//lib/cyclone-server:dev.encryption.key", - "dev.jwt_signing_private_key.pem": "//config/keys:dev.jwt_signing_private_key.pem", - "dev.jwt_signing_public_key.pem": "//config/keys:dev.jwt_signing_public_key.pem", - "lang-js": "//bin/lang-js:bin", - "pkgs_path": "//pkgs:pkgs", - "prod.jwt_signing_public_key.pem": "//config/keys:prod.jwt_signing_public_key.pem", - }, -) - +) \ No newline at end of file diff --git a/lib/rebaser-server/Cargo.toml b/lib/rebaser-server/Cargo.toml index 1fad512995..7c204be83f 100644 --- a/lib/rebaser-server/Cargo.toml +++ b/lib/rebaser-server/Cargo.toml @@ -2,20 +2,13 @@ name = "rebaser-server" version = "0.1.0" edition = "2021" -rust-version = "1.64" publish = false -# TODO(nick): validate all these dependencies. [dependencies] buck2-resources = { path = "../../lib/buck2-resources" } dal = { path = "../../lib/dal" } -derive_builder = { workspace = true } -futures = { workspace = true } nats-subscriber = { path = "../../lib/nats-subscriber" } rebaser-core = { path = "../../lib/rebaser-core" } -remain = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } si-crypto = { path = "../../lib/si-crypto" } si-data-nats = { path = "../../lib/si-data-nats" } si-data-pg = { path = "../../lib/si-data-pg" } @@ -25,13 +18,12 @@ si-std = { path = "../../lib/si-std" } si-test-macros = { path = "../../lib/si-test-macros" } stream-cancel = { workspace = true } telemetry = { path = "../../lib/telemetry-rs" } +veritech-client = { path = "../../lib/veritech-client" } + +derive_builder = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } -tokio-stream = { workspace = true } ulid = { workspace = true } -veritech-client = { path = "../../lib/veritech-client" } - -[dev-dependencies] -dal-test = { path = "../../lib/dal-test" } -pretty_assertions_sorted = { workspace = true } -rebaser-client = { path = "../../lib/rebaser-client" } diff --git a/lib/rebaser-server/src/lib.rs b/lib/rebaser-server/src/lib.rs index eff74462f3..6a4c6fc901 100644 --- a/lib/rebaser-server/src/lib.rs +++ b/lib/rebaser-server/src/lib.rs @@ -12,7 +12,6 @@ overflowing_literals, path_statements, patterns_in_fns_without_body, - private_in_public, unconditional_recursion, unused, unused_allocation, diff --git a/lib/rebaser-server/tests/integration.rs b/lib/rebaser-server/tests/integration.rs deleted file mode 100644 index 1bfe9c6abf..0000000000 --- a/lib/rebaser-server/tests/integration.rs +++ /dev/null @@ -1,9 +0,0 @@ -//! All tests should be ran with the following environment variable: -//! -//! ```shell -//! SI_TEST_BUILTIN_SCHEMAS=none -//! ``` - -const TEST_PG_DBNAME: &str = "si_test_rebaser"; - -mod integration_test; diff --git a/lib/rebaser-server/tests/integration_test/client.rs b/lib/rebaser-server/tests/integration_test/client.rs deleted file mode 100644 index 0b845789f5..0000000000 --- a/lib/rebaser-server/tests/integration_test/client.rs +++ /dev/null @@ -1,46 +0,0 @@ -use tokio::test; -use ulid::Ulid; - -use rebaser_client::Client; -use rebaser_server::{ConfigBuilder, Server}; - -#[test] -async fn connect() { - let client = test_setup().await; - client.close().await; -} - -#[test] -async fn management() { - let mut client = test_setup().await; - - let change_set_id = Ulid::new(); - let _new_stream_to_produce_to = client - .send_management_open_change_set(change_set_id) - .await - .expect("could not create new rebaser loop for change set"); - - client - .send_management_close_change_set(change_set_id) - .await - .expect("could not close the rebaser loop for change set"); - - client.close().await; -} - -async fn test_setup() -> Client { - let config = ConfigBuilder::default() - .cyclone_encryption_key_path( - "../../lib/cyclone-server/src/dev.encryption.key" - .try_into() - .expect("could not convert"), - ) - .build() - .expect("could not build config"); - let server = Server::from_config(config) - .await - .expect("could not build server"); - tokio::spawn(server.run()); - - Client::new().await.expect("could not build client") -} diff --git a/lib/rebaser-server/tests/integration_test/connection.rs b/lib/rebaser-server/tests/integration_test/connection.rs deleted file mode 100644 index 06fa534fd7..0000000000 --- a/lib/rebaser-server/tests/integration_test/connection.rs +++ /dev/null @@ -1,17 +0,0 @@ -use dal::change_set_pointer::ChangeSetPointer; -use dal::{DalContext, WorkspaceSnapshot}; -use si_rabbitmq::Environment; -use si_test_macros::rebaser_test as test; - -#[test] -async fn connect_to_database(ctx: &DalContext) { - let change_set = ChangeSetPointer::new_local().expect("could not create change set"); - let _snapshot = WorkspaceSnapshot::initial(ctx, &change_set) - .await - .expect("could not create snapshot"); -} - -#[test] -async fn connect_to_queue(_ctx: &DalContext) { - let _environment = Environment::new().await.expect("could not connect"); -} diff --git a/lib/rebaser-server/tests/integration_test/mod.rs b/lib/rebaser-server/tests/integration_test/mod.rs deleted file mode 100644 index ce013a0ed6..0000000000 --- a/lib/rebaser-server/tests/integration_test/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -mod client; -mod connection; diff --git a/lib/si-rabbitmq/src/lib.rs b/lib/si-rabbitmq/src/lib.rs index 709352fd84..ae60fc75cc 100644 --- a/lib/si-rabbitmq/src/lib.rs +++ b/lib/si-rabbitmq/src/lib.rs @@ -13,7 +13,6 @@ overflowing_literals, path_statements, patterns_in_fns_without_body, - private_in_public, unconditional_recursion, unused, unused_allocation, diff --git a/lib/si-test-macros/src/lib.rs b/lib/si-test-macros/src/lib.rs index 25c637a645..79f2535d25 100644 --- a/lib/si-test-macros/src/lib.rs +++ b/lib/si-test-macros/src/lib.rs @@ -394,13 +394,3 @@ pub fn sdf_test(attr: TokenStream, input: TokenStream) -> TokenStream { let item = parse_macro_input!(input as ItemFn); sdf_test::expand(item, args).into() } - -/// A procedural macro which helps to streamline, setup, and manage rebaser-related tests. -/// -/// Currently, this macro is equivalent to [`dal_test`](dal_test()). -#[proc_macro_attribute] -pub fn rebaser_test(attr: TokenStream, input: TokenStream) -> TokenStream { - let args = parse_macro_input!(attr as Args); - let item = parse_macro_input!(input as ItemFn); - dal_test::expand(item, args).into() -} From 50c4271cd6b3370656e2e9990bd8e38b2a84f867 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Mon, 2 Oct 2023 18:07:15 -0400 Subject: [PATCH 30/92] Add content store to DalContext Primary: - Add content store to DalContext as PgStore instead of a generic that implements Store - This is something we should consider once "Mostly Everything is a Node or an Edge" is done since we do not want to add generic trait bounds to DalContext with everything else that's being refactored - Use the new content store off the DalContext in the content store integration test - Use the content store from the test context in the dal test macro expansion - For now, we keep the dal context builder immutable, but in the future, we should get rid of the new "build_default_with_content_store" method and allow the builder to be mutable (which would allow us to use a setter method and THEN use "build_default") - Provide a content store for every integration test rather than on a case-by-case basis Secondary: - Rename PgMigrationHelpers to PgStoreTools and ensure the module's name reflects the change - Make PgStore cloneable to satisfy DalContext, but don't change the Store trait or LocalStore to do the same Signed-off-by: Nick Gerace --- Cargo.lock | 1 + lib/content-store-test/src/lib.rs | 27 +++++--- lib/content-store/src/lib.rs | 2 +- lib/content-store/src/store.rs | 7 +- lib/content-store/src/store/pg.rs | 21 ++++-- .../src/store/pg/{migrate.rs => tools.rs} | 20 ++---- lib/dal-test/BUCK | 1 + lib/dal-test/Cargo.toml | 1 + lib/dal-test/src/lib.rs | 25 ++++++-- lib/dal/src/context.rs | 64 ++++++++++++++++++- .../content_store.rs | 18 ++++-- lib/si-test-macros/src/expand.rs | 30 +++++++-- 12 files changed, 162 insertions(+), 55 deletions(-) rename lib/content-store/src/store/pg/{migrate.rs => tools.rs} (60%) diff --git a/Cargo.lock b/Cargo.lock index 444e117968..3a55ee5800 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1354,6 +1354,7 @@ version = "0.1.0" dependencies = [ "buck2-resources", "color-eyre", + "content-store", "content-store-test", "council-server", "dal", diff --git a/lib/content-store-test/src/lib.rs b/lib/content-store-test/src/lib.rs index 98c35a3bb6..a9f8195114 100644 --- a/lib/content-store-test/src/lib.rs +++ b/lib/content-store-test/src/lib.rs @@ -22,7 +22,7 @@ )] use color_eyre::eyre::{Result, WrapErr}; -use content_store::{PgMigrationHelpers, PgStore}; +use content_store::{PgStore, PgStoreTools}; use si_data_pg::{PgPool, PgPoolConfig}; use telemetry::prelude::*; use uuid::Uuid; @@ -83,24 +83,35 @@ impl PgTestMigrationClient { /// Perform migrations for the global content store test database. pub async fn migrate(&self) -> Result<()> { - Ok(PgMigrationHelpers::migrate(&self.pg_pool).await?) + Ok(PgStoreTools::migrate(&self.pg_pool).await?) } } -/// This unit struct provides method(s) for creating [`PgStores`](PgStore) in `dal` integration -/// tests. +/// This unit struct provides method(s) for creating [`PgStores`](PgStore) in integration tests. #[allow(missing_debug_implementations)] -pub struct DalTestPgStore; +pub struct PgStoreFactory; + +impl PgStoreFactory { + /// Creates a [`PgStore`] for the global test database. + pub async fn global() -> Result { + let pg_pool_config = PgPoolConfig { + dbname: TEST_DBNAME.to_string(), + application_name: TEST_APPLICATION_NAME.to_string(), + ..Default::default() + }; + let pg_pool = PgPool::new(&pg_pool_config).await?; + PgStore::new(pg_pool) + .await + .wrap_err("failed to create PgStore for global") + } -impl DalTestPgStore { /// Creates a test-specific database using the global content store test database. Then, a /// [`PgPool`] is created for the new database. Finally, a [`PgStore`] is created from that /// pool. /// /// This should be used over [`PgStore::new`] for `dal` integration tests until `dal-test` is /// able to perform this functionality on its own. - #[allow(clippy::new_ret_no_self)] - pub async fn new() -> Result { + pub async fn test_specific() -> Result { let global_test_dbname = TEST_DBNAME.to_string(); let global_application_name = TEST_APPLICATION_NAME.to_string(); diff --git a/lib/content-store/src/lib.rs b/lib/content-store/src/lib.rs index 600e76d9e0..10ba8558e1 100644 --- a/lib/content-store/src/lib.rs +++ b/lib/content-store/src/lib.rs @@ -28,7 +28,7 @@ mod store; pub use hash::ContentHash; pub use store::local::LocalStore; -pub use store::pg::migrate::PgMigrationHelpers; +pub use store::pg::tools::PgStoreTools; pub use store::pg::PgStore; pub use store::Store; pub use store::StoreError; diff --git a/lib/content-store/src/store.rs b/lib/content-store/src/store.rs index 177fd04b69..d75b0d3b8c 100644 --- a/lib/content-store/src/store.rs +++ b/lib/content-store/src/store.rs @@ -1,15 +1,14 @@ -pub(crate) mod local; -pub(crate) mod pg; - use serde::de::DeserializeOwned; use serde::Serialize; use si_data_pg::{PgError, PgPoolError}; - use thiserror::Error; use crate::hash::ContentHash; use crate::pair::ContentPairError; +pub(crate) mod local; +pub(crate) mod pg; + #[allow(missing_docs)] #[remain::sorted] #[derive(Error, Debug)] diff --git a/lib/content-store/src/store/pg.rs b/lib/content-store/src/store/pg.rs index 003b9d8d6b..0d11722db4 100644 --- a/lib/content-store/src/store/pg.rs +++ b/lib/content-store/src/store/pg.rs @@ -1,15 +1,17 @@ -use crate::hash::ContentHash; -use crate::pair::ContentPair; -use crate::store::{Store, StoreResult}; use serde::de::DeserializeOwned; use serde::Serialize; use si_data_pg::PgPool; use std::collections::HashMap; -pub(crate) mod migrate; +use crate::hash::ContentHash; +use crate::pair::ContentPair; +use crate::store::{Store, StoreResult}; +use crate::PgStoreTools; + +pub(crate) mod tools; /// A content store backed by Postgres. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct PgStore { inner: HashMap, pg_pool: PgPool, @@ -38,6 +40,15 @@ impl PgStore { pg_pool, }) } + + /// Create a new [`PgStore`] from a given [`PgPool`]. + pub async fn new_production() -> StoreResult { + let pg_pool = PgStoreTools::new_production_pg_pool().await?; + Ok(Self { + inner: Default::default(), + pg_pool, + }) + } } #[async_trait::async_trait] diff --git a/lib/content-store/src/store/pg/migrate.rs b/lib/content-store/src/store/pg/tools.rs similarity index 60% rename from lib/content-store/src/store/pg/migrate.rs rename to lib/content-store/src/store/pg/tools.rs index dc67148951..d01a50ced2 100644 --- a/lib/content-store/src/store/pg/migrate.rs +++ b/lib/content-store/src/store/pg/tools.rs @@ -1,6 +1,5 @@ use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; use telemetry::prelude::*; -use thiserror::Error; mod embedded { use refinery::embed_migrations; @@ -8,25 +7,16 @@ mod embedded { embed_migrations!("./src/store/pg/migrations"); } -#[remain::sorted] -#[derive(Error, Debug)] -pub enum PgMigrationHelpersError { - #[error("pg pool error: {0}")] - PgPool(#[from] PgPoolError), -} - -pub(crate) type PgMigrationHelpersResult = Result; - const DBNAME: &str = "si_content_store"; const APPLICATION_NAME: &str = "si_test_content_store"; /// A unit struct that provides helpers for performing [`PgStore`] migrations. #[allow(missing_debug_implementations)] -pub struct PgMigrationHelpers; +pub struct PgStoreTools; -impl PgMigrationHelpers { +impl PgStoreTools { /// Create a new [`PgPool`] for a production [`PgStore`]. - pub async fn new_production_pg_pool() -> PgMigrationHelpersResult { + pub async fn new_production_pg_pool() -> Result { let pg_pool_config = PgPoolConfig { dbname: DBNAME.to_string(), application_name: APPLICATION_NAME.to_string(), @@ -38,7 +28,7 @@ impl PgMigrationHelpers { /// Perform migrations for the database. #[instrument(skip_all)] - pub async fn migrate(pg_pool: &PgPool) -> PgMigrationHelpersResult<()> { - Ok(pg_pool.migrate(embedded::migrations::runner()).await?) + pub async fn migrate(pg_pool: &PgPool) -> Result<(), PgPoolError> { + pg_pool.migrate(embedded::migrations::runner()).await } } diff --git a/lib/dal-test/BUCK b/lib/dal-test/BUCK index fa07a50b78..08e87ae561 100644 --- a/lib/dal-test/BUCK +++ b/lib/dal-test/BUCK @@ -4,6 +4,7 @@ rust_library( name = "dal-test", deps = [ "//lib/buck2-resources:buck2-resources", + "//lib/content-store:content-store", "//lib/content-store-test:content-store-test", "//lib/council-server:council-server", "//lib/dal:dal", diff --git a/lib/dal-test/Cargo.toml b/lib/dal-test/Cargo.toml index 8e0ff18d63..73bd622b70 100644 --- a/lib/dal-test/Cargo.toml +++ b/lib/dal-test/Cargo.toml @@ -7,6 +7,7 @@ publish = false [dependencies] buck2-resources = { path = "../../lib/buck2-resources" } +content-store = { path = "../../lib/content-store" } content-store-test = { path = "../../lib/content-store-test" } council-server = { path = "../../lib/council-server" } dal = { path = "../../lib/dal" } diff --git a/lib/dal-test/src/lib.rs b/lib/dal-test/src/lib.rs index dce9c0a205..91efab8b24 100644 --- a/lib/dal-test/src/lib.rs +++ b/lib/dal-test/src/lib.rs @@ -9,7 +9,8 @@ use std::{ }; use buck2_resources::Buck2Resources; -use content_store_test::PgTestMigrationClient; +use content_store::PgStore; +use content_store_test::{PgStoreFactory, PgTestMigrationClient}; use dal::{ builtins::SelectedTestBuiltinSchemas, job::processor::{JobQueueProcessor, NatsProcessor}, @@ -170,6 +171,11 @@ pub struct TestContext { encryption_key: Arc, /// A service that can encrypt values based on the loaded donkeys symmetric_crypto_service: SymmetricCryptoService, + /// The content-addressable [`store`](content_store::Store) used by the "dal". + /// + /// This should be configurable in the future, but for now, the only kind of store used is the + /// [`PgStore`](content_store::PgStore). + content_store: PgStore, } impl TestContext { @@ -251,6 +257,11 @@ impl TestContext { pub fn nats_config(&self) -> &NatsConfig { &self.config.nats } + + /// Gets a reference to the content store. + pub fn content_store(&self) -> &PgStore { + &self.content_store + } } /// A builder for a [`TestContext`]. @@ -287,21 +298,20 @@ impl TestContextBuilder { let pg_pool = PgPool::new(&self.config.pg) .await .wrap_err("failed to create global setup PgPool")?; + let content_store = PgStoreFactory::global().await?; - self.build_inner(pg_pool).await + self.build_inner(pg_pool, content_store).await } /// Builds and returns a new [`TestContext`] with its own connection pooling for each test. async fn build_for_test(&self) -> Result { let pg_pool = self.create_test_specific_db_with_pg_pool().await?; + let content_store = PgStoreFactory::test_specific().await?; - // TODO(nick): create the test-specific content store db with a pg store upon request. Until - // this is resolved, use "TestPgStore::new" instead of "PgStore::new" for integration tests. - - self.build_inner(pg_pool).await + self.build_inner(pg_pool, content_store).await } - async fn build_inner(&self, pg_pool: PgPool) -> Result { + async fn build_inner(&self, pg_pool: PgPool, content_store: PgStore) -> Result { // Need to make a new NatsConfig so that we can add the test-specific subject prefix // without leaking it to other tests. let mut nats_config = self.config.nats.clone(); @@ -327,6 +337,7 @@ impl TestContextBuilder { job_processor, encryption_key: self.encryption_key.clone(), symmetric_crypto_service, + content_store, }) } diff --git a/lib/dal/src/context.rs b/lib/dal/src/context.rs index bec521242a..f0404e1ca3 100644 --- a/lib/dal/src/context.rs +++ b/lib/dal/src/context.rs @@ -1,5 +1,6 @@ use std::{mem, path::PathBuf, sync::Arc}; +use content_store::{PgStore, StoreError}; use futures::Future; use serde::{Deserialize, Serialize}; use si_crypto::SymmetricCryptoService; @@ -73,6 +74,7 @@ impl ServicesContext { services_context: self, blocking, no_dependent_values: false, + content_store: None, } } @@ -222,6 +224,11 @@ pub struct DalContext { /// Determines if we should not enqueue dependent value update jobs for attribute updates in /// this context no_dependent_values: bool, + /// The content-addressable [`store`](content_store::Store) used by the "dal". + /// + /// This should be configurable in the future, but for now, the only kind of store used is the + /// [`PgStore`](content_store::PgStore). + content_store: Arc>, } impl DalContext { @@ -232,6 +239,7 @@ impl DalContext { services_context, blocking, no_dependent_values: false, + content_store: None, } } @@ -487,6 +495,11 @@ impl DalContext { self.services_context.module_index_url.as_deref() } + /// Gets a reference to the content store. + pub fn content_store(&self) -> &Arc> { + &self.content_store + } + /// Determines if a standard model object matches the tenancy of the current context and /// is in the same visibility. pub async fn check_tenancy( @@ -598,12 +611,22 @@ pub struct DalContextBuilder { /// Determines if we should not enqueue dependent value update jobs for attribute value /// changes. no_dependent_values: bool, + /// The content store, which defaults to the production [`PgStore`], if empty. + /// + /// In the future, this should use the [`Store`](content_store::Store) trait instead of the + /// [`PgStore`] directly. + content_store: Option, } impl DalContextBuilder { - /// Contructs and returns a new [`DalContext`] using a default [`RequestContext`]. + /// Constructs and returns a new [`DalContext`] using a default [`RequestContext`]. pub async fn build_default(&self) -> Result { let conns = self.connections().await?; + let raw_content_store = match &self.content_store { + Some(found_content_store) => found_content_store.clone(), + None => PgStore::new_production().await?, + }; + Ok(DalContext { services_context: self.services_context.clone(), blocking: self.blocking, @@ -611,16 +634,41 @@ impl DalContextBuilder { tenancy: Tenancy::new_empty(), visibility: Visibility::new_head(false), history_actor: HistoryActor::SystemInit, + content_store: Arc::new(Mutex::new(raw_content_store)), no_dependent_values: self.no_dependent_values, }) } - /// Contructs and returns a new [`DalContext`] using a [`RequestContext`]. + /// Constructs and returns a new [`DalContext`] using a default [`RequestContext`] and a + /// provided content store. + pub async fn build_default_with_content_store( + &self, + content_store: PgStore, + ) -> Result { + let conns = self.connections().await?; + Ok(DalContext { + services_context: self.services_context.clone(), + blocking: self.blocking, + conns_state: Arc::new(Mutex::new(ConnectionState::new_from_conns(conns))), + tenancy: Tenancy::new_empty(), + visibility: Visibility::new_head(false), + history_actor: HistoryActor::SystemInit, + no_dependent_values: self.no_dependent_values, + content_store: Arc::new(Mutex::new(content_store)), + }) + } + + /// Constructs and returns a new [`DalContext`] using a [`RequestContext`]. pub async fn build_head( &self, access_builder: AccessBuilder, ) -> Result { let conns = self.connections().await?; + let raw_content_store = match &self.content_store { + Some(found_content_store) => found_content_store.clone(), + None => PgStore::new_production().await?, + }; + Ok(DalContext { services_context: self.services_context.clone(), blocking: self.blocking, @@ -629,15 +677,21 @@ impl DalContextBuilder { history_actor: access_builder.history_actor, visibility: Visibility::new_head(false), no_dependent_values: self.no_dependent_values, + content_store: Arc::new(Mutex::new(raw_content_store)), }) } - /// Contructs and returns a new [`DalContext`] using a [`RequestContext`]. + /// Constructs and returns a new [`DalContext`] using a [`RequestContext`]. pub async fn build( &self, request_context: RequestContext, ) -> Result { let conns = self.connections().await?; + let raw_content_store = match &self.content_store { + Some(found_content_store) => found_content_store.clone(), + None => PgStore::new_production().await?, + }; + Ok(DalContext { services_context: self.services_context.clone(), blocking: self.blocking, @@ -646,6 +700,7 @@ impl DalContextBuilder { visibility: request_context.visibility, history_actor: request_context.history_actor, no_dependent_values: self.no_dependent_values, + content_store: Arc::new(Mutex::new(raw_content_store)), }) } @@ -659,6 +714,7 @@ impl DalContextBuilder { &self.services_context.nats_conn } + /// Gets a clone of the job queue processor. pub fn job_processor(&self) -> Box { self.services_context.job_processor.clone() } @@ -701,6 +757,8 @@ pub enum TransactionsError { PgPool(#[from] PgPoolError), #[error(transparent)] SerdeJson(#[from] serde_json::Error), + #[error("store error: {0}")] + Store(#[from] StoreError), #[error(transparent)] Tenancy(#[from] TenancyError), #[error("cannot commit transactions on invalid connections state")] diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs index d6188d0055..cd910ac3a2 100644 --- a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs @@ -1,21 +1,27 @@ //! For all tests in this file, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. use content_store::Store; -use content_store_test::DalTestPgStore; use dal::component::ComponentKind; use dal::{DalContext, Schema}; use dal_test::test; #[test] -async fn new(ctx: &DalContext) { - let mut store = DalTestPgStore::new().await.expect("could not create store"); - +async fn new(ctx: &mut DalContext) { // TODO(nick): replace this with something more useful. We're just trying to make sure we can // use both the DalContext and the store at the same time to talk to PG. let schema = Schema::new(ctx, "cumbersome", &ComponentKind::Standard) .await .expect("could not create schema"); - store.add(schema.name()).expect("could not add"); - store.write().await.expect("could not write"); + ctx.content_store() + .lock() + .await + .add(schema.name()) + .expect("could not add"); + ctx.content_store() + .lock() + .await + .write() + .await + .expect("could not write"); } diff --git a/lib/si-test-macros/src/expand.rs b/lib/si-test-macros/src/expand.rs index 2e51584eab..cfc865871d 100644 --- a/lib/si-test-macros/src/expand.rs +++ b/lib/si-test-macros/src/expand.rs @@ -528,12 +528,15 @@ pub(crate) trait FnSetupExpander { let dal_context_builder = self.setup_dal_context_builder(); let dal_context_builder = dal_context_builder.as_ref(); + let test_context = self.setup_test_context(); + let test_context = test_context.as_ref(); + let var_nw = Ident::new("nw", Span::call_site()); let var_auth_token = Ident::new("auth_token", Span::call_site()); self.code_extend(quote! { let (#var_nw, #var_auth_token) = { let ctx = #dal_context_builder - .build_default() + .build_default_with_content_store(#test_context.content_store().clone()) .await .wrap_err("failed to build default dal ctx for workspace_signup")?; let r = ::dal_test::helpers::workspace_signup(&ctx).await?; @@ -576,11 +579,14 @@ pub(crate) trait FnSetupExpander { let bas = self.setup_workspace_signup(); let nw = bas.0.as_ref(); + let test_context = self.setup_test_context(); + let test_context = test_context.as_ref(); + let var = Ident::new("default_dal_context", Span::call_site()); self.code_extend(quote! { let #var = { let mut ctx = #dal_context_builder - .build_default() + .build_default_with_content_store(#test_context.content_store().clone()) .await .wrap_err("failed to build default dal ctx for dal_context_default")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); @@ -607,11 +613,14 @@ pub(crate) trait FnSetupExpander { let bas = self.setup_workspace_signup(); let nw = bas.0.as_ref(); + let test_context = self.setup_test_context(); + let test_context = test_context.as_ref(); + let var = Ident::new("dal_context_default_mut", Span::call_site()); self.code_extend(quote! { let mut #var = { let mut ctx = #dal_context_builder - .build_default() + .build_default_with_content_store(#test_context.content_store().clone()) .await .wrap_err("failed to build default dal ctx for dal_context_default_mut")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); @@ -638,11 +647,14 @@ pub(crate) trait FnSetupExpander { let bas = self.setup_workspace_signup(); let nw = bas.0.as_ref(); + let test_context = self.setup_test_context(); + let test_context = test_context.as_ref(); + let var = Ident::new("dal_context_head", Span::call_site()); self.code_extend(quote! { let #var = { let mut ctx = #dal_context_builder - .build_default() + .build_default_with_content_store(#test_context.content_store().clone()) .await .wrap_err("failed to build default dal ctx for dal_context_head")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); @@ -665,11 +677,14 @@ pub(crate) trait FnSetupExpander { let bas = self.setup_workspace_signup(); let nw = bas.0.as_ref(); + let test_context = self.setup_test_context(); + let test_context = test_context.as_ref(); + let var = Ident::new("dal_context_head_ref", Span::call_site()); self.code_extend(quote! { let _dchr = { let mut ctx = #dal_context_builder - .build_default() + .build_default_with_content_store(#test_context.content_store().clone()) .await .wrap_err("failed to build default dal ctx for dal_context_head_ref")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); @@ -693,11 +708,14 @@ pub(crate) trait FnSetupExpander { let bas = self.setup_workspace_signup(); let nw = bas.0.as_ref(); + let test_context = self.setup_test_context(); + let test_context = test_context.as_ref(); + let var = Ident::new("dal_context_head_mut_ref", Span::call_site()); self.code_extend(quote! { let mut _dchmr = { let mut ctx = #dal_context_builder - .build_default() + .build_default_with_content_store(#test_context.content_store().clone()) .await .wrap_err("failed to build default dal ctx for dal_context_head_mut_ref")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); From 6b5f09350693073bbdf3263cc92e42713d84a0e0 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Fri, 6 Oct 2023 13:47:20 -0400 Subject: [PATCH 31/92] Introduce VectorClockId and use it in rebaser-server Introduce VectorClockId to separate ChangeSetPointers from VectorClocks. This will make the behavior of the rebaser-server easier to understand because, as it stands today, VectorClockIds are just ChangeSetPointerIds. They share the same ULID. That might not always be the case either, so this change has domain-driven benefits too. Use the new VectorClockId in rebaser-server and change "updates and conflicts" detection to actually perform as intended. As a result, detection no longer requires ChangeSetPointers directly and only requires VectorClockIds. Ensure the ChangeSetMessage reflects the new VectorClockId changes. The fields now use "to_rebase" and "onto" prefixes to help imply how to prepare the payload. Add ChangeSetLoopError to differentiate errors in the rebaser-server outer loop to the inner loop(s). Signed-off-by: Nick Gerace --- lib/dal/src/change_set_pointer.rs | 7 ++ lib/dal/src/workspace_snapshot.rs | 18 +-- lib/dal/src/workspace_snapshot/edge_weight.rs | 15 ++- lib/dal/src/workspace_snapshot/graph.rs | 105 ++++++++++++------ .../node_weight/content_node_weight.rs | 35 +++--- .../node_weight/ordering_node_weight.rs | 33 ++++-- .../node_weight/prop_node_weight.rs | 35 +++--- .../src/workspace_snapshot/vector_clock.rs | 60 +++++----- lib/rebaser-client/src/client.rs | 7 +- lib/rebaser-core/src/lib.rs | 15 +-- .../src/server/change_set_loop.rs | 69 ++++++++---- 11 files changed, 253 insertions(+), 146 deletions(-) diff --git a/lib/dal/src/change_set_pointer.rs b/lib/dal/src/change_set_pointer.rs index da0f66048b..7676388161 100644 --- a/lib/dal/src/change_set_pointer.rs +++ b/lib/dal/src/change_set_pointer.rs @@ -9,6 +9,7 @@ use telemetry::prelude::*; use thiserror::Error; use ulid::{Generator, Ulid}; +use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::workspace_snapshot::WorkspaceSnapshotId; use crate::{pk, DalContext, TransactionsError}; @@ -88,6 +89,12 @@ impl ChangeSetPointer { Ok(Self::try_from(row)?) } + /// Create a [`VectorClockId`] from the [`ChangeSetPointer`]. + pub fn vector_clock_id(&self) -> VectorClockId { + let ulid: Ulid = self.id.into(); + VectorClockId::from(ulid) + } + pub fn generate_ulid(&self) -> ChangeSetPointerResult { self.generator .lock() diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs index 2415770329..b67f47545c 100644 --- a/lib/dal/src/workspace_snapshot.rs +++ b/lib/dal/src/workspace_snapshot.rs @@ -43,6 +43,7 @@ use crate::workspace_snapshot::conflict::Conflict; use crate::workspace_snapshot::edge_weight::EdgeWeight; use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::update::Update; +use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::{ pk, workspace_snapshot::{graph::WorkspaceSnapshotGraphError, node_weight::NodeWeightError}, @@ -172,16 +173,15 @@ impl WorkspaceSnapshot { } pub async fn detect_conflicts_and_updates( - &self, - ctx: &DalContext, - to_rebase_change_set: &ChangeSetPointer, - onto_change_set: &ChangeSetPointer, + &mut self, + to_rebase_vector_clock_id: VectorClockId, + onto_workspace_snapshot: &WorkspaceSnapshot, + onto_vector_clock_id: VectorClockId, ) -> WorkspaceSnapshotResult<(Vec, Vec)> { - let onto: WorkspaceSnapshot = Self::find_for_change_set(ctx, onto_change_set.id).await?; - Ok(self.snapshot()?.detect_conflicts_and_updates( - to_rebase_change_set, - &onto.snapshot()?, - onto_change_set, + Ok(self.working_copy()?.detect_conflicts_and_updates( + to_rebase_vector_clock_id, + &onto_workspace_snapshot.snapshot()?, + onto_vector_clock_id, )?) } diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs index b1c94aaf86..0f25d78f3f 100644 --- a/lib/dal/src/workspace_snapshot/edge_weight.rs +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -52,7 +52,7 @@ impl EdgeWeight { &mut self, change_set: &ChangeSetPointer, ) -> EdgeWeightResult<()> { - self.vector_clock_write.inc(change_set)?; + self.vector_clock_write.inc(change_set.vector_clock_id())?; Ok(()) } @@ -62,16 +62,21 @@ impl EdgeWeight { } pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { - if self.vector_clock_first_seen.entry_for(change_set).is_none() { - self.vector_clock_first_seen.inc_to(change_set, seen_at); + if self + .vector_clock_first_seen + .entry_for(change_set.vector_clock_id()) + .is_none() + { + self.vector_clock_first_seen + .inc_to(change_set.vector_clock_id(), seen_at); } } pub fn new(change_set: &ChangeSetPointer, kind: EdgeWeightKind) -> EdgeWeightResult { Ok(Self { kind, - vector_clock_first_seen: VectorClock::new(change_set)?, - vector_clock_write: VectorClock::new(change_set)?, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, }) } diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index fe510a6107..f0310d861c 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -8,6 +8,7 @@ use thiserror::Error; use ulid::Ulid; use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}; +use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::workspace_snapshot::{ conflict::Conflict, content_address::ContentAddress, @@ -389,18 +390,18 @@ impl WorkspaceSnapshotGraph { pub fn detect_conflicts_and_updates( &self, - to_rebase_change_set: &ChangeSetPointer, + to_rebase_vector_clock_id: VectorClockId, onto: &WorkspaceSnapshotGraph, - onto_change_set: &ChangeSetPointer, + onto_vector_clock_id: VectorClockId, ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { let mut conflicts: Vec = Vec::new(); let mut updates: Vec = Vec::new(); if let Err(traversal_error) = petgraph::visit::depth_first_search(&onto.graph, Some(onto.root_index), |event| { self.detect_conflicts_and_updates_process_dfs_event( - to_rebase_change_set, + to_rebase_vector_clock_id, onto, - onto_change_set, + onto_vector_clock_id, event, &mut conflicts, &mut updates, @@ -415,9 +416,9 @@ impl WorkspaceSnapshotGraph { fn detect_conflicts_and_updates_process_dfs_event( &self, - to_rebase_change_set: &ChangeSetPointer, + to_rebase_vector_clock_id: VectorClockId, onto: &WorkspaceSnapshotGraph, - onto_change_set: &ChangeSetPointer, + onto_vector_clock_id: VectorClockId, event: DfsEvent, conflicts: &mut Vec, updates: &mut Vec, @@ -555,10 +556,10 @@ impl WorkspaceSnapshotGraph { let (container_conflicts, container_updates) = self .find_unordered_container_membership_conflicts_and_updates( - to_rebase_change_set, + to_rebase_vector_clock_id, to_rebase_node_index, onto, - onto_change_set, + onto_vector_clock_id, onto_node_index, ) .map_err(|err| { @@ -586,11 +587,11 @@ impl WorkspaceSnapshotGraph { ); let (container_conflicts, container_updates) = self .find_ordered_container_membership_conflicts_and_updates( - to_rebase_change_set, + to_rebase_vector_clock_id, to_rebase_node_index, to_rebase_ordering_node_index, onto, - onto_change_set, + onto_vector_clock_id, onto_node_index, onto_ordering_node_index, ) @@ -641,11 +642,11 @@ impl WorkspaceSnapshotGraph { fn find_ordered_container_membership_conflicts_and_updates( &self, - to_rebase_change_set: &ChangeSetPointer, + to_rebase_vector_clock_id: VectorClockId, to_rebase_container_index: NodeIndex, to_rebase_ordering_index: NodeIndex, onto: &WorkspaceSnapshotGraph, - onto_change_set: &ChangeSetPointer, + onto_vector_clock_id: VectorClockId, onto_container_index: NodeIndex, onto_ordering_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { @@ -800,14 +801,14 @@ impl WorkspaceSnapshotGraph { if to_rebase_edgeref .weight() .vector_clock_first_seen() - .entry_for(onto_change_set) + .entry_for(onto_vector_clock_id) .is_none() { // `only_to_rebase_item` is new: Edge in `to_rebase` does not have a "First Seen" for `onto`. } else if self .get_node_weight(only_to_rebase_item_index)? .vector_clock_write() - .entry_for(to_rebase_change_set) + .entry_for(to_rebase_vector_clock_id) .is_some() { // Entry was deleted in `onto`. If we have also modified the entry, then @@ -832,7 +833,7 @@ impl WorkspaceSnapshotGraph { let onto_root_seen_as_of = self .get_node_weight(self.root_index)? .vector_clock_recently_seen() - .entry_for(onto_change_set); + .entry_for(onto_vector_clock_id); for only_onto_item in only_onto_items { let only_onto_item_index = *only_onto_item_indexes.get(&only_onto_item).ok_or( WorkspaceSnapshotGraphError::NodeWithIdNotFound(only_onto_item), @@ -847,7 +848,7 @@ impl WorkspaceSnapshotGraph { if let Some(onto_first_seen) = onto_edgeref .weight() .vector_clock_first_seen() - .entry_for(onto_change_set) + .entry_for(onto_vector_clock_id) { if let Some(root_seen_as_of) = onto_root_seen_as_of { if onto_first_seen > root_seen_as_of { @@ -890,10 +891,10 @@ impl WorkspaceSnapshotGraph { fn find_unordered_container_membership_conflicts_and_updates( &self, - to_rebase_change_set: &ChangeSetPointer, + to_rebase_vector_clock_id: VectorClockId, to_rebase_container_index: NodeIndex, onto: &WorkspaceSnapshotGraph, - onto_change_set: &ChangeSetPointer, + onto_vector_clock_id: VectorClockId, onto_container_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { #[derive(Debug, Clone, Hash, PartialEq, Eq)] @@ -962,7 +963,7 @@ impl WorkspaceSnapshotGraph { let root_seen_as_of_onto = self .get_node_weight(self.root_index)? .vector_clock_recently_seen() - .entry_for(onto_change_set); + .entry_for(onto_vector_clock_id); for only_to_rebase_edge_info in only_to_rebase_edges.values() { let to_rebase_edge_weight = self .graph @@ -975,12 +976,12 @@ impl WorkspaceSnapshotGraph { // no updates. if to_rebase_edge_weight .vector_clock_first_seen() - .entry_for(onto_change_set) + .entry_for(onto_vector_clock_id) .is_some() { if to_rebase_item_weight .vector_clock_write() - .entry_for(to_rebase_change_set) + .entry_for(to_rebase_vector_clock_id) > root_seen_as_of_onto { // Edge has been modified in `onto` (`onto` item write vector clock > "seen as @@ -1005,7 +1006,7 @@ impl WorkspaceSnapshotGraph { if let Some(onto_first_seen) = onto_edge_weight .vector_clock_first_seen() - .entry_for(onto_change_set) + .entry_for(onto_vector_clock_id) { if let Some(root_seen_as_of) = root_seen_as_of_onto { if onto_first_seen > root_seen_as_of { @@ -2047,7 +2048,11 @@ mod test { new_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!(Vec::::new(), conflicts); @@ -2152,7 +2157,11 @@ mod test { base_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!(Vec::::new(), conflicts); @@ -2309,7 +2318,11 @@ mod test { base_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!(Vec::::new(), conflicts); @@ -2451,7 +2464,11 @@ mod test { base_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!( @@ -2591,7 +2608,11 @@ mod test { new_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!( @@ -2867,7 +2888,11 @@ mod test { .expect("Unable to update the schema"); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &base_graph, base_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); println!("base graph current root: {:?}", base_graph.root_index); @@ -3887,7 +3912,11 @@ mod test { new_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!(Vec::::new(), conflicts); @@ -4119,7 +4148,11 @@ mod test { new_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!(Vec::::new(), conflicts); @@ -4392,7 +4425,11 @@ mod test { new_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!( @@ -4676,7 +4713,11 @@ mod test { new_graph.dot(); let (conflicts, updates) = new_graph - .detect_conflicts_and_updates(new_change_set, &initial_graph, initial_change_set) + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) .expect("Unable to detect conflicts and updates"); assert_eq!(Vec::::new(), conflicts); diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index a28d0cda16..8cf139efea 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -50,9 +50,9 @@ impl ContentNodeWeight { lineage_id: change_set.generate_ulid()?, content_address, merkle_tree_hash: ContentHash::default(), - vector_clock_first_seen: VectorClock::new(change_set)?, - vector_clock_recently_seen: VectorClock::new(change_set)?, - vector_clock_write: VectorClock::new(change_set)?, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, }) } @@ -72,8 +72,9 @@ impl ContentNodeWeight { &mut self, change_set: &ChangeSetPointer, ) -> NodeWeightResult<()> { - self.vector_clock_write.inc(change_set)?; - self.vector_clock_recently_seen.inc(change_set)?; + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; Ok(()) } @@ -84,9 +85,14 @@ impl ContentNodeWeight { pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { self.vector_clock_recently_seen - .inc_to(change_set, seen_at.clone()); - if self.vector_clock_first_seen.entry_for(change_set).is_none() { - self.vector_clock_first_seen.inc_to(change_set, seen_at); + .inc_to(change_set.vector_clock_id(), seen_at.clone()); + if self + .vector_clock_first_seen + .entry_for(change_set.vector_clock_id()) + .is_none() + { + self.vector_clock_first_seen + .inc_to(change_set.vector_clock_id(), seen_at); } } @@ -96,11 +102,13 @@ impl ContentNodeWeight { other: &Self, ) -> NodeWeightResult<()> { self.vector_clock_write - .merge(change_set, &other.vector_clock_write)?; + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; self.vector_clock_first_seen - .merge(change_set, &other.vector_clock_first_seen)?; - self.vector_clock_recently_seen - .merge(change_set, &other.vector_clock_recently_seen)?; + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; Ok(()) } @@ -159,7 +167,8 @@ impl ContentNodeWeight { change_set: &ChangeSetPointer, new_val: DateTime, ) { - self.vector_clock_recently_seen.inc_to(change_set, new_val); + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); } pub fn vector_clock_first_seen(&self) -> &VectorClock { diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs index 90342d327b..a57f04859b 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -32,7 +32,8 @@ impl OrderingNodeWeight { &mut self, change_set: &ChangeSetPointer, ) -> NodeWeightResult<()> { - self.vector_clock_first_seen.inc(change_set)?; + self.vector_clock_first_seen + .inc(change_set.vector_clock_id())?; Ok(()) } @@ -41,7 +42,9 @@ impl OrderingNodeWeight { &mut self, change_set: &ChangeSetPointer, ) -> NodeWeightResult<()> { - self.vector_clock_write.inc(change_set).map_err(Into::into) + self.vector_clock_write + .inc(change_set.vector_clock_id()) + .map_err(Into::into) } pub fn lineage_id(&self) -> Ulid { @@ -50,9 +53,14 @@ impl OrderingNodeWeight { pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { self.vector_clock_recently_seen - .inc_to(change_set, seen_at.clone()); - if self.vector_clock_first_seen.entry_for(change_set).is_none() { - self.vector_clock_first_seen.inc_to(change_set, seen_at); + .inc_to(change_set.vector_clock_id(), seen_at.clone()); + if self + .vector_clock_first_seen + .entry_for(change_set.vector_clock_id()) + .is_none() + { + self.vector_clock_first_seen + .inc_to(change_set.vector_clock_id(), seen_at); } } @@ -62,9 +70,11 @@ impl OrderingNodeWeight { other: &OrderingNodeWeight, ) -> NodeWeightResult<()> { self.vector_clock_write - .merge(change_set, other.vector_clock_write())?; - self.vector_clock_first_seen - .merge(change_set, other.vector_clock_first_seen())?; + .merge(change_set.vector_clock_id(), other.vector_clock_write())?; + self.vector_clock_first_seen.merge( + change_set.vector_clock_id(), + other.vector_clock_first_seen(), + )?; Ok(()) } @@ -77,8 +87,8 @@ impl OrderingNodeWeight { Ok(Self { id: change_set.generate_ulid()?, lineage_id: change_set.generate_ulid()?, - vector_clock_write: VectorClock::new(change_set)?, - vector_clock_first_seen: VectorClock::new(change_set)?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, ..Default::default() }) } @@ -122,7 +132,8 @@ impl OrderingNodeWeight { change_set: &ChangeSetPointer, new_val: DateTime, ) { - self.vector_clock_recently_seen.inc_to(change_set, new_val); + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); } fn update_content_hash(&mut self) { diff --git a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs index ccc3129166..7dbf46219f 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs @@ -42,9 +42,9 @@ impl PropNodeWeight { merkle_tree_hash: ContentHash::default(), kind, name, - vector_clock_first_seen: VectorClock::new(change_set)?, - vector_clock_recently_seen: VectorClock::new(change_set)?, - vector_clock_write: VectorClock::new(change_set)?, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, }) } @@ -64,8 +64,9 @@ impl PropNodeWeight { &mut self, change_set: &ChangeSetPointer, ) -> NodeWeightResult<()> { - self.vector_clock_write.inc(change_set)?; - self.vector_clock_recently_seen.inc(change_set)?; + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; Ok(()) } @@ -76,9 +77,14 @@ impl PropNodeWeight { pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { self.vector_clock_recently_seen - .inc_to(change_set, seen_at.clone()); - if self.vector_clock_first_seen.entry_for(change_set).is_none() { - self.vector_clock_first_seen.inc_to(change_set, seen_at); + .inc_to(change_set.vector_clock_id(), seen_at.clone()); + if self + .vector_clock_first_seen + .entry_for(change_set.vector_clock_id()) + .is_none() + { + self.vector_clock_first_seen + .inc_to(change_set.vector_clock_id(), seen_at); } } @@ -88,11 +94,13 @@ impl PropNodeWeight { other: &Self, ) -> NodeWeightResult<()> { self.vector_clock_write - .merge(change_set, &other.vector_clock_write)?; + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; self.vector_clock_first_seen - .merge(change_set, &other.vector_clock_first_seen)?; - self.vector_clock_recently_seen - .merge(change_set, &other.vector_clock_recently_seen)?; + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; Ok(()) } @@ -202,7 +210,8 @@ impl PropNodeWeight { change_set: &ChangeSetPointer, new_val: DateTime, ) { - self.vector_clock_recently_seen.inc_to(change_set, new_val); + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); } pub fn vector_clock_first_seen(&self) -> &VectorClock { diff --git a/lib/dal/src/workspace_snapshot/vector_clock.rs b/lib/dal/src/workspace_snapshot/vector_clock.rs index 53ef41ee3a..b5793f9444 100644 --- a/lib/dal/src/workspace_snapshot/vector_clock.rs +++ b/lib/dal/src/workspace_snapshot/vector_clock.rs @@ -6,10 +6,8 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use thiserror::Error; -use crate::workspace_snapshot::{ - lamport_clock::{LamportClock, LamportClockError}, - {ChangeSetPointer, ChangeSetPointerId}, -}; +use crate::pk; +use crate::workspace_snapshot::lamport_clock::{LamportClock, LamportClockError}; #[derive(Debug, Error)] pub enum VectorClockError { @@ -19,74 +17,78 @@ pub enum VectorClockError { pub type VectorClockResult = Result; +pk!(VectorClockId); + #[derive(Default, Serialize, Deserialize, PartialEq, Eq, Clone)] pub struct VectorClock { - entries: HashMap, + entries: HashMap, } impl VectorClock { - /// Create a new [`VectorClock`] with an entry for [`ChangeSetPointer`]. - pub fn new(change_set: &ChangeSetPointer) -> VectorClockResult { + /// Create a new [`VectorClock`] with an entry for [`VectorClockId`]. + pub fn new(vector_clock_id: VectorClockId) -> VectorClockResult { let lamport_clock = LamportClock::new()?; let mut entries = HashMap::new(); - entries.insert(change_set.id, lamport_clock); + entries.insert(vector_clock_id, lamport_clock); Ok(VectorClock { entries }) } - pub fn entry_for(&self, change_set: &ChangeSetPointer) -> Option { - self.entries.get(&change_set.id).copied() + pub fn entry_for(&self, vector_clock_id: VectorClockId) -> Option { + self.entries.get(&vector_clock_id).copied() } pub fn has_entries_newer_than(&self, clock_stamp: LamportClock) -> bool { self.entries.values().any(|v| *v > clock_stamp) } - pub fn inc_to(&mut self, change_set: &ChangeSetPointer, new_clock_value: DateTime) { - if let Some(lamport_clock) = self.entries.get_mut(&change_set.id) { + pub fn inc_to(&mut self, vector_clock_id: VectorClockId, new_clock_value: DateTime) { + if let Some(lamport_clock) = self.entries.get_mut(&vector_clock_id) { lamport_clock.inc_to(new_clock_value); } else { - self.entries - .insert(change_set.id, LamportClock::new_with_value(new_clock_value)); + self.entries.insert( + vector_clock_id, + LamportClock::new_with_value(new_clock_value), + ); } } - /// Increment the entry for [`ChangeSetPointer`], adding one if there wasn't one already. - pub fn inc(&mut self, change_set: &ChangeSetPointer) -> VectorClockResult<()> { - if let Some(lamport_clock) = self.entries.get_mut(&change_set.id) { + /// Increment the entry for [`VectorClockId`], adding one if there wasn't one already. + pub fn inc(&mut self, vector_clock_id: VectorClockId) -> VectorClockResult<()> { + if let Some(lamport_clock) = self.entries.get_mut(&vector_clock_id) { lamport_clock.inc()?; } else { - self.entries.insert(change_set.id, LamportClock::new()?); + self.entries.insert(vector_clock_id, LamportClock::new()?); } Ok(()) } /// Add all entries in `other` to `self`, taking the most recent value if the entry already - /// exists in `self`, then increment the entry for [`ChangeSetPointer`] (adding one if it is not + /// exists in `self`, then increment the entry for [`VectorClockId`] (adding one if it is not /// already there). pub fn merge( &mut self, - change_set: &ChangeSetPointer, + vector_clock_id: VectorClockId, other: &VectorClock, ) -> VectorClockResult<()> { - for (other_change_set_id, other_lamport_clock) in other.entries.iter() { - if let Some(lamport_clock) = self.entries.get_mut(other_change_set_id) { + for (other_vector_clock_id, other_lamport_clock) in other.entries.iter() { + if let Some(lamport_clock) = self.entries.get_mut(other_vector_clock_id) { lamport_clock.merge(other_lamport_clock); } else { self.entries - .insert(*other_change_set_id, *other_lamport_clock); + .insert(*other_vector_clock_id, *other_lamport_clock); } } - self.inc(change_set)?; + self.inc(vector_clock_id)?; Ok(()) } - /// Return a new [`VectorClock`] with the entry for [`ChangeSetPointer`] incremented. - pub fn fork(&self, change_set: &ChangeSetPointer) -> VectorClockResult { + /// Return a new [`VectorClock`] with the entry for [`VectorClockId`] incremented. + pub fn fork(&self, vector_clock_id: VectorClockId) -> VectorClockResult { let mut forked = self.clone(); - forked.inc(change_set)?; + forked.inc(vector_clock_id)?; Ok(forked) } @@ -95,8 +97,8 @@ impl VectorClock { /// `self`, meaning that `self` has already seen/incorporated all of the information /// in `other`. pub fn is_newer_than(&self, other: &VectorClock) -> bool { - for (other_change_set_id, other_lamport_clock) in &other.entries { - if let Some(my_clock) = self.entries.get(other_change_set_id) { + for (other_vector_clock_id, other_lamport_clock) in &other.entries { + if let Some(my_clock) = self.entries.get(other_vector_clock_id) { if other_lamport_clock > my_clock { return false; } diff --git a/lib/rebaser-client/src/client.rs b/lib/rebaser-client/src/client.rs index 450b4c4576..9f7ec87d23 100644 --- a/lib/rebaser-client/src/client.rs +++ b/lib/rebaser-client/src/client.rs @@ -81,9 +81,10 @@ impl Client { .producer .send_single( ChangeSetMessage { - change_set_to_update, - workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at, - change_set_that_dictates_changes, + to_rebase_vector_clock_id: change_set_to_update, + to_rebase_workspace_snapshot_id: + workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at, + onto_change_set_id: change_set_that_dictates_changes, }, Some(stream.reply_stream.clone()), ) diff --git a/lib/rebaser-core/src/lib.rs b/lib/rebaser-core/src/lib.rs index 64e2bfcaca..79af401850 100644 --- a/lib/rebaser-core/src/lib.rs +++ b/lib/rebaser-core/src/lib.rs @@ -55,13 +55,14 @@ pub struct ManagementMessage { #[derive(Debug, Serialize, Deserialize)] pub struct ChangeSetMessage { /// Corresponds to the change set whose pointer is to be updated. - pub change_set_to_update: Ulid, - /// Corresponds to the workspace snapshot that will be rebased on top of the snapshot that the - /// change set is currently pointing at. - pub workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at: Ulid, - /// Corresponds to the change set that's either the base change set, the last change set before - /// edits were made, or the change set that you are trying to “merge” into the base. - pub change_set_that_dictates_changes: Ulid, + pub onto_change_set_id: Ulid, + /// Corresponds to the workspace snapshot that will be rebased on top of the workspace snapshot + /// that the change set is currently pointing at. + pub to_rebase_workspace_snapshot_id: Ulid, + /// Derived from the ephemeral or persisted change set that's either the base change set, the + /// last change set before edits were made, or the change set that you are trying to rebase + /// onto base. + pub to_rebase_vector_clock_id: Ulid, } /// The message shape that the rebaser change set loop will use for replying to the client. diff --git a/lib/rebaser-server/src/server/change_set_loop.rs b/lib/rebaser-server/src/server/change_set_loop.rs index 3892afc5d4..781f7a302b 100644 --- a/lib/rebaser-server/src/server/change_set_loop.rs +++ b/lib/rebaser-server/src/server/change_set_loop.rs @@ -1,10 +1,35 @@ -use dal::change_set_pointer::ChangeSetPointer; -use dal::{DalContext, DalContextBuilder, Tenancy, Visibility, WorkspacePk, WorkspaceSnapshot}; +use dal::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; +use dal::workspace_snapshot::WorkspaceSnapshotError; +use dal::{ + DalContext, DalContextBuilder, Tenancy, TransactionsError, Visibility, WorkspacePk, + WorkspaceSnapshot, +}; use rebaser_core::{ChangeSetMessage, ChangeSetReplyMessage}; -use si_rabbitmq::{Consumer, Delivery, Environment, Producer}; +use si_rabbitmq::{Consumer, Delivery, Environment, Producer, RabbitError}; use telemetry::prelude::*; +use thiserror::Error; -use crate::server::{ServerError, ServerResult}; +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Debug, Error)] +enum ChangeSetLoopError { + #[error("workspace snapshot error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), + #[error("missing change set message \"reply_to\" field")] + MissingChangeSetMessageReplyTo, + #[error("missing workspace snapshot for change set ({0}) (the change set likely isn't pointing at a workspace snapshot)")] + MissingWorkspaceSnapshotForChangeSet(ChangeSetPointerId), + #[error("rabbit error: {0}")] + Rabbit(#[from] RabbitError), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), +} + +type ChangeSetLoopResult = Result; pub(crate) async fn change_set_loop_infallible_wrapper( ctx_builder: DalContextBuilder, @@ -18,7 +43,7 @@ pub(crate) async fn change_set_loop_infallible_wrapper( async fn change_set_loop( ctx_builder: DalContextBuilder, mut consumer: Consumer, -) -> ServerResult> { +) -> ChangeSetLoopResult> { let mut ctx = ctx_builder.build_default().await?; ctx.update_visibility(Visibility::new_head(false)); ctx.update_tenancy(Tenancy::new(WorkspacePk::NONE)); @@ -80,37 +105,33 @@ async fn process_delivery_infallible_wrapper( } } -// TODO(nick): use real errors in this function. async fn process_delivery( ctx: &mut DalContext, environment: &Environment, inbound_stream: impl AsRef, delivery: &Delivery, reply_to: impl AsRef, -) -> ServerResult<()> { +) -> ChangeSetLoopResult<()> { let raw_message = match &delivery.message_contents { Some(found_raw_message) => found_raw_message, - None => return Err(ServerError::MissingManagementMessageReplyTo), + None => return Err(ChangeSetLoopError::MissingChangeSetMessageReplyTo), }; let message: ChangeSetMessage = serde_json::from_value(raw_message.clone())?; - // ------------------------------------ - // NOTE(nick): the "work" begins below! - // ------------------------------------ - - let to_rebase: WorkspaceSnapshot = WorkspaceSnapshot::find( - ctx, - message - .workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at - .into(), - ) - .await?; - let to_rebase_change_set = - ChangeSetPointer::find(ctx, message.change_set_that_dictates_changes.into()).await?; - let onto_change_set = ChangeSetPointer::find(ctx, message.change_set_to_update.into()).await?; + let mut to_rebase_workspace_snapshot: WorkspaceSnapshot = + WorkspaceSnapshot::find(ctx, message.to_rebase_workspace_snapshot_id.into()).await?; + let onto_change_set = ChangeSetPointer::find(ctx, message.onto_change_set_id.into()).await?; + let onto_workspace_snapshot_id = onto_change_set.workspace_snapshot_id.ok_or( + ChangeSetLoopError::MissingWorkspaceSnapshotForChangeSet(onto_change_set.id), + )?; + let onto_workspace_snapshot = WorkspaceSnapshot::find(ctx, onto_workspace_snapshot_id).await?; - let (conflicts, updates) = to_rebase - .detect_conflicts_and_updates(ctx, &to_rebase_change_set, &onto_change_set) + let (conflicts, updates) = to_rebase_workspace_snapshot + .detect_conflicts_and_updates( + message.to_rebase_vector_clock_id.into(), + &onto_workspace_snapshot, + onto_change_set.vector_clock_id(), + ) .await?; // TODO(nick): for now, just send back the conflicts and updates. We'll need to do something From 3cbbcb277cb864f8dd9693f962a3356a0d86b20f Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Fri, 13 Oct 2023 17:27:29 -0400 Subject: [PATCH 32/92] Add si-cbor and use for WorkspaceSnapshot and ContentPair Add a new library, si-cbor, for quickly encoding and decoding CBOR serialized objects. Use si-cbor for both the serialized snapshots in WorkspaceSnapshot and for the value in ContentPair. This should help with space savings and performance in the long term. Signed-off-by: Nick Gerace --- Cargo.lock | 12 +++ Cargo.toml | 1 + lib/content-store/BUCK | 3 +- lib/content-store/Cargo.toml | 1 + lib/content-store/src/pair.rs | 1 + lib/content-store/src/store.rs | 3 + lib/content-store/src/store/pg.rs | 8 +- lib/dal/BUCK | 1 + lib/dal/Cargo.toml | 1 + .../migrations/U3000__workspace_snapshots.sql | 2 +- lib/dal/src/workspace_snapshot.rs | 20 ++--- .../src/server/change_set_loop.rs | 5 +- lib/si-cbor/BUCK | 12 +++ lib/si-cbor/Cargo.toml | 11 +++ lib/si-cbor/src/lib.rs | 74 +++++++++++++++++++ 15 files changed, 135 insertions(+), 20 deletions(-) create mode 100644 lib/si-cbor/BUCK create mode 100644 lib/si-cbor/Cargo.toml create mode 100644 lib/si-cbor/src/lib.rs diff --git a/Cargo.lock b/Cargo.lock index 3a55ee5800..da6c40998e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -948,6 +948,7 @@ dependencies = [ "remain", "serde", "serde_json", + "si-cbor", "si-data-pg", "telemetry", "thiserror", @@ -1331,6 +1332,7 @@ dependencies = [ "serde-aux", "serde_json", "serde_with 3.4.0", + "si-cbor", "si-crypto", "si-data-nats", "si-data-pg", @@ -5093,6 +5095,16 @@ dependencies = [ "tokio", ] +[[package]] +name = "si-cbor" +version = "0.1.0" +dependencies = [ + "ciborium", + "remain", + "serde", + "thiserror", +] + [[package]] name = "si-cli" version = "0.1.0" diff --git a/Cargo.toml b/Cargo.toml index f90c941dfe..591070a14e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -34,6 +34,7 @@ members = [ "lib/rebaser-core", "lib/rebaser-server", "lib/sdf-server", + "lib/si-cbor", "lib/si-crypto", "lib/si-data-nats", "lib/si-data-pg", diff --git a/lib/content-store/BUCK b/lib/content-store/BUCK index 26f8e270c3..3476c9cf52 100644 --- a/lib/content-store/BUCK +++ b/lib/content-store/BUCK @@ -3,6 +3,7 @@ load("@prelude-si//:macros.bzl", "rust_library") rust_library( name = "content-store", deps = [ + "//lib/si-cbor:si-cbor", "//lib/si-data-pg:si-data-pg", "//lib/telemetry-rs:telemetry", "//third-party/rust:async-trait", @@ -23,4 +24,4 @@ rust_library( env = { "CARGO_MANIFEST_DIR": ".", }, -) \ No newline at end of file +) diff --git a/lib/content-store/Cargo.toml b/lib/content-store/Cargo.toml index 099021134c..17998b0ad0 100644 --- a/lib/content-store/Cargo.toml +++ b/lib/content-store/Cargo.toml @@ -5,6 +5,7 @@ edition = "2021" publish = false [dependencies] +si-cbor = { path = "../../lib/si-cbor" } si-data-pg = { path = "../../lib/si-data-pg" } telemetry = { path = "../../lib/telemetry-rs" } diff --git a/lib/content-store/src/pair.rs b/lib/content-store/src/pair.rs index b0f984c361..fc1b8bbab5 100644 --- a/lib/content-store/src/pair.rs +++ b/lib/content-store/src/pair.rs @@ -21,6 +21,7 @@ pub(crate) type ContentPairResult = Result; pub(crate) struct ContentPair { key: String, created_at: DateTime, + /// Serialized CBOR bytes. value: Vec, } diff --git a/lib/content-store/src/store.rs b/lib/content-store/src/store.rs index d75b0d3b8c..2eed650da3 100644 --- a/lib/content-store/src/store.rs +++ b/lib/content-store/src/store.rs @@ -1,5 +1,6 @@ use serde::de::DeserializeOwned; use serde::Serialize; +use si_cbor::CborError; use si_data_pg::{PgError, PgPoolError}; use thiserror::Error; @@ -13,6 +14,8 @@ pub(crate) mod pg; #[remain::sorted] #[derive(Error, Debug)] pub enum StoreError { + #[error("cbor error: {0}")] + Cbor(#[from] CborError), #[error("content pair error: {0}")] ContentPair(#[from] ContentPairError), #[error("pg error: {0}")] diff --git a/lib/content-store/src/store/pg.rs b/lib/content-store/src/store/pg.rs index 0d11722db4..741ae748ef 100644 --- a/lib/content-store/src/store/pg.rs +++ b/lib/content-store/src/store/pg.rs @@ -65,7 +65,7 @@ impl Store for PgStore { where T: Serialize + ?Sized, { - let value = serde_json::to_vec(object)?; + let value = si_cbor::encode(object)?; let key = ContentHash::new(&value); self.inner.insert(key, PgStoreItem::new(value)); Ok(key) @@ -79,9 +79,9 @@ impl Store for PgStore { Some(item) => serde_json::from_slice(&item.value)?, None => match ContentPair::find(&self.pg_pool, key).await? { Some(content_pair) => { - let bytes = content_pair.value(); - self.add(bytes)?; - serde_json::from_slice(bytes)? + let encoded = content_pair.value(); + self.add(encoded)?; + si_cbor::decode(encoded)? } None => return Ok(None), }, diff --git a/lib/dal/BUCK b/lib/dal/BUCK index 101a38b9bd..d4242fc8b0 100644 --- a/lib/dal/BUCK +++ b/lib/dal/BUCK @@ -7,6 +7,7 @@ load( rust_library( name = "dal", deps = [ + "//lib/si-cbor:si-cbor", "//lib/content-store:content-store", "//lib/council-server:council-server", "//lib/nats-subscriber:nats-subscriber", diff --git a/lib/dal/Cargo.toml b/lib/dal/Cargo.toml index daf3f5a941..de80fd3c7d 100644 --- a/lib/dal/Cargo.toml +++ b/lib/dal/Cargo.toml @@ -39,6 +39,7 @@ serde = { workspace = true } serde-aux = { workspace = true } serde_json = { workspace = true } serde_with = { workspace = true } +si-cbor = { path = "../../lib/si-cbor" } si-crypto = { path = "../../lib/si-crypto" } si-data-nats = { path = "../../lib/si-data-nats" } si-data-pg = { path = "../../lib/si-data-pg" } diff --git a/lib/dal/src/migrations/U3000__workspace_snapshots.sql b/lib/dal/src/migrations/U3000__workspace_snapshots.sql index e35f915d15..e11dce2f7e 100644 --- a/lib/dal/src/migrations/U3000__workspace_snapshots.sql +++ b/lib/dal/src/migrations/U3000__workspace_snapshots.sql @@ -2,7 +2,7 @@ CREATE TABLE workspace_snapshots ( id ident primary key NOT NULL DEFAULT ident_create_v1(), created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - snapshot jsonb NOT NULL + snapshot bytea NOT NULL -- TODO(nick): add once workspaces are added -- workspace_id ident REFERENCES workspaces_v2 (id) NOT NULL, -- TODO(nick): replace the existing primary key with this once workspaces are added diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs index b67f47545c..cd6c626d2c 100644 --- a/lib/dal/src/workspace_snapshot.rs +++ b/lib/dal/src/workspace_snapshot.rs @@ -33,7 +33,7 @@ pub mod vector_clock; use chrono::{DateTime, Utc}; use petgraph::prelude::*; use serde::{Deserialize, Serialize}; -use serde_json::Value; +use si_cbor::CborError; use si_data_pg::{PgError, PgRow}; use telemetry::prelude::*; use thiserror::Error; @@ -56,6 +56,8 @@ const FIND_FOR_CHANGE_SET: &str = #[remain::sorted] #[derive(Error, Debug)] pub enum WorkspaceSnapshotError { + #[error("cbor error: {0}")] + Cbor(#[from] CborError), #[error("monotonic error: {0}")] Monotonic(#[from] ulid::MonotonicError), #[error("NodeWeight error: {0}")] @@ -64,8 +66,6 @@ pub enum WorkspaceSnapshotError { Pg(#[from] PgError), #[error("poison error: {0}")] Poison(String), - #[error("serde json error: {0}")] - SerdeJson(#[from] serde_json::Error), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), #[error("WorkspaceSnapshotGraph error: {0}")] @@ -82,7 +82,7 @@ pk!(WorkspaceSnapshotId); pub struct WorkspaceSnapshot { id: WorkspaceSnapshotId, created_at: DateTime, - snapshot: Value, + snapshot: Vec, #[serde(skip_serializing)] working_copy: Option, } @@ -127,7 +127,7 @@ impl WorkspaceSnapshot { ctx: &DalContext, graph: WorkspaceSnapshotGraph, ) -> WorkspaceSnapshotResult { - let serialized_snapshot = serde_json::to_value(graph)?; + let serialized_snapshot = si_cbor::encode(&graph)?; let row = ctx .txns() .await? @@ -146,17 +146,13 @@ impl WorkspaceSnapshot { fn working_copy(&mut self) -> WorkspaceSnapshotResult<&mut WorkspaceSnapshotGraph> { if self.working_copy.is_none() { - self.working_copy = Some(serde_json::from_value(self.snapshot.clone())?); + self.working_copy = Some(si_cbor::decode(&self.snapshot)?); } self.working_copy .as_mut() .ok_or(WorkspaceSnapshotError::WorkspaceSnapshotGraphMissing) } - fn snapshot(&self) -> WorkspaceSnapshotResult { - Ok(serde_json::from_value(self.snapshot.clone())?) - } - pub fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotResult { Ok(self.working_copy()?.add_node(node)?) } @@ -175,12 +171,12 @@ impl WorkspaceSnapshot { pub async fn detect_conflicts_and_updates( &mut self, to_rebase_vector_clock_id: VectorClockId, - onto_workspace_snapshot: &WorkspaceSnapshot, + onto_workspace_snapshot: &mut WorkspaceSnapshot, onto_vector_clock_id: VectorClockId, ) -> WorkspaceSnapshotResult<(Vec, Vec)> { Ok(self.working_copy()?.detect_conflicts_and_updates( to_rebase_vector_clock_id, - &onto_workspace_snapshot.snapshot()?, + onto_workspace_snapshot.working_copy()?, onto_vector_clock_id, )?) } diff --git a/lib/rebaser-server/src/server/change_set_loop.rs b/lib/rebaser-server/src/server/change_set_loop.rs index 781f7a302b..eeb42c5c3f 100644 --- a/lib/rebaser-server/src/server/change_set_loop.rs +++ b/lib/rebaser-server/src/server/change_set_loop.rs @@ -124,12 +124,13 @@ async fn process_delivery( let onto_workspace_snapshot_id = onto_change_set.workspace_snapshot_id.ok_or( ChangeSetLoopError::MissingWorkspaceSnapshotForChangeSet(onto_change_set.id), )?; - let onto_workspace_snapshot = WorkspaceSnapshot::find(ctx, onto_workspace_snapshot_id).await?; + let mut onto_workspace_snapshot = + WorkspaceSnapshot::find(ctx, onto_workspace_snapshot_id).await?; let (conflicts, updates) = to_rebase_workspace_snapshot .detect_conflicts_and_updates( message.to_rebase_vector_clock_id.into(), - &onto_workspace_snapshot, + &mut onto_workspace_snapshot, onto_change_set.vector_clock_id(), ) .await?; diff --git a/lib/si-cbor/BUCK b/lib/si-cbor/BUCK new file mode 100644 index 0000000000..f6574e8e2e --- /dev/null +++ b/lib/si-cbor/BUCK @@ -0,0 +1,12 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "si-cbor", + deps = [ + "//third-party/rust:ciborium", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:thiserror", + ], + srcs = glob(["src/**/*.rs"]), +) diff --git a/lib/si-cbor/Cargo.toml b/lib/si-cbor/Cargo.toml new file mode 100644 index 0000000000..f98842bd81 --- /dev/null +++ b/lib/si-cbor/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "si-cbor" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +ciborium = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +thiserror = { workspace = true } \ No newline at end of file diff --git a/lib/si-cbor/src/lib.rs b/lib/si-cbor/src/lib.rs new file mode 100644 index 0000000000..84657cd14e --- /dev/null +++ b/lib/si-cbor/src/lib.rs @@ -0,0 +1,74 @@ +//! This library provides the ability to encode (serialize) and decode (deserialize) +//! [CBOR](https://en.wikipedia.org/wiki/CBOR) objects. + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::io::BufReader; +use thiserror::Error; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Error, Debug)] +pub enum CborError { + #[error("ciborium deserialization error: {0}")] + CiboriumDeserialization(#[from] ciborium::de::Error), + #[error("ciborium serialization error: {0}")] + CiboriumSerialization(#[from] ciborium::ser::Error), +} + +type CborResult = Result; + +/// Serialize the given value to CBOR. +pub fn encode(value: &T) -> CborResult> +where + T: Serialize + ?Sized, +{ + let mut encoded = Vec::new(); + ciborium::into_writer(value, &mut encoded)?; + Ok(encoded) +} + +/// Deserialize from CBOR to a provided type. +pub fn decode(value: &[u8]) -> CborResult +where + T: DeserializeOwned, +{ + let reader = BufReader::new(value); + Ok(ciborium::from_reader(reader)?) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn string() { + let original = "mybrainhurts"; + + let bytes = encode(original).expect("could not encode"); + let round_trip: String = decode(&bytes).expect("could not decode"); + + assert_eq!(original, round_trip.as_str()); + } +} From 9012a9bf277f10b415a7de7fe304b8e2f8c9dd84 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Fri, 6 Oct 2023 15:45:26 -0400 Subject: [PATCH 33/92] Bail on conflicts in rebaser-server Bail on conflicts in rebaser-server by returning a new variant of the ChangeSetReplyMessage. The new variant, ConflictsFound, indicates that while no functions failed, there was at least one conflict found. Therefore, no updates could be performed. Signed-off-by: Nick Gerace --- Cargo.lock | 1 + .../rebaser.rs | 7 +---- lib/rebaser-core/BUCK | 1 + lib/rebaser-core/Cargo.toml | 1 + lib/rebaser-core/src/lib.rs | 21 +++++++------ .../src/server/change_set_loop.rs | 30 +++++++++++-------- 6 files changed, 34 insertions(+), 27 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index da6c40998e..9304737065 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4142,6 +4142,7 @@ name = "rebaser-core" version = "0.1.0" dependencies = [ "serde", + "serde_json", "ulid", ] diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs index 4a0215f4f4..a3df79554e 100644 --- a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs @@ -89,12 +89,7 @@ async fn simple_rebase(ctx: &mut DalContext) { .expect("could not send"); // TODO(nick): do something useful with this. - match response { - ChangeSetReplyMessage::Success { results } => { - dbg!(results); - } - ChangeSetReplyMessage::Failure { error } => panic!("{}", error), - } + dbg!(response); // TODO(nick): move cleanup to the test harness. let _ = client diff --git a/lib/rebaser-core/BUCK b/lib/rebaser-core/BUCK index a0bb494027..3ff014b557 100644 --- a/lib/rebaser-core/BUCK +++ b/lib/rebaser-core/BUCK @@ -4,6 +4,7 @@ rust_library( name = "rebaser-core", deps = [ "//third-party/rust:serde", + "//third-party/rust:serde_json", "//third-party/rust:ulid", ], srcs = glob([ diff --git a/lib/rebaser-core/Cargo.toml b/lib/rebaser-core/Cargo.toml index 2733580632..4a4cbf5dd2 100644 --- a/lib/rebaser-core/Cargo.toml +++ b/lib/rebaser-core/Cargo.toml @@ -6,4 +6,5 @@ publish = false [dependencies] serde = { workspace = true } +serde_json = { workspace = true } ulid = { workspace = true } diff --git a/lib/rebaser-core/src/lib.rs b/lib/rebaser-core/src/lib.rs index 79af401850..799262cadf 100644 --- a/lib/rebaser-core/src/lib.rs +++ b/lib/rebaser-core/src/lib.rs @@ -27,6 +27,7 @@ use serde::Deserialize; use serde::Serialize; +use serde_json::Value; use ulid::Ulid; /// Stream to manage rebaser consumer loops. @@ -68,14 +69,16 @@ pub struct ChangeSetMessage { /// The message shape that the rebaser change set loop will use for replying to the client. #[derive(Debug, Serialize, Deserialize)] pub enum ChangeSetReplyMessage { - /// Processing the delivery was a success. - Success { - /// The results of processing the delivery. - results: String, - }, - /// Processing the delivery was a failure. - Failure { - /// The error encountered when processing the delivery. - error: String, + /// Updates performed when processing the delivery. + Success(Value), + /// Conflicts found when processing the delivery. + ConflictsFound { + /// A serialized list of the conflicts found during detection. + conflicts_found: Value, + /// A serialized list of the updates found during detection and skipped because at least + /// once conflict was found. + updates_found_and_skipped: Value, }, + /// Error encountered when processing the delivery. + Error(String), } diff --git a/lib/rebaser-server/src/server/change_set_loop.rs b/lib/rebaser-server/src/server/change_set_loop.rs index eeb42c5c3f..6a96c5efad 100644 --- a/lib/rebaser-server/src/server/change_set_loop.rs +++ b/lib/rebaser-server/src/server/change_set_loop.rs @@ -80,12 +80,7 @@ async fn process_delivery_infallible_wrapper( match Producer::for_reply(&environment, inbound_stream, reply_to).await { Ok(mut producer) => { if let Err(err) = producer - .send_single( - ChangeSetReplyMessage::Failure { - error: format!("{err}"), - }, - None, - ) + .send_single(ChangeSetReplyMessage::Error(err.to_string()), None) .await { error!(error = ?err, "sending reply failed"); @@ -118,6 +113,7 @@ async fn process_delivery( }; let message: ChangeSetMessage = serde_json::from_value(raw_message.clone())?; + // Gather everything we need to detect conflicts and updates from the inbound message. let mut to_rebase_workspace_snapshot: WorkspaceSnapshot = WorkspaceSnapshot::find(ctx, message.to_rebase_workspace_snapshot_id.into()).await?; let onto_change_set = ChangeSetPointer::find(ctx, message.onto_change_set_id.into()).await?; @@ -127,6 +123,7 @@ async fn process_delivery( let mut onto_workspace_snapshot = WorkspaceSnapshot::find(ctx, onto_workspace_snapshot_id).await?; + // Perform the conflicts and updates detection. let (conflicts, updates) = to_rebase_workspace_snapshot .detect_conflicts_and_updates( message.to_rebase_vector_clock_id.into(), @@ -135,13 +132,22 @@ async fn process_delivery( ) .await?; - // TODO(nick): for now, just send back the conflicts and updates. We'll need to do something - // with those updates later. - let serialized = serde_json::to_value(ChangeSetReplyMessage::Success { - results: format!("{:?} {:?}", conflicts, updates), - })?; + // If there are conflicts, immediately assemble a reply message that conflicts were found. + // Otherwise, we can perform updates and assemble a "success" reply message. + let message: ChangeSetReplyMessage = if conflicts.is_empty() { + ChangeSetReplyMessage::ConflictsFound { + conflicts_found: serde_json::to_value(conflicts)?, + updates_found_and_skipped: serde_json::to_value(updates)?, + } + } else { + // TODO(nick): actually perform updates. + ChangeSetReplyMessage::Success(serde_json::to_value(updates)?) + }; + let mut producer = Producer::for_reply(&environment, inbound_stream, reply_to).await?; - producer.send_single(serialized, None).await?; + producer + .send_single(serde_json::to_value(message)?, None) + .await?; producer.close().await?; Ok(()) From cf1bc2563dec6040c190047d68fbce82ac663a73 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Fri, 6 Oct 2023 16:00:09 -0400 Subject: [PATCH 34/92] Perform updates in the rebaser This commit adds the ability for the rebaser to perform the updates it finds. Two new tests have been added: one showcasing a "fork" rebasing flow including a simple conflict. The other test is contained within WorkspaceSnapshotGraph and helped fix a bug found while writing the former test: when "to_rebase" has never seen anything from the "onto" change set, all items are new. Before this commit, those updates were not detected. Primary dal changes: - Add ability to update content, import subgraph, remove edge by EdgeIndex (only when performing updates), replace references, get node weight, find equivalent node, print "dot" output to stdout, get node index by id, and add edge from root for WorkspaceSnapshot - It is likely that many of these methods will not last the test of time and exist mainly for the commit's integration test - Replace ChangeSetPointer function parameters with VectorClockId where appropriate (continuation of prior work with VectorClockId) - Re-export NodeIndex for use outside of WorkspaceSnapshotGraph - This should be rarely used and we may be able to abstract it away in the future - Add ability to get the root NodeIndex, find an equivalent node, and remove edge by EdgeIndex for WorkspaceSnapshotGraph Primary rebaser changes: - Renamed client methods to closer match intent - Centralize stream name generation into core - Add debug logging to both the client and server to aid in debugging timeouts - Expand ChangeSetReplyMessage options to aid in testing and debugging in production - Move DalContext building into the listener loop rather than re-using the same DalContext for every delivery - Convert Producer from "Dedup" to "Nodedup" since a Producer can be re-created for the same stream and process - Without this change, you could produce a message to a stream, see that it would not appear as "ready" in the RabbitMQ Dashboard and observe that no error or failure occurred (read: this was a pain in the ass) - From this change, the number of constructors have been reduced from two to one Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig --- Cargo.lock | 1 + lib/dal/src/change_set_pointer.rs | 7 +- lib/dal/src/workspace_snapshot.rs | 126 +++++++++- lib/dal/src/workspace_snapshot/conflict.rs | 1 - lib/dal/src/workspace_snapshot/edge_weight.rs | 8 +- lib/dal/src/workspace_snapshot/graph.rs | 223 +++++++++++++++--- lib/dal/src/workspace_snapshot/node_weight.rs | 11 +- .../node_weight/content_node_weight.rs | 11 +- .../node_weight/ordering_node_weight.rs | 9 +- .../node_weight/prop_node_weight.rs | 29 +-- lib/dal/src/workspace_snapshot/update.rs | 14 +- .../mostly_everything_is_a_node_or_an_edge.rs | 1 + .../change_set.rs | 13 + .../content_store.rs | 2 - .../rebaser.rs | 201 +++++++++++++--- lib/rebaser-client/Cargo.toml | 1 + lib/rebaser-client/src/client.rs | 71 +++--- lib/rebaser-client/src/lib.rs | 5 +- lib/rebaser-core/src/lib.rs | 55 ++++- .../src/server/change_set_loop.rs | 168 +++++++++++-- .../src/server/management_loop.rs | 14 +- lib/si-rabbitmq/src/lib.rs | 2 +- lib/si-rabbitmq/src/producer.rs | 27 +-- 23 files changed, 774 insertions(+), 226 deletions(-) create mode 100644 lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/change_set.rs diff --git a/Cargo.lock b/Cargo.lock index 9304737065..097df33512 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4126,6 +4126,7 @@ dependencies = [ name = "rebaser-client" version = "0.1.0" dependencies = [ + "log", "rebaser-core", "remain", "serde", diff --git a/lib/dal/src/change_set_pointer.rs b/lib/dal/src/change_set_pointer.rs index 7676388161..737827b3c9 100644 --- a/lib/dal/src/change_set_pointer.rs +++ b/lib/dal/src/change_set_pointer.rs @@ -86,13 +86,12 @@ impl ChangeSetPointer { &[&name], ) .await?; - Ok(Self::try_from(row)?) + Self::try_from(row) } /// Create a [`VectorClockId`] from the [`ChangeSetPointer`]. pub fn vector_clock_id(&self) -> VectorClockId { - let ulid: Ulid = self.id.into(); - VectorClockId::from(ulid) + VectorClockId::from(Ulid::from(self.id)) } pub fn generate_ulid(&self) -> ChangeSetPointerResult { @@ -134,7 +133,7 @@ impl ChangeSetPointer { &[&change_set_pointer_id], ) .await?; - Ok(Self::try_from(row)?) + Self::try_from(row) } } diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs index cd6c626d2c..ce655d7ea6 100644 --- a/lib/dal/src/workspace_snapshot.rs +++ b/lib/dal/src/workspace_snapshot.rs @@ -31,16 +31,19 @@ pub mod update; pub mod vector_clock; use chrono::{DateTime, Utc}; +use content_store::ContentHash; use petgraph::prelude::*; use serde::{Deserialize, Serialize}; use si_cbor::CborError; use si_data_pg::{PgError, PgRow}; +use std::collections::HashMap; use telemetry::prelude::*; use thiserror::Error; +use ulid::Ulid; use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; use crate::workspace_snapshot::conflict::Conflict; -use crate::workspace_snapshot::edge_weight::EdgeWeight; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}; use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::update::Update; use crate::workspace_snapshot::vector_clock::VectorClockId; @@ -58,6 +61,10 @@ const FIND_FOR_CHANGE_SET: &str = pub enum WorkspaceSnapshotError { #[error("cbor error: {0}")] Cbor(#[from] CborError), + #[error("change set pointer error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), #[error("monotonic error: {0}")] Monotonic(#[from] ulid::MonotonicError), #[error("NodeWeight error: {0}")] @@ -106,18 +113,29 @@ impl WorkspaceSnapshot { change_set: &ChangeSetPointer, ) -> WorkspaceSnapshotResult { let snapshot = WorkspaceSnapshotGraph::new(change_set)?; - Ok(Self::new_inner(ctx, snapshot).await?) + Self::new_inner(ctx, snapshot).await } - pub async fn write(&mut self, ctx: &DalContext) -> WorkspaceSnapshotResult<()> { + pub async fn write( + &mut self, + ctx: &DalContext, + vector_clock_id: VectorClockId, + ) -> WorkspaceSnapshotResult<()> { + // Pull out the working copy and clean it up. let working_copy = self.working_copy()?; working_copy.cleanup(); + // Mark everything left as seen. + working_copy.mark_graph_seen(vector_clock_id)?; + + // Stamp the new workspace snapshot. let object = Self::new_inner(ctx, working_copy.clone()).await?; + // Reset relevant fields on self. self.id = object.id; self.created_at = object.created_at; self.snapshot = object.snapshot; + self.working_copy = None; Ok(()) } @@ -153,8 +171,26 @@ impl WorkspaceSnapshot { .ok_or(WorkspaceSnapshotError::WorkspaceSnapshotGraphMissing) } + pub fn root(&mut self) -> WorkspaceSnapshotResult { + Ok(self.working_copy()?.root()) + } + + // TODO(nick): replace this with the api. pub fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotResult { - Ok(self.working_copy()?.add_node(node)?) + let new_node_index = self.working_copy()?.add_node(node)?; + Ok(new_node_index) + } + + // TODO(nick): replace this with the api. + pub fn update_content( + &mut self, + change_set: &ChangeSetPointer, + id: Ulid, + new_content_hash: ContentHash, + ) -> WorkspaceSnapshotResult<()> { + Ok(self + .working_copy()? + .update_content(change_set, id, new_content_hash)?) } pub fn add_edge( @@ -181,6 +217,84 @@ impl WorkspaceSnapshot { )?) } + pub fn remove_edge_for_update_stableish( + &mut self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotResult<()> { + Ok(self + .working_copy()? + .remove_edge_for_update_stableish(edge_index)?) + } + + pub fn get_edge_by_index_stableish( + &mut self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotResult { + Ok(self + .working_copy()? + .get_edge_by_index_stableish(edge_index)?) + } + + pub fn import_subgraph( + &mut self, + other: &mut Self, + root_index: NodeIndex, + ) -> WorkspaceSnapshotResult> { + let updated_indices = self + .working_copy()? + .import_subgraph(other.working_copy()?, root_index)?; + Ok(updated_indices) + } + + pub fn replace_references( + &mut self, + original_node_index: NodeIndex, + new_node_index: NodeIndex, + ) -> WorkspaceSnapshotResult<()> { + Ok(self + .working_copy()? + .replace_references(original_node_index, new_node_index)?) + } + + pub fn get_node_weight( + &mut self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotResult<&NodeWeight> { + Ok(self.working_copy()?.get_node_weight(node_index)?) + } + + pub fn find_equivalent_node( + &mut self, + id: Ulid, + lineage_id: Ulid, + ) -> WorkspaceSnapshotResult> { + Ok(self.working_copy()?.find_equivalent_node(id, lineage_id)?) + } + + pub fn dot(&mut self) { + self.working_copy() + .expect("failed on accessing or creating a working copy") + .dot(); + } + + pub fn get_node_index_by_id(&mut self, id: Ulid) -> WorkspaceSnapshotResult { + Ok(self.working_copy()?.get_node_index_by_id(id)?) + } + + pub fn add_edge_from_root( + &mut self, + change_set: &ChangeSetPointer, + destination: NodeIndex, + ) -> WorkspaceSnapshotResult { + let root = self.working_copy()?.root(); + let new_edge = self.working_copy()?.add_edge( + root, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + destination, + )?; + Ok(new_edge) + } + #[instrument(skip_all)] pub async fn find( ctx: &DalContext, @@ -195,7 +309,7 @@ impl WorkspaceSnapshot { &[&workspace_snapshot_id], ) .await?; - Ok(Self::try_from(row)?) + Self::try_from(row) } #[instrument(skip_all)] @@ -209,6 +323,6 @@ impl WorkspaceSnapshot { .pg() .query_one(FIND_FOR_CHANGE_SET, &[&change_set_pointer_id]) .await?; - Ok(Self::try_from(row)?) + Self::try_from(row) } } diff --git a/lib/dal/src/workspace_snapshot/conflict.rs b/lib/dal/src/workspace_snapshot/conflict.rs index dbf71b52fa..659175f76e 100644 --- a/lib/dal/src/workspace_snapshot/conflict.rs +++ b/lib/dal/src/workspace_snapshot/conflict.rs @@ -7,7 +7,6 @@ use serde::Serialize; #[remain::sorted] #[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize)] pub enum Conflict { - // TODO(nick,jacob): this variant will not be possible until ordering is in place. ChildOrder { onto: NodeIndex, to_rebase: NodeIndex, diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs index 0f25d78f3f..f052a8054b 100644 --- a/lib/dal/src/workspace_snapshot/edge_weight.rs +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -5,7 +5,7 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use crate::change_set_pointer::ChangeSetPointer; -use crate::workspace_snapshot::vector_clock::{VectorClock, VectorClockError}; +use crate::workspace_snapshot::vector_clock::{VectorClock, VectorClockError, VectorClockId}; #[derive(Debug, Error)] pub enum EdgeWeightError { @@ -61,14 +61,14 @@ impl EdgeWeight { &self.kind } - pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { if self .vector_clock_first_seen - .entry_for(change_set.vector_clock_id()) + .entry_for(vector_clock_id) .is_none() { self.vector_clock_first_seen - .inc_to(change_set.vector_clock_id(), seen_at); + .inc_to(vector_clock_id, seen_at); } } diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index f0310d861c..ce06b2d393 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -17,6 +17,9 @@ use crate::workspace_snapshot::{ update::Update, }; +/// Ensure [`NodeIndex`] is usable by external crates. +pub use petgraph::graph::NodeIndex; + pub type LineageId = Ulid; #[allow(clippy::large_enum_variant)] @@ -33,6 +36,8 @@ pub enum WorkspaceSnapshotGraphError { ContentStore(#[from] StoreError), #[error("Action would create a graph cycle")] CreateGraphCycle, + #[error("Edge does not exist for EdgeIndex: {0:?}")] + EdgeDoesNotExist(EdgeIndex), #[error("EdgeWeight error: {0}")] EdgeWeight(#[from] EdgeWeightError), #[error("EdgeWeight not found")] @@ -47,6 +52,8 @@ pub enum WorkspaceSnapshotGraphError { NodeWeight(#[from] NodeWeightError), #[error("node weight not found")] NodeWeightNotFound, + #[error("node weight not found by node index: {0:?}")] + NodeWeightNotFoundByNodeIndex(NodeIndex), #[error("Node with ID {} not found", .0.to_string())] NodeWithIdNotFound(Ulid), #[error("No Prop found for NodeIndex {0:?}")] @@ -92,10 +99,14 @@ impl WorkspaceSnapshotGraph { Ok(Self { root_index, graph }) } + pub fn root(&self) -> NodeIndex { + self.root_index + } + pub fn add_edge( &mut self, from_node_index: NodeIndex, - mut edge_weight: EdgeWeight, + edge_weight: EdgeWeight, to_node_index: NodeIndex, ) -> WorkspaceSnapshotGraphResult { // Temporarily add the edge to the existing tree to see if it would create a cycle. @@ -180,6 +191,7 @@ impl WorkspaceSnapshotGraph { Ok(new_edge_index) } + #[allow(dead_code)] fn add_ordered_node( &mut self, change_set: &ChangeSetPointer, @@ -378,6 +390,24 @@ impl WorkspaceSnapshotGraph { }); } + pub fn find_equivalent_node( + &self, + id: Ulid, + lineage_id: Ulid, + ) -> WorkspaceSnapshotGraphResult> { + // This looks a bit clunky and could be improved with the following issue resolved: + // https://github.com/petgraph/petgraph/issues/577 + for node_index in self.graph.node_indices() { + let node_weight = self.graph.node_weight(node_index).ok_or( + WorkspaceSnapshotGraphError::NodeWeightNotFoundByNodeIndex(node_index), + )?; + if id == node_weight.id() && lineage_id == node_weight.lineage_id() { + return Ok(Some(node_index)); + } + } + Ok(None) + } + fn copy_node_index( &mut self, node_index_to_copy: NodeIndex, @@ -509,8 +539,8 @@ impl WorkspaceSnapshotGraph { // `to_rebase`. There is no conflict, and we should update to use the // `onto` node. updates.push(Update::ReplaceSubgraph { - new: onto_node_index, - old: to_rebase_node_index, + onto: onto_node_index, + to_rebase: to_rebase_node_index, }); } else { // There are changes on both sides that have not been seen by the other @@ -627,7 +657,8 @@ impl WorkspaceSnapshotGraph { } } - fn dot(&self) { + #[allow(dead_code)] + pub fn dot(&self) { // NOTE(nick): copy the output and execute this on macOS. It will create a file in the // process and open a new tab in your browser. // ``` @@ -726,8 +757,8 @@ impl WorkspaceSnapshotGraph { // Use the ordering from `other` in `to_rebase`. updates.push(Update::ReplaceSubgraph { - new: onto_ordering_index, - old: to_rebase_ordering_index, + onto: onto_ordering_index, + to_rebase: to_rebase_ordering_index, }); } else if to_rebase_ordering .vector_clock_write() @@ -1008,10 +1039,13 @@ impl WorkspaceSnapshotGraph { .vector_clock_first_seen() .entry_for(onto_vector_clock_id) { - if let Some(root_seen_as_of) = root_seen_as_of_onto { - if onto_first_seen > root_seen_as_of { + match root_seen_as_of_onto { + Some(root_seen_as_of) if onto_first_seen <= root_seen_as_of => {} + _ => { // Edge first seen by `onto` > "seen as of" on `to_rebase` graph for `onto`'s entry on // root node: Item is new. + // Other case where item is new: the `to_rebase` has never seen anything from + // the `onto` change set. All the items are new. updates.push(Update::NewEdge { source: to_rebase_container_index, destination: only_onto_edge_info.target_node_index, @@ -1039,7 +1073,7 @@ impl WorkspaceSnapshotGraph { Ok((conflicts, updates)) } - fn get_node_index_by_id(&self, id: Ulid) -> WorkspaceSnapshotGraphResult { + pub(crate) fn get_node_index_by_id(&self, id: Ulid) -> WorkspaceSnapshotGraphResult { for node_index in self.graph.node_indices() { // It's possible that there are multiple nodes in the petgraph that have the // same ID as the one we're interested in, as we may not yet have cleaned up @@ -1074,7 +1108,10 @@ impl WorkspaceSnapshotGraph { Ok(results) } - fn get_node_weight(&self, node_index: NodeIndex) -> WorkspaceSnapshotGraphResult<&NodeWeight> { + pub fn get_node_weight( + &self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<&NodeWeight> { self.graph .node_weight(node_index) .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound) @@ -1093,11 +1130,11 @@ impl WorkspaceSnapshotGraph { algo::has_path_connecting(&self.graph, self.root_index, node, None) } - fn import_subgraph( + pub fn import_subgraph( &mut self, other: &WorkspaceSnapshotGraph, root_index: NodeIndex, - ) -> WorkspaceSnapshotGraphResult { + ) -> WorkspaceSnapshotGraphResult> { let mut new_node_indexes = HashMap::new(); let mut dfs = petgraph::visit::DfsPostOrder::new(&other.graph, root_index); while let Some(node_index_to_copy) = dfs.next(&other.graph) { @@ -1116,11 +1153,7 @@ impl WorkspaceSnapshotGraph { ); } } - - new_node_indexes - .get(&root_index) - .copied() - .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound) + Ok(new_node_indexes) } fn is_acyclic_directed(&self) -> bool { @@ -1135,14 +1168,14 @@ impl WorkspaceSnapshotGraph { pub fn mark_graph_seen( &mut self, - change_set: &ChangeSetPointer, + vector_clock_id: VectorClockId, ) -> WorkspaceSnapshotGraphResult<()> { let seen_at = Utc::now(); for edge in self.graph.edge_weights_mut() { - edge.mark_seen_at(change_set, seen_at.clone()); + edge.mark_seen_at(vector_clock_id, seen_at.clone()); } for node in self.graph.node_weights_mut() { - node.mark_seen_at(change_set, seen_at.clone()); + node.mark_seen_at(vector_clock_id, seen_at); } Ok(()) @@ -1224,7 +1257,8 @@ impl WorkspaceSnapshotGraph { /// are **NO** guarantees around the stability of [`EdgeIndex`] across removals. If /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] found before /// [`Self::cleanup()`] has run should be considered invalid. - fn remove_edge( + #[allow(dead_code)] + pub(crate) fn remove_edge( &mut self, change_set: &ChangeSetPointer, source_node_index: NodeIndex, @@ -1290,7 +1324,28 @@ impl WorkspaceSnapshotGraph { Ok(()) } - fn replace_references( + pub(crate) fn remove_edge_for_update_stableish( + &mut self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotGraphResult<()> { + let _ = self + .graph + .remove_edge(edge_index) + .ok_or(WorkspaceSnapshotGraphError::EdgeDoesNotExist(edge_index))?; + Ok(()) + } + + pub(crate) fn get_edge_by_index_stableish( + &mut self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotGraphResult { + self.graph + .edge_weight(edge_index) + .cloned() + .ok_or(WorkspaceSnapshotGraphError::EdgeDoesNotExist(edge_index)) + } + + pub fn replace_references( &mut self, original_node_index: NodeIndex, new_node_index: NodeIndex, @@ -2183,6 +2238,104 @@ mod test { } } + #[test] + fn detect_conflicts_and_updates_with_purely_new_content_in_new_graph() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + component_id, + ContentAddress::Component(ContentHash::from("Component A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + + base_graph.cleanup(); + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + let new_component_id = new_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let new_component_index = new_graph + .add_node( + NodeWeight::new_content( + new_change_set, + new_component_id, + ContentAddress::Component(ContentHash::from("Component B")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component B"); + new_graph + .add_edge( + new_graph.root_index, + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + new_component_index, + ) + .expect("Unable to add root -> component edge"); + + new_graph.cleanup(); + println!("Updated new graph (Root: {:?}):", new_graph.root_index); + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert!(updates.is_empty()); + assert!(conflicts.is_empty()); + + let (conflicts, updates) = base_graph + .detect_conflicts_and_updates( + base_change_set.vector_clock_id(), + &new_graph, + new_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert!(conflicts.is_empty()); + + match updates.as_slice() { + [Update::NewEdge { + source, + destination, + edge_weight, + }] => { + assert_eq!(base_graph.root_index, *source); + assert_eq!(new_component_index, *destination); + assert_eq!(&EdgeWeightKind::Use, edge_weight.kind()); + } + other => panic!("Unexpected updates: {:?}", other), + } + } + #[test] fn detect_conflicts_and_updates_simple_no_conflicts_with_updates_on_both_sides() { let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); @@ -2916,10 +3069,10 @@ mod test { }, ]; let expected_updates = vec![Update::ReplaceSubgraph { - new: base_graph + onto: base_graph .get_node_index_by_id(docker_image_schema_id) .expect("Unable to get NodeIndex"), - old: new_graph + to_rebase: new_graph .get_node_index_by_id(docker_image_schema_id) .expect("Unable to get NodeIndex"), }]; @@ -4168,7 +4321,7 @@ mod test { edge_weight: new_edge_weight, }, Update::ReplaceSubgraph { - new: initial_graph + onto: initial_graph .ordering_node_index_for_container( initial_graph .get_node_index_by_id(container_prop_id) @@ -4176,7 +4329,7 @@ mod test { ) .expect("Unable to get new ordering NodeIndex") .expect("Ordering NodeIndex not found"), - old: new_graph + to_rebase: new_graph .ordering_node_index_for_container( new_graph .get_node_index_by_id(container_prop_id) @@ -4658,7 +4811,7 @@ mod test { initial_graph.cleanup(); initial_graph - .mark_graph_seen(initial_change_set) + .mark_graph_seen(initial_change_set.vector_clock_id()) .expect("Unable to update recently seen information"); // initial_graph.dot(); @@ -5028,7 +5181,7 @@ mod test { &mut content_store, graph .get_node_index_by_id(root_av_id) - .expect("Unable to get NodeIndex") + .expect("Unable to get NodeIndex"), ) .await .expect("Unable to generate attribute value view"), @@ -5397,7 +5550,7 @@ mod test { &mut content_store, graph .get_node_index_by_id(root_av_id) - .expect("Unable to get NodeIndex") + .expect("Unable to get NodeIndex"), ) .await .expect("Unable to generate attribute value view"), @@ -5886,7 +6039,7 @@ mod test { &mut content_store, graph .get_node_index_by_id(root_av_id) - .expect("Unable to get NodeIndex") + .expect("Unable to get NodeIndex"), ) .await .expect("Unable to generate attribute value view"), @@ -5912,7 +6065,7 @@ mod test { &mut content_store, graph .get_node_index_by_id(root_av_id) - .expect("Unable to get NodeIndex") + .expect("Unable to get NodeIndex"), ) .await .expect("Unable to generate attribute value view"), @@ -5973,7 +6126,7 @@ mod test { &mut content_store, graph .get_node_index_by_id(root_av_id) - .expect("Unable to get NodeIndex") + .expect("Unable to get NodeIndex"), ) .await .expect("Unable to generate attribute value view"), @@ -6474,7 +6627,7 @@ mod test { &mut content_store, graph .get_node_index_by_id(root_av_id) - .expect("Unable to get NodeIndex") + .expect("Unable to get NodeIndex"), ) .await .expect("Unable to generate attribute value view"), @@ -6505,7 +6658,7 @@ mod test { &mut content_store, graph .get_node_index_by_id(root_av_id) - .expect("Unable to get NodeIndex") + .expect("Unable to get NodeIndex"), ) .await .expect("Unable to generate attribute value view"), @@ -6569,7 +6722,7 @@ mod test { &mut content_store, graph .get_node_index_by_id(root_av_id) - .expect("Unable to get NodeIndex") + .expect("Unable to get NodeIndex"), ) .await .expect("Unable to generate attribute value view"), diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index 4b3242e078..97d53528b8 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize}; use thiserror::Error; use ulid::Ulid; +use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::{ change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}, workspace_snapshot::{ @@ -88,13 +89,15 @@ impl NodeWeight { } } - pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { match self { - NodeWeight::Content(content_weight) => content_weight.mark_seen_at(change_set, seen_at), + NodeWeight::Content(content_weight) => { + content_weight.mark_seen_at(vector_clock_id, seen_at) + } NodeWeight::Ordering(ordering_weight) => { - ordering_weight.mark_seen_at(change_set, seen_at) + ordering_weight.mark_seen_at(vector_clock_id, seen_at) } - NodeWeight::Prop(prop_weight) => prop_weight.mark_seen_at(change_set, seen_at), + NodeWeight::Prop(prop_weight) => prop_weight.mark_seen_at(vector_clock_id, seen_at), } } diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index 8cf139efea..61a22bda24 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -3,6 +3,7 @@ use content_store::ContentHash; use serde::{Deserialize, Serialize}; use ulid::Ulid; +use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::{ change_set_pointer::ChangeSetPointer, workspace_snapshot::{ @@ -83,16 +84,16 @@ impl ContentNodeWeight { self.lineage_id } - pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { self.vector_clock_recently_seen - .inc_to(change_set.vector_clock_id(), seen_at.clone()); + .inc_to(vector_clock_id, seen_at); if self .vector_clock_first_seen - .entry_for(change_set.vector_clock_id()) + .entry_for(vector_clock_id) .is_none() { self.vector_clock_first_seen - .inc_to(change_set.vector_clock_id(), seen_at); + .inc_to(vector_clock_id, seen_at); } } @@ -132,7 +133,7 @@ impl ContentNodeWeight { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "Prop".to_string(), "Content".to_string(), - )) + )); } ContentAddress::Root => return Err(NodeWeightError::CannotUpdateRootNodeContentHash), ContentAddress::Schema(_) => ContentAddress::Schema(content_hash), diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs index a57f04859b..c6e5dee80d 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -4,6 +4,7 @@ use serde::{Deserialize, Serialize}; use ulid::Ulid; use crate::change_set_pointer::ChangeSetPointer; +use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::workspace_snapshot::{node_weight::NodeWeightResult, vector_clock::VectorClock}; #[derive(Clone, Serialize, Deserialize, Default)] @@ -51,16 +52,16 @@ impl OrderingNodeWeight { self.lineage_id } - pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { self.vector_clock_recently_seen - .inc_to(change_set.vector_clock_id(), seen_at.clone()); + .inc_to(vector_clock_id, seen_at); if self .vector_clock_first_seen - .entry_for(change_set.vector_clock_id()) + .entry_for(vector_clock_id) .is_none() { self.vector_clock_first_seen - .inc_to(change_set.vector_clock_id(), seen_at); + .inc_to(vector_clock_id, seen_at); } } diff --git a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs index 7dbf46219f..665ed5e164 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs @@ -3,6 +3,7 @@ use content_store::ContentHash; use serde::{Deserialize, Serialize}; use ulid::Ulid; +use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::{ change_set_pointer::ChangeSetPointer, workspace_snapshot::{ @@ -75,16 +76,16 @@ impl PropNodeWeight { self.lineage_id } - pub fn mark_seen_at(&mut self, change_set: &ChangeSetPointer, seen_at: DateTime) { + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { self.vector_clock_recently_seen - .inc_to(change_set.vector_clock_id(), seen_at.clone()); + .inc_to(vector_clock_id, seen_at); if self .vector_clock_first_seen - .entry_for(change_set.vector_clock_id()) + .entry_for(vector_clock_id) .is_none() { self.vector_clock_first_seen - .inc_to(change_set.vector_clock_id(), seen_at); + .inc_to(vector_clock_id, seen_at); } } @@ -119,62 +120,62 @@ impl PropNodeWeight { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "AttributePrototype".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::AttributeValue(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "AttributeValue".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::Component(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "Component".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::ExternalProvider(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "ExternalProvider".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::Func(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "Func".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::FuncArg(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "FuncArc".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::InternalProvider(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "InternalProvider".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::Prop(_) => ContentAddress::Prop(content_hash), ContentAddress::Root => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "Root".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::Schema(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "Schema".to_string(), "Prop".to_string(), - )) + )); } ContentAddress::SchemaVariant(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "SchemaVariant".to_string(), "Prop".to_string(), - )) + )); } }; diff --git a/lib/dal/src/workspace_snapshot/update.rs b/lib/dal/src/workspace_snapshot/update.rs index 73a73a5f5c..9f18592cb2 100644 --- a/lib/dal/src/workspace_snapshot/update.rs +++ b/lib/dal/src/workspace_snapshot/update.rs @@ -8,17 +8,17 @@ use serde::{Deserialize, Serialize}; pub enum Update { NewEdge { source: NodeIndex, + // Check if already exists in "onto" (source). Grab node weight from "to_rebase" + // (destination) and see if there is an equivalent node (id and lineage) in "onto". + // If not, use "import_subgraph". destination: NodeIndex, edge_weight: EdgeWeight, }, - NewSubgraph { - source: NodeIndex, - }, RemoveEdge(EdgeIndex), ReplaceSubgraph { - // "onto" - new: NodeIndex, - // "to_rebase" - old: NodeIndex, + onto: NodeIndex, + // Check if already exists in "onto". Grab node weight from "to_rebase" and see if there is + // an equivalent node (id and lineage) in "onto". If not, use "import_subgraph". + to_rebase: NodeIndex, }, } diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs index 1f279e71bd..e984e92b8f 100644 --- a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs @@ -3,5 +3,6 @@ //! //! For all tests in this module, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. +mod change_set; mod content_store; mod rebaser; diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/change_set.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/change_set.rs new file mode 100644 index 0000000000..442f8d0127 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/change_set.rs @@ -0,0 +1,13 @@ +use dal::change_set_pointer::ChangeSetPointer; +use dal::DalContext; +use dal_test::test; +use ulid::Ulid; + +#[test] +async fn vector_clock_id(ctx: &mut DalContext) { + let change_set = ChangeSetPointer::new(ctx, "main") + .await + .expect("could not create change set"); + let vector_clock_id_as_ulid: Ulid = change_set.vector_clock_id().into(); + assert_eq!(change_set.id, vector_clock_id_as_ulid.into()); +} diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs index cd910ac3a2..17f8e29888 100644 --- a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs @@ -1,5 +1,3 @@ -//! For all tests in this file, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. - use content_store::Store; use dal::component::ComponentKind; use dal::{DalContext, Schema}; diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs index a3df79554e..0eb9ab68a1 100644 --- a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/rebaser.rs @@ -1,22 +1,21 @@ -//! For all tests in this module, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. - use content_store::ContentHash; use dal::change_set_pointer::ChangeSetPointer; +use dal::workspace_snapshot::conflict::Conflict; use dal::workspace_snapshot::content_address::ContentAddress; use dal::workspace_snapshot::node_weight::NodeWeight; +use dal::workspace_snapshot::update::Update; use dal::{DalContext, Tenancy, Visibility, WorkspacePk, WorkspaceSnapshot}; use dal_test::test; +use pretty_assertions_sorted::assert_eq; use rebaser_client::Client; use rebaser_core::ChangeSetReplyMessage; #[test] -async fn simple_rebase(ctx: &mut DalContext) { +async fn pure_update_and_single_conflict(ctx: &mut DalContext) { ctx.update_visibility(Visibility::new_head(false)); ctx.update_tenancy(Tenancy::new(WorkspacePk::NONE)); - let ctx = &ctx; - - let mut client = Client::new().await.expect("could not build client"); + // Start with the base change set and the initial snapshot. let mut base_change_set = ChangeSetPointer::new(ctx, "main") .await .expect("could not create change set"); @@ -25,74 +24,204 @@ async fn simple_rebase(ctx: &mut DalContext) { .await .expect("could not create workspace snapshot"); - // Add a new node. - snapshot + // Add a new node, write and update the pointer. + let olivia_rodrigo_id = base_change_set + .generate_ulid() + .expect("could not generate id"); + let olivia_rodrigo_node_index = snapshot .add_node( NodeWeight::new_content( base_change_set, - base_change_set - .generate_ulid() - .expect("cannot generate ulid"), - ContentAddress::Schema(ContentHash::from("lacy - olivia rodrigo")), + olivia_rodrigo_id, + ContentAddress::Component(ContentHash::from("lacy - olivia rodrigo")), ) .expect("could not create node weight"), ) .expect("could not add node"); - - snapshot.write(ctx).await.expect("could not write snapshot"); + snapshot + .add_edge_from_root(base_change_set, olivia_rodrigo_node_index) + .expect("could not add edge"); + snapshot + .write(ctx, base_change_set.vector_clock_id()) + .await + .expect("could not write snapshot"); base_change_set .update_pointer(ctx, snapshot.id()) .await - .expect("could not update pointer"); + .expect("could not update change set"); - // Create another change set and update. + // Create another change set and update the snapshot. let mut forked_change_set = ChangeSetPointer::new(ctx, "fork") .await .expect("could not create change set"); let forked_change_set = &mut forked_change_set; - snapshot + let mut forked_snapshot = WorkspaceSnapshot::find_for_change_set(ctx, base_change_set.id) + .await + .expect("could not find snapshot"); + let victoria_monet_id = forked_change_set + .generate_ulid() + .expect("could not generate id"); + let victoria_monet_node_index = forked_snapshot .add_node( NodeWeight::new_content( forked_change_set, - forked_change_set - .generate_ulid() - .expect("cannot generate ulid"), - ContentAddress::Schema(ContentHash::from("i'm the one - victoria monét")), + victoria_monet_id, + ContentAddress::Component(ContentHash::from("i'm the one - victoria monét")), ) .expect("could not create node weight"), ) .expect("could not add node"); - snapshot.write(ctx).await.expect("could not write snapshot"); + let victoria_monet_edge_index = forked_snapshot + .add_edge_from_root(forked_change_set, victoria_monet_node_index) + .expect("could not add edge"); + forked_snapshot + .write(ctx, forked_change_set.vector_clock_id()) + .await + .expect("could not write snapshot"); forked_change_set - .update_pointer(ctx, snapshot.id()) + .update_pointer(ctx, forked_snapshot.id()) .await - .expect("could not update pointer"); + .expect("could not update change set"); - // Rebase! - let response = client - .send_management_open_change_set(base_change_set.id.into()) + // Commit all changes made so that the rebaser can access them. + ctx.blocking_commit().await.expect("could not commit"); + + // Create a rebaser client and open a change set loop. + let mut client = Client::new().await.expect("could not build client"); + let _ = client + .open_stream_for_change_set(base_change_set.id.into()) .await .expect("could not send management"); - // TODO(nick): do something useful with this. - dbg!(response); + // Cache expected updates and then perform a rebase. + let expected_updates = [Update::NewEdge { + source: snapshot.root().expect("could not get root"), + destination: forked_snapshot + .get_node_index_by_id(victoria_monet_id) + .expect("could not get node index"), + edge_weight: forked_snapshot + .get_edge_by_index_stableish(victoria_monet_edge_index) + .expect("could not find edge by index"), + }]; + let response = client + .request_rebase( + base_change_set.id.into(), + forked_snapshot.id().into(), + forked_change_set.vector_clock_id().into(), + ) + .await + .expect("could not send"); + + // Ensure the rebase was successful and no updates needed to be performed. + match response { + ChangeSetReplyMessage::Success { updates_performed } => { + let actual_updates: Vec = + serde_json::from_value(updates_performed).expect("could not deserialize"); + assert_eq!( + &expected_updates, // expected + actual_updates.as_slice() // actual + ); + } + ChangeSetReplyMessage::ConflictsFound { + conflicts_found, + updates_found_and_skipped: _, + } => { + let conflicts: Vec = + serde_json::from_value(conflicts_found).expect("could not deserialize"); + panic!("unexpected conflicts: {conflicts:?}"); + } + ChangeSetReplyMessage::Error { message } => { + panic!("unexpected error: {message}"); + } + } + + // Now, create a conflict. + let mut snapshot = WorkspaceSnapshot::find_for_change_set(ctx, base_change_set.id) + .await + .expect("could not find snapshot"); + snapshot + .update_content( + base_change_set, + olivia_rodrigo_id, + ContentHash::from("onto updated"), + ) + .expect("could not update content"); + snapshot + .write(ctx, base_change_set.vector_clock_id()) + .await + .expect("could not write snapshot"); + base_change_set + .update_pointer(ctx, snapshot.id()) + .await + .expect("could not update change set"); + let mut forked_snapshot = WorkspaceSnapshot::find_for_change_set(ctx, forked_change_set.id) + .await + .expect("could not find snapshot"); + forked_snapshot + .update_content( + forked_change_set, + olivia_rodrigo_id, + ContentHash::from("to rebase updated"), + ) + .expect("could not update content"); + forked_snapshot + .write(ctx, forked_change_set.vector_clock_id()) + .await + .expect("could not write snapshot"); + forked_change_set + .update_pointer(ctx, forked_snapshot.id()) + .await + .expect("could not update change set"); - ctx.blocking_commit().await.expect("could not do this"); + // Commit all changes made so that the rebaser can access them. + ctx.blocking_commit().await.expect("could not commit"); + // Cache the expected conflict and perform the rebase with the conflict. + let expected_conflicts = [Conflict::NodeContent { + onto: forked_snapshot + .get_node_index_by_id(olivia_rodrigo_id) + .expect("could not get node index by id"), + to_rebase: snapshot + .get_node_index_by_id(olivia_rodrigo_id) + .expect("could not get node index by id"), + }]; let response = client - .send_with_reply( + .request_rebase( base_change_set.id.into(), - snapshot.id().into(), - forked_change_set.id.into(), + forked_snapshot.id().into(), + forked_change_set.vector_clock_id().into(), ) .await .expect("could not send"); - // TODO(nick): do something useful with this. - dbg!(response); + // Ensure we see the conflict. + match response { + ChangeSetReplyMessage::Success { updates_performed } => { + let updates_performed: Vec = + serde_json::from_value(updates_performed).expect("could not deserialize"); + panic!("unexpected success: {updates_performed:?}") + } + ChangeSetReplyMessage::ConflictsFound { + conflicts_found, + updates_found_and_skipped, + } => { + let actual_conflicts: Vec = + serde_json::from_value(conflicts_found).expect("could not deserialize"); + assert_eq!( + &expected_conflicts, // expected + actual_conflicts.as_slice() // actual + ); + let updates_found_and_skipped: Vec = + serde_json::from_value(updates_found_and_skipped).expect("could not deserialize"); + assert!(updates_found_and_skipped.is_empty()); + } + ChangeSetReplyMessage::Error { message } => { + panic!("unexpected error: {message}"); + } + } // TODO(nick): move cleanup to the test harness. let _ = client - .send_management_close_change_set(base_change_set.id.into()) + .close_stream_for_change_set(base_change_set.id.into()) .await; } diff --git a/lib/rebaser-client/Cargo.toml b/lib/rebaser-client/Cargo.toml index 7d27807626..a703107a7f 100644 --- a/lib/rebaser-client/Cargo.toml +++ b/lib/rebaser-client/Cargo.toml @@ -14,3 +14,4 @@ telemetry = { path = "../../lib/telemetry-rs" } thiserror = { workspace = true } tokio = { workspace = true } ulid = { workspace = true } +log = "0.4.20" diff --git a/lib/rebaser-client/src/client.rs b/lib/rebaser-client/src/client.rs index 9f7ec87d23..8de750e551 100644 --- a/lib/rebaser-client/src/client.rs +++ b/lib/rebaser-client/src/client.rs @@ -3,23 +3,22 @@ use rebaser_core::{ ChangeSetMessage, ChangeSetReplyMessage, ManagementMessage, ManagementMessageAction, - REBASER_MANAGEMENT_STREAM, + StreamNameGenerator, }; use si_rabbitmq::{Consumer, ConsumerOffsetSpecification, Environment, Producer}; use std::collections::HashMap; use std::time::Duration; - use telemetry::prelude::*; use ulid::Ulid; use crate::{ClientError, ClientResult}; -const REBASER_REPLY_STREAM_PREFIX: &str = "rebaser-reply"; const REPLY_TIMEOUT_SECONDS: u64 = 10; /// A client for communicating with a running rebaser [`Server`](rebaser_server::Server). #[allow(missing_debug_implementations)] pub struct Client { + id: Ulid, management_stream: Stream, streams: HashMap, reply_timeout: Duration, @@ -38,10 +37,10 @@ impl Client { pub async fn new() -> ClientResult { let environment = Environment::new().await?; - // First, create the reply stream. We do not check if it already exists since the reply - // stream name is ULID-based. It's unlikely that there will be a collision. - let unique_identifier = Ulid::new().to_string(); - let management_reply_stream = format!("rebaser-management-reply-{unique_identifier}"); + let id = Ulid::new(); + let management_stream = StreamNameGenerator::management(); + let management_reply_stream = StreamNameGenerator::management_reply(id); + environment.create_stream(&management_reply_stream).await?; let management_reply_consumer = Consumer::new( &environment, @@ -52,10 +51,10 @@ impl Client { // Name the producer using the reply stream, but produce to the primary rebaser stream. This // may... will... uh... potentially?... be useful for tracing. - let management_producer = - Producer::new(&environment, unique_identifier, REBASER_MANAGEMENT_STREAM).await?; + let management_producer = Producer::new(&environment, management_stream).await?; Ok(Self { + id, management_stream: Stream { producer: management_producer, reply_stream: management_reply_stream, @@ -67,33 +66,42 @@ impl Client { } /// Send a message to a rebaser stream for a change set and block for a reply. - pub async fn send_with_reply( + pub async fn request_rebase( &mut self, - change_set_to_update: Ulid, - workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at: Ulid, - change_set_that_dictates_changes: Ulid, + to_rebase_change_set_id: Ulid, + onto_workspace_snapshot_id: Ulid, + onto_vector_clock_id: Ulid, ) -> ClientResult { let stream = self .streams - .get_mut(&change_set_to_update) + .get_mut(&to_rebase_change_set_id) .ok_or(ClientError::RebaserStreamForChangeSetNotFound)?; stream .producer .send_single( ChangeSetMessage { - to_rebase_vector_clock_id: change_set_to_update, - to_rebase_workspace_snapshot_id: - workspace_snapshot_to_rebase_on_top_of_current_snapshot_being_pointed_at, - onto_change_set_id: change_set_that_dictates_changes, + to_rebase_change_set_id, + onto_workspace_snapshot_id, + onto_vector_clock_id, }, Some(stream.reply_stream.clone()), ) .await?; - let maybe_delivery = - match tokio::time::timeout(self.reply_timeout, stream.reply_consumer.next()).await { - Ok(result) => result?, - Err(e) => return Err(ClientError::ReplyTimeout(e)), - }; + let maybe_delivery = match tokio::time::timeout( + self.reply_timeout, + stream.reply_consumer.next(), + ) + .await + { + Ok(result) => result?, + Err(_elapsed) => { + debug!( + "hit timeout for consuming on the reply stream (\"{}\") from the rebaser server", + stream.reply_consumer.stream() + ); + return Err(ClientError::ReplyTimeout); + } + }; let delivery = maybe_delivery.ok_or(ClientError::EmptyDelivery( stream.reply_consumer.stream().to_string(), @@ -107,7 +115,7 @@ impl Client { } /// Send a message to the management stream to open a rebaser loop and block for a reply. - pub async fn send_management_open_change_set( + pub async fn open_stream_for_change_set( &mut self, change_set_id: Ulid, ) -> ClientResult { @@ -122,6 +130,9 @@ impl Client { ) .await?; + // FIXME(nick): we should probably not await a reply and assume that it is working OR we + // should await a reply, but only to see if it was successful. This is because we should + // know the name already and not have to get it from the route. let maybe_delivery = match tokio::time::timeout( self.reply_timeout, self.management_stream.reply_consumer.next(), @@ -129,7 +140,7 @@ impl Client { .await { Ok(result) => result?, - Err(e) => return Err(ClientError::ReplyTimeout(e)), + Err(_elapsed) => return Err(ClientError::ReplyTimeout), }; let delivery = maybe_delivery.ok_or(ClientError::EmptyDelivery( @@ -142,12 +153,13 @@ impl Client { let change_set_stream: String = serde_json::from_value(contents)?; + // TODO(nick): move stream generation to a common crate. let environment = Environment::new().await?; - let reply_stream = format!("{REBASER_REPLY_STREAM_PREFIX}-{change_set_id}"); + let reply_stream = StreamNameGenerator::change_set_reply(change_set_id, self.id); environment.create_stream(&reply_stream).await?; // FIXME(nick): name the producer properly. - let producer = Producer::new(&environment, "producer", &change_set_stream).await?; + let producer = Producer::new(&environment, &change_set_stream).await?; let reply_consumer = Consumer::new( &environment, &reply_stream, @@ -167,10 +179,7 @@ impl Client { } /// Send a message to the management stream to close a rebaser loop and do not wait for a reply. - pub async fn send_management_close_change_set( - &mut self, - change_set_id: Ulid, - ) -> ClientResult<()> { + pub async fn close_stream_for_change_set(&mut self, change_set_id: Ulid) -> ClientResult<()> { self.management_stream .producer .send_single( diff --git a/lib/rebaser-client/src/lib.rs b/lib/rebaser-client/src/lib.rs index 8b5be47345..46533629be 100644 --- a/lib/rebaser-client/src/lib.rs +++ b/lib/rebaser-client/src/lib.rs @@ -29,7 +29,6 @@ pub use client::Client; use si_rabbitmq::{Delivery, RabbitError}; use telemetry::prelude::error; use thiserror::Error; -use tokio::time::error::Elapsed; #[allow(missing_docs)] #[remain::sorted] @@ -43,8 +42,8 @@ pub enum ClientError { Rabbit(#[from] RabbitError), #[error("rebaser stream for change set not found")] RebaserStreamForChangeSetNotFound, - #[error("hit timeout while waiting for message on reply stream: {0}")] - ReplyTimeout(Elapsed), + #[error("hit timeout while waiting for message on reply stream")] + ReplyTimeout, #[error("serde json error: {0}")] SerdeJson(#[from] serde_json::Error), } diff --git a/lib/rebaser-core/src/lib.rs b/lib/rebaser-core/src/lib.rs index 799262cadf..407c494ce3 100644 --- a/lib/rebaser-core/src/lib.rs +++ b/lib/rebaser-core/src/lib.rs @@ -30,9 +30,6 @@ use serde::Serialize; use serde_json::Value; use ulid::Ulid; -/// Stream to manage rebaser consumer loops. -pub const REBASER_MANAGEMENT_STREAM: &str = "rebaser-management"; - /// The action for the rebaser management loop. #[derive(Debug, Serialize, Deserialize)] pub enum ManagementMessageAction { @@ -52,25 +49,28 @@ pub struct ManagementMessage { pub action: ManagementMessageAction, } -/// The message that the rebaser change set consumer expects in the server. +/// The message that the server's listener loop uses to perform a rebase. #[derive(Debug, Serialize, Deserialize)] pub struct ChangeSetMessage { /// Corresponds to the change set whose pointer is to be updated. - pub onto_change_set_id: Ulid, - /// Corresponds to the workspace snapshot that will be rebased on top of the workspace snapshot - /// that the change set is currently pointing at. - pub to_rebase_workspace_snapshot_id: Ulid, + pub to_rebase_change_set_id: Ulid, + /// Corresponds to the workspace snapshot that will be the "onto" workspace snapshot when + /// rebasing the "to rebase" workspace snapshot. + pub onto_workspace_snapshot_id: Ulid, /// Derived from the ephemeral or persisted change set that's either the base change set, the /// last change set before edits were made, or the change set that you are trying to rebase /// onto base. - pub to_rebase_vector_clock_id: Ulid, + pub onto_vector_clock_id: Ulid, } /// The message shape that the rebaser change set loop will use for replying to the client. #[derive(Debug, Serialize, Deserialize)] pub enum ChangeSetReplyMessage { - /// Updates performed when processing the delivery. - Success(Value), + /// Processing the delivery and performing updates was successful. + Success { + /// The serialized updates performed when rebasing. + updates_performed: Value, + }, /// Conflicts found when processing the delivery. ConflictsFound { /// A serialized list of the conflicts found during detection. @@ -80,5 +80,36 @@ pub enum ChangeSetReplyMessage { updates_found_and_skipped: Value, }, /// Error encountered when processing the delivery. - Error(String), + Error { + /// The error message. + message: String, + }, +} + +/// A generator that provides stream names in a centralized location. +#[allow(missing_debug_implementations)] +pub struct StreamNameGenerator; + +impl StreamNameGenerator { + /// Returns the name of the management stream. + pub fn management() -> &'static str { + "rebaser-management" + } + + /// Returns the name of the stream that the rebaser will reply to for messages sent to the + /// management stream from a specific client. + pub fn management_reply(client_id: Ulid) -> String { + format!("rebaser-management-reply-{client_id}") + } + + /// Returns the name of a stream for a given change set. + pub fn change_set(change_set_id: Ulid) -> String { + format!("rebaser-{change_set_id}") + } + + /// Returns the name of the stream that the rebaser will reply to for messages sent to a change + /// set stream from a specific client. + pub fn change_set_reply(change_set_id: Ulid, client_id: Ulid) -> String { + format!("rebaser-{change_set_id}-reply-{client_id}") + } } diff --git a/lib/rebaser-server/src/server/change_set_loop.rs b/lib/rebaser-server/src/server/change_set_loop.rs index 6a96c5efad..dc74b3c564 100644 --- a/lib/rebaser-server/src/server/change_set_loop.rs +++ b/lib/rebaser-server/src/server/change_set_loop.rs @@ -1,4 +1,7 @@ use dal::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; +use dal::workspace_snapshot::graph::NodeIndex; +use dal::workspace_snapshot::update::Update; +use dal::workspace_snapshot::vector_clock::VectorClockId; use dal::workspace_snapshot::WorkspaceSnapshotError; use dal::{ DalContext, DalContextBuilder, Tenancy, TransactionsError, Visibility, WorkspacePk, @@ -6,6 +9,7 @@ use dal::{ }; use rebaser_core::{ChangeSetMessage, ChangeSetReplyMessage}; use si_rabbitmq::{Consumer, Delivery, Environment, Producer, RabbitError}; +use std::collections::HashMap; use telemetry::prelude::*; use thiserror::Error; @@ -15,6 +19,8 @@ use thiserror::Error; enum ChangeSetLoopError { #[error("workspace snapshot error: {0}")] ChangeSetPointer(#[from] ChangeSetPointerError), + #[error("when performing updates, could not find the newly imported subgraph (may god have mercy on your soul)")] + DestinationNotUpdatedWhenImportingSubgraph, #[error("missing change set message \"reply_to\" field")] MissingChangeSetMessageReplyTo, #[error("missing workspace snapshot for change set ({0}) (the change set likely isn't pointing at a workspace snapshot)")] @@ -44,19 +50,13 @@ async fn change_set_loop( ctx_builder: DalContextBuilder, mut consumer: Consumer, ) -> ChangeSetLoopResult> { - let mut ctx = ctx_builder.build_default().await?; - ctx.update_visibility(Visibility::new_head(false)); - ctx.update_tenancy(Tenancy::new(WorkspacePk::NONE)); - // Create an environment for reply streams. let environment = Environment::new().await?; while let Some(delivery) = consumer.next().await? { - // TODO(nick): first detect conflicts and updates, second perform the updates. - // If conflicts appears, do not perform updates if they exist, and report conflicts back. - // In other words... - // 1) succeed everywhere - // 2) store offset with changeset - // 3) update requester stream w/out waiting for reply + let mut ctx = ctx_builder.build_default().await?; + ctx.update_visibility(Visibility::new_head(false)); + ctx.update_tenancy(Tenancy::new(WorkspacePk::NONE)); + process_delivery_infallible_wrapper(&mut ctx, &environment, consumer.stream(), &delivery) .await; } @@ -77,10 +77,15 @@ async fn process_delivery_infallible_wrapper( process_delivery(ctx, environment, inbound_stream, delivery, reply_to).await { error!(error = ?err, "processing delivery failed, attempting to reply"); - match Producer::for_reply(&environment, inbound_stream, reply_to).await { + match Producer::new(&environment, reply_to).await { Ok(mut producer) => { if let Err(err) = producer - .send_single(ChangeSetReplyMessage::Error(err.to_string()), None) + .send_single( + ChangeSetReplyMessage::Error { + message: err.to_string(), + }, + None, + ) .await { error!(error = ?err, "sending reply failed"); @@ -105,7 +110,7 @@ async fn process_delivery( environment: &Environment, inbound_stream: impl AsRef, delivery: &Delivery, - reply_to: impl AsRef, + reply_to_stream: impl AsRef, ) -> ChangeSetLoopResult<()> { let raw_message = match &delivery.message_contents { Some(found_raw_message) => found_raw_message, @@ -114,41 +119,152 @@ async fn process_delivery( let message: ChangeSetMessage = serde_json::from_value(raw_message.clone())?; // Gather everything we need to detect conflicts and updates from the inbound message. - let mut to_rebase_workspace_snapshot: WorkspaceSnapshot = - WorkspaceSnapshot::find(ctx, message.to_rebase_workspace_snapshot_id.into()).await?; - let onto_change_set = ChangeSetPointer::find(ctx, message.onto_change_set_id.into()).await?; - let onto_workspace_snapshot_id = onto_change_set.workspace_snapshot_id.ok_or( - ChangeSetLoopError::MissingWorkspaceSnapshotForChangeSet(onto_change_set.id), + let mut to_rebase_change_set = + ChangeSetPointer::find(ctx, message.to_rebase_change_set_id.into()).await?; + let to_rebase_workspace_snapshot_id = to_rebase_change_set.workspace_snapshot_id.ok_or( + ChangeSetLoopError::MissingWorkspaceSnapshotForChangeSet(to_rebase_change_set.id), )?; - let mut onto_workspace_snapshot = - WorkspaceSnapshot::find(ctx, onto_workspace_snapshot_id).await?; + let mut to_rebase_workspace_snapshot = + WorkspaceSnapshot::find(ctx, to_rebase_workspace_snapshot_id).await?; + let mut onto_workspace_snapshot: WorkspaceSnapshot = + WorkspaceSnapshot::find(ctx, message.onto_workspace_snapshot_id.into()).await?; // Perform the conflicts and updates detection. + let onto_vector_clock_id: VectorClockId = message.onto_vector_clock_id.into(); let (conflicts, updates) = to_rebase_workspace_snapshot .detect_conflicts_and_updates( - message.to_rebase_vector_clock_id.into(), + to_rebase_change_set.vector_clock_id(), &mut onto_workspace_snapshot, - onto_change_set.vector_clock_id(), + onto_vector_clock_id, ) .await?; + debug!("conflicts and updates detected: {conflicts:?} {updates:?}"); // If there are conflicts, immediately assemble a reply message that conflicts were found. // Otherwise, we can perform updates and assemble a "success" reply message. let message: ChangeSetReplyMessage = if conflicts.is_empty() { + // TODO(nick): store the offset with the change set. + perform_updates_and_write_out_and_update_pointer( + ctx, + &mut to_rebase_workspace_snapshot, + &mut to_rebase_change_set, + &mut onto_workspace_snapshot, + &updates, + ) + .await?; + ChangeSetReplyMessage::Success { + updates_performed: serde_json::to_value(updates)?, + } + } else { ChangeSetReplyMessage::ConflictsFound { conflicts_found: serde_json::to_value(conflicts)?, updates_found_and_skipped: serde_json::to_value(updates)?, } - } else { - // TODO(nick): actually perform updates. - ChangeSetReplyMessage::Success(serde_json::to_value(updates)?) }; - let mut producer = Producer::for_reply(&environment, inbound_stream, reply_to).await?; + // Before replying to the requester, we must commit. + ctx.blocking_commit().await?; + + // Send reply to the "reply to stream" for the specific client. + let inbound_stream = inbound_stream.as_ref(); + let reply_to_stream = reply_to_stream.as_ref(); + debug!( + "processed delivery from \"{inbound_stream}\", committed transaction and sending reply to \"{reply_to_stream}\"", + ); + let mut producer = Producer::new(&environment, reply_to_stream).await?; producer .send_single(serde_json::to_value(message)?, None) .await?; + + // Close the producer _after_ logging, but do not make it an infallible close. We do that + // because the function managing the change set loop is infallible and will log the error. + debug!("sent reply to \"{reply_to_stream}\""); producer.close().await?; Ok(()) } + +async fn perform_updates_and_write_out_and_update_pointer( + ctx: &DalContext, + to_rebase_workspace_snapshot: &mut WorkspaceSnapshot, + to_rebase_change_set: &mut ChangeSetPointer, + onto_workspace_snapshot: &mut WorkspaceSnapshot, + updates: &Vec, +) -> ChangeSetLoopResult<()> { + let mut updated = HashMap::new(); + for update in updates { + match update { + Update::NewEdge { + source, + destination, + edge_weight, + } => { + let source = *updated.get(source).unwrap_or(source); + let destination = find_in_to_rebase_or_create_using_onto( + *destination, + &mut updated, + onto_workspace_snapshot, + to_rebase_workspace_snapshot, + )?; + to_rebase_workspace_snapshot.add_edge(source, edge_weight.clone(), destination)?; + } + Update::RemoveEdge(edge) => { + to_rebase_workspace_snapshot.remove_edge_for_update_stableish(*edge)?; + } + Update::ReplaceSubgraph { onto, to_rebase } => { + let to_rebase = *updated.get(to_rebase).unwrap_or(to_rebase); + let new_subgraph_root = find_in_to_rebase_or_create_using_onto( + *onto, + &mut updated, + onto_workspace_snapshot, + to_rebase_workspace_snapshot, + )?; + to_rebase_workspace_snapshot.replace_references(to_rebase, new_subgraph_root)?; + } + } + } + + // Once all updates have been performed, we can write out, mark everything as recently seen + // and update the pointer. + to_rebase_workspace_snapshot + .write(ctx, to_rebase_change_set.vector_clock_id()) + .await?; + to_rebase_change_set + .update_pointer(ctx, to_rebase_workspace_snapshot.id()) + .await?; + + Ok(()) +} + +fn find_in_to_rebase_or_create_using_onto( + unchecked: NodeIndex, + updated: &mut HashMap, + onto_workspace_snapshot: &mut WorkspaceSnapshot, + to_rebase_workspace_snapshot: &mut WorkspaceSnapshot, +) -> ChangeSetLoopResult { + let found_or_created = match updated.get(&unchecked) { + Some(found) => *found, + None => { + let unchecked_node_weight = onto_workspace_snapshot.get_node_weight(unchecked)?; + match to_rebase_workspace_snapshot.find_equivalent_node( + unchecked_node_weight.id(), + unchecked_node_weight.lineage_id(), + )? { + Some(found_equivalent_node) => { + updated.insert(unchecked, found_equivalent_node); + found_equivalent_node + } + None => { + updated.extend( + to_rebase_workspace_snapshot + .import_subgraph(onto_workspace_snapshot, unchecked)?, + ); + *updated + .get(&unchecked) + .ok_or(ChangeSetLoopError::DestinationNotUpdatedWhenImportingSubgraph)? + } + } + } + }; + Ok(found_or_created) +} diff --git a/lib/rebaser-server/src/server/management_loop.rs b/lib/rebaser-server/src/server/management_loop.rs index 63e25bedb6..0154d3d99d 100644 --- a/lib/rebaser-server/src/server/management_loop.rs +++ b/lib/rebaser-server/src/server/management_loop.rs @@ -1,6 +1,6 @@ use dal::{DalContext, JobQueueProcessor, ServicesContext}; -use rebaser_core::{ManagementMessage, ManagementMessageAction, REBASER_MANAGEMENT_STREAM}; +use rebaser_core::{ManagementMessage, ManagementMessageAction, StreamNameGenerator}; use si_data_nats::NatsClient; use si_data_pg::PgPool; use si_rabbitmq::{Consumer, ConsumerHandle, ConsumerOffsetSpecification, Environment, Producer}; @@ -72,15 +72,16 @@ async fn management_loop( // NOTE: QUERY DB FOR OFFSET NUMBER OR GO TO FIRST SPECIFICATION // Prepare the environment and management stream. + let management_stream = StreamNameGenerator::management(); let environment = Environment::new().await?; if recreate_management_stream { - environment.delete_stream(REBASER_MANAGEMENT_STREAM).await?; + environment.delete_stream(management_stream).await?; } - environment.create_stream(REBASER_MANAGEMENT_STREAM).await?; + environment.create_stream(management_stream).await?; let mut management_consumer = Consumer::new( &environment, - REBASER_MANAGEMENT_STREAM, + management_stream, ConsumerOffsetSpecification::Next, ) .await?; @@ -114,8 +115,7 @@ async fn management_loop( } } ManagementMessageAction::OpenChangeSet => { - // TODO(nick): move stream naming to a centralized system, perhaps behind a unit struct. - let new_stream = format!("rebaser-{}", mm.change_set_id); + let new_stream = StreamNameGenerator::change_set(mm.change_set_id); let stream_already_exists = environment.create_stream(&new_stream).await?; // Only create the new stream and loop if the stream does not already exist. @@ -134,7 +134,7 @@ async fn management_loop( } // Return the requested stream and then close the producer. - let mut producer = Producer::for_reply(&environment, &new_stream, reply_to).await?; + let mut producer = Producer::new(&environment, reply_to).await?; producer.send_single(new_stream, None).await?; producer.close().await?; } diff --git a/lib/si-rabbitmq/src/lib.rs b/lib/si-rabbitmq/src/lib.rs index ae60fc75cc..958d72ec05 100644 --- a/lib/si-rabbitmq/src/lib.rs +++ b/lib/si-rabbitmq/src/lib.rs @@ -60,7 +60,7 @@ mod tests { .await .expect("could not create stream"); - let mut producer = Producer::new(&environment, "producer", stream) + let mut producer = Producer::new(&environment, stream) .await .expect("could not create producer"); diff --git a/lib/si-rabbitmq/src/producer.rs b/lib/si-rabbitmq/src/producer.rs index 44baf38874..c95976c995 100644 --- a/lib/si-rabbitmq/src/producer.rs +++ b/lib/si-rabbitmq/src/producer.rs @@ -1,5 +1,5 @@ use rabbitmq_stream_client::types::Message; -use rabbitmq_stream_client::{Dedup, Producer as UpstreamProducer}; +use rabbitmq_stream_client::{NoDedup, Producer as UpstreamProducer}; use serde::Serialize; use crate::environment::Environment; @@ -8,21 +8,16 @@ use crate::{RabbitError, RabbitResult}; /// An interface for producing and sending RabbitMQ stream messages. #[allow(missing_debug_implementations)] pub struct Producer { - inner: UpstreamProducer, + inner: UpstreamProducer, closed: bool, } impl Producer { /// Creates a new [`Producer`] for producing and sending RabbitMQ stream messages. - pub async fn new( - environment: &Environment, - name: impl AsRef, - stream: impl AsRef, - ) -> RabbitResult { + pub async fn new(environment: &Environment, stream: impl AsRef) -> RabbitResult { let inner = environment .inner() .producer() - .name(name.as_ref()) .build(stream.as_ref()) .await?; Ok(Self { @@ -31,22 +26,6 @@ impl Producer { }) } - /// Creates a new [`Producer`] for replying to the sender from an inbound stream. - pub async fn for_reply( - environment: &Environment, - inbound_stream: impl AsRef, - reply_to_stream: impl AsRef, - ) -> RabbitResult { - let inbound_stream = inbound_stream.as_ref(); - let reply_to_stream = reply_to_stream.as_ref(); - Self::new( - &environment, - format!("{inbound_stream}-reply-{reply_to_stream}"), - reply_to_stream, - ) - .await - } - /// Sends a single message to a stream. pub async fn send_single( &mut self, From 0ee30906f7edf9102e7fa011a410354264b50ce4 Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Tue, 17 Oct 2023 13:38:45 -0400 Subject: [PATCH 35/92] Fix rebaser-server after si-crypto and si-std refactor Signed-off-by: Nick Gerace --- bin/rebaser/BUCK | 1 + lib/dal-test/src/lib.rs | 1 + lib/dal/src/lib.rs | 3 +- lib/dal/src/workspace_snapshot/graph.rs | 4 +- lib/rebaser-server/BUCK | 1 + lib/rebaser-server/src/config.rs | 41 +++- lib/rebaser-server/src/server.rs | 201 ++---------------- .../src/server/management_loop.rs | 5 + 8 files changed, 70 insertions(+), 187 deletions(-) diff --git a/bin/rebaser/BUCK b/bin/rebaser/BUCK index 3264ee3b83..4b02234ae4 100644 --- a/bin/rebaser/BUCK +++ b/bin/rebaser/BUCK @@ -16,6 +16,7 @@ rust_binary( srcs = glob(["src/**/*.rs"]), resources = { "dev.encryption.key": "//lib/cyclone-server:dev.encryption.key", + "dev.donkey.key": "//lib/dal:dev.donkey.key", }, ) diff --git a/lib/dal-test/src/lib.rs b/lib/dal-test/src/lib.rs index 91efab8b24..beead10c9d 100644 --- a/lib/dal-test/src/lib.rs +++ b/lib/dal-test/src/lib.rs @@ -481,6 +481,7 @@ pub fn rebaser_server(services_context: &ServicesContext) -> Result bool { self.recreate_management_stream } + + /// Gets a reference to the symmetric crypto service. + pub fn symmetric_crypto_service(&self) -> &SymmetricCryptoServiceConfig { + &self.symmetric_crypto_service + } } /// The configuration file for creating a [`Server`]. @@ -95,6 +102,8 @@ pub struct ConfigFile { cyclone_encryption_key_path: String, #[serde(default = "default_recreate_management_stream")] recreate_management_stream: bool, + #[serde(default = "default_symmetric_crypto_config")] + symmetric_crypto_service: SymmetricCryptoServiceConfigFile, } impl Default for ConfigFile { @@ -104,6 +113,7 @@ impl Default for ConfigFile { nats: Default::default(), cyclone_encryption_key_path: default_cyclone_encryption_key_path(), recreate_management_stream: false, + symmetric_crypto_service: default_symmetric_crypto_config(), } } } @@ -123,6 +133,7 @@ impl TryFrom for Config { config.nats(value.nats); config.cyclone_encryption_key_path(value.cyclone_encryption_key_path.try_into()?); config.recreate_management_stream(value.recreate_management_stream); + config.symmetric_crypto_service(value.symmetric_crypto_service.try_into()?); config.build().map_err(Into::into) } } @@ -135,6 +146,13 @@ fn default_recreate_management_stream() -> bool { false } +fn default_symmetric_crypto_config() -> SymmetricCryptoServiceConfigFile { + SymmetricCryptoServiceConfigFile { + active_key: "/run/rebaser/donkey.key".into(), + extra_keys: vec![], + } +} + /// This function is used to determine the development environment and update the [`ConfigFile`] /// accordingly. #[allow(clippy::disallowed_methods)] @@ -156,13 +174,23 @@ fn buck2_development(config: &mut ConfigFile) -> Result<()> { .map_err(ConfigError::development)? .to_string_lossy() .to_string(); + let symmetric_crypto_service_key = resources + .get_ends_with("dev.donkey.key") + .map_err(ConfigError::development)? + .to_string_lossy() + .to_string(); warn!( cyclone_encryption_key_path = cyclone_encryption_key_path.as_str(), + symmetric_crypto_service_key = symmetric_crypto_service_key.as_str(), "detected development run", ); config.cyclone_encryption_key_path = cyclone_encryption_key_path; + config.symmetric_crypto_service = SymmetricCryptoServiceConfigFile { + active_key: symmetric_crypto_service_key, + extra_keys: vec![], + }; Ok(()) } @@ -172,13 +200,22 @@ fn cargo_development(dir: String, config: &mut ConfigFile) -> Result<()> { .join("../../lib/cyclone-server/src/dev.encryption.key") .to_string_lossy() .to_string(); + let symmetric_crypto_service_key = Path::new(&dir) + .join("../../lib/dal/dev.donkey.key") + .to_string_lossy() + .to_string(); warn!( cyclone_encryption_key_path = cyclone_encryption_key_path.as_str(), + symmetric_crypto_service_key = symmetric_crypto_service_key.as_str(), "detected development run", ); config.cyclone_encryption_key_path = cyclone_encryption_key_path; + config.symmetric_crypto_service = SymmetricCryptoServiceConfigFile { + active_key: symmetric_crypto_service_key, + extra_keys: vec![], + }; Ok(()) } diff --git a/lib/rebaser-server/src/server.rs b/lib/rebaser-server/src/server.rs index 43669e1b33..4f4dc6d940 100644 --- a/lib/rebaser-server/src/server.rs +++ b/lib/rebaser-server/src/server.rs @@ -4,13 +4,12 @@ use dal::{ job::consumer::JobConsumerError, InitializationError, JobFailureError, JobQueueProcessor, NatsProcessor, TransactionsError, }; -use futures::{FutureExt, Stream, StreamExt}; use nats_subscriber::SubscriberError; - +use si_crypto::SymmetricCryptoServiceConfig; +use si_crypto::{SymmetricCryptoError, SymmetricCryptoService}; use si_data_nats::{NatsClient, NatsConfig, NatsError}; use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; use si_rabbitmq::RabbitError; - use std::{io, path::Path, sync::Arc}; use telemetry::prelude::*; use thiserror::Error; @@ -21,14 +20,8 @@ use tokio::{ oneshot, watch, }, }; -<<<<<<< HEAD -use tokio_stream::wrappers::UnboundedReceiverStream; use ulid::Ulid; use veritech_client::{Client as VeritechClient, CycloneEncryptionKey, CycloneEncryptionKeyError}; -======= - -use veritech_client::{Client as VeritechClient, EncryptionKey, EncryptionKeyError}; ->>>>>>> cdb8726f3 (Initial round trip loop of rebaser using graph logic) use crate::Config; @@ -65,6 +58,8 @@ pub enum ServerError { Signal(#[source] io::Error), #[error(transparent)] Subscriber(#[from] SubscriberError), + #[error("symmetric crypto error: {0}")] + SymmetricCrypto(#[from] SymmetricCryptoError), #[error(transparent)] Transactions(#[from] Box), #[error("workspace snapshot error: {0}")] @@ -99,6 +94,7 @@ pub struct Server { pg_pool: PgPool, veritech: VeritechClient, job_processor: Box, + symmetric_crypto_service: SymmetricCryptoService, /// An internal shutdown watch receiver handle which can be provided to internal tasks which /// want to be notified when a shutdown event is in progress. shutdown_watch_rx: watch::Receiver<()>, @@ -125,6 +121,8 @@ impl Server { let pg_pool = Self::create_pg_pool(config.pg_pool()).await?; let veritech = Self::create_veritech_client(nats.clone()); let job_processor = Self::create_job_processor(nats.clone()); + let symmetric_crypto_service = + Self::create_symmetric_crypto_service(config.symmetric_crypto_service()).await?; Self::from_services( encryption_key, @@ -132,6 +130,7 @@ impl Server { pg_pool, veritech, job_processor, + symmetric_crypto_service, config.recreate_management_stream(), ) } @@ -144,6 +143,7 @@ impl Server { pg_pool: PgPool, veritech: VeritechClient, job_processor: Box, + symmetric_crypto_service: SymmetricCryptoService, recreate_management_stream: bool, ) -> ServerResult { // An mpsc channel which can be used to externally shut down the server. @@ -165,6 +165,7 @@ impl Server { veritech, encryption_key, job_processor, + symmetric_crypto_service, shutdown_watch_rx, external_shutdown_tx, graceful_shutdown_rx, @@ -180,6 +181,7 @@ impl Server { self.nats, self.veritech, self.job_processor, + self.symmetric_crypto_service, self.encryption_key, self.shutdown_watch_rx, ) @@ -229,6 +231,15 @@ impl Server { fn create_job_processor(nats: NatsClient) -> Box { Box::new(NatsProcessor::new(nats)) as Box } + + #[instrument(name = "pinga.init.create_symmetric_crypto_service", skip_all)] + async fn create_symmetric_crypto_service( + config: &SymmetricCryptoServiceConfig, + ) -> ServerResult { + SymmetricCryptoService::from_config(config) + .await + .map_err(Into::into) + } } #[allow(missing_docs, missing_debug_implementations)] @@ -257,178 +268,6 @@ impl Default for ShutdownSource { } } -<<<<<<< HEAD -#[allow(clippy::too_many_arguments)] -async fn consume_stream_task( - recreate_management_stream: bool, - pg_pool: PgPool, - nats: NatsClient, - veritech: veritech_client::Client, - job_processor: Box, - encryption_key: Arc, - shutdown_watch_rx: watch::Receiver<()>, -) { - if let Err(err) = consume_stream( - recreate_management_stream, - pg_pool, - nats, - veritech, - job_processor, - encryption_key, - shutdown_watch_rx, - ) - .await - { - info!(error = ?err, "consuming stream failed"); - } -} - -#[allow(clippy::too_many_arguments)] -async fn consume_stream( - recreate_management_stream: bool, - pg_pool: PgPool, - nats: NatsClient, - veritech: veritech_client::Client, - job_processor: Box, - encryption_key: Arc, - mut shutdown_watch_rx: watch::Receiver<()>, -) -> ServerResult<()> { - let services_context = ServicesContext::new( - pg_pool, - nats.clone(), - job_processor, - veritech.clone(), - encryption_key, - None, - None, - (), - ); - let _ctx_builder = DalContext::builder(services_context, false); - - // Meta: we can only have one rebaser instance right now due to https://github.com/rabbitmq/rabbitmq-stream-rust-client/issues/130 - // - // 1) subscribe to "next" for changeset close/create events --> stream for ChangeSetClose or ChangeSetOpen - // --> "rebaser-management" - // 2) query db for all named, open changesets - // 3) start a subscription for each result for step 2 - // --> "rebaser-" - // 1:N --> "rebaser--reply--" - // (e.g. "rebaser--reply-sdf-") - // note: requester deletes stream upon reply - // - // NOTE: QUERY DB FOR OFFSET NUMBER OR GO TO FIRST SPECIFICATION - - // Prepare the environment and management stream. - let environment = Environment::new().await?; - if recreate_management_stream { - environment.delete_stream(REBASER_MANAGEMENT_STREAM).await?; - } - environment.create_stream(REBASER_MANAGEMENT_STREAM).await?; - - let mut management_consumer = Consumer::new( - &environment, - REBASER_MANAGEMENT_STREAM, - ConsumerOffsetSpecification::Next, - ) - .await?; - let management_handle = management_consumer.handle(); - let mut rebaser_handles: HashMap = HashMap::new(); - - while let Some(management_delivery) = management_consumer.next().await? { - let contents = management_delivery - .message_contents - .ok_or(ServerError::MissingManagementMessageContents)?; - let reply_to = management_delivery - .reply_to - .ok_or(ServerError::MissingManagementMessageReplyTo)?; - let mm: ManagementMessage = serde_json::from_value(contents)?; - - match mm.action { - ManagementMessageAction::Close => match rebaser_handles.remove(&mm.change_set_id) { - Some((stream, handle)) => { - if let Err(e) = handle.close().await { - error!("{e}"); - } - if let Err(e) = environment.delete_stream(stream).await { - error!("{e}"); - } - } - None => debug!( - "did not find handle for change set id: {}", - mm.change_set_id - ), - }, - ManagementMessageAction::Open => { - let new_stream = format!("{REBASER_STREAM_PREFIX}-{}", mm.change_set_id); - let stream_already_exists = environment.create_stream(&new_stream).await?; - - // Only create the new stream if it does not already exist. - if !stream_already_exists { - let consumer = - Consumer::new(&environment, &new_stream, ConsumerOffsetSpecification::Next) - .await?; - let handle = consumer.handle(); - rebaser_handles.insert(mm.change_set_id, (new_stream.clone(), handle)); - - tokio::spawn(rebaser_loop_infallible_wrapper(consumer)); - } - - // Return the requested stream and then close the producer. - let mut producer = Producer::for_reply(&environment, &new_stream, reply_to).await?; - producer.send_single(new_stream, None).await?; - producer.close().await?; - } - } - } - - for (_, (stream, handle)) in rebaser_handles.drain() { - if let Err(e) = handle.close().await { - error!("{e}"); - } - if let Err(e) = environment.delete_stream(stream).await { - error!("{e}") - } - } - if let Err(e) = management_handle.close().await { - error!("{e}"); - } - Ok(()) -} - -async fn rebaser_loop_infallible_wrapper(consumer: Consumer) { - if let Err(e) = rebaser_loop(consumer).await { - dbg!(e); - } -} - -async fn rebaser_loop(mut consumer: Consumer) -> ServerResult<()> { - // Create an environment for reply streams. - let environment = Environment::new().await?; - while let Some(delivery) = consumer.next().await? { - if let Some(reply_to) = delivery.reply_to { - let mut producer = - Producer::for_reply(&environment, consumer.stream(), reply_to).await?; - - // ----------------------------------------- - // TODO(nick): this is where the fun begins. - // 1) succeed everywhere - // 2) store offset with changeset - // 3) update requester stream w/out waiting for reply - // ----------------------------------------- - - // TODO(nick): for now, just send back the message. Unwrapping is fine because we know - // that it must have content. - producer - .send_single(delivery.message_contents.unwrap(), None) - .await?; - producer.close().await?; - } - } - Ok(()) -} - -======= ->>>>>>> cdb8726f3 (Initial round trip loop of rebaser using graph logic) fn prepare_graceful_shutdown( mut external_shutdown_rx: mpsc::Receiver, shutdown_watch_tx: watch::Sender<()>, diff --git a/lib/rebaser-server/src/server/management_loop.rs b/lib/rebaser-server/src/server/management_loop.rs index 0154d3d99d..c756fff21d 100644 --- a/lib/rebaser-server/src/server/management_loop.rs +++ b/lib/rebaser-server/src/server/management_loop.rs @@ -9,6 +9,7 @@ use std::collections::HashMap; use std::sync::Arc; use telemetry::prelude::*; +use si_crypto::SymmetricCryptoService; use tokio::sync::watch; use ulid::Ulid; @@ -20,6 +21,7 @@ pub(crate) async fn management_loop_infallible_wrapper( nats: NatsClient, veritech: veritech_client::Client, job_processor: Box, + symmetric_crypto_service: SymmetricCryptoService, encryption_key: Arc, shutdown_watch_rx: watch::Receiver<()>, ) { @@ -29,6 +31,7 @@ pub(crate) async fn management_loop_infallible_wrapper( nats, veritech, job_processor, + symmetric_crypto_service, encryption_key, shutdown_watch_rx, ) @@ -44,6 +47,7 @@ async fn management_loop( nats: NatsClient, veritech: veritech_client::Client, job_processor: Box, + symmetric_crypto_service: SymmetricCryptoService, encryption_key: Arc, _shutdown_watch_rx: watch::Receiver<()>, ) -> ServerResult<()> { @@ -55,6 +59,7 @@ async fn management_loop( encryption_key, None, None, + symmetric_crypto_service, ); // let ctx_builder = DalContext::builder(services_context, false); From 727a80a55f68c3f995fc9117dfefe6036105f5dd Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Tue, 17 Oct 2023 14:32:30 -0400 Subject: [PATCH 36/92] Handle signup via a "universal" change set and snapshot This commit adds the "universal" change set that points to an "initial" workspace snapshot. Those two objects are created when the builtin workspace does not yet exist and needs to be created. We will need to handle the "universal" use case with the new graph work so that users have functions like "si:setObject" and "si:identity" ready to go. This commit does not handle what goes into the "initial" workspace snapshot. It solely creates an empty-ish snapshot. In the future, the "initial" workspace snapshot will be filled out to give synonymous functionality to what "import_builtins_v1" does today. Some good news: "signup" is handled indirectly because the primary workspace constructor (Workspace::new) accounts for the existence of the builtin Workspace with its corresponding "universal" change set and "initial" workspace snapshot. Subsequent workspaces start with their base change set who points to the "initial" workspace snapshot. In other words, users of new workspaces will have all the builtin funcs and schemas they need to get rolling. Signed-off-by: Nick Gerace --- lib/dal-test/src/test_harness.rs | 9 +- lib/dal/src/context.rs | 8 +- .../src/migrations/U0004__standard_model.sql | 7 +- lib/dal/src/migrations/U0030__workspaces.sql | 37 ++---- .../migrations/U3000__workspace_snapshots.sql | 11 +- lib/dal/src/timestamp.rs | 7 ++ lib/dal/src/workspace.rs | 112 ++++++++++++++---- .../integration_test/internal/key_pair.rs | 15 +-- 8 files changed, 120 insertions(+), 86 deletions(-) diff --git a/lib/dal-test/src/test_harness.rs b/lib/dal-test/src/test_harness.rs index 89a4845084..2a9e374955 100644 --- a/lib/dal-test/src/test_harness.rs +++ b/lib/dal-test/src/test_harness.rs @@ -7,7 +7,7 @@ use dal::{ socket::{Socket, SocketArity, SocketEdgeKind, SocketKind}, ChangeSet, ChangeSetPk, Component, DalContext, DiagramKind, EncryptedSecret, Func, FuncBackendKind, FuncBackendResponseType, KeyPair, Node, Schema, SchemaId, SchemaVariantId, - Secret, StandardModel, User, UserPk, Visibility, Workspace, WorkspacePk, + Secret, StandardModel, User, UserPk, Visibility, }; use names::{Generator, Name}; @@ -30,13 +30,6 @@ pub fn create_visibility_head() -> Visibility { Visibility::new(ChangeSetPk::NONE, None) } -pub async fn create_workspace(ctx: &mut DalContext) -> Workspace { - let name = generate_fake_name(); - Workspace::new(ctx, WorkspacePk::generate(), &name) - .await - .expect("cannot create workspace") -} - pub async fn create_key_pair(ctx: &DalContext) -> KeyPair { let name = generate_fake_name(); KeyPair::new(ctx, &name) diff --git a/lib/dal/src/context.rs b/lib/dal/src/context.rs index f0404e1ca3..d880e33c7d 100644 --- a/lib/dal/src/context.rs +++ b/lib/dal/src/context.rs @@ -16,7 +16,7 @@ use crate::{ processor::{JobQueueProcessor, JobQueueProcessorError}, producer::{BlockingJobError, BlockingJobResult, JobProducer}, }, - HistoryActor, StandardModel, Tenancy, TenancyError, Visibility, + HistoryActor, StandardModel, Tenancy, TenancyError, Visibility, WorkspacePk, }; /// A context type which contains handles to common core service dependencies. @@ -518,10 +518,14 @@ impl DalContext { /// Needed to remove universal tenancy while packages aren't a thing #[instrument(skip_all)] pub async fn import_builtins(&self) -> Result<(), TransactionsError> { + let source_workspace_pk = WorkspacePk::NONE; self.txns() .await? .pg() - .execute("SELECT import_builtins_v1($1)", &[self.tenancy()]) + .execute( + "SELECT import_builtins_v1($1, $2)", + &[self.tenancy(), &source_workspace_pk], + ) .await?; Ok(()) } diff --git a/lib/dal/src/migrations/U0004__standard_model.sql b/lib/dal/src/migrations/U0004__standard_model.sql index 556301a7f2..43baa7f9fb 100644 --- a/lib/dal/src/migrations/U0004__standard_model.sql +++ b/lib/dal/src/migrations/U0004__standard_model.sql @@ -1181,7 +1181,7 @@ BEGIN END; $$ LANGUAGE plpgsql VOLATILE; -CREATE OR REPLACE FUNCTION import_builtins_v1(destination_tenancy jsonb) +CREATE OR REPLACE FUNCTION import_builtins_v1(destination_tenancy jsonb, source_workspace_pk ident) RETURNS VOID AS $$ DECLARE @@ -1189,7 +1189,6 @@ DECLARE destination_tenancy_record tenancy_record_v1; this_table_name regclass; insert_column_names text; - source_workspace_pk ident; BEGIN destination_tenancy_record = tenancy_json_to_columns_v1(destination_tenancy); FOR standard_model IN SELECT * FROM standard_models @@ -1203,10 +1202,6 @@ BEGIN AND information_schema.columns.is_generated = 'NEVER' INTO insert_column_names; - SELECT (object ->> 'pk')::ident - INTO source_workspace_pk - FROM workspace_find_or_create_builtin_v1(); - -- No history events for this update EXECUTE format('INSERT INTO %1$I (tenancy_workspace_pk, visibility_change_set_pk, diff --git a/lib/dal/src/migrations/U0030__workspaces.sql b/lib/dal/src/migrations/U0030__workspaces.sql index 62618ae395..4218afece0 100644 --- a/lib/dal/src/migrations/U0030__workspaces.sql +++ b/lib/dal/src/migrations/U0030__workspaces.sql @@ -1,35 +1,12 @@ CREATE TABLE workspaces ( - pk ident primary key default ident_create_v1(), - visibility_deleted_at timestamp with time zone, - created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - name text NOT NULL + pk ident primary key DEFAULT ident_create_v1(), + visibility_deleted_at timestamp with time zone, + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + name text NOT NULL, + base_change_set_id ident NOT NULL + -- TODO(nick): add "REFERENCES change_set_pointers (id)" to column type ); CREATE UNIQUE INDEX ON workspaces (pk); CREATE INDEX ON workspaces (visibility_deleted_at NULLS FIRST); - -CREATE OR REPLACE FUNCTION workspace_create_v1( - this_pk ident, - this_name text, - OUT object json) AS -$$ -DECLARE - this_new_row workspaces%ROWTYPE; -BEGIN - - INSERT INTO workspaces (pk, name) - VALUES (this_pk, this_name) - RETURNING * INTO this_new_row; - - object := row_to_json(this_new_row); -END; -$$ LANGUAGE PLPGSQL VOLATILE; - -CREATE OR REPLACE FUNCTION workspace_find_or_create_builtin_v1(OUT object json) AS -$$ -BEGIN - INSERT INTO workspaces (pk, name) VALUES (ident_nil_v1(), 'builtin') ON CONFLICT (pk) DO NOTHING; - SELECT row_to_json(workspaces.*) INTO STRICT object FROM workspaces WHERE pk = ident_nil_v1(); -END; -$$ LANGUAGE PLPGSQL VOLATILE; diff --git a/lib/dal/src/migrations/U3000__workspace_snapshots.sql b/lib/dal/src/migrations/U3000__workspace_snapshots.sql index e11dce2f7e..33a89c2a4b 100644 --- a/lib/dal/src/migrations/U3000__workspace_snapshots.sql +++ b/lib/dal/src/migrations/U3000__workspace_snapshots.sql @@ -7,13 +7,4 @@ CREATE TABLE workspace_snapshots -- workspace_id ident REFERENCES workspaces_v2 (id) NOT NULL, -- TODO(nick): replace the existing primary key with this once workspaces are added -- primary key (id, workspace_id) -); - --- TODO(nick): add the new workspaces to their own migration. --- CREATE TABLE workspaces_v2 --- ( --- id ident primary key NOT NULL DEFAULT ident_create_v1(), --- created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), --- updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), --- base_change_set_id ident REFERENCES change_set_pointers (id) --- ); \ No newline at end of file +); \ No newline at end of file diff --git a/lib/dal/src/timestamp.rs b/lib/dal/src/timestamp.rs index 7580f14fec..2e5f756c7e 100644 --- a/lib/dal/src/timestamp.rs +++ b/lib/dal/src/timestamp.rs @@ -22,4 +22,11 @@ impl Timestamp { updated_at: now, } } + + pub fn assemble(created_at: DateTime, updated_at: DateTime) -> Self { + Self { + created_at, + updated_at, + } + } } diff --git a/lib/dal/src/workspace.rs b/lib/dal/src/workspace.rs index 173fc0158b..485edd1acf 100644 --- a/lib/dal/src/workspace.rs +++ b/lib/dal/src/workspace.rs @@ -1,13 +1,16 @@ +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use si_data_nats::NatsError; -use si_data_pg::PgError; +use si_data_pg::{PgError, PgRow}; use telemetry::prelude::*; use thiserror::Error; +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - pk, standard_model, standard_model_accessor_ro, DalContext, HistoryActor, HistoryEvent, - HistoryEventError, KeyPair, KeyPairError, StandardModelError, Tenancy, Timestamp, - TransactionsError, User, UserError, UserPk, + pk, standard_model_accessor_ro, DalContext, HistoryActor, HistoryEvent, HistoryEventError, + KeyPair, KeyPairError, StandardModelError, Tenancy, Timestamp, TransactionsError, User, + UserError, UserPk, WorkspaceSnapshot, }; const WORKSPACE_GET_BY_PK: &str = include_str!("queries/workspace/get_by_pk.sql"); @@ -17,6 +20,8 @@ const WORKSPACE_LIST_FOR_USER: &str = include_str!("queries/workspace/list_for_u #[remain::sorted] #[derive(Error, Debug)] pub enum WorkspaceError { + #[error("change set pointer error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), #[error(transparent)] HistoryEvent(#[from] HistoryEventError), #[error(transparent)] @@ -35,6 +40,8 @@ pub enum WorkspaceError { Transactions(#[from] TransactionsError), #[error(transparent)] User(#[from] UserError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type WorkspaceResult = Result; @@ -53,29 +60,75 @@ pub struct WorkspaceSignup { pub struct Workspace { pk: WorkspacePk, name: String, + base_change_set_id: ChangeSetPointerId, #[serde(flatten)] timestamp: Timestamp, } +impl TryFrom for Workspace { + type Error = WorkspaceError; + + fn try_from(row: PgRow) -> Result { + let created_at: DateTime = row.try_get("created_at")?; + let updated_at: DateTime = row.try_get("updated_at")?; + Ok(Self { + pk: row.try_get("pk")?, + name: row.try_get("name")?, + base_change_set_id: row.try_get("base_change_set_id")?, + timestamp: Timestamp::assemble(created_at, updated_at), + }) + } +} + impl Workspace { pub fn pk(&self) -> &WorkspacePk { &self.pk } + /// Find or create the builtin [`Workspace`]. #[instrument(skip_all)] pub async fn builtin(ctx: &DalContext) -> WorkspaceResult { + // Check if the builtin already exists. + if let Some(found_builtin) = Self::find_builtin(ctx).await? { + return Ok(found_builtin); + } + + // If not, create the builtin workspace with a corresponding base change set and initial + // workspace snapshot. + let mut change_set = ChangeSetPointer::new(ctx, "HEAD").await?; + let workspace_snapshot = WorkspaceSnapshot::initial(ctx, &change_set).await?; + change_set + .update_pointer(ctx, workspace_snapshot.id()) + .await?; + let head_pk = WorkspaceId::NONE; + let name = "builtin"; let row = ctx .txns() .await? .pg() .query_one( - "SELECT object FROM workspace_find_or_create_builtin_v1()", - &[], + "INSERT INTO workspaces (pk, name, base_change_set_id) VALUES ($1, $2, $3) RETURNING *", + &[&head_pk, &name, &change_set.id], ) .await?; + Self::try_from(row) + } - let object = standard_model::object_from_row(row)?; - Ok(object) + /// This private method attempts to find the builtin [`Workspace`]. + #[instrument(skip_all)] + async fn find_builtin(ctx: &DalContext) -> WorkspaceResult> { + let head_pk = WorkspaceId::NONE; + let maybe_row = ctx + .txns() + .await? + .pg() + .query_opt("SELECT * FROM workspaces WHERE pk = $1", &[&head_pk]) + .await?; + let maybe_builtin = match maybe_row { + Some(found) => Some(Self::try_from(found)?), + None => None, + }; + Ok(maybe_builtin) } pub async fn list_for_user(ctx: &DalContext) -> WorkspaceResult> { @@ -94,11 +147,14 @@ impl Workspace { } pub async fn find_first_user_workspace(ctx: &DalContext) -> WorkspaceResult> { - let row = ctx.txns().await?.pg().query_opt( + let maybe_row = ctx.txns().await?.pg().query_opt( "SELECT row_to_json(w.*) AS object FROM workspaces AS w WHERE pk != $1 ORDER BY created_at ASC LIMIT 1", &[&WorkspacePk::NONE], ).await?; - - Ok(standard_model::option_object_from_row(row)?) + let maybe_workspace = match maybe_row { + Some(found) => Some(Self::try_from(found)?), + None => None, + }; + Ok(maybe_workspace) } #[instrument(skip_all)] @@ -107,23 +163,30 @@ impl Workspace { pk: WorkspacePk, name: impl AsRef, ) -> WorkspaceResult { + // Get the snapshot that the builtin workspace's base change set is pointing at. + let builtin = Self::builtin(ctx).await?; + let workspace_snapshot = + WorkspaceSnapshot::find_for_change_set(ctx, builtin.base_change_set_id).await?; + + // Create a new change set and point to the aforementioned snapshot. + let mut change_set = ChangeSetPointer::new(ctx, "HEAD").await?; + change_set + .update_pointer(ctx, workspace_snapshot.id()) + .await?; + let name = name.as_ref(); let row = ctx .txns() .await? .pg() .query_one( - "SELECT object FROM workspace_create_v1($1, $2)", - &[&pk, &name], + "INSERT INTO workspaces (pk, name, base_change_set_id) VALUES ($1, $2, $3) RETURNING *", + &[&pk, &name, &change_set.id], ) .await?; + let new_workspace = Self::try_from(row)?; - // Inlined `finish_create_from_row` - - let json: serde_json::Value = row.try_get("object")?; - let object: Self = serde_json::from_value(json)?; - - ctx.update_tenancy(Tenancy::new(object.pk)); + ctx.update_tenancy(Tenancy::new(new_workspace.pk)); let _history_event = HistoryEvent::new( ctx, @@ -132,7 +195,7 @@ impl Workspace { &serde_json::json![{ "visibility": ctx.visibility() }], ) .await?; - Ok(object) + Ok(new_workspace) } pub async fn clear(&self, ctx: &DalContext) -> WorkspaceResult<()> { @@ -196,14 +259,17 @@ impl Workspace { } pub async fn find_by_name(ctx: &DalContext, name: &str) -> WorkspaceResult> { - let row = ctx + let maybe_row = ctx .txns() .await? .pg() .query_opt(WORKSPACE_FIND_BY_NAME, &[&name]) .await?; - let result = standard_model::option_object_from_row(row)?; - Ok(result) + let maybe_workspace = match maybe_row { + Some(found) => Some(Self::try_from(found)?), + None => None, + }; + Ok(maybe_workspace) } pub async fn get_by_pk( diff --git a/lib/dal/tests/integration_test/internal/key_pair.rs b/lib/dal/tests/integration_test/internal/key_pair.rs index a5e4c8b0e8..c9d22a0312 100644 --- a/lib/dal/tests/integration_test/internal/key_pair.rs +++ b/lib/dal/tests/integration_test/internal/key_pair.rs @@ -1,8 +1,5 @@ -use dal::{key_pair::PublicKey, DalContext, KeyPair, Tenancy}; -use dal_test::{ - test, - test_harness::{create_key_pair, create_workspace}, -}; +use dal::{key_pair::PublicKey, DalContext, KeyPair, Tenancy, Workspace, WorkspacePk}; +use dal_test::{test, test_harness::create_key_pair}; #[test] async fn new(ctx: &DalContext) { @@ -13,7 +10,9 @@ async fn new(ctx: &DalContext) { #[test] async fn belongs_to(ctx: &mut DalContext) { - let workspace = create_workspace(ctx).await; + let workspace = Workspace::new(ctx, WorkspacePk::generate(), "new") + .await + .expect("cannot create workspace"); ctx.update_tenancy(Tenancy::new(*workspace.pk())); let key_pair = create_key_pair(ctx).await; @@ -26,7 +25,9 @@ async fn belongs_to(ctx: &mut DalContext) { #[test] async fn public_key_get_current(ctx: &mut DalContext) { - let workspace = create_workspace(ctx).await; + let workspace = Workspace::new(ctx, WorkspacePk::generate(), "new") + .await + .expect("cannot create workspace"); ctx.update_tenancy(Tenancy::new(*workspace.pk())); let first_key_pair = create_key_pair(ctx).await; From 2aad323ffed43b386561b18ca61a6e632f12a9dd Mon Sep 17 00:00:00 2001 From: Nick Gerace Date: Tue, 17 Oct 2023 18:55:29 -0400 Subject: [PATCH 37/92] feat(*): begin switchover to workspace snapshot and content store - Start with Schemas and create them fully - Move to SchemaVariants and create them up until the root prop - Move to Props and create them somewhat - Determine that we probably do not need the path of the prop on the prop since the O(1) lookup is not possible in the same way anymore (i.e. you have to start from the root prop anyway) Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Round two - Add creation for attribute prototype - Add creation for external provider - Add creation for func - Add creation for socket - Add new node weight for categorizing common items off root (e.g. Components, Schemas and Funcs) Next steps: - Use the new node weight for components, schemas and funcs - Double check that WorkspaceSnapshot::initial looks good - Look up "si:identity: for usage in ExternalProvider creation Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Round three Essentially, we continued down the SchemaVariant::new rabbit hole with this session. - Finish ExternalProvider creation - Add FuncNodeWeight to access name - Create both Frame providers for new SchemaVariants - Continue work on RootProp and go as far down as "/root/si/" creation Next time: - Start with "create_validation" inside the "/root/si/" creation - Continue creating the "/root" prop and its descendants - Finish up the SchemaVariant::new API equivalent Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Round four - Add validation prototypes - Continue down root prop creation including the resource and resource value props (stopped at default prototypes and values) - Caught Zack up to speed (as much as one can be) - Drop reconciliation funcs for now as they are not used on "main" and will likely be refactored anyway (this was seen in the resource value prop creation func) Next Steps: - Start with attribute value creation - Refactor attribute prototype creation (i.e. attribute prototypes only having incoming edges from attribute values... at least that is what we think at the time of writing) - Continue down default prototypes and values creation - Finish root prop creation - Finish SchemaVariant::new switchover Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm Round five - Finished creating default prototypes and values underneath and including the root prop - Everything else in this commit supported doing that - For now, we will NOT be creating edges within AttributeValue creation (there are too many side effects and ways to do it) Next steps: - Start with creating implicit internal providers - Continue in the "resource_value" prop creation - Continue with root prop tree creation and scaffolding - Continue with SchemaVariant::new Observers: Brit, Paul, Keeb Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm Round six - Finish create implicit internal providers - Finish root prop creation - Finish SchemaVariant::new equivalent - Switch existing API methods to use "_" syntax Next steps: - Explore Actions as a potential next step - Potentially comment out code that prevents compilation - Potentially create an integration test that tracks API progress thus far Observers: Wendy Signed-off-by: Nick Gerace Co-authored-by: Zachary Hamm Co-authored-by: Jacob Helwig Round seven - Add edge for ActionPrototypes from SchemaVariant to Function. Edge weight contains ActionKind - Get existing code closer to compiling Signed-off-by: Zachary Hamm Co-authored-by: Nick Gerace Co-authored-by: Jacob Helwig Round eight - Add Node (visual for the frontend) API object with content address - Add Component::new equivalent - Add SchemaVariant::finalize equivalent - Ensure Component edges are created by default - Component Category --> Component - Component --> SchemaVariant - Ensude Node (visual for the frontend) edge is created by default - Component --> Node - Comment out and deleted now unused code Discussion - Frames are likely going to be skipped for now and we will likely try to port them in a similar manner to how they work today - ComponentStatus is likely going to be skipped for now and may be for the foreseeable future (perhaps we get things working and _then_ reintroduce the StatusUpdater's use of this object) - Deletion logic is going to be dropped because deletion itself is a consequence of comparing two graphs (although, soft deletion may still exist (e.g. "needs_destroy" on Components)) Signed-off-by: Nick Gerace Co-authored-by: Zachary Hamm Co-authored-by: Jacob Helwig Round nine - Implement attribute_value_set_value which handles the portion of attribute_value_update that sets the value and unprocessed value in the graph by executing a func binding. - Implement helpers to get funcs and attribute values from the content store. - Remove func id from attribute prototype node and turn it into an edge. Next steps: implement the equivalent of parent value vivification for attribute_value_update Signed-off-by: Zachary Hamm Co-authored-by: Nick Gerace Co-authored-by: Jacob Helwig Round Ten - Implement parent vivification for attribute_value_update - Implement create nested values for attribute value update Next steps: equivalent to insert_for_context in graphland. Handle a version of the system where schema variants only have prototypes and no attribute values, with the prototype having a const args field to handle default values. Signed-off-by: Zachary Hamm Co-authored-by: Nick Gerace Co-authored-by: Jacob Helwig Round eleven - Move function execution outside of updating attribute values - Cache the unset func id for the create nested value helper Next steps: - Add insert for context with initializing descending values to "unset" until you hit the next map, array or scalar - Evaluate removing attribute values from the schema variant (only having them for components) Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm Round twelve - Add insert for context equivalent for attribute values - Extend populated nested values functionality to work for both update and insert API methods for attribute values - When populating nested values, only descend for maps and arrays if the value is "some" - We should always descend on objects even if the value is "none" Next steps: - Consider removing attribute values from schema variants (i.e. created for components only) - Attempt to get rudimentary SDF and frontend behavior working (e.g. listing schema variants) Signed-off-by: Nick Gerace Co-authored-by: Zachary Hamm Co-authored-by: Jacob Helwig Round thirteen - Remove attribute values from schema variants - Props and providers on schema variants no longer have attribute values (i.e. they have attribute prototypes directly) - Components will solely have attribute values Next steps: - Try to get a rudimentary SDF route working in the frontend (likely "list schema variants") Observer: Anna Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm Round fourteen part one - Gut the dal to get it to compile - Comment out everything that we do not want to use to get a rudimentary SDF route or new integration test working Next steps: - Get at least "bin/sdf" compiling outside of the dal Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm Round fourteen part two - Make the rebaser-server and pinga-server happy - Restore the ability to delete edges by index Next steps: - Get lib/dal-test and bin/sdf to compile Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm Round fourteen part three - Get everything compileable... mostly by removing code Next steps: - Use compileable codebase to either make a rudimentary SDF route work or by writing a new dal integration test Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm Round fifteen - Get the UI working up until schema variants are listed - Work towards listing schema variants - Restore PG migrations back to last working point (they are unimportant) - Disable builtins - Refactor workspaces and change set pointers to handle default and base change set pointer ids Next Steps: - Get SDF route working for listing schema variants - Create new integration test(s) using what we have builtin thus far - Discuss API shapes and access patterns Observers: Adam, Wendy Signed-off-by: Nick Gerace Co-authored-by: Zachary Hamm Co-auhtored-by: Jacob Helwig Round sixteen - Shift to getting functions imported. - Narrow import to just the function behavior - Begin to implement import functionality with the workspace snapshot methods. Next steps: - Finish func import - Bring back sdf-service/list_funcs route. - Add func arguments - Bring back get_func sans "associations" Round 17 - Fix content store update (use cbor instead of serde) - Get intrinsics importing - Attempt importing all the functions from the builtins modules (this fails!) Next steps: - Fix importing all functions - Add func arguments - Fix sdf list_funcs route --- Cargo.lock | 1688 ++-- Cargo.toml | 72 +- bin/sdf/src/main.rs | 24 +- lib/content-store/src/store/pg.rs | 2 +- lib/dal-test/src/lib.rs | 4 +- lib/dal/examples/dal-pkg-export/main.rs | 278 +- lib/dal/examples/dal-pkg-import/main.rs | 258 +- lib/dal/src/action.rs | 502 +- lib/dal/src/action_prototype.rs | 662 +- lib/dal/src/attribute/context.rs | 1102 +-- lib/dal/src/attribute/context/read.rs | 474 +- lib/dal/src/attribute/prototype.rs | 1763 ++--- lib/dal/src/attribute/prototype/argument.rs | 898 +-- lib/dal/src/attribute/value.rs | 2288 +++--- lib/dal/src/attribute/value/view.rs | 507 +- lib/dal/src/authentication_prototype.rs | 13 +- lib/dal/src/builtins.rs | 81 +- lib/dal/src/builtins/func.rs | 284 +- lib/dal/src/builtins/schema.rs | 4 +- lib/dal/src/change_set.rs | 106 +- lib/dal/src/change_set_pointer.rs | 38 +- lib/dal/src/component.rs | 1701 ++-- lib/dal/src/component/code.rs | 6 +- lib/dal/src/component/resource.rs | 7 +- lib/dal/src/component/status.rs | 1 - lib/dal/src/component/view/debug.rs | 14 +- lib/dal/src/context.rs | 26 +- lib/dal/src/diagram.rs | 35 +- lib/dal/src/diagram/node.rs | 659 +- lib/dal/src/edge.rs | 9 +- lib/dal/src/func.rs | 476 +- lib/dal/src/func/argument.rs | 6 +- lib/dal/src/func/backend.rs | 373 +- lib/dal/src/func/backend/validation.rs | 255 +- lib/dal/src/func/binding.rs | 4 +- lib/dal/src/func/identity.rs | 79 +- lib/dal/src/func/intrinsics.rs | 2 +- lib/dal/src/installed_pkg/asset.rs | 451 +- lib/dal/src/job.rs | 2 +- lib/dal/src/job/consumer.rs | 46 +- .../job/definition/dependent_values_update.rs | 4 +- lib/dal/src/lib.rs | 187 +- lib/dal/src/migrations/U0030__workspaces.sql | 2 +- .../migrations/U0072__action_prototype.sql | 2 +- .../migrations/U3001__change_set_pointers.sql | 1 + lib/dal/src/node.rs | 431 +- lib/dal/src/node_menu.rs | 2 +- lib/dal/src/pkg.rs | 502 +- lib/dal/src/pkg/import.rs | 6374 +++++++-------- lib/dal/src/prop.rs | 1043 +-- lib/dal/src/property_editor.rs | 20 +- lib/dal/src/property_editor/schema.rs | 195 +- lib/dal/src/provider/external.rs | 543 +- lib/dal/src/provider/internal.rs | 1051 ++- lib/dal/src/schema.rs | 375 +- lib/dal/src/schema/ui_menu.rs | 127 +- lib/dal/src/schema/variant.rs | 1471 ++-- lib/dal/src/schema/variant/definition.rs | 7 +- lib/dal/src/schema/variant/leaves.rs | 303 +- lib/dal/src/schema/variant/root_prop.rs | 936 ++- lib/dal/src/socket.rs | 518 +- lib/dal/src/status.rs | 5 +- lib/dal/src/validation.rs | 67 +- lib/dal/src/validation/prototype.rs | 338 +- lib/dal/src/workspace.rs | 72 +- lib/dal/src/workspace_snapshot.rs | 82 +- lib/dal/src/workspace_snapshot/api.rs | 107 + .../src/workspace_snapshot/api/attribute.rs | 2 + .../api/attribute/prototype.rs | 103 + .../workspace_snapshot/api/attribute/value.rs | 810 ++ .../src/workspace_snapshot/api/component.rs | 69 + lib/dal/src/workspace_snapshot/api/func.rs | 168 + lib/dal/src/workspace_snapshot/api/node.rs | 111 + lib/dal/src/workspace_snapshot/api/prop.rs | 158 + .../src/workspace_snapshot/api/provider.rs | 2 + .../api/provider/external.rs | 86 + .../api/provider/internal.rs | 125 + lib/dal/src/workspace_snapshot/api/schema.rs | 81 + .../workspace_snapshot/api/schema/variant.rs | 325 + .../api/schema/variant/root_prop.rs | 591 ++ lib/dal/src/workspace_snapshot/api/socket.rs | 77 + .../src/workspace_snapshot/api/validation.rs | 1 + .../api/validation/prototype.rs | 71 + .../src/workspace_snapshot/content_address.rs | 31 +- lib/dal/src/workspace_snapshot/edge_weight.rs | 14 +- lib/dal/src/workspace_snapshot/graph.rs | 142 +- lib/dal/src/workspace_snapshot/node_weight.rs | 255 +- .../node_weight/category_node_weight.rs | 165 + .../node_weight/content_node_weight.rs | 6 + .../node_weight/func_node_weight.rs | 266 + .../node_weight/prop_node_weight.rs | 28 + lib/dal/src/ws_event.rs | 85 +- lib/dal/tests/integration.rs | 2 +- lib/pinga-server/src/server.rs | 46 +- .../src/server/change_set_loop.rs | 11 +- .../src/server/management_loop.rs | 4 +- lib/sdf-server/src/server/routes.rs | 56 +- lib/sdf-server/src/server/server.rs | 54 +- lib/sdf-server/src/server/service.rs | 26 +- .../src/server/service/change_set.rs | 123 +- .../change_set/list_open_change_sets.rs | 56 +- lib/sdf-server/src/server/service/diagram.rs | 153 +- .../service/diagram/list_schema_variants.rs | 160 +- lib/sdf-server/src/server/service/signup.rs | 64 - .../server/service/signup/create_account.rs | 53 - lib/sdf-server/tests/api.rs | 2 +- third-party/rust/BUCK | 6940 +++++++---------- third-party/rust/Cargo.lock | 1714 ++-- third-party/rust/Cargo.toml | 51 +- .../rust/fixups/crossbeam-epoch/fixups.toml | 2 + third-party/rust/fixups/heapless/fixups.toml | 1 + third-party/rust/fixups/ring/fixups.toml | 3 + 112 files changed, 22143 insertions(+), 21115 deletions(-) create mode 100644 lib/dal/src/workspace_snapshot/api.rs create mode 100644 lib/dal/src/workspace_snapshot/api/attribute.rs create mode 100644 lib/dal/src/workspace_snapshot/api/attribute/prototype.rs create mode 100644 lib/dal/src/workspace_snapshot/api/attribute/value.rs create mode 100644 lib/dal/src/workspace_snapshot/api/component.rs create mode 100644 lib/dal/src/workspace_snapshot/api/func.rs create mode 100644 lib/dal/src/workspace_snapshot/api/node.rs create mode 100644 lib/dal/src/workspace_snapshot/api/prop.rs create mode 100644 lib/dal/src/workspace_snapshot/api/provider.rs create mode 100644 lib/dal/src/workspace_snapshot/api/provider/external.rs create mode 100644 lib/dal/src/workspace_snapshot/api/provider/internal.rs create mode 100644 lib/dal/src/workspace_snapshot/api/schema.rs create mode 100644 lib/dal/src/workspace_snapshot/api/schema/variant.rs create mode 100644 lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs create mode 100644 lib/dal/src/workspace_snapshot/api/socket.rs create mode 100644 lib/dal/src/workspace_snapshot/api/validation.rs create mode 100644 lib/dal/src/workspace_snapshot/api/validation/prototype.rs create mode 100644 lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs create mode 100644 lib/dal/src/workspace_snapshot/node_weight/func_node_weight.rs delete mode 100644 lib/sdf-server/src/server/service/signup.rs delete mode 100644 lib/sdf-server/src/server/service/signup/create_account.rs create mode 100644 third-party/rust/fixups/heapless/fixups.toml diff --git a/Cargo.lock b/Cargo.lock index 097df33512..40c56baa6f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -25,32 +25,31 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.7.7" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.6" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if", "once_cell", "version_check", - "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.1.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a" dependencies = [ "memchr", ] @@ -84,50 +83,51 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.5" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is-terminal", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.4" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" +checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea" [[package]] name = "anstyle-parse" -version = "0.2.3" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.2" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.2" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] @@ -136,6 +136,12 @@ version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" +[[package]] +name = "array-init" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d62b7694a562cdf5a74227903507c56ab2cc8bdd1f781ed5cb4cf9c9f810bfc" + [[package]] name = "arrayref" version = "0.3.7" @@ -154,18 +160,18 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8257238e2a3629ee5618502a75d1b91f8017c24638c75349fc8d2d80cf1f7c4c" dependencies = [ - "base64 0.21.5", - "bytes 1.5.0", + "base64 0.21.2", + "bytes 1.4.0", "futures", "http", "itoa", "memchr", - "nkeys 0.3.2", + "nkeys 0.3.1", "nuid", "once_cell", "rand 0.8.5", "regex", - "ring 0.16.20", + "ring", "rustls-native-certs", "rustls-pemfile", "rustls-webpki", @@ -174,7 +180,7 @@ dependencies = [ "serde_nanos", "serde_repr", "thiserror", - "time", + "time 0.3.27", "tokio", "tokio-retry", "tokio-rustls 0.24.1", @@ -184,13 +190,13 @@ dependencies = [ [[package]] name = "async-recursion" -version = "1.0.5" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" +checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -212,18 +218,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "async-trait" -version = "0.1.74" +version = "0.1.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -232,7 +238,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures-sink", "futures-util", "memchr", @@ -262,7 +268,7 @@ checksum = "1fcf00bc6d5abb29b5f97e3c61a90b6d3caa12f3faf897d4a3e3607c050a35a7" dependencies = [ "http", "log", - "rustls 0.20.9", + "rustls 0.20.8", "serde", "serde_json", "url", @@ -305,15 +311,15 @@ dependencies = [ "rust-ini", "serde", "thiserror", - "time", + "time 0.3.27", "url", ] [[package]] name = "aws-region" -version = "0.25.4" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42fed2b9fca70f2908268d057a607f2a906f47edbf856ea8587de9038d264e22" +checksum = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba" dependencies = [ "thiserror", ] @@ -327,9 +333,9 @@ dependencies = [ "async-trait", "axum-core", "axum-macros", - "base64 0.21.5", + "base64 0.21.2", "bitflags 1.3.2", - "bytes 1.5.0", + "bytes 1.4.0", "futures-util", "http", "http-body", @@ -349,7 +355,7 @@ dependencies = [ "sha1", "sync_wrapper", "tokio", - "tokio-tungstenite 0.20.1", + "tokio-tungstenite 0.20.0", "tower", "tower-layer", "tower-service", @@ -362,7 +368,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes 1.4.0", "futures-util", "http", "http-body", @@ -381,7 +387,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -426,9 +432,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.5" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" +checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" [[package]] name = "base64ct" @@ -461,9 +467,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" [[package]] name = "bitvec" @@ -479,15 +485,16 @@ dependencies = [ [[package]] name = "blake3" -version = "1.5.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" +checksum = "199c42ab6972d92c9f8995f086273d25c42fc0f7b2a1fcefba465c1352d25ba5" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", "constant_time_eq", + "digest 0.10.7", ] [[package]] @@ -514,9 +521,9 @@ version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f03db470b3c0213c47e978da93200259a1eb4dae2e5512cba9955e2b540a6fc6" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "bollard-stubs", - "bytes 1.5.0", + "bytes 1.4.0", "futures-core", "futures-util", "hex", @@ -545,38 +552,59 @@ checksum = "b58071e8fd9ec1e930efd28e3a90c1251015872a2ce49f81f36421b86466932e" dependencies = [ "serde", "serde_repr", - "serde_with 3.4.0", + "serde_with 3.3.0", ] [[package]] name = "borsh" -version = "1.2.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9897ef0f1bd2362169de6d7e436ea2237dc1085d7d1e4db75f4be34d86f309d1" +checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b" dependencies = [ "borsh-derive", - "cfg_aliases", + "hashbrown 0.13.2", ] [[package]] name = "borsh-derive" -version = "1.2.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "478b41ff04256c5c8330f3dfdaaae2a5cc976a8e75088bafa4625b0d0208de8c" +checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" +dependencies = [ + "borsh-derive-internal", + "borsh-schema-derive-internal", + "proc-macro-crate 0.1.5", + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "borsh-derive-internal" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" dependencies = [ - "once_cell", - "proc-macro-crate 2.0.0", "proc-macro2", "quote", - "syn 2.0.40", - "syn_derive", + "syn 1.0.109", +] + +[[package]] +name = "borsh-schema-derive-internal" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", ] [[package]] name = "bstr" -version = "1.8.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" +checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", "serde", @@ -592,9 +620,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.14.0" +version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" +checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" [[package]] name = "bytecheck" @@ -620,9 +648,9 @@ dependencies = [ [[package]] name = "byteorder" -version = "1.5.0" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" @@ -632,9 +660,9 @@ checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" [[package]] name = "bytes" -version = "1.5.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" dependencies = [ "serde", ] @@ -643,7 +671,7 @@ dependencies = [ name = "bytes-lines-codec" version = "0.1.0" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures", "serde", "tokio", @@ -667,25 +695,20 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "cfg_aliases" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" - [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", + "time 0.1.45", "wasm-bindgen", - "windows-targets 0.48.5", + "winapi", ] [[package]] @@ -717,19 +740,20 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.11" +version = "4.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" +checksum = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487" dependencies = [ "clap_builder", "clap_derive", + "once_cell", ] [[package]] name = "clap_builder" -version = "4.4.11" +version = "4.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" +checksum = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e" dependencies = [ "anstream", "anstyle", @@ -740,27 +764,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.4.7" +version = "4.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "clap_lex" -version = "0.6.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" [[package]] name = "coarsetime" -version = "0.1.33" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71367d3385c716342014ad17e3d19f7788ae514885a1f4c24f500260fb365e1a" +checksum = "a90d114103adbc625300f346d4d09dfb4ab1c4a8df6868435dd903392ecf4354" dependencies = [ "libc", "once_cell", @@ -785,9 +809,9 @@ dependencies = [ [[package]] name = "color-spantrace" -version = "0.2.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" +checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" dependencies = [ "once_cell", "owo-colors", @@ -803,32 +827,33 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "colored" -version = "2.1.0" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6" dependencies = [ + "is-terminal", "lazy_static", "windows-sys 0.48.0", ] [[package]] name = "comfy-table" -version = "7.1.0" +version = "7.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c64043d6c7b7a4c58e39e7efccfdea7b93d885a795d0c054a69dbbf4dd52686" +checksum = "9ab77dbd8adecaf3f0db40581631b995f312a8a5ae3aa9993188bb8f23d83a5b" dependencies = [ "console", - "crossterm 0.27.0", - "strum 0.25.0", - "strum_macros 0.25.3", + "crossterm 0.26.1", + "strum", + "strum_macros", "unicode-width", ] [[package]] name = "config" -version = "0.13.4" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca" +checksum = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7" dependencies = [ "async-trait", "lazy_static", @@ -853,7 +878,7 @@ dependencies = [ "serde_yaml", "thiserror", "tokio", - "toml 0.7.8", + "toml 0.7.6", "tracing", ] @@ -983,9 +1008,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.4" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" dependencies = [ "core-foundation-sys", "libc", @@ -993,9 +1018,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "council" @@ -1027,9 +1052,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.11" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" dependencies = [ "libc", ] @@ -1053,30 +1078,6 @@ dependencies = [ "crossbeam-utils", ] -[[package]] -name = "crossbeam-deque" -version = "0.8.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" -dependencies = [ - "cfg-if", - "crossbeam-epoch", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-epoch" -version = "0.9.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" -dependencies = [ - "autocfg", - "cfg-if", - "crossbeam-utils", - "memoffset 0.9.0", - "scopeguard", -] - [[package]] name = "crossbeam-queue" version = "0.3.8" @@ -1114,14 +1115,17 @@ dependencies = [ [[package]] name = "crossterm" -version = "0.27.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" +checksum = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13" dependencies = [ - "bitflags 2.4.1", + "bitflags 1.3.2", "crossterm_winapi", "libc", + "mio", "parking_lot 0.12.1", + "signal-hook", + "signal-hook-mio", "winapi", ] @@ -1136,9 +1140,9 @@ dependencies = [ [[package]] name = "crypto-bigint" -version = "0.5.5" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +checksum = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -1175,33 +1179,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "curve25519-dalek" -version = "4.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" -dependencies = [ - "cfg-if", - "cpufeatures", - "curve25519-dalek-derive", - "digest 0.10.7", - "fiat-crypto", - "platforms", - "rustc_version", - "subtle", -] - -[[package]] -name = "curve25519-dalek-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.40", -] - [[package]] name = "cyclone" version = "0.1.0" @@ -1218,7 +1195,7 @@ name = "cyclone-client" version = "0.1.0" dependencies = [ "async-trait", - "base64 0.21.5", + "base64 0.21.2", "buck2-resources", "cyclone-core", "cyclone-server", @@ -1245,8 +1222,8 @@ dependencies = [ name = "cyclone-core" version = "0.1.0" dependencies = [ - "base64 0.21.5", - "nix 0.26.4", + "base64 0.21.2", + "nix 0.26.2", "remain", "serde", "serde_json", @@ -1265,7 +1242,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum", - "base64 0.21.5", + "base64 0.21.2", "bytes-lines-codec", "chrono", "cyclone-core", @@ -1295,7 +1272,7 @@ version = "0.1.0" dependencies = [ "async-recursion", "async-trait", - "base64 0.21.5", + "base64 0.21.2", "blake3", "buck2-resources", "chrono", @@ -1311,7 +1288,7 @@ dependencies = [ "futures", "hex", "iftree", - "itertools 0.10.5", + "itertools", "jwt-simple", "lazy_static", "nats-subscriber", @@ -1331,7 +1308,7 @@ dependencies = [ "serde", "serde-aux", "serde_json", - "serde_with 3.4.0", + "serde_with 3.3.0", "si-cbor", "si-crypto", "si-data-nats", @@ -1339,7 +1316,7 @@ dependencies = [ "si-hash", "si-pkg", "sodiumoxide", - "strum 0.24.1", + "strum", "telemetry", "tempfile", "thiserror", @@ -1431,7 +1408,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -1453,7 +1430,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -1465,27 +1442,27 @@ dependencies = [ "anyhow", "html-escape", "nom", - "ordered-float 2.10.1", + "ordered-float 2.10.0", ] [[package]] name = "dashmap" -version = "5.5.3" +version = "5.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +checksum = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28" dependencies = [ "cfg-if", - "hashbrown 0.14.3", + "hashbrown 0.14.0", "lock_api", "once_cell", - "parking_lot_core 0.9.9", + "parking_lot_core 0.9.8", ] [[package]] name = "data-encoding" -version = "2.5.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" +checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" [[package]] name = "deadpool" @@ -1512,7 +1489,7 @@ dependencies = [ "deadpool", "derive_builder", "futures", - "nix 0.26.4", + "nix 0.26.2", "rand 0.8.5", "remain", "serde", @@ -1541,9 +1518,9 @@ dependencies = [ [[package]] name = "deadpool-runtime" -version = "0.1.3" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63dfa964fe2a66f3fde91fc70b267fe193d822c7e603e2a675a49a7f46ad3f49" +checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" dependencies = [ "tokio", ] @@ -1581,11 +1558,10 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.10" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc" +checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" dependencies = [ - "powerfmt", "serde", ] @@ -1715,7 +1691,7 @@ dependencies = [ "asynchronous-codec", "base64 0.13.1", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "chrono", "containers-api 0.9.0", "docker-api-stubs", @@ -1750,22 +1726,22 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "dyn-clone" -version = "1.0.16" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" +checksum = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555" [[package]] name = "ecdsa" -version = "0.16.9" +version = "0.16.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" dependencies = [ "der 0.7.8", "digest 0.10.7", "elliptic-curve", "rfc6979", - "signature 2.2.0", - "spki 0.7.3", + "signature 2.1.0", + "spki 0.7.2", ] [[package]] @@ -1777,15 +1753,6 @@ dependencies = [ "signature 1.6.4", ] -[[package]] -name = "ed25519" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" -dependencies = [ - "signature 2.2.0", -] - [[package]] name = "ed25519-compact" version = "2.0.4" @@ -1793,7 +1760,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a3d382e8464107391c8706b4c14b087808ecb909f6c15c34114bc42e53a9e4c" dependencies = [ "ct-codecs", - "getrandom 0.2.11", + "getrandom 0.2.10", ] [[package]] @@ -1802,30 +1769,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek 3.2.0", - "ed25519 1.5.3", + "curve25519-dalek", + "ed25519", "sha2 0.9.9", "zeroize", ] -[[package]] -name = "ed25519-dalek" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0" -dependencies = [ - "curve25519-dalek 4.1.1", - "ed25519 2.2.3", - "sha2 0.10.8", - "signature 2.2.0", - "subtle", -] - [[package]] name = "educe" -version = "0.4.23" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0042ff8246a363dbe77d2ceedb073339e85a804b9a47636c6e016a9a32c05f" +checksum = "079044df30bb07de7d846d41a184c4b00e66ebdac93ee459253474f3a47e50ae" dependencies = [ "enum-ordinalize", "proc-macro2", @@ -1841,9 +1795,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "elliptic-curve" -version = "0.13.8" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +checksum = "968405c8fdc9b3bf4df0a6638858cc0b52462836ab6b1c87377785dd09cf1c0b" dependencies = [ "base16ct", "crypto-bigint", @@ -1877,15 +1831,15 @@ dependencies = [ [[package]] name = "enum-ordinalize" -version = "3.1.15" +version = "3.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf1fa3f06bbff1ea5b1a9c7b14aa992a39657db60a2759457328d7e058f49ee" +checksum = "e4f76552f53cefc9a7f64987c3701b99d982f7690606fd67de1d09712fbf52f1" dependencies = [ "num-bigint", "num-traits", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -1896,12 +1850,23 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" dependencies = [ + "cc", "libc", - "windows-sys 0.52.0", ] [[package]] @@ -1912,9 +1877,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "eyre" -version = "0.6.10" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bbb8258be8305fb0237d7b295f47bb24ff1b136a535f473baf40e70468515aa" +checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" dependencies = [ "indenter", "once_cell", @@ -1937,9 +1902,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" [[package]] name = "ff" @@ -1951,30 +1916,18 @@ dependencies = [ "subtle", ] -[[package]] -name = "fiat-crypto" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" - [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", - "windows-sys 0.52.0", + "redox_syscall 0.3.5", + "windows-sys 0.48.0", ] -[[package]] -name = "finl_unicode" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" - [[package]] name = "fixedbitset" version = "0.4.2" @@ -1983,9 +1936,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" dependencies = [ "crc32fast", "miniz_oxide", @@ -1999,9 +1952,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ "percent-encoding", ] @@ -2014,9 +1967,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.29" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" dependencies = [ "futures-channel", "futures-core", @@ -2045,9 +1998,9 @@ checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" [[package]] name = "futures-executor" -version = "0.3.29" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" dependencies = [ "futures-core", "futures-task", @@ -2094,7 +2047,7 @@ checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -2165,9 +2118,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", "libc", @@ -2176,21 +2129,21 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d" dependencies = [ "aho-corasick", "bstr", + "fnv", "log", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex", ] [[package]] @@ -2206,17 +2159,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.22" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "fnv", "futures-core", "futures-sink", "futures-util", "http", - "indexmap 2.1.0", + "indexmap 1.9.3", "slab", "tokio", "tokio-util", @@ -2235,26 +2188,35 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.7", + "ahash 0.7.6", ] [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash 0.8.3", +] + +[[package]] +name = "hashbrown" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.3", "allocator-api2", ] [[package]] name = "hashlink" -version = "0.8.4" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" dependencies = [ - "hashbrown 0.14.3", + "hashbrown 0.14.0", ] [[package]] @@ -2277,9 +2239,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.3" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" [[package]] name = "hex" @@ -2329,15 +2291,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "home" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" -dependencies = [ - "windows-sys 0.48.0", -] - [[package]] name = "html-escape" version = "0.2.13" @@ -2349,22 +2302,22 @@ dependencies = [ [[package]] name = "http" -version = "0.2.11" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "fnv", "itoa", ] [[package]] name = "http-body" -version = "0.4.6" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "http", "pin-project-lite", ] @@ -2393,7 +2346,7 @@ version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures-channel", "futures-core", "futures-util", @@ -2404,7 +2357,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -2413,14 +2366,14 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.2" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" dependencies = [ "futures-util", "http", "hyper", - "rustls 0.21.10", + "rustls 0.21.6", "tokio", "tokio-rustls 0.24.1", ] @@ -2451,16 +2404,16 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.58" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows-core", + "windows", ] [[package]] @@ -2480,9 +2433,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -2499,22 +2452,23 @@ dependencies = [ "quote", "serde", "syn 1.0.109", - "toml 0.7.8", + "toml 0.7.6", "unicode-xid", ] [[package]] name = "ignore" -version = "0.4.21" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060" +checksum = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492" dependencies = [ - "crossbeam-deque", "globset", + "lazy_static", "log", "memchr", - "regex-automata 0.4.3", + "regex", "same-file", + "thread_local", "walkdir", "winapi-util", ] @@ -2538,20 +2492,20 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.1.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" dependencies = [ "equivalent", - "hashbrown 0.14.3", + "hashbrown 0.14.0", "serde", ] [[package]] name = "indicatif" -version = "0.17.7" +version = "0.17.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" +checksum = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730" dependencies = [ "console", "instant", @@ -2562,9 +2516,9 @@ dependencies = [ [[package]] name = "indoc" -version = "2.0.4" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" +checksum = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4" [[package]] name = "inquire" @@ -2591,11 +2545,22 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "ipnet" -version = "2.9.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" [[package]] name = "is-docker" @@ -2606,6 +2571,17 @@ dependencies = [ "once_cell", ] +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi", + "rustix 0.38.8", + "windows-sys 0.48.0", +] + [[package]] name = "is-wsl" version = "0.4.0" @@ -2625,35 +2601,26 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" -dependencies = [ - "either", -] - [[package]] name = "itoa" -version = "1.0.10" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" -version = "0.3.66" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] [[package]] name = "jwt-simple" -version = "0.11.9" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357892bb32159d763abdea50733fadcb9a8e1c319a9aa77592db8555d05af83e" +checksum = "733741e7bcd1532b56c9ba6c698c069f274f3782ad956f0d2c7f31650cedaa1b" dependencies = [ "anyhow", "binstring", @@ -2677,16 +2644,16 @@ dependencies = [ [[package]] name = "k256" -version = "0.13.2" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b" +checksum = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc" dependencies = [ "cfg-if", "ecdsa", "elliptic-curve", "once_cell", - "sha2 0.10.8", - "signature 2.2.0", + "sha2 0.10.7", + "signature 2.1.0", ] [[package]] @@ -2709,26 +2676,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.151" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "libm" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" - -[[package]] -name = "libredox" -version = "0.0.1" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" -dependencies = [ - "bitflags 2.4.1", - "libc", - "redox_syscall 0.4.1", -] +checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" [[package]] name = "libsodium-sys" @@ -2750,15 +2706,21 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" + +[[package]] +name = "linux-raw-sys" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" [[package]] name = "lock_api" -version = "0.4.11" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ "autocfg", "scopeguard", @@ -2781,9 +2743,9 @@ dependencies = [ [[package]] name = "matchit" -version = "0.7.3" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +checksum = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef" [[package]] name = "maybe-async" @@ -2798,11 +2760,10 @@ dependencies = [ [[package]] name = "md-5" -version = "0.10.6" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" dependencies = [ - "cfg-if", "digest 0.10.7", ] @@ -2814,9 +2775,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.6.4" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" @@ -2846,15 +2807,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg", -] - [[package]] name = "mime" version = "0.3.17" @@ -2888,9 +2840,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.10" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "log", @@ -2932,7 +2884,7 @@ version = "0.1.0" dependencies = [ "auth-api-client", "axum", - "base64 0.21.5", + "base64 0.21.2", "buck2-resources", "chrono", "derive_builder", @@ -2969,7 +2921,7 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "encoding_rs", "futures-util", "http", @@ -3035,15 +2987,16 @@ dependencies = [ [[package]] name = "nix" -version = "0.26.4" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" dependencies = [ "bitflags 1.3.2", "cfg-if", "libc", "memoffset 0.7.1", "pin-utils", + "static_assertions", ] [[package]] @@ -3054,27 +3007,27 @@ checksum = "0e66a7cd1358277b2a6f77078e70aea7315ff2f20db969cc61153103ec162594" dependencies = [ "byteorder", "data-encoding", - "ed25519-dalek 1.0.1", - "getrandom 0.2.11", + "ed25519-dalek", + "getrandom 0.2.10", "log", "rand 0.8.5", - "signatory 0.23.2", + "signatory", ] [[package]] name = "nkeys" -version = "0.3.2" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47" +checksum = "3e9261eb915c785ea65708bc45ef43507ea46914e1a73f1412d1a38aba967c8e" dependencies = [ "byteorder", "data-encoding", - "ed25519 2.2.3", - "ed25519-dalek 2.1.0", - "getrandom 0.2.11", + "ed25519", + "ed25519-dalek", + "getrandom 0.2.10", "log", "rand 0.8.5", - "signatory 0.27.1", + "signatory", ] [[package]] @@ -3158,9 +3111,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.17" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -3194,7 +3147,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -3205,9 +3158,9 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.1" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" dependencies = [ "memchr", ] @@ -3220,7 +3173,7 @@ dependencies = [ "remain", "serde", "si-hash", - "strum 0.24.1", + "strum", "tar", "tempfile", "thiserror", @@ -3231,9 +3184,9 @@ dependencies = [ [[package]] name = "once_cell" -version = "1.19.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "opaque-debug" @@ -3243,9 +3196,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "open" -version = "5.0.1" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90878fb664448b54c4e592455ad02831e23a3f7e157374a8b95654731aac7349" +checksum = "cfabf1927dce4d6fdf563d63328a0a506101ced3ec780ca2135747336c98cef8" dependencies = [ "is-wsl", "libc", @@ -3355,9 +3308,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "ordered-float" -version = "2.10.1" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87" dependencies = [ "num-traits", ] @@ -3425,7 +3378,7 @@ dependencies = [ "ecdsa", "elliptic-curve", "primeorder", - "sha2 0.10.8", + "sha2 0.10.7", ] [[package]] @@ -3437,14 +3390,14 @@ dependencies = [ "ecdsa", "elliptic-curve", "primeorder", - "sha2 0.10.8", + "sha2 0.10.7", ] [[package]] name = "parking" -version = "2.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" [[package]] name = "parking_lot" @@ -3464,7 +3417,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.9", + "parking_lot_core 0.9.8", ] [[package]] @@ -3483,13 +3436,13 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.9" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", + "redox_syscall 0.3.5", "smallvec", "windows-targets 0.48.5", ] @@ -3535,9 +3488,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "petgraph" @@ -3546,7 +3499,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.1.0", + "indexmap 2.0.0", "serde", "serde_derive", ] @@ -3566,7 +3519,7 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ - "siphasher 0.3.11", + "siphasher", ] [[package]] @@ -3606,14 +3559,14 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" [[package]] name = "pin-utils" @@ -3699,7 +3652,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der 0.7.8", - "spki 0.7.3", + "spki 0.7.2", ] [[package]] @@ -3708,12 +3661,6 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" -[[package]] -name = "platforms" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" - [[package]] name = "podman-api" version = "0.10.0" @@ -3722,7 +3669,7 @@ checksum = "4d0ade207138f12695cb4be3b590283f1cf764c5c4909f39966c4b4b0dba7c1e" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "chrono", "containers-api 0.8.0", "flate2", @@ -3752,9 +3699,9 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.6.0" +version = "1.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e" [[package]] name = "postgres-derive" @@ -3765,7 +3712,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -3774,15 +3721,15 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "fallible-iterator", "hmac", "md-5", "memchr", "rand 0.8.5", - "sha2 0.10.8", + "sha2 0.10.7", "stringprep", ] @@ -3792,7 +3739,8 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" dependencies = [ - "bytes 1.5.0", + "array-init", + "bytes 1.4.0", "chrono", "fallible-iterator", "postgres-derive", @@ -3802,14 +3750,8 @@ dependencies = [ ] [[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - -[[package]] -name = "ppv-lite86" -version = "0.2.17" +name = "ppv-lite86" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" @@ -3845,30 +3787,30 @@ dependencies = [ [[package]] name = "primeorder" -version = "0.13.6" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +checksum = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro-crate" -version = "1.3.1" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" dependencies = [ - "once_cell", - "toml_edit 0.19.15", + "toml 0.5.11", ] [[package]] name = "proc-macro-crate" -version = "2.0.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ - "toml_edit 0.20.7", + "once_cell", + "toml_edit", ] [[package]] @@ -3897,9 +3839,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.70" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" dependencies = [ "unicode-ident", ] @@ -3910,7 +3852,7 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "prost-derive", ] @@ -3920,9 +3862,9 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "heck 0.4.1", - "itertools 0.10.5", + "itertools", "lazy_static", "log", "multimap", @@ -3943,7 +3885,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools", "proc-macro2", "quote", "syn 1.0.109", @@ -4004,7 +3946,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "735d6cecec44dc27382b962309c05de47d40727ff9898a501ec8fadec76be9a8" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes 1.4.0", "dashmap", "futures", "pin-project 1.1.3", @@ -4099,7 +4041,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", ] [[package]] @@ -4185,29 +4127,29 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_users" -version = "0.4.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom 0.2.11", - "libredox", + "getrandom 0.2.10", + "redox_syscall 0.2.16", "thiserror", ] [[package]] name = "refinery" -version = "0.8.11" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "529664dbccc0a296947615c997a857912d72d1c44be1fafb7bae54ecfa7a8c24" +checksum = "cdb0436d0dd7bd8d4fce1e828751fa79742b08e35f27cfea7546f8a322b5ef24" dependencies = [ "refinery-core", "refinery-macros", @@ -4215,9 +4157,9 @@ dependencies = [ [[package]] name = "refinery-core" -version = "0.8.11" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e895cb870cf06e92318cbbeb701f274d022d5ca87a16fa8244e291cd035ef954" +checksum = "19206547cd047e8f4dfa6b20c30d3ecaf24be05841b6aa0aa926a47a3d0662bb" dependencies = [ "async-trait", "cfg-if", @@ -4225,39 +4167,39 @@ dependencies = [ "log", "regex", "serde", - "siphasher 1.0.0", + "siphasher", "thiserror", - "time", + "time 0.3.27", "tokio", "tokio-postgres", - "toml 0.7.8", + "toml 0.7.6", "url", "walkdir", ] [[package]] name = "refinery-macros" -version = "0.8.11" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "123e8b80f8010c3ae38330c81e76938fc7adf6cdbfbaad20295bb8c22718b4f1" +checksum = "d94d4b9241859ba19eaa5c04c86e782eb3aa0aae2c5868e0cfa90c856e58a174" dependencies = [ "proc-macro2", "quote", "refinery-core", "regex", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "regex" -version = "1.10.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.3.6", + "regex-syntax 0.7.4", ] [[package]] @@ -4271,13 +4213,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.7.4", ] [[package]] @@ -4288,9 +4230,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" [[package]] name = "remain" @@ -4300,26 +4242,26 @@ checksum = "bce3a7139d2ee67d07538ee5dba997364fbc243e7e7143e96eb830c74bfaa082" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "rend" -version = "0.4.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd" +checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" dependencies = [ "bytecheck", ] [[package]] name = "reqwest" -version = "0.11.22" +version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ - "base64 0.21.5", - "bytes 1.5.0", + "base64 0.21.2", + "bytes 1.4.0", "encoding_rs", "futures-core", "futures-util", @@ -4336,12 +4278,11 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.10", + "rustls 0.21.6", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", - "system-configuration", "tokio", "tokio-rustls 0.24.1", "tokio-util", @@ -4351,7 +4292,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.25.3", + "webpki-roots 0.25.2", "winreg", ] @@ -4381,25 +4322,11 @@ dependencies = [ "libc", "once_cell", "spin 0.5.2", - "untrusted 0.7.1", + "untrusted", "web-sys", "winapi", ] -[[package]] -name = "ring" -version = "0.17.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" -dependencies = [ - "cc", - "getrandom 0.2.11", - "libc", - "spin 0.9.8", - "untrusted 0.9.0", - "windows-sys 0.48.0", -] - [[package]] name = "rkyv" version = "0.7.42" @@ -4469,7 +4396,7 @@ dependencies = [ "aws-creds", "aws-region", "base64 0.13.1", - "bytes 1.5.0", + "bytes 1.4.0", "cfg-if", "futures", "hex", @@ -4483,9 +4410,9 @@ dependencies = [ "reqwest", "serde", "serde_derive", - "sha2 0.10.8", + "sha2 0.10.7", "thiserror", - "time", + "time 0.3.27", "tokio", "tokio-stream", "url", @@ -4493,13 +4420,13 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.33.1" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4" +checksum = "a4c4216490d5a413bc6d10fa4742bd7d4955941d062c0ef873141d6b0e7b30fd" dependencies = [ "arrayvec", "borsh", - "bytes 1.5.0", + "bytes 1.4.0", "num-traits", "rand 0.8.5", "rkyv", @@ -4524,37 +4451,51 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.28" +version = "0.37.23" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" +dependencies = [ + "bitflags 1.3.2", + "errno", + "io-lifetimes", + "libc", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f" dependencies = [ - "bitflags 2.4.1", + "bitflags 2.4.0", "errno", "libc", - "linux-raw-sys", - "windows-sys 0.52.0", + "linux-raw-sys 0.4.5", + "windows-sys 0.48.0", ] [[package]] name = "rustls" -version = "0.20.9" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" dependencies = [ "log", - "ring 0.16.20", + "ring", "sct", "webpki", ] [[package]] name = "rustls" -version = "0.21.10" +version = "0.21.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" dependencies = [ "log", - "ring 0.17.7", + "ring", "rustls-webpki", "sct", ] @@ -4573,21 +4514,21 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.4" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", ] [[package]] name = "rustls-webpki" -version = "0.101.7" +version = "0.101.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +checksum = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d" dependencies = [ - "ring 0.17.7", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -4598,9 +4539,9 @@ checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "same-file" @@ -4628,12 +4569,12 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" dependencies = [ - "ring 0.17.7", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -4653,7 +4594,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum", - "base64 0.21.5", + "base64 0.21.2", "buck2-resources", "chrono", "convert_case 0.6.0", @@ -4665,7 +4606,7 @@ dependencies = [ "hyper", "module-index-client", "names", - "nix 0.26.4", + "nix 0.26.2", "once_cell", "pathdiff", "pretty_assertions_sorted", @@ -4675,7 +4616,7 @@ dependencies = [ "serde", "serde_json", "serde_url_params", - "serde_with 3.4.0", + "serde_with 3.3.0", "si-crypto", "si-data-nats", "si-data-pg", @@ -4684,7 +4625,7 @@ dependencies = [ "si-settings", "si-std", "sodiumoxide", - "strum 0.24.1", + "strum", "telemetry", "thiserror", "tokio", @@ -4695,7 +4636,7 @@ dependencies = [ "url", "veritech-client", "y-sync", - "yrs 0.16.10", + "yrs", ] [[package]] @@ -4720,7 +4661,7 @@ dependencies = [ "serde_json", "sqlx", "thiserror", - "time", + "time 0.3.27", "tracing", "url", "uuid", @@ -4750,7 +4691,7 @@ dependencies = [ "rust_decimal", "sea-query-derive", "serde_json", - "time", + "time 0.3.27", "uuid", ] @@ -4766,7 +4707,7 @@ dependencies = [ "sea-query", "serde_json", "sqlx", - "time", + "time 0.3.27", "uuid", ] @@ -4860,24 +4801,24 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.20" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" +checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" [[package]] name = "serde" -version = "1.0.193" +version = "1.0.186" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" +checksum = "9f5db24220c009de9bd45e69fb2938f4b6d2df856aa9304ce377b3180f83b7c1" dependencies = [ "serde_derive", ] [[package]] name = "serde-aux" -version = "4.3.1" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "184eba62ebddb71658697c8b08822edee89970bf318c5362189f0de27f85b498" +checksum = "c3dfe1b7eb6f9dcf011bd6fad169cdeaae75eda0d61b1a99a3f015b41b0cae39" dependencies = [ "chrono", "serde", @@ -4886,22 +4827,22 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.193" +version = "1.0.186" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" +checksum = "5ad697f7e0b65af4983a4ce8f56ed5b357e8d3c36651bf6a7e13639c17b8e670" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.0.0", "itoa", "ryu", "serde", @@ -4928,20 +4869,20 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.17" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" +checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "serde_spanned" -version = "0.6.4" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" +checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" dependencies = [ "serde", ] @@ -4981,24 +4922,24 @@ dependencies = [ "serde", "serde_json", "serde_with_macros 2.3.3", - "time", + "time 0.3.27", ] [[package]] name = "serde_with" -version = "3.4.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.1.0", + "indexmap 2.0.0", "serde", "serde_json", - "serde_with_macros 3.4.0", - "time", + "serde_with_macros 3.3.0", + "time 0.3.27", ] [[package]] @@ -5010,28 +4951,28 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "serde_with_macros" -version = "3.4.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c" dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "serde_yaml" -version = "0.9.27" +version = "0.9.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" +checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.0.0", "itoa", "ryu", "serde", @@ -5040,9 +4981,9 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.6" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" dependencies = [ "cfg-if", "cpufeatures", @@ -5064,9 +5005,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ "cfg-if", "cpufeatures", @@ -5075,9 +5016,9 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.7" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" dependencies = [ "lazy_static", ] @@ -5092,7 +5033,7 @@ dependencies = [ "serde_json", "si-cli", "si-posthog", - "strum 0.24.1", + "strum", "telemetry-application", "tokio", ] @@ -5113,7 +5054,7 @@ version = "0.1.0" dependencies = [ "async-trait", "axum", - "base64 0.21.5", + "base64 0.21.2", "color-eyre", "colored", "comfy-table", @@ -5140,14 +5081,14 @@ dependencies = [ "tempfile", "thiserror", "tokio", - "toml 0.7.8", + "toml 0.7.6", ] [[package]] name = "si-crypto" version = "0.1.0" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "ciborium", "remain", "serde", @@ -5182,7 +5123,7 @@ dependencies = [ name = "si-data-pg" version = "0.1.0" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "deadpool", "deadpool-postgres", "futures", @@ -5211,7 +5152,7 @@ dependencies = [ name = "si-pkg" version = "0.1.0" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "chrono", "derive_builder", "object-tree", @@ -5220,7 +5161,7 @@ dependencies = [ "serde", "serde_json", "si-hash", - "strum 0.24.1", + "strum", "tempfile", "thiserror", "tokio", @@ -5237,7 +5178,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "strum 0.24.1", + "strum", "telemetry", "thiserror", "tokio", @@ -5274,7 +5215,7 @@ version = "0.1.0" dependencies = [ "remain", "serde", - "serde_with 3.4.0", + "serde_with 3.3.0", "thiserror", ] @@ -5284,7 +5225,7 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -5329,18 +5270,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "signatory" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e303f8205714074f6068773f0e29527e0453937fe837c9717d066635b65f31" -dependencies = [ - "pkcs8 0.10.2", - "rand_core 0.6.4", - "signature 2.2.0", - "zeroize", -] - [[package]] name = "signature" version = "1.6.4" @@ -5353,9 +5282,9 @@ dependencies = [ [[package]] name = "signature" -version = "2.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -5373,12 +5302,6 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" -[[package]] -name = "siphasher" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54ac45299ccbd390721be55b412d41931911f654fa99e2cb8bfb57184b2061fe" - [[package]] name = "slab" version = "0.4.9" @@ -5397,26 +5320,17 @@ dependencies = [ "smallvec", ] -[[package]] -name = "smallstr" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63b1aefdf380735ff8ded0b15f31aab05daf1f70216c01c02a12926badd1df9d" -dependencies = [ - "smallvec", -] - [[package]] name = "smallvec" -version = "1.11.2" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "socket2" -version = "0.4.10" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" dependencies = [ "libc", "winapi", @@ -5424,9 +5338,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" dependencies = [ "libc", "windows-sys 0.48.0", @@ -5438,7 +5352,7 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e26be3acb6c2d9a7aac28482586a7856436af4cfe7100031d219de2d2ecb0028" dependencies = [ - "ed25519 1.5.3", + "ed25519", "libc", "libsodium-sys", "serde", @@ -5477,9 +5391,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.3" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" dependencies = [ "base64ct", "der 0.7.8", @@ -5487,11 +5401,11 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.3" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" +checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" dependencies = [ - "itertools 0.12.0", + "itertools", "nom", "unicode_categories", ] @@ -5512,13 +5426,13 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" dependencies = [ - "ahash 0.7.7", + "ahash 0.7.6", "atoi", "base64 0.13.1", "bigdecimal", "bitflags 1.3.2", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "chrono", "crossbeam-queue", "dirs", @@ -5545,18 +5459,18 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "rust_decimal", - "rustls 0.20.9", + "rustls 0.20.8", "rustls-pemfile", "serde", "serde_json", "sha1", - "sha2 0.10.8", + "sha2 0.10.7", "smallvec", "sqlformat", "sqlx-rt", "stringprep", "thiserror", - "time", + "time 0.3.27", "tokio-stream", "url", "uuid", @@ -5600,6 +5514,12 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + [[package]] name = "stream-cancel" version = "0.8.1" @@ -5613,11 +5533,10 @@ dependencies = [ [[package]] name = "stringprep" -version = "0.1.4" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da" dependencies = [ - "finl_unicode", "unicode-bidi", "unicode-normalization", ] @@ -5634,15 +5553,9 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros 0.24.3", + "strum_macros", ] -[[package]] -name = "strum" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" - [[package]] name = "strum_macros" version = "0.24.3" @@ -5656,19 +5569,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "strum_macros" -version = "0.25.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.40", -] - [[package]] name = "subtle" version = "2.5.0" @@ -5688,54 +5588,21 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.40" +version = "2.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e" +checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "syn_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn 2.0.40", -] - [[package]] name = "sync_wrapper" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "tap" version = "1.0.1" @@ -5791,66 +5658,56 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.8.1" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ "cfg-if", - "fastrand 2.0.1", - "redox_syscall 0.4.1", - "rustix", + "fastrand 2.0.0", + "redox_syscall 0.3.5", + "rustix 0.38.8", "windows-sys 0.48.0", ] [[package]] name = "terminal_size" -version = "0.3.0" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" +checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237" dependencies = [ - "rustix", + "rustix 0.37.23", "windows-sys 0.48.0", ] [[package]] name = "test-log" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6159ab4116165c99fc88cce31f99fa2c9dbe08d3691cb38da02fc3b45f357d2b" -dependencies = [ - "test-log-macros", - "tracing-subscriber", -] - -[[package]] -name = "test-log-macros" -version = "0.2.14" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" +checksum = "d9601d162c1d77e62c1ea0bc8116cd1caf143ce3af947536c3c9052a1677fe0c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 1.0.109", ] [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -5865,13 +5722,23 @@ dependencies = [ [[package]] name = "time" -version = "0.3.30" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +checksum = "0bb39ee79a6d8de55f48f2293a830e040392f1c5f16e336bdd1788cd0aadce07" dependencies = [ "deranged", "itoa", - "powerfmt", "serde", "time-core", "time-macros", @@ -5879,15 +5746,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "time-macros" -version = "0.2.15" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" +checksum = "733d258752e9303d392b94b75230d07b0b9c489350c69b851fc6c065fde3e8f9" dependencies = [ "time-core", ] @@ -5909,19 +5776,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.35.0" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ "backtrace", - "bytes 1.5.0", + "bytes 1.4.0", "libc", "mio", "num_cpus", "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2 0.5.3", "tokio-macros", "windows-sys 0.48.0", ] @@ -5938,24 +5805,24 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "tokio-postgres" -version = "0.7.10" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" +checksum = "000387915083ea6406ee44b50ca74813aba799fe682a7689e382bf9e13b74ce9" dependencies = [ "async-trait", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "fallible-iterator", "futures-channel", "futures-util", @@ -5967,7 +5834,7 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand 0.8.5", - "socket2 0.5.5", + "socket2 0.5.3", "tokio", "tokio-util", "whoami", @@ -5990,7 +5857,7 @@ version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ - "rustls 0.20.9", + "rustls 0.20.8", "tokio", "webpki", ] @@ -6001,7 +5868,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.10", + "rustls 0.21.6", "tokio", ] @@ -6011,7 +5878,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "911a61637386b789af998ee23f50aa30d5fd7edcec8d6d3dedae5e5815205466" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "educe", "futures-core", "futures-sink", @@ -6038,7 +5905,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89b3cbabd3ae862100094ae433e1def582cf86451b4e9bf83aa7ac1d8a7d719" dependencies = [ "async-stream", - "bytes 1.5.0", + "bytes 1.4.0", "futures-core", "tokio", "tokio-stream", @@ -6058,23 +5925,23 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.20.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2" dependencies = [ "futures-util", "log", "tokio", - "tungstenite 0.20.1", + "tungstenite 0.20.0", ] [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures-core", "futures-sink", "pin-project-lite", @@ -6088,7 +5955,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52a15c15b1bc91f90902347eff163b5b682643aff0c8e972912cca79bd9208dd" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures", "libc", "tokio", @@ -6106,49 +5973,38 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.8" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.19.15", + "toml_edit", ] [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.19.15" +version = "0.19.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.0.0", "serde", "serde_spanned", "toml_datetime", "winnow", ] -[[package]] -name = "toml_edit" -version = "0.20.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" -dependencies = [ - "indexmap 2.1.0", - "toml_datetime", - "winnow", -] - [[package]] name = "tonic" version = "0.8.3" @@ -6159,7 +6015,7 @@ dependencies = [ "async-trait", "axum", "base64 0.13.1", - "bytes 1.5.0", + "bytes 1.4.0", "futures-core", "futures-util", "h2", @@ -6216,12 +6072,12 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +checksum = "55ae70283aba8d2a8b411c695c437fe25b8b5e44e23e780662002fc72fb47a82" dependencies = [ - "bitflags 2.4.1", - "bytes 1.5.0", + "bitflags 2.4.0", + "bytes 1.4.0", "futures-core", "futures-util", "http", @@ -6247,10 +6103,11 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ + "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -6259,20 +6116,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" dependencies = [ "once_cell", "valuable", @@ -6300,23 +6157,12 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" dependencies = [ + "lazy_static", "log", - "once_cell", "tracing-core", ] @@ -6330,15 +6176,15 @@ dependencies = [ "opentelemetry", "tracing", "tracing-core", - "tracing-log 0.1.4", + "tracing-log", "tracing-subscriber", ] [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" dependencies = [ "matchers", "nu-ansi-term", @@ -6349,14 +6195,14 @@ dependencies = [ "thread_local", "tracing", "tracing-core", - "tracing-log 0.2.0", + "tracing-log", ] [[package]] name = "try-lock" -version = "0.2.5" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" [[package]] name = "tungstenite" @@ -6366,7 +6212,7 @@ checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "http", "httparse", "log", @@ -6379,12 +6225,12 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.20.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649" dependencies = [ "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "data-encoding", "http", "httparse", @@ -6398,15 +6244,15 @@ dependencies = [ [[package]] name = "typenum" -version = "1.17.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" [[package]] name = "ulid" -version = "1.1.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e37c4b6cbcc59a8dcd09a6429fbc7890286bcbb79215cea7b38a3c4c0921d93" +checksum = "13a3aaa69b04e5b66cc27309710a569ea23593612387d67daaf102e73aa974fd" dependencies = [ "rand 0.8.5", "serde", @@ -6423,15 +6269,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.14" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" [[package]] name = "unicode-normalization" @@ -6450,9 +6296,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" -version = "0.1.11" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "unicode-xid" @@ -6478,17 +6324,11 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - [[package]] name = "url" -version = "2.5.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", "idna", @@ -6504,9 +6344,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8-width" -version = "0.1.7" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" +checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" [[package]] name = "utf8parse" @@ -6516,11 +6356,11 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.6.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", "serde", ] @@ -6545,7 +6385,7 @@ dependencies = [ name = "veritech-client" version = "0.1.0" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "cyclone-core", "futures", "indoc", @@ -6627,15 +6467,15 @@ dependencies = [ [[package]] name = "waker-fn" -version = "1.1.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" [[package]] name = "walkdir" -version = "2.4.0" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" dependencies = [ "same-file", "winapi-util", @@ -6656,6 +6496,12 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -6664,9 +6510,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -6674,24 +6520,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.39" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ "cfg-if", "js-sys", @@ -6701,9 +6547,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6711,22 +6557,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-streams" @@ -6743,9 +6589,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.66" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", @@ -6753,12 +6599,12 @@ dependencies = [ [[package]] name = "webpki" -version = "0.22.4" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" dependencies = [ - "ring 0.17.7", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -6772,20 +6618,19 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.3" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" +checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" [[package]] name = "which" -version = "4.4.2" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" dependencies = [ "either", - "home", + "libc", "once_cell", - "rustix", ] [[package]] @@ -6816,9 +6661,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ "winapi", ] @@ -6830,10 +6675,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "windows-core" -version = "0.51.1" +name = "windows" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ "windows-targets 0.48.5", ] @@ -6856,15 +6701,6 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.0", -] - [[package]] name = "windows-targets" version = "0.42.2" @@ -6895,21 +6731,6 @@ dependencies = [ "windows_x86_64_msvc 0.48.5", ] -[[package]] -name = "windows-targets" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" -dependencies = [ - "windows_aarch64_gnullvm 0.52.0", - "windows_aarch64_msvc 0.52.0", - "windows_i686_gnu 0.52.0", - "windows_i686_msvc 0.52.0", - "windows_x86_64_gnu 0.52.0", - "windows_x86_64_gnullvm 0.52.0", - "windows_x86_64_msvc 0.52.0", -] - [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -6922,12 +6743,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" - [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -6940,12 +6755,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" - [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -6958,12 +6767,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" -[[package]] -name = "windows_i686_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" - [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -6976,12 +6779,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" -[[package]] -name = "windows_i686_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" - [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -6994,12 +6791,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" - [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -7012,12 +6803,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" - [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -7030,17 +6815,11 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" - [[package]] name = "winnow" -version = "0.5.28" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2" +checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" dependencies = [ "memchr", ] @@ -7066,13 +6845,11 @@ dependencies = [ [[package]] name = "xattr" -version = "1.1.2" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d367426ae76bdfce3d8eaea6e94422afd6def7d46f9c89e2980309115b3c2c41" +checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" dependencies = [ "libc", - "linux-raw-sys", - "rustix", ] [[package]] @@ -7085,7 +6862,7 @@ dependencies = [ "lib0", "thiserror", "tokio", - "yrs 0.17.2", + "yrs", ] [[package]] @@ -7112,51 +6889,16 @@ dependencies = [ "atomic_refcell", "lib0", "rand 0.7.3", - "smallstr 0.2.0", + "smallstr", "smallvec", "thiserror", ] -[[package]] -name = "yrs" -version = "0.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68aea14c6c33f2edd8a5ff9415360cfa5b98d90cce30c5ee3be59a8419fb15a9" -dependencies = [ - "atomic_refcell", - "rand 0.7.3", - "serde", - "serde_json", - "smallstr 0.3.0", - "smallvec", - "thiserror", -] - -[[package]] -name = "zerocopy" -version = "0.7.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.40", -] - [[package]] name = "zeroize" -version = "1.7.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" dependencies = [ "zeroize_derive", ] @@ -7169,5 +6911,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] diff --git a/Cargo.toml b/Cargo.toml index 591070a14e..6c1da3820e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,11 +4,19 @@ debug = true [workspace] resolver = "2" members = [ + "bin/rebaser", + "lib/content-store", + "lib/content-store-test", + "lib/rebaser-client", + "lib/rebaser-core", + "lib/rebaser-server", + "lib/si-cbor", + "lib/si-posthog-rs", + "lib/si-rabbitmq", "bin/council", "bin/cyclone", "bin/module-index", "bin/pinga", - "bin/rebaser", "bin/sdf", "bin/si", "bin/veritech", @@ -16,12 +24,10 @@ members = [ "lib/buck2-resources", "lib/bytes-lines-codec", "lib/config-file", - "lib/content-store", - "lib/content-store-test", - "lib/council-server", "lib/cyclone-client", "lib/cyclone-core", "lib/cyclone-server", + "lib/council-server", "lib/dal", "lib/dal-test", "lib/deadpool-cyclone", @@ -30,21 +36,16 @@ members = [ "lib/nats-subscriber", "lib/object-tree", "lib/pinga-server", - "lib/rebaser-client", - "lib/rebaser-core", - "lib/rebaser-server", "lib/sdf-server", - "lib/si-cbor", "lib/si-crypto", "lib/si-data-nats", "lib/si-data-pg", "lib/si-hash", "lib/si-pkg", - "lib/si-posthog-rs", - "lib/si-rabbitmq", "lib/si-settings", "lib/si-std", "lib/si-test-macros", + "lib/si-posthog-rs", "lib/telemetry-application-rs", "lib/telemetry-rs", "lib/veritech-client", @@ -66,11 +67,7 @@ ciborium = "0.2.1" clap = { version = "4.2.7", features = ["derive", "color", "env", "wrap_help"] } color-eyre = "0.6.2" colored = "2.0.4" -comfy-table = { version = "7.0.1", features = [ - "crossterm", - "tty", - "custom_styling", -] } +comfy-table = { version = "7.0.1", features = ["crossterm", "tty", "custom_styling"] } config = { version = "0.13.3", default-features = false, features = ["toml"] } console = "0.15.7" convert_case = "0.6.0" @@ -88,15 +85,8 @@ futures = "0.3.28" futures-lite = "1.13.0" hex = "0.4.3" http = "0.2.9" -hyper = { version = "0.14.26", features = [ - "client", - "http1", - "runtime", - "server", -] } -hyperlocal = { version = "0.8.0", default-features = false, features = [ - "client", -] } +hyper = { version = "0.14.26", features = ["client", "http1", "runtime", "server"] } +hyperlocal = { version = "0.8.0", default-features = false, features = ["client"] } iftree = "1.0.4" indicatif = "0.17.5" indoc = "2.0.1" @@ -110,10 +100,7 @@ nkeys = "0.2.0" num_cpus = "1.15.0" once_cell = "1.17.1" open = "5.0.0" -opentelemetry = { version = "~0.18.0", features = [ - "rt-tokio", - "trace", -] } # pinned, pending new release of tracing-opentelemetry, 0.18 +opentelemetry = { version = "~0.18.0", features = ["rt-tokio", "trace"] } # pinned, pending new release of tracing-opentelemetry, 0.18 opentelemetry-otlp = "~0.11.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 opentelemetry-semantic-conventions = "~0.10.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 ouroboros = "0.15.6" @@ -122,6 +109,7 @@ pathdiff = "0.2.1" petgraph = { version = "0.6.3", features = ["serde-1"] } pin-project-lite = "0.2.9" podman-api = "0.10" +postcard = { version = "1.0.8", features = ["use-std"] } postgres-types = { version = "0.2.5", features = ["derive"] } pretty_assertions_sorted = "1.2.1" proc-macro2 = "1.0.56" @@ -131,22 +119,10 @@ rand = "0.8.5" refinery = { version = "0.8.9", features = ["tokio-postgres"] } regex = "1.8.1" remain = "0.2.8" -reqwest = { version = "0.11.17", default-features = false, features = [ - "rustls-tls", - "json", - "multipart", -] } -rust-s3 = { version = "0.33.0", default-features = false, features = [ - "tokio-rustls-tls", -] } +reqwest = { version = "0.11.17", default-features = false, features = ["rustls-tls", "json", "multipart"] } +rust-s3 = { version = "0.33.0", default-features = false, features = ["tokio-rustls-tls"] } rustls = "0.21.6" # pinned, pending update from tokio-rustls for async-nats -sea-orm = { version = "0.11", features = [ - "sqlx-postgres", - "runtime-tokio-rustls", - "macros", - "with-chrono", - "debug-print", -] } +sea-orm = { version = "0.11", features = ["sqlx-postgres", "runtime-tokio-rustls", "macros", "with-chrono", "debug-print"] } self-replace = "1.3.5" serde = { version = "1.0.160", features = ["derive", "rc"] } serde-aux = "4.2.0" @@ -160,16 +136,10 @@ strum = { version = "0.24.1", features = ["derive"] } syn = { version = "2.0.15", features = ["full", "extra-traits"] } tar = "0.4.38" tempfile = "3.5.0" -test-log = { version = "0.2.11", default-features = false, features = [ - "trace", -] } +test-log = { version = "0.2.11", default-features = false, features = ["trace"] } thiserror = "1.0.40" tokio = { version = "1.28.0", features = ["full"] } -tokio-postgres = { version = "0.7.8", features = [ - "runtime", - "with-chrono-0_4", - "with-serde_json-1", -] } +tokio-postgres = { version = "0.7.8", features = ["runtime", "with-chrono-0_4", "with-serde_json-1", "array-impls"] } tokio-serde = { version = "0.8.0", features = ["json"] } tokio-stream = "0.1.14" tokio-test = "0.4.2" diff --git a/bin/sdf/src/main.rs b/bin/sdf/src/main.rs index 0fd2370940..ec24c4fd01 100644 --- a/bin/sdf/src/main.rs +++ b/bin/sdf/src/main.rs @@ -130,13 +130,13 @@ async fn run(args: args::Args, mut telemetry: ApplicationTelemetryClient) -> Res )?; let second_shutdown_broadcast_rx = initial_shutdown_broadcast_rx.resubscribe(); - Server::start_resource_refresh_scheduler( - services_context.clone(), - initial_shutdown_broadcast_rx, - ) - .await; + // Server::start_resource_refresh_scheduler( + // services_context.clone(), + // initial_shutdown_broadcast_rx, + // ) + // .await; - Server::start_status_updater(services_context, second_shutdown_broadcast_rx).await?; + // Server::start_status_updater(services_context, second_shutdown_broadcast_rx).await?; server.run().await?; } @@ -150,13 +150,13 @@ async fn run(args: args::Args, mut telemetry: ApplicationTelemetryClient) -> Res .await?; let second_shutdown_broadcast_rx = initial_shutdown_broadcast_rx.resubscribe(); - Server::start_resource_refresh_scheduler( - services_context.clone(), - initial_shutdown_broadcast_rx, - ) - .await; + // Server::start_resource_refresh_scheduler( + // services_context.clone(), + // initial_shutdown_broadcast_rx, + // ) + // .await; - Server::start_status_updater(services_context, second_shutdown_broadcast_rx).await?; + // Server::start_status_updater(services_context, second_shutdown_broadcast_rx).await?; server.run().await?; } diff --git a/lib/content-store/src/store/pg.rs b/lib/content-store/src/store/pg.rs index 741ae748ef..e25382465e 100644 --- a/lib/content-store/src/store/pg.rs +++ b/lib/content-store/src/store/pg.rs @@ -76,7 +76,7 @@ impl Store for PgStore { T: DeserializeOwned, { let object = match self.inner.get(key) { - Some(item) => serde_json::from_slice(&item.value)?, + Some(item) => si_cbor::decode(&item.value)?, None => match ContentPair::find(&self.pg_pool, key).await? { Some(content_pair) => { let encoded = content_pair.value(); diff --git a/lib/dal-test/src/lib.rs b/lib/dal-test/src/lib.rs index beead10c9d..992af762f1 100644 --- a/lib/dal-test/src/lib.rs +++ b/lib/dal-test/src/lib.rs @@ -39,8 +39,8 @@ pub use si_test_macros::{dal_test as test, sdf_test}; pub use telemetry; pub use tracing_subscriber; -pub mod helpers; -pub mod test_harness; +// pub mod helpers; +// pub mod test_harness; const ENV_VAR_NATS_URL: &str = "SI_TEST_NATS_URL"; const ENV_VAR_MODULE_INDEX_URL: &str = "SI_TEST_MODULE_INDEX_URL"; diff --git a/lib/dal/examples/dal-pkg-export/main.rs b/lib/dal/examples/dal-pkg-export/main.rs index 15ed4c2e92..2956dac9df 100644 --- a/lib/dal/examples/dal-pkg-export/main.rs +++ b/lib/dal/examples/dal-pkg-export/main.rs @@ -1,148 +1,148 @@ -use buck2_resources::Buck2Resources; -use std::{env, path::Path, str::FromStr, sync::Arc}; -use tokio::fs; - -use dal::{ - pkg::PkgExporter, ChangeSet, ChangeSetPk, DalContext, JobQueueProcessor, NatsProcessor, Schema, - ServicesContext, StandardModel, Tenancy, Workspace, -}; -use si_crypto::{SymmetricCryptoService, SymmetricCryptoServiceConfigFile}; -use si_data_nats::{NatsClient, NatsConfig}; -use si_data_pg::{PgPool, PgPoolConfig}; -use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; +// use buck2_resources::Buck2Resources; +// use std::{env, path::Path, str::FromStr, sync::Arc}; +// use tokio::fs; + +// use dal::{ +// pkg::PkgExporter, ChangeSet, ChangeSetPk, DalContext, JobQueueProcessor, NatsProcessor, Schema, +// ServicesContext, StandardModel, Tenancy, Workspace, +// }; +// use si_crypto::{SymmetricCryptoService, SymmetricCryptoServiceConfigFile}; +// use si_data_nats::{NatsClient, NatsConfig}; +// use si_data_pg::{PgPool, PgPoolConfig}; +// use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; type Result = std::result::Result>; -const USAGE: &str = - "usage: program "; +// const USAGE: &str = +// "usage: program "; #[tokio::main] async fn main() -> Result<()> { - let mut args = env::args(); - let change_set_pk = ChangeSetPk::from_str(args.nth(1).expect(USAGE).as_str())?; - let tar_file = args.next().expect(USAGE); - let name = args.next().expect(USAGE); - let version = args.next().expect(USAGE); - let created_by = args.next().expect(USAGE); - let schema_names = args.next().expect(USAGE); - let schema_names = schema_names.split(','); - - let description = format!("{name} package, created by {created_by}."); - - let mut ctx = ctx().await?; - - let workspace = match Workspace::find_first_user_workspace(&ctx).await? { - Some(workspace) => workspace, - None => Workspace::builtin(&ctx).await?, - }; - - ctx.update_tenancy(Tenancy::new(*workspace.pk())); - let change_set = ChangeSet::get_by_pk(&ctx, &change_set_pk) - .await? - .expect("That change set could not be found"); - ctx.update_visibility(ctx.visibility().to_change_set(change_set.pk)); - - let mut schema_ids = Vec::new(); - for schema_name in schema_names { - schema_ids.push(*Schema::find_by_name(&ctx, schema_name.trim()).await?.id()); - } - - println!( - "--- Exporting pkg: {tar_file} from head change set in workspace \"{}\"", - workspace.name() - ); - let mut exporter = - PkgExporter::new_module_exporter(name, version, Some(description), created_by, schema_ids); - - fs::write(&tar_file, exporter.export_as_bytes(&ctx).await?).await?; - - println!("--- Committing database transaction"); - ctx.commit().await?; - println!(" - Committed."); - - println!("--- Export complete."); + // let mut args = env::args(); + // let change_set_pk = ChangeSetPk::from_str(args.nth(1).expect(USAGE).as_str())?; + // let tar_file = args.next().expect(USAGE); + // let name = args.next().expect(USAGE); + // let version = args.next().expect(USAGE); + // let created_by = args.next().expect(USAGE); + // let schema_names = args.next().expect(USAGE); + // let schema_names = schema_names.split(','); + + // let description = format!("{name} package, created by {created_by}."); + + // let mut ctx = ctx().await?; + + // let workspace = match Workspace::find_first_user_workspace(&ctx).await? { + // Some(workspace) => workspace, + // None => Workspace::builtin(&ctx).await?, + // }; + + // ctx.update_tenancy(Tenancy::new(*workspace.pk())); + // let change_set = ChangeSet::get_by_pk(&ctx, &change_set_pk) + // .await? + // .expect("That change set could not be found"); + // ctx.update_visibility(ctx.visibility().to_change_set(change_set.pk)); + + // let mut schema_ids = Vec::new(); + // for schema_name in schema_names { + // schema_ids.push(*Schema::find_by_name(&ctx, schema_name.trim()).await?.id()); + // } + + // println!( + // "--- Exporting pkg: {tar_file} from head change set in workspace \"{}\"", + // workspace.name() + // ); + // let mut exporter = + // PkgExporter::new_module_exporter(name, version, Some(description), created_by, schema_ids); + + // fs::write(&tar_file, exporter.export_as_bytes(&ctx).await?).await?; + + // println!("--- Committing database transaction"); + // ctx.commit().await?; + // println!(" - Committed."); + + // println!("--- Export complete."); Ok(()) } -async fn ctx() -> Result { - let encryption_key = Arc::new(load_encryption_key().await?); - let pg_pool = create_pg_pool().await?; - let nats_conn = connect_to_nats().await?; - let veritech = create_veritech_client(nats_conn.clone()); - let symmetric_crypto_service = create_symmetric_crypto_service().await?; - - let job_processor = connect_processor(nats_conn.clone()).await?; - - let services_context = ServicesContext::new( - pg_pool, - nats_conn, - job_processor, - veritech, - encryption_key, - None, - None, - symmetric_crypto_service, - ); - - Ok(DalContext::builder(services_context, false) - .build_default() - .await?) -} - -async fn create_pg_pool() -> Result { - PgPool::new(&PgPoolConfig::default()) - .await - .map_err(Into::into) -} - -async fn connect_to_nats() -> Result { - NatsClient::new(&NatsConfig::default()) - .await - .map_err(Into::into) -} - -fn create_veritech_client(nats: NatsClient) -> VeritechClient { - VeritechClient::new(nats) -} - -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -async fn load_encryption_key() -> Result { - let path = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { - Buck2Resources::read()?.get_ends_with("dev.encryption.key")? - } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { - Path::new(&dir).join("../../lib/cyclone-server/src/dev.encryption.key") - } else { - unimplemented!("not running with Buck2 or Cargo, unsupported") - }; - - CycloneEncryptionKey::load(path).await.map_err(Into::into) -} - -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -async fn create_symmetric_crypto_service() -> Result { - let active_key = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { - Buck2Resources::read()?.get_ends_with("dev.donkey.key")? - } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { - Path::new(&dir).join("../../lib/dal/dev.donkey.key") - } else { - unimplemented!("not running with Buck2 or Cargo, unsupported") - }; - - SymmetricCryptoService::from_config( - &SymmetricCryptoServiceConfigFile { - active_key: active_key.to_string_lossy().into_owned(), - extra_keys: Default::default(), - } - .try_into()?, - ) - .await - .map_err(Into::into) -} - -async fn connect_processor( - job_client: NatsClient, -) -> Result> { - let job_processor = - Box::new(NatsProcessor::new(job_client)) as Box; - Ok(job_processor) -} +// async fn ctx() -> Result { +// let encryption_key = Arc::new(load_encryption_key().await?); +// let pg_pool = create_pg_pool().await?; +// let nats_conn = connect_to_nats().await?; +// let veritech = create_veritech_client(nats_conn.clone()); +// let symmetric_crypto_service = create_symmetric_crypto_service().await?; + +// let job_processor = connect_processor(nats_conn.clone()).await?; + +// let services_context = ServicesContext::new( +// pg_pool, +// nats_conn, +// job_processor, +// veritech, +// encryption_key, +// None, +// None, +// symmetric_crypto_service, +// ); + +// Ok(DalContext::builder(services_context, false) +// .build_default() +// .await?) +// } + +// async fn create_pg_pool() -> Result { +// PgPool::new(&PgPoolConfig::default()) +// .await +// .map_err(Into::into) +// } + +// async fn connect_to_nats() -> Result { +// NatsClient::new(&NatsConfig::default()) +// .await +// .map_err(Into::into) +// } + +// fn create_veritech_client(nats: NatsClient) -> VeritechClient { +// VeritechClient::new(nats) +// } + +// #[allow(clippy::disallowed_methods)] // Used to determine if running in development +// async fn load_encryption_key() -> Result { +// let path = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { +// Buck2Resources::read()?.get_ends_with("dev.encryption.key")? +// } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { +// Path::new(&dir).join("../../lib/cyclone-server/src/dev.encryption.key") +// } else { +// unimplemented!("not running with Buck2 or Cargo, unsupported") +// }; + +// CycloneEncryptionKey::load(path).await.map_err(Into::into) +// } + +// #[allow(clippy::disallowed_methods)] // Used to determine if running in development +// async fn create_symmetric_crypto_service() -> Result { +// let active_key = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { +// Buck2Resources::read()?.get_ends_with("dev.donkey.key")? +// } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { +// Path::new(&dir).join("../../lib/dal/dev.donkey.key") +// } else { +// unimplemented!("not running with Buck2 or Cargo, unsupported") +// }; + +// SymmetricCryptoService::from_config( +// &SymmetricCryptoServiceConfigFile { +// active_key: active_key.to_string_lossy().into_owned(), +// extra_keys: Default::default(), +// } +// .try_into()?, +// ) +// .await +// .map_err(Into::into) +// } + +// async fn connect_processor( +// job_client: NatsClient, +// ) -> Result> { +// let job_processor = +// Box::new(NatsProcessor::new(job_client)) as Box; +// Ok(job_processor) +// } diff --git a/lib/dal/examples/dal-pkg-import/main.rs b/lib/dal/examples/dal-pkg-import/main.rs index 2b3b2bb9e8..cd95a9c659 100644 --- a/lib/dal/examples/dal-pkg-import/main.rs +++ b/lib/dal/examples/dal-pkg-import/main.rs @@ -1,137 +1,137 @@ -use std::{env, path::Path, sync::Arc}; - -use buck2_resources::Buck2Resources; -use dal::generate_unique_id; -use dal::{ - pkg::import_pkg_from_pkg, ChangeSet, DalContext, JobQueueProcessor, NatsProcessor, - ServicesContext, Tenancy, Workspace, -}; -use si_crypto::{SymmetricCryptoService, SymmetricCryptoServiceConfigFile}; -use si_data_nats::{NatsClient, NatsConfig}; -use si_data_pg::{PgPool, PgPoolConfig}; -use si_pkg::SiPkg; -use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; +// use std::{env, path::Path, sync::Arc}; + +// use buck2_resources::Buck2Resources; +// use dal::generate_unique_id; +// use dal::{ +// pkg::import_pkg_from_pkg, ChangeSet, DalContext, JobQueueProcessor, NatsProcessor, +// ServicesContext, Tenancy, Workspace, +// }; +// use si_crypto::{SymmetricCryptoService, SymmetricCryptoServiceConfigFile}; +// use si_data_nats::{NatsClient, NatsConfig}; +// use si_data_pg::{PgPool, PgPoolConfig}; +// use si_pkg::SiPkg; +// use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; type Result = std::result::Result>; #[tokio::main] async fn main() -> Result<()> { - let mut args = env::args(); - let tar_file = args.nth(1).expect("usage: program "); - - let mut ctx = ctx().await?; - - let workspace = match Workspace::find_first_user_workspace(&ctx).await? { - Some(workspace) => workspace, - None => Workspace::builtin(&ctx).await?, - }; - - ctx.update_tenancy(Tenancy::new(*workspace.pk())); - - let pkg = SiPkg::load_from_file(Path::new(&tar_file)).await?; - let metadata = pkg.metadata()?; - let change_set_name = format!( - "pkg - {} ({}) {}", - metadata.name(), - metadata.version(), - generate_unique_id(4) - ); - let change_set = ChangeSet::new(&ctx, &change_set_name, None).await?; - let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_change_set(change_set.pk)); - - println!( - "--- Importing pkg: {tar_file} into change set \"{change_set_name}\" in workspace \"{}\"", - workspace.name() - ); - import_pkg_from_pkg(&ctx, &pkg, None).await?; - - println!("--- Committing database transaction"); - ctx.commit().await?; - println!(" - Committed."); - - println!("--- Import complete."); + // let mut args = env::args(); + // let tar_file = args.nth(1).expect("usage: program "); + + // let mut ctx = ctx().await?; + + // let workspace = match Workspace::find_first_user_workspace(&ctx).await? { + // Some(workspace) => workspace, + // None => Workspace::builtin(&ctx).await?, + // }; + + // ctx.update_tenancy(Tenancy::new(*workspace.pk())); + + // let pkg = SiPkg::load_from_file(Path::new(&tar_file)).await?; + // let metadata = pkg.metadata()?; + // let change_set_name = format!( + // "pkg - {} ({}) {}", + // metadata.name(), + // metadata.version(), + // generate_unique_id(4) + // ); + // let change_set = ChangeSet::new(&ctx, &change_set_name, None).await?; + // let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_change_set(change_set.pk)); + + // println!( + // "--- Importing pkg: {tar_file} into change set \"{change_set_name}\" in workspace \"{}\"", + // workspace.name() + // ); + // import_pkg_from_pkg(&ctx, &pkg, None).await?; + + // println!("--- Committing database transaction"); + // ctx.commit().await?; + // println!(" - Committed."); + + // println!("--- Import complete."); Ok(()) } -async fn ctx() -> Result { - let encryption_key = Arc::new(load_encryption_key().await?); - let pg_pool = create_pg_pool().await?; - let nats_conn = connect_to_nats().await?; - let veritech = create_veritech_client(nats_conn.clone()); - let symmetric_crypto_service = create_symmetric_crypto_service().await?; - - let job_processor = connect_processor(nats_conn.clone()).await?; - - let services_context = ServicesContext::new( - pg_pool, - nats_conn, - job_processor, - veritech, - encryption_key, - None, - None, - symmetric_crypto_service, - ); - - Ok(DalContext::builder(services_context, false) - .build_default() - .await?) -} - -async fn create_pg_pool() -> Result { - PgPool::new(&PgPoolConfig::default()) - .await - .map_err(Into::into) -} - -async fn connect_to_nats() -> Result { - NatsClient::new(&NatsConfig::default()) - .await - .map_err(Into::into) -} - -fn create_veritech_client(nats: NatsClient) -> VeritechClient { - VeritechClient::new(nats) -} - -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -async fn load_encryption_key() -> Result { - let path = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { - Buck2Resources::read()?.get_ends_with("dev.encryption.key")? - } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { - Path::new(&dir).join("../../lib/cyclone-server/src/dev.encryption.key") - } else { - unimplemented!("not running with Buck2 or Cargo, unsupported") - }; - - CycloneEncryptionKey::load(path).await.map_err(Into::into) -} - -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -async fn create_symmetric_crypto_service() -> Result { - let active_key = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { - Buck2Resources::read()?.get_ends_with("dev.donkey.key")? - } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { - Path::new(&dir).join("../../lib/dal/dev.donkey.key") - } else { - unimplemented!("not running with Buck2 or Cargo, unsupported") - }; - - SymmetricCryptoService::from_config( - &SymmetricCryptoServiceConfigFile { - active_key: active_key.to_string_lossy().into_owned(), - extra_keys: Default::default(), - } - .try_into()?, - ) - .await - .map_err(Into::into) -} - -async fn connect_processor( - job_client: NatsClient, -) -> Result> { - let job_processor = - Box::new(NatsProcessor::new(job_client)) as Box; - Ok(job_processor) -} +// async fn ctx() -> Result { +// let encryption_key = Arc::new(load_encryption_key().await?); +// let pg_pool = create_pg_pool().await?; +// let nats_conn = connect_to_nats().await?; +// let veritech = create_veritech_client(nats_conn.clone()); +// let symmetric_crypto_service = create_symmetric_crypto_service().await?; + +// let job_processor = connect_processor(nats_conn.clone()).await?; + +// let services_context = ServicesContext::new( +// pg_pool, +// nats_conn, +// job_processor, +// veritech, +// encryption_key, +// None, +// None, +// symmetric_crypto_service, +// ); + +// Ok(DalContext::builder(services_context, false) +// .build_default() +// .await?) +// } + +// async fn create_pg_pool() -> Result { +// PgPool::new(&PgPoolConfig::default()) +// .await +// .map_err(Into::into) +// } + +// async fn connect_to_nats() -> Result { +// NatsClient::new(&NatsConfig::default()) +// .await +// .map_err(Into::into) +// } + +// fn create_veritech_client(nats: NatsClient) -> VeritechClient { +// VeritechClient::new(nats) +// } + +// #[allow(clippy::disallowed_methods)] // Used to determine if running in development +// async fn load_encryption_key() -> Result { +// let path = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { +// Buck2Resources::read()?.get_ends_with("dev.encryption.key")? +// } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { +// Path::new(&dir).join("../../lib/cyclone-server/src/dev.encryption.key") +// } else { +// unimplemented!("not running with Buck2 or Cargo, unsupported") +// }; + +// CycloneEncryptionKey::load(path).await.map_err(Into::into) +// } + +// #[allow(clippy::disallowed_methods)] // Used to determine if running in development +// async fn create_symmetric_crypto_service() -> Result { +// let active_key = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { +// Buck2Resources::read()?.get_ends_with("dev.donkey.key")? +// } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { +// Path::new(&dir).join("../../lib/dal/dev.donkey.key") +// } else { +// unimplemented!("not running with Buck2 or Cargo, unsupported") +// }; + +// SymmetricCryptoService::from_config( +// &SymmetricCryptoServiceConfigFile { +// active_key: active_key.to_string_lossy().into_owned(), +// extra_keys: Default::default(), +// } +// .try_into()?, +// ) +// .await +// .map_err(Into::into) +// } + +// async fn connect_processor( +// job_client: NatsClient, +// ) -> Result> { +// let job_processor = +// Box::new(NatsProcessor::new(job_client)) as Box; +// Ok(job_processor) +// } diff --git a/lib/dal/src/action.rs b/lib/dal/src/action.rs index ee94ec1bd1..624312e865 100644 --- a/lib/dal/src/action.rs +++ b/lib/dal/src/action.rs @@ -1,253 +1,249 @@ -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use thiserror::Error; - -use si_data_pg::PgError; -use telemetry::prelude::*; - -use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_accessor_ro, - ActionKind, ActionPrototype, ActionPrototypeError, ActionPrototypeId, ChangeSetPk, Component, - ComponentError, ComponentId, DalContext, HistoryActor, HistoryEventError, Node, NodeError, - StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, UserPk, Visibility, - WsEvent, WsEventError, -}; - -const FIND_FOR_CHANGE_SET: &str = include_str!("./queries/action/find_for_change_set.sql"); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum ActionError { - #[error("action prototype error: {0}")] - ActionPrototype(#[from] ActionPrototypeError), - #[error(transparent)] - Component(#[from] ComponentError), - #[error("component not found: {0}")] - ComponentNotFound(ComponentId), - #[error("history event: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("in head")] - InHead, - #[error(transparent)] - Node(#[from] NodeError), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("action prototype not found: {0}")] - PrototypeNotFound(ActionPrototypeId), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), - #[error(transparent)] - WsEvent(#[from] WsEventError), -} - -pub type ActionResult = Result; - -pk!(ActionPk); -pk!(ActionId); - -// An Action joins an `ActionPrototype` to a `ComponentId` in a `ChangeSetPk` -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -pub struct Action { - pk: ActionPk, - id: ActionId, - action_prototype_id: ActionPrototypeId, - // Change set is a field so head doesn't get cluttered with actions to it and the original - // change set pk is lost on apply - change_set_pk: ChangeSetPk, - component_id: ComponentId, - creation_user_id: Option, - index: i16, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, -} - -impl_standard_model! { - model: Action, - pk: ActionPk, - id: ActionId, - table_name: "actions", - history_event_label_base: "action", - history_event_message_name: "Action Prototype" -} - -impl Action { - #[allow(clippy::too_many_arguments)] - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - prototype_id: ActionPrototypeId, - component_id: ComponentId, - ) -> ActionResult { - if ctx.visibility().change_set_pk.is_none() { - return Err(ActionError::InHead); - } - - let actor_user_pk = match ctx.history_actor() { - HistoryActor::User(user_pk) => Some(*user_pk), - _ => None, - }; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM action_create_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &prototype_id, - &component_id, - &actor_user_pk, - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - - WsEvent::change_set_written(ctx) - .await? - .publish_on_commit(ctx) - .await?; - - Ok(object) - } - - pub async fn find_for_change_set(ctx: &DalContext) -> ActionResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CHANGE_SET, - &[ - ctx.tenancy(), - ctx.visibility(), - &ctx.visibility().change_set_pk, - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn prototype(&self, ctx: &DalContext) -> ActionResult { - ActionPrototype::get_by_id(ctx, self.action_prototype_id()) - .await? - .ok_or(ActionError::PrototypeNotFound(*self.action_prototype_id())) - } - - pub async fn component(&self, ctx: &DalContext) -> ActionResult { - Component::get_by_id(ctx, self.component_id()) - .await? - .ok_or(ActionError::ComponentNotFound(*self.component_id())) - } - - pub async fn sort_of_change_set(ctx: &DalContext) -> ActionResult<()> { - let actions = Self::find_for_change_set(ctx).await?; - - let mut actions_by_component: HashMap> = HashMap::new(); - for action in actions { - actions_by_component - .entry(*action.component_id()) - .or_default() - .push(action); - } - - let mut initial_deletions = Vec::new(); - let mut initial_others = Vec::new(); - let mut creations = Vec::new(); - let mut final_others = Vec::new(); - let mut final_deletions = Vec::new(); - - let sorted_node_ids = - Node::list_topologically_sorted_configuration_nodes_with_stable_ordering(ctx, false) - .await?; - - let ctx_with_deleted = &ctx.clone_with_delete_visibility(); - for sorted_node_id in sorted_node_ids { - let sorted_node = Node::get_by_id(ctx_with_deleted, &sorted_node_id) - .await? - .ok_or(NodeError::NotFound(sorted_node_id))?; - let component = sorted_node - .component(ctx_with_deleted) - .await? - .ok_or(NodeError::ComponentIsNone)?; - - if component.is_destroyed() { - continue; - } - - let mut actions = - if let Some(actions) = actions_by_component.get(component.id()).cloned() { - actions - } else { - continue; - }; - - // Make them stable - actions.sort_by_key(|a| *a.action_prototype_id()); - - for action in actions { - let prototype = action.prototype(ctx).await?; - match prototype.kind() { - ActionKind::Create => { - creations.push(action); - } - ActionKind::Delete => { - if component.resource(ctx).await?.payload.is_some() { - initial_deletions.push(action); - } else { - final_deletions.push(action); - } - } - ActionKind::Refresh | ActionKind::Other => { - if component.resource(ctx).await?.payload.is_some() { - initial_others.push(action); - } else { - final_others.push(action); - } - } - } - } - } - - initial_deletions.reverse(); - final_deletions.reverse(); - - let mut actions = Vec::with_capacity( - initial_deletions.len() - + creations.len() - + initial_others.len() - + final_others.len() - + final_deletions.len(), - ); - actions.extend(initial_deletions); - actions.extend(initial_others); - actions.extend(creations); - actions.extend(final_others); - actions.extend(final_deletions); - - for (index, mut action) in actions.into_iter().enumerate() { - action.set_index(ctx, index as i16).await?; - } - - WsEvent::change_set_written(ctx) - .await? - .publish_on_commit(ctx) - .await?; - - Ok(()) - } - - standard_model_accessor!(index, i16, ActionResult); - standard_model_accessor_ro!(action_prototype_id, ActionPrototypeId); - standard_model_accessor_ro!(change_set_pk, ChangeSetPk); - standard_model_accessor_ro!(component_id, ComponentId); - standard_model_accessor_ro!(creation_user_id, Option); -} +// use serde::{Deserialize, Serialize}; +// use std::collections::HashMap; +// use thiserror::Error; + +// use si_data_pg::PgError; +// use telemetry::prelude::*; + +// use crate::{ +// impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_accessor_ro, +// ActionKind, ActionPrototype, ActionPrototypeError, ActionPrototypeId, ChangeSetPk, Component, +// ComponentError, ComponentId, DalContext, HistoryActor, HistoryEventError, Node, NodeError, +// StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, UserPk, Visibility, +// WsEvent, WsEventError, +// }; +// +// const FIND_FOR_CHANGE_SET: &str = include_str!("./queries/action/find_for_change_set.sql"); + +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum ActionError { +// #[error("action prototype error: {0}")] +// ActionPrototype(#[from] ActionPrototypeError), +// #[error("component not found: {0}")] +// ComponentNotFound(ComponentId), +// #[error("history event: {0}")] +// HistoryEvent(#[from] HistoryEventError), +// #[error("in head")] +// InHead, +// #[error("pg error: {0}")] +// Pg(#[from] PgError), +// #[error("action prototype not found: {0}")] +// PrototypeNotFound(ActionPrototypeId), +// #[error("standard model error: {0}")] +// StandardModelError(#[from] StandardModelError), +// #[error("transactions error: {0}")] +// Transactions(#[from] TransactionsError), +// #[error(transparent)] +// WsEvent(#[from] WsEventError), +// } + +// pub type ActionResult = Result; + +// pk!(ActionPk); +// pk!(ActionId); +// +// // An Action joins an `ActionPrototype` to a `ComponentId` in a `ChangeSetPk` +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// pub struct Action { +// pk: ActionPk, +// id: ActionId, +// action_prototype_id: ActionPrototypeId, +// // Change set is a field so head doesn't get cluttered with actions to it and the original +// // change set pk is lost on apply +// change_set_pk: ChangeSetPk, +// component_id: ComponentId, +// creation_user_id: Option, +// index: i16, +// #[serde(flatten)] +// tenancy: Tenancy, +// #[serde(flatten)] +// timestamp: Timestamp, +// #[serde(flatten)] +// visibility: Visibility, +// } +// +// impl_standard_model! { +// model: Action, +// pk: ActionPk, +// id: ActionId, +// table_name: "actions", +// history_event_label_base: "action", +// history_event_message_name: "Action Prototype" +// } +// +// impl Action { +// #[allow(clippy::too_many_arguments)] +// #[instrument(skip_all)] +// pub async fn new( +// ctx: &DalContext, +// prototype_id: ActionPrototypeId, +// component_id: ComponentId, +// ) -> ActionResult { +// if ctx.visibility().change_set_pk.is_none() { +// return Err(ActionError::InHead); +// } +// +// let actor_user_pk = match ctx.history_actor() { +// HistoryActor::User(user_pk) => Some(*user_pk), +// _ => None, +// }; +// +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM action_create_v1($1, $2, $3, $4, $5)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &prototype_id, +// &component_id, +// &actor_user_pk, +// ], +// ) +// .await?; +// let object = standard_model::finish_create_from_row(ctx, row).await?; +// +// WsEvent::change_set_written(ctx) +// .await? +// .publish_on_commit(ctx) +// .await?; + +// Ok(object) +// } + +// pub async fn find_for_change_set(ctx: &DalContext) -> ActionResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CHANGE_SET, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &ctx.visibility().change_set_pk, +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn prototype(&self, ctx: &DalContext) -> ActionResult { +// ActionPrototype::get_by_id(ctx, self.action_prototype_id()) +// .await? +// .ok_or(ActionError::PrototypeNotFound(*self.action_prototype_id())) +// } + +// pub async fn component(&self, ctx: &DalContext) -> ActionResult { +// Component::get_by_id(ctx, self.component_id()) +// .await? +// .ok_or(ActionError::ComponentNotFound(*self.component_id())) +// } + +// pub async fn sort_of_change_set(ctx: &DalContext) -> ActionResult<()> { +// let actions = Self::find_for_change_set(ctx).await?; + +// let mut actions_by_component: HashMap> = HashMap::new(); +// for action in actions { +// actions_by_component +// .entry(*action.component_id()) +// .or_default() +// .push(action); +// } + +// let mut initial_deletions = Vec::new(); +// let mut initial_others = Vec::new(); +// let mut creations = Vec::new(); +// let mut final_others = Vec::new(); +// let mut final_deletions = Vec::new(); + +// let sorted_node_ids = +// Node::list_topologically_sorted_configuration_nodes_with_stable_ordering(ctx, false) +// .await?; + +// let ctx_with_deleted = &ctx.clone_with_delete_visibility(); +// for sorted_node_id in sorted_node_ids { +// let sorted_node = Node::get_by_id(ctx_with_deleted, &sorted_node_id) +// .await? +// .ok_or(NodeError::NotFound(sorted_node_id))?; +// let component = sorted_node +// .component(ctx_with_deleted) +// .await? +// .ok_or(NodeError::ComponentIsNone)?; + +// if component.is_destroyed() { +// continue; +// } + +// let mut actions = +// if let Some(actions) = actions_by_component.get(component.id()).cloned() { +// actions +// } else { +// continue; +// }; + +// // Make them stable +// actions.sort_by_key(|a| *a.action_prototype_id()); + +// for action in actions { +// let prototype = action.prototype(ctx).await?; +// match prototype.kind() { +// ActionKind::Create => { +// creations.push(action); +// } +// ActionKind::Delete => { +// if component.resource(ctx).await?.payload.is_some() { +// initial_deletions.push(action); +// } else { +// final_deletions.push(action); +// } +// } +// ActionKind::Refresh | ActionKind::Other => { +// if component.resource(ctx).await?.payload.is_some() { +// initial_others.push(action); +// } else { +// final_others.push(action); +// } +// } +// } +// } +// } + +// initial_deletions.reverse(); +// final_deletions.reverse(); + +// let mut actions = Vec::with_capacity( +// initial_deletions.len() +// + creations.len() +// + initial_others.len() +// + final_others.len() +// + final_deletions.len(), +// ); +// actions.extend(initial_deletions); +// actions.extend(initial_others); +// actions.extend(creations); +// actions.extend(final_others); +// actions.extend(final_deletions); + +// for (index, mut action) in actions.into_iter().enumerate() { +// action.set_index(ctx, index as i16).await?; +// } + +// WsEvent::change_set_written(ctx) +// .await? +// .publish_on_commit(ctx) +// .await?; + +// Ok(()) +// } + +// standard_model_accessor!(index, i16, ActionResult); +// standard_model_accessor_ro!(action_prototype_id, ActionPrototypeId); +// standard_model_accessor_ro!(change_set_pk, ChangeSetPk); +// standard_model_accessor_ro!(component_id, ComponentId); +// standard_model_accessor_ro!(creation_user_id, Option); +// } diff --git a/lib/dal/src/action_prototype.rs b/lib/dal/src/action_prototype.rs index b92d5af6dc..67b306fa48 100644 --- a/lib/dal/src/action_prototype.rs +++ b/lib/dal/src/action_prototype.rs @@ -1,106 +1,93 @@ -use std::default::Default; - +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use strum::{AsRefStr, Display}; -use thiserror::Error; - -use si_data_nats::NatsError; -use si_data_pg::PgError; use si_pkg::ActionFuncSpecKind; -use telemetry::prelude::*; - -use crate::func::before::before_funcs_for_component; -use crate::{ - component::view::ComponentViewError, func::backend::js_action::ActionRunResult, - impl_standard_model, pk, standard_model, standard_model_accessor, Component, ComponentId, - ComponentView, DalContext, Func, FuncBinding, FuncBindingError, FuncBindingReturnValueError, - FuncError, FuncId, HistoryEventError, SchemaVariantId, StandardModel, StandardModelError, - Tenancy, Timestamp, TransactionsError, Visibility, WsEvent, WsEventError, -}; - -const FIND_FOR_CONTEXT: &str = include_str!("./queries/action_prototype/find_for_context.sql"); -const FIND_FOR_CONTEXT_AND_KIND: &str = - include_str!("./queries/action_prototype/find_for_context_and_kind.sql"); -const FIND_FOR_FUNC: &str = include_str!("./queries/action_prototype/find_for_func.sql"); -const FIND_FOR_CONTEXT_AND_FUNC: &str = - include_str!("./queries/action_prototype/find_for_context_and_func.sql"); - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct ActionPrototypeView { - id: ActionPrototypeId, - name: String, - display_name: Option, -} - -impl ActionPrototypeView { - pub async fn new( - ctx: &DalContext, - prototype: ActionPrototype, - ) -> ActionPrototypeResult { - let mut display_name = None; - let func_details = Func::get_by_id(ctx, &prototype.func_id).await?; - if let Some(func) = func_details { - display_name = func.display_name().map(|dname| dname.to_string()) - }; - Ok(Self { - id: prototype.id, - name: prototype.name().map_or_else( - || match prototype.kind() { - ActionKind::Create => "create".to_owned(), - ActionKind::Delete => "delete".to_owned(), - ActionKind::Other => "other".to_owned(), - ActionKind::Refresh => "refresh".to_owned(), - }, - ToOwned::to_owned, - ), - display_name, - }) - } -} - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum ActionPrototypeError { - #[error("component error: {0}")] - Component(String), - #[error("component not found: {0}")] - ComponentNotFound(ComponentId), - #[error(transparent)] - ComponentView(#[from] ComponentViewError), - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error(transparent)] - FuncBinding(#[from] FuncBindingError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("action Func {0} not found for ActionPrototype {1}")] - FuncNotFound(FuncId, ActionPrototypeId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("this asset already has an action of this kind")] - MultipleOfSameKind, - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("not found with kind {0} for context {1:?}")] - NotFoundByKindAndContext(ActionKind, ActionPrototypeContext), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("schema not found")] - SchemaNotFound, - #[error("schema variant not found")] - SchemaVariantNotFound, - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), - #[error(transparent)] - WsEvent(#[from] WsEventError), -} - -pub type ActionPrototypeResult = Result; +use std::default::Default; +use strum::{AsRefStr, Display, EnumDiscriminants}; + +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::{pk, SchemaVariantId, Timestamp}; + +// const FIND_FOR_CONTEXT: &str = include_str!("./queries/action_prototype/find_for_context.sql"); +// const FIND_FOR_CONTEXT_AND_KIND: &str = +// include_str!("./queries/action_prototype/find_for_context_and_kind.sql"); +// const FIND_FOR_FUNC: &str = include_str!("./queries/action_prototype/find_for_func.sql"); +// const FIND_FOR_CONTEXT_AND_FUNC: &str = +// include_str!("./queries/action_prototype/find_for_context_and_func.sql"); + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct ActionPrototypeView { +// id: ActionPrototypeId, +// name: String, +// display_name: Option, +// } + +// impl ActionPrototypeView { +// pub async fn new( +// ctx: &DalContext, +// prototype: ActionPrototype, +// ) -> ActionPrototypeResult { +// // let mut display_name = None; +// // let func_details = Func::get_by_id(ctx, &prototype.func_id).await?; +// // if let Some(func) = func_details { +// // display_name = func.display_name().map(|dname| dname.to_string()) +// // }; +// Ok(Self { +// id: prototype.id, +// name: prototype.name().map_or_else( +// || match prototype.kind() { +// ActionKind::Create => "create".to_owned(), +// ActionKind::Delete => "delete".to_owned(), +// ActionKind::Other => "other".to_owned(), +// ActionKind::Refresh => "refresh".to_owned(), +// }, +// ToOwned::to_owned, +// ), +// display_name: Some("delete me".to_string()), +// }) +// } +// } + +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum ActionPrototypeError { +// #[error("component error: {0}")] +// Component(String), +// #[error("component not found: {0}")] +// ComponentNotFound(ComponentId), +// #[error(transparent)] +// ComponentView(#[from] ComponentViewError), +// #[error(transparent)] +// FuncBinding(#[from] FuncBindingError), +// #[error(transparent)] +// FuncBindingReturnValue(#[from] FuncBindingReturnValueError), +// #[error("action Func {0} not found for ActionPrototype {1}")] +// FuncNotFound(FuncId, ActionPrototypeId), +// #[error("history event error: {0}")] +// HistoryEvent(#[from] HistoryEventError), +// #[error("this asset already has an action of this kind")] +// MultipleOfSameKind, +// #[error("nats txn error: {0}")] +// Nats(#[from] NatsError), +// #[error("not found with kind {0} for context {1:?}")] +// NotFoundByKindAndContext(ActionKind, ActionPrototypeContext), +// #[error("pg error: {0}")] +// Pg(#[from] PgError), +// #[error("schema not found")] +// SchemaNotFound, +// #[error("schema variant not found")] +// SchemaVariantNotFound, +// #[error("error serializing/deserializing json: {0}")] +// SerdeJson(#[from] serde_json::Error), +// #[error("standard model error: {0}")] +// StandardModelError(#[from] StandardModelError), +// #[error("transactions error: {0}")] +// Transactions(#[from] TransactionsError), +// #[error(transparent)] +// WsEvent(#[from] WsEventError), +// } + +// pub type ActionPrototypeResult = Result; #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy)] pub struct ActionPrototypeContext { @@ -145,290 +132,231 @@ impl From<&ActionKind> for ActionFuncSpecKind { } } -// Hrm - is this a universal resolver context? -- Adam -impl Default for ActionPrototypeContext { - fn default() -> Self { - Self::new() - } -} - -impl ActionPrototypeContext { - pub fn new() -> Self { - Self { - schema_variant_id: SchemaVariantId::NONE, - } - } - - pub fn new_for_context_field(context_field: ActionPrototypeContextField) -> Self { - match context_field { - ActionPrototypeContextField::SchemaVariant(schema_variant_id) => { - ActionPrototypeContext { schema_variant_id } - } - } - } - - pub fn schema_variant_id(&self) -> SchemaVariantId { - self.schema_variant_id - } - - pub fn set_schema_variant_id(&mut self, schema_variant_id: SchemaVariantId) { - self.schema_variant_id = schema_variant_id; - } -} - -pk!(ActionPrototypePk); pk!(ActionPrototypeId); // An ActionPrototype joins a `FuncId` to a `SchemaVariantId` with a `ActionKind` and `name` #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct ActionPrototype { - pk: ActionPrototypePk, id: ActionPrototypeId, - func_id: FuncId, kind: ActionKind, name: Option, - schema_variant_id: SchemaVariantId, - #[serde(flatten)] - tenancy: Tenancy, #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, } -#[remain::sorted] -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum ActionPrototypeContextField { - SchemaVariant(SchemaVariantId), +#[derive(Debug, PartialEq)] +pub struct ActionPrototypeGraphNode { + id: ActionPrototypeId, + content_address: ContentAddress, + content: ActionPrototypeContentV1, } -impl From for ActionPrototypeContextField { - fn from(schema_variant_id: SchemaVariantId) -> Self { - ActionPrototypeContextField::SchemaVariant(schema_variant_id) - } +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum ActionPrototypeContent { + V1(ActionPrototypeContentV1), } -impl_standard_model! { - model: ActionPrototype, - pk: ActionPrototypePk, - id: ActionPrototypeId, - table_name: "action_prototypes", - history_event_label_base: "action_prototype", - history_event_message_name: "Action Prototype" +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct ActionPrototypeContentV1 { + kind: ActionKind, + name: Option, + #[serde(flatten)] + timestamp: Timestamp, } -impl ActionPrototype { - #[allow(clippy::too_many_arguments)] - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - func_id: FuncId, - kind: ActionKind, - context: ActionPrototypeContext, - ) -> ActionPrototypeResult { - let action_prototypes = Self::find_for_context(ctx, context).await?; - for prototype in action_prototypes { - if *prototype.kind() == kind && kind != ActionKind::Other { - return Err(ActionPrototypeError::MultipleOfSameKind); - } - } - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM action_prototype_create_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &kind.as_ref(), - &context.schema_variant_id(), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } - - pub async fn find_for_context( - ctx: &DalContext, - context: ActionPrototypeContext, - ) -> ActionPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.schema_variant_id(), - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_context_and_kind( - ctx: &DalContext, - kind: ActionKind, - context: ActionPrototypeContext, - ) -> ActionPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_AND_KIND, - &[ - ctx.tenancy(), - ctx.visibility(), - &kind.as_ref(), - &context.schema_variant_id(), - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_func( - ctx: &DalContext, - func_id: FuncId, - ) -> ActionPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), &func_id]) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_context_and_func( - ctx: &DalContext, - context: ActionPrototypeContext, - func_id: FuncId, - ) -> ActionPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_AND_FUNC, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.schema_variant_id(), - &func_id, - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - standard_model_accessor!( - schema_variant_id, - Pk(SchemaVariantId), - ActionPrototypeResult - ); - standard_model_accessor!(name, Option, ActionPrototypeResult); - standard_model_accessor!(func_id, Pk(FuncId), ActionPrototypeResult); - standard_model_accessor!(kind, Enum(ActionKind), ActionPrototypeResult); - - pub async fn set_kind_checked( - &mut self, - ctx: &DalContext, - kind: ActionKind, - ) -> ActionPrototypeResult<()> { - let action_prototypes = Self::find_for_context( - ctx, - ActionPrototypeContext { - schema_variant_id: self.schema_variant_id(), - }, - ) - .await?; - for prototype in action_prototypes { - if *prototype.kind() == kind && kind != ActionKind::Other && prototype.id() != self.id() - { - return Err(ActionPrototypeError::MultipleOfSameKind); - } - } - self.set_kind(ctx, kind).await - } - - pub fn context(&self) -> ActionPrototypeContext { - let mut context = ActionPrototypeContext::new(); - context.set_schema_variant_id(self.schema_variant_id); - - context - } - - pub async fn run( - &self, - ctx: &DalContext, - component_id: ComponentId, - ) -> ActionPrototypeResult> { - let component_view = ComponentView::new(ctx, component_id).await?; - - let before = before_funcs_for_component(ctx, &component_id).await?; - - let (_, return_value) = FuncBinding::create_and_execute( - ctx, - serde_json::to_value(component_view)?, - self.func_id(), - before, - ) - .await?; - - let mut logs = vec![]; - for stream_part in return_value - .get_output_stream(ctx) - .await? - .unwrap_or_default() - { - logs.push(stream_part); +impl ActionPrototypeGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: ActionPrototypeContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::ActionPrototype(content_hash), + content, } - - logs.sort_by_key(|log| log.timestamp); - - Ok(match return_value.value() { - Some(value) => { - let mut run_result: ActionRunResult = serde_json::from_value(value.clone())?; - run_result.logs = logs.iter().map(|l| l.message.clone()).collect(); - - let deleted_ctx = &ctx.clone_with_delete_visibility(); - let mut component = Component::get_by_id(deleted_ctx, &component_id) - .await? - .ok_or(ActionPrototypeError::ComponentNotFound(component_id))?; - - if component.needs_destroy() && run_result.payload.is_none() { - component - .set_needs_destroy(deleted_ctx, false) - .await - .map_err(|e| ActionPrototypeError::Component(e.to_string()))?; - } - - if component - .set_resource(ctx, run_result.clone()) - .await - .map_err(|e| ActionPrototypeError::Component(e.to_string()))? - { - WsEvent::resource_refreshed(ctx, *component.id()) - .await? - .publish_on_commit(ctx) - .await?; - } - - Some(run_result) - } - None => None, - }) } } + +// impl ActionPrototype { +// pub async fn find_for_context( +// ctx: &DalContext, +// context: ActionPrototypeContext, +// ) -> ActionPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.schema_variant_id(), +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_context_and_kind( +// ctx: &DalContext, +// kind: ActionKind, +// context: ActionPrototypeContext, +// ) -> ActionPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT_AND_KIND, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &kind.as_ref(), +// &context.schema_variant_id(), +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_func( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> ActionPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), &func_id]) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_context_and_func( +// ctx: &DalContext, +// context: ActionPrototypeContext, +// func_id: FuncId, +// ) -> ActionPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT_AND_FUNC, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.schema_variant_id(), +// &func_id, +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// standard_model_accessor!( +// schema_variant_id, +// Pk(SchemaVariantId), +// ActionPrototypeResult +// ); +// standard_model_accessor!(name, Option, ActionPrototypeResult); +// standard_model_accessor!(func_id, Pk(FuncId), ActionPrototypeResult); +// standard_model_accessor!(kind, Enum(ActionKind), ActionPrototypeResult); + +// pub async fn set_kind_checked( +// &mut self, +// ctx: &DalContext, +// kind: ActionKind, +// ) -> ActionPrototypeResult<()> { +// let action_prototypes = Self::find_for_context( +// ctx, +// ActionPrototypeContext { +// schema_variant_id: self.schema_variant_id(), +// }, +// ) +// .await?; +// for prototype in action_prototypes { +// if *prototype.kind() == kind && kind != ActionKind::Other && prototype.id() != self.id() +// { +// return Err(ActionPrototypeError::MultipleOfSameKind); +// } +// } +// self.set_kind(ctx, kind).await +// } + +// pub fn context(&self) -> ActionPrototypeContext { +// let mut context = ActionPrototypeContext::new(); +// context.set_schema_variant_id(self.schema_variant_id); + +// context +// } + +// pub async fn run( +// &self, +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> ActionPrototypeResult> { +// let component_view = ComponentView::new(ctx, component_id).await?; + +// let before = before_funcs_for_component(ctx, &component_id).await?; + +// let (_, return_value) = FuncBinding::create_and_execute( +// ctx, +// serde_json::to_value(component_view)?, +// self.func_id(), +// before, +// ) +// .await?; + +// let mut logs = vec![]; +// for stream_part in return_value +// .get_output_stream(ctx) +// .await? +// .unwrap_or_default() +// { +// logs.push(stream_part); +// } + +// logs.sort_by_key(|log| log.timestamp); + +// Ok(match return_value.value() { +// Some(value) => { +// let mut run_result: ActionRunResult = serde_json::from_value(value.clone())?; +// run_result.logs = logs.iter().map(|l| l.message.clone()).collect(); + +// let deleted_ctx = &ctx.clone_with_delete_visibility(); +// let mut component = Component::get_by_id(deleted_ctx, &component_id) +// .await? +// .ok_or(ActionPrototypeError::ComponentNotFound(component_id))?; + +// if component.needs_destroy() && run_result.payload.is_none() { +// component +// .set_needs_destroy(deleted_ctx, false) +// .await +// .map_err(|e| ActionPrototypeError::Component(e.to_string()))?; +// } + +// if component +// .set_resource(ctx, run_result.clone()) +// .await +// .map_err(|e| ActionPrototypeError::Component(e.to_string()))? +// { +// WsEvent::resource_refreshed(ctx, *component.id()) +// .await? +// .publish_on_commit(ctx) +// .await?; +// } + +// Some(run_result) +// } +// None => None, +// }) +// } +// } diff --git a/lib/dal/src/attribute/context.rs b/lib/dal/src/attribute/context.rs index ad9bd0c76f..b72acee904 100644 --- a/lib/dal/src/attribute/context.rs +++ b/lib/dal/src/attribute/context.rs @@ -1,551 +1,551 @@ -//! This module contains the [`AttributeContext`], and its corresponding builder, [`AttributeContextBuilder`]. -//! The context can be scoped with varying levels of specificity, using an order of precedence. -//! The builder ensures the correct order of precedence is maintained whilst setting and unsetting -//! fields of specificity. -//! -//! ## The Order of Precedence -//! -//! The order of precedence is as follows (from least to most "specificity"): -//! - [`PropId`] / [`InternalProviderId`] / [`ExternalProviderId`] -//! - [`ComponentId`] -//! -//! At the level of least "specificity", you can provider have a [`PropId`], an -//! [`InternalProviderId`], or an [`ExternalProviderId`]. However, you can only provide one and only -//! one for an [`AttributeContext`] since they are at the same "level" in the order of precedence. -//! -//! ## `AttributeContext` vs. `AttributeReadContext` -//! -//! While the [`AttributeContext`] can be used for both read and write queries, the -//! [`AttributeReadContext`](crate::AttributeReadContext) is useful for read-only queries and for -//! flexibility when searching for objects of varying levels of specificity. - -use serde::{Deserialize, Serialize}; -use std::cmp::Ordering; -use std::default::Default; -use thiserror::Error; - -use crate::{ - ComponentId, DalContext, ExternalProviderId, InternalProviderId, Prop, PropId, StandardModel, - StandardModelError, -}; - -pub mod read; - -use crate::attribute::context::AttributeContextLeastSpecificFieldKind::{ - ExternalProvider, InternalProvider, -}; -pub use read::AttributeReadContext; - -/// Indicates which least specific field for an [`AttributeContext`] is specified and contains the -/// field's value. -#[remain::sorted] -#[derive(Debug)] -pub enum AttributeContextLeastSpecificFieldKind { - ExternalProvider(ExternalProviderId), - InternalProvider(InternalProviderId), - Prop(PropId), -} - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum AttributeContextError { - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("could not find least specific field")] - LeastSpecificFieldKindNotFound, - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), -} - -pub type AttributeContextResult = Result; - -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct AttributeContext { - #[serde(rename = "attribute_context_prop_id")] - prop_id: PropId, - #[serde(rename = "attribute_context_internal_provider_id")] - internal_provider_id: InternalProviderId, - #[serde(rename = "attribute_context_external_provider_id")] - external_provider_id: ExternalProviderId, - #[serde(rename = "attribute_context_component_id")] - component_id: ComponentId, -} - -impl From for AttributeContextBuilder { - fn from(from_context: AttributeContext) -> AttributeContextBuilder { - AttributeContextBuilder { - prop_id: from_context.prop_id(), - internal_provider_id: from_context.internal_provider_id(), - external_provider_id: from_context.external_provider_id(), - component_id: from_context.component_id(), - } - } -} - -impl From for AttributeContextBuilder { - fn from(from_read_context: AttributeReadContext) -> AttributeContextBuilder { - let mut builder = AttributeContextBuilder::new(); - if let Some(prop_id) = from_read_context.prop_id { - builder.set_prop_id(prop_id); - } - if let Some(internal_provider_id) = from_read_context.internal_provider_id { - builder.set_internal_provider_id(internal_provider_id); - } - if let Some(external_provider_id) = from_read_context.external_provider_id { - builder.set_external_provider_id(external_provider_id); - } - if let Some(component_id) = from_read_context.component_id { - builder.set_component_id(component_id); - } - builder - } -} - -impl PartialOrd for AttributeContext { - /// How to compare two [`AttributeContexts`](crate::AttributeContext): - /// - /// - [`Ordering::Equal`]: same level of specificity between two contexts - /// - [`Ordering::Greater`]: "self" is "more-specific" than "other" - /// - [`Ordering::Less`]: "self" is "less-specific" than "other" - /// - [`None`]: "self" and "other" have different "least-specific" fields (e.g. "self" is - /// [`Prop`](crate::Prop)-specific and "other" is [`InternalProvider`](crate::InternalProvider)-specific. - fn partial_cmp(&self, other: &Self) -> Option { - if !self.is_component_unset() { - return if !other.is_component_unset() { - Some(Ordering::Equal) - } else { - Some(Ordering::Greater) - }; - } - - if !self.is_external_provider_unset() { - return if !other.is_component_unset() { - Some(Ordering::Less) - } else if !other.is_external_provider_unset() { - Some(Ordering::Equal) - } else { - None - }; - } - - if !self.is_internal_provider_unset() { - return if !other.is_component_unset() { - Some(Ordering::Less) - } else if !other.is_internal_provider_unset() { - Some(Ordering::Equal) - } else { - None - }; - } - - if !self.is_prop_unset() { - return if !other.is_component_unset() { - Some(Ordering::Less) - } else if !other.is_prop_unset() { - Some(Ordering::Equal) - } else { - None - }; - } - - None - } -} - -impl AttributeContext { - pub fn builder() -> AttributeContextBuilder { - AttributeContextBuilder::new() - } - - pub fn prop_id(&self) -> PropId { - self.prop_id - } - - pub fn is_prop_unset(&self) -> bool { - self.prop_id == PropId::NONE - } - - pub fn internal_provider_id(&self) -> InternalProviderId { - self.internal_provider_id - } - - pub fn is_internal_provider_unset(&self) -> bool { - self.internal_provider_id == InternalProviderId::NONE - } - - pub fn external_provider_id(&self) -> ExternalProviderId { - self.external_provider_id - } - - pub fn is_external_provider_unset(&self) -> bool { - self.external_provider_id == ExternalProviderId::NONE - } - - pub fn component_id(&self) -> ComponentId { - self.component_id - } - - pub fn is_component_unset(&self) -> bool { - self.component_id == ComponentId::NONE - } - - pub fn is_least_specific(&self) -> bool { - self.component_id == ComponentId::NONE - } - - /// Return a new [`AttributeContext`] with the most specific piece - /// of the current [`AttributeContext`] unset, widening the scope - /// of the context by one step. If widening the context would - /// result in everything being unset, it will return a new copy of - /// the current [`AttributeContext`]. - pub fn less_specific(&self) -> AttributeContextResult { - let mut builder = AttributeContextBuilder::from(*self); - if self.component_id() != ComponentId::NONE { - builder.unset_component_id(); - } - Ok(builder.to_context()?) - } - - /// Returns true if the least specific field corresponds to a [`Prop`](crate::Prop). - pub fn is_least_specific_field_kind_prop(&self) -> AttributeContextResult { - if let AttributeContextLeastSpecificFieldKind::Prop(_) = self.least_specific_field_kind()? { - Ok(true) - } else { - Ok(false) - } - } - - /// Returns true if the least specific field corresponds to an [`InternalProvider`](crate::InternalProvider). - pub fn is_least_specific_field_kind_internal_provider(&self) -> AttributeContextResult { - if let InternalProvider(_) = self.least_specific_field_kind()? { - Ok(true) - } else { - Ok(false) - } - } - - /// Returns true if the least specific field corresponds to an [`InternalProvider`](crate::InternalProvider) - /// _or_ an [`ExternalProvider`](crate::ExternalProvider). - pub fn is_least_specific_field_kind_internal_or_external_provider( - &self, - ) -> AttributeContextResult { - match self.least_specific_field_kind()? { - InternalProvider(_) | ExternalProvider(_) => Ok(true), - _ => Ok(false), - } - } - - /// Returns true if the least specific field corresponds to an [`ExternalProvider`](crate::ExternalProvider). - pub fn is_least_specific_field_kind_external_provider(&self) -> AttributeContextResult { - if let ExternalProvider(_) = self.least_specific_field_kind()? { - Ok(true) - } else { - Ok(false) - } - } - - /// Returns the [`AttributeContextLeastSpecificFieldKind`] that is "set" for [`Self`]. - pub fn least_specific_field_kind( - &self, - ) -> AttributeContextResult { - if self.prop_id != PropId::NONE { - Ok(AttributeContextLeastSpecificFieldKind::Prop(self.prop_id)) - } else if self.internal_provider_id != InternalProviderId::NONE { - Ok(InternalProvider(self.internal_provider_id)) - } else if self.external_provider_id != ExternalProviderId::NONE { - Ok(ExternalProvider(self.external_provider_id)) - } else { - // This should never be possible to hit, but this check exists to protect - // against potential regressions. - Err(AttributeContextError::LeastSpecificFieldKindNotFound) - } - } - - pub async fn prop(&self, ctx: &DalContext) -> AttributeContextResult> { - Ok(Prop::get_by_id(ctx, &self.prop_id()).await?) - } - - pub async fn internal_provider( - &self, - ctx: &DalContext, - ) -> AttributeContextResult> { - Ok(crate::InternalProvider::get_by_id(ctx, &self.internal_provider_id()).await?) - } - - pub async fn external_provider( - &self, - ctx: &DalContext, - ) -> AttributeContextResult> { - Ok(crate::ExternalProvider::get_by_id(ctx, &self.external_provider_id()).await?) - } -} - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum AttributeContextBuilderError { - #[error( - "cannot specify more than one field at the lowest level in the order of precedence: {0:?}" - )] - MultipleLeastSpecificFieldsSpecified(AttributeContextBuilder), - #[error("for builder {0:?}, the following fields must be set: {1:?}")] - PrerequisteFieldsUnset(AttributeContextBuilder, Vec<&'static str>), -} - -pub type AttributeContextBuilderResult = Result; - -/// A builder with non-consuming "setter" and "unsetter" methods that -/// verify the order of precedence for [`AttributeContext`]. -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy)] -pub struct AttributeContextBuilder { - prop_id: PropId, - internal_provider_id: InternalProviderId, - external_provider_id: ExternalProviderId, - component_id: ComponentId, -} - -/// Returns [`Self::new()`]. -impl Default for AttributeContextBuilder { - fn default() -> Self { - Self::new() - } -} - -impl AttributeContextBuilder { - /// Creates [`Self`] with all fields unset. - pub fn new() -> Self { - Self { - prop_id: PropId::NONE, - internal_provider_id: InternalProviderId::NONE, - external_provider_id: ExternalProviderId::NONE, - component_id: ComponentId::NONE, - } - } - - pub fn to_context_unchecked(&self) -> AttributeContext { - AttributeContext { - prop_id: self.prop_id, - internal_provider_id: self.internal_provider_id, - external_provider_id: self.external_provider_id, - component_id: self.component_id, - } - } - - /// Converts [`Self`] to [`AttributeContext`]. This method will - /// fail if the order of precedence is broken (i.e. more-specific - /// fields are set, but one-to-all less-specific fields are unset) - /// or if the field of least specificity, [`PropId`], is unset. - pub fn to_context(&self) -> AttributeContextBuilderResult { - let mut unset_prerequisite_fields = Vec::new(); - - // The lowest level in the order of precedence must always be set. - if self.prop_id == PropId::NONE - && self.internal_provider_id == InternalProviderId::NONE - && self.external_provider_id == ExternalProviderId::NONE - { - unset_prerequisite_fields.push("PropId or InternalProviderId or ExternalProviderId"); - } - - // Only one field at the lowest level in the order of precedence can be set. - #[allow(clippy::nonminimal_bool)] - if (self.prop_id != PropId::NONE && self.internal_provider_id != InternalProviderId::NONE) - || (self.prop_id != PropId::NONE - && self.external_provider_id != ExternalProviderId::NONE) - || (self.internal_provider_id != InternalProviderId::NONE - && self.external_provider_id != ExternalProviderId::NONE) - { - return Err(AttributeContextBuilderError::MultipleLeastSpecificFieldsSpecified(*self)); - } - - if !unset_prerequisite_fields.is_empty() { - return Err(AttributeContextBuilderError::PrerequisteFieldsUnset( - *self, - unset_prerequisite_fields, - )); - } - - Ok(AttributeContext { - prop_id: self.prop_id, - internal_provider_id: self.internal_provider_id, - external_provider_id: self.external_provider_id, - component_id: self.component_id, - }) - } - - /// Sets the [`PropId`] field. If the unset value is passed in, then - /// [`Self::unset_prop_id()`] is returned. - pub fn set_prop_id(&mut self, prop_id: PropId) -> &mut Self { - if prop_id == PropId::NONE { - return self.unset_prop_id(); - } - self.prop_id = prop_id; - self - } - - /// Sets the [`InternalProviderId`] field. If the unset value is passed in, then - /// [`Self::unset_internal_provider_id()`] is returned. - pub fn set_internal_provider_id( - &mut self, - internal_provider_id: InternalProviderId, - ) -> &mut Self { - if internal_provider_id == InternalProviderId::NONE { - return self.unset_internal_provider_id(); - } - self.internal_provider_id = internal_provider_id; - self - } - - /// Sets the [`ExternalProviderId`] field. If the unset value is passed in, then - /// [`Self::unset_external_provider_id()`] is returned. - pub fn set_external_provider_id( - &mut self, - external_provider_id: ExternalProviderId, - ) -> &mut Self { - if external_provider_id == ExternalProviderId::NONE { - return self.unset_external_provider_id(); - } - self.external_provider_id = external_provider_id; - self - } - - /// Sets the [`ComponentId`] field. If the unset value is passed in, then - /// [`Self::unset_component_id()`] is returned. - pub fn set_component_id(&mut self, component_id: ComponentId) -> &mut Self { - if component_id == ComponentId::NONE { - return self.unset_component_id(); - } - self.component_id = component_id; - self - } - - /// Unsets the [`PropId`]. - pub fn unset_prop_id(&mut self) -> &mut Self { - self.prop_id = PropId::NONE; - self - } - - /// Unsets the [`InternalProviderId`]. - pub fn unset_internal_provider_id(&mut self) -> &mut Self { - self.internal_provider_id = InternalProviderId::NONE; - self - } - - /// Unsets the [`ExternalProviderId`]. - pub fn unset_external_provider_id(&mut self) -> &mut Self { - self.external_provider_id = ExternalProviderId::NONE; - self - } - - /// Unsets the [`ComponentId`]. - pub fn unset_component_id(&mut self) -> &mut Self { - self.component_id = ComponentId::NONE; - self - } -} - -impl postgres_types::ToSql for AttributeContext { - fn to_sql( - &self, - ty: &postgres_types::Type, - out: &mut postgres_types::private::BytesMut, - ) -> Result> - where - Self: Sized, - { - let json = serde_json::to_value(self)?; - postgres_types::ToSql::to_sql(&json, ty, out) - } - - fn accepts(ty: &postgres_types::Type) -> bool - where - Self: Sized, - { - ty == &postgres_types::Type::JSONB - } - - fn to_sql_checked( - &self, - ty: &postgres_types::Type, - out: &mut postgres_types::private::BytesMut, - ) -> Result> { - let json = serde_json::to_value(self)?; - postgres_types::ToSql::to_sql(&json, ty, out) - } -} - -// NOTE(nick): there are only error permutations tests for fields that have at least two prerequisite -// fields. Thus ComponentId, and SchemaVariantId have error permutations tests and SchemaId -// and PropId do not. - -// TODO(nick): for the aforementioned error permutations tests, when/if more "layers" are added, we will likely -// need a helper to "flip" values from set to unset (and vice versa) to automatically test every condition. -// Currently, all error permutations are manually written. In an example using an automatic setup, the -// helper could provide an iteration method that flips each fields value from unset to -// "Id::generate()" and vice versa. Then, the test writer could supply contraints to indicate when the helper -// should expect failure or success upon iteration. - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn less_specific() { - let prop_id = PropId::generate(); - let component_id = ComponentId::generate(); - let context = AttributeContextBuilder::new() - .set_prop_id(prop_id) - .set_component_id(component_id) - .to_context() - .expect("cannot build attribute context"); - assert!(!context.is_least_specific()); - - let new_context = context - .less_specific() - .expect("cannot create less specific context"); - - assert_eq!( - AttributeContextBuilder::new() - .set_prop_id(prop_id) - .to_context() - .expect("cannot create expected context"), - new_context, - ); - - let new_context = new_context - .less_specific() - .expect("cannot create less specific context"); - - // Should be the exact same. - assert_eq!( - AttributeContextBuilder::new() - .set_prop_id(prop_id) - .to_context() - .expect("cannot create expected context"), - new_context, - ); - assert!(new_context.is_least_specific()); - } - - #[test] - fn builder_new() { - let prop_id = PropId::generate(); - let component_id = ComponentId::generate(); - - let mut builder = AttributeContextBuilder::new(); - - // Empty (FAIL) - assert!(builder.to_context().is_err()); - - // ComponentId without PropId (FAIL) - builder.set_component_id(component_id); - assert!(builder.to_context().is_err()); - builder.unset_component_id(); - - // PropId (PASS) - builder.set_prop_id(prop_id); - assert!(builder.to_context().is_ok()); - - // ComponentId with PropId (PASS) - builder.set_component_id(component_id); - assert!(builder.to_context().is_ok()); - } -} +// //! This module contains the [`AttributeContext`], and its corresponding builder, [`AttributeContextBuilder`]. +// //! The context can be scoped with varying levels of specificity, using an order of precedence. +// //! The builder ensures the correct order of precedence is maintained whilst setting and unsetting +// //! fields of specificity. +// //! +// //! ## The Order of Precedence +// //! +// //! The order of precedence is as follows (from least to most "specificity"): +// //! - [`PropId`] / [`InternalProviderId`] / [`ExternalProviderId`] +// //! - [`ComponentId`] +// //! +// //! At the level of least "specificity", you can provider have a [`PropId`], an +// //! [`InternalProviderId`], or an [`ExternalProviderId`]. However, you can only provide one and only +// //! one for an [`AttributeContext`] since they are at the same "level" in the order of precedence. +// //! +// //! ## `AttributeContext` vs. `AttributeReadContext` +// //! +// //! While the [`AttributeContext`] can be used for both read and write queries, the +// //! [`AttributeReadContext`](crate::AttributeReadContext) is useful for read-only queries and for +// //! flexibility when searching for objects of varying levels of specificity. + +// use serde::{Deserialize, Serialize}; +// use std::cmp::Ordering; +// use std::default::Default; +// use thiserror::Error; + +// use crate::{ +// ComponentId, DalContext, ExternalProviderId, InternalProviderId, Prop, PropId, StandardModel, +// StandardModelError, +// }; + +// pub mod read; + +// use crate::attribute::context::AttributeContextLeastSpecificFieldKind::{ +// ExternalProvider, InternalProvider, +// }; +// pub use read::AttributeReadContext; + +// /// Indicates which least specific field for an [`AttributeContext`] is specified and contains the +// /// field's value. +// #[remain::sorted] +// #[derive(Debug)] +// pub enum AttributeContextLeastSpecificFieldKind { +// ExternalProvider(ExternalProviderId), +// InternalProvider(InternalProviderId), +// Prop(PropId), +// } + +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum AttributeContextError { +// #[error("attribute context builder error: {0}")] +// AttributeContextBuilder(#[from] AttributeContextBuilderError), +// #[error("could not find least specific field")] +// LeastSpecificFieldKindNotFound, +// #[error("standard model error: {0}")] +// StandardModel(#[from] StandardModelError), +// } + +// pub type AttributeContextResult = Result; + +// #[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +// pub struct AttributeContext { +// #[serde(rename = "attribute_context_prop_id")] +// prop_id: PropId, +// #[serde(rename = "attribute_context_internal_provider_id")] +// internal_provider_id: InternalProviderId, +// #[serde(rename = "attribute_context_external_provider_id")] +// external_provider_id: ExternalProviderId, +// #[serde(rename = "attribute_context_component_id")] +// component_id: ComponentId, +// } + +// impl From for AttributeContextBuilder { +// fn from(from_context: AttributeContext) -> AttributeContextBuilder { +// AttributeContextBuilder { +// prop_id: from_context.prop_id(), +// internal_provider_id: from_context.internal_provider_id(), +// external_provider_id: from_context.external_provider_id(), +// component_id: from_context.component_id(), +// } +// } +// } + +// impl From for AttributeContextBuilder { +// fn from(from_read_context: AttributeReadContext) -> AttributeContextBuilder { +// let mut builder = AttributeContextBuilder::new(); +// if let Some(prop_id) = from_read_context.prop_id { +// builder.set_prop_id(prop_id); +// } +// if let Some(internal_provider_id) = from_read_context.internal_provider_id { +// builder.set_internal_provider_id(internal_provider_id); +// } +// if let Some(external_provider_id) = from_read_context.external_provider_id { +// builder.set_external_provider_id(external_provider_id); +// } +// if let Some(component_id) = from_read_context.component_id { +// builder.set_component_id(component_id); +// } +// builder +// } +// } + +// impl PartialOrd for AttributeContext { +// /// How to compare two [`AttributeContexts`](crate::AttributeContext): +// /// +// /// - [`Ordering::Equal`]: same level of specificity between two contexts +// /// - [`Ordering::Greater`]: "self" is "more-specific" than "other" +// /// - [`Ordering::Less`]: "self" is "less-specific" than "other" +// /// - [`None`]: "self" and "other" have different "least-specific" fields (e.g. "self" is +// /// [`Prop`](crate::Prop)-specific and "other" is [`InternalProvider`](crate::InternalProvider)-specific. +// fn partial_cmp(&self, other: &Self) -> Option { +// if !self.is_component_unset() { +// return if !other.is_component_unset() { +// Some(Ordering::Equal) +// } else { +// Some(Ordering::Greater) +// }; +// } + +// if !self.is_external_provider_unset() { +// return if !other.is_component_unset() { +// Some(Ordering::Less) +// } else if !other.is_external_provider_unset() { +// Some(Ordering::Equal) +// } else { +// None +// }; +// } + +// if !self.is_internal_provider_unset() { +// return if !other.is_component_unset() { +// Some(Ordering::Less) +// } else if !other.is_internal_provider_unset() { +// Some(Ordering::Equal) +// } else { +// None +// }; +// } + +// if !self.is_prop_unset() { +// return if !other.is_component_unset() { +// Some(Ordering::Less) +// } else if !other.is_prop_unset() { +// Some(Ordering::Equal) +// } else { +// None +// }; +// } + +// None +// } +// } + +// impl AttributeContext { +// pub fn builder() -> AttributeContextBuilder { +// AttributeContextBuilder::new() +// } + +// pub fn prop_id(&self) -> PropId { +// self.prop_id +// } + +// pub fn is_prop_unset(&self) -> bool { +// self.prop_id == PropId::NONE +// } + +// pub fn internal_provider_id(&self) -> InternalProviderId { +// self.internal_provider_id +// } + +// pub fn is_internal_provider_unset(&self) -> bool { +// self.internal_provider_id == InternalProviderId::NONE +// } + +// pub fn external_provider_id(&self) -> ExternalProviderId { +// self.external_provider_id +// } + +// pub fn is_external_provider_unset(&self) -> bool { +// self.external_provider_id == ExternalProviderId::NONE +// } + +// pub fn component_id(&self) -> ComponentId { +// self.component_id +// } + +// pub fn is_component_unset(&self) -> bool { +// self.component_id == ComponentId::NONE +// } + +// pub fn is_least_specific(&self) -> bool { +// self.component_id == ComponentId::NONE +// } + +// /// Return a new [`AttributeContext`] with the most specific piece +// /// of the current [`AttributeContext`] unset, widening the scope +// /// of the context by one step. If widening the context would +// /// result in everything being unset, it will return a new copy of +// /// the current [`AttributeContext`]. +// pub fn less_specific(&self) -> AttributeContextResult { +// let mut builder = AttributeContextBuilder::from(*self); +// if self.component_id() != ComponentId::NONE { +// builder.unset_component_id(); +// } +// Ok(builder.to_context()?) +// } + +// /// Returns true if the least specific field corresponds to a [`Prop`](crate::Prop). +// pub fn is_least_specific_field_kind_prop(&self) -> AttributeContextResult { +// if let AttributeContextLeastSpecificFieldKind::Prop(_) = self.least_specific_field_kind()? { +// Ok(true) +// } else { +// Ok(false) +// } +// } + +// /// Returns true if the least specific field corresponds to an [`InternalProvider`](crate::InternalProvider). +// pub fn is_least_specific_field_kind_internal_provider(&self) -> AttributeContextResult { +// if let InternalProvider(_) = self.least_specific_field_kind()? { +// Ok(true) +// } else { +// Ok(false) +// } +// } + +// /// Returns true if the least specific field corresponds to an [`InternalProvider`](crate::InternalProvider) +// /// _or_ an [`ExternalProvider`](crate::ExternalProvider). +// pub fn is_least_specific_field_kind_internal_or_external_provider( +// &self, +// ) -> AttributeContextResult { +// match self.least_specific_field_kind()? { +// InternalProvider(_) | ExternalProvider(_) => Ok(true), +// _ => Ok(false), +// } +// } + +// /// Returns true if the least specific field corresponds to an [`ExternalProvider`](crate::ExternalProvider). +// pub fn is_least_specific_field_kind_external_provider(&self) -> AttributeContextResult { +// if let ExternalProvider(_) = self.least_specific_field_kind()? { +// Ok(true) +// } else { +// Ok(false) +// } +// } + +// /// Returns the [`AttributeContextLeastSpecificFieldKind`] that is "set" for [`Self`]. +// pub fn least_specific_field_kind( +// &self, +// ) -> AttributeContextResult { +// if self.prop_id != PropId::NONE { +// Ok(AttributeContextLeastSpecificFieldKind::Prop(self.prop_id)) +// } else if self.internal_provider_id != InternalProviderId::NONE { +// Ok(InternalProvider(self.internal_provider_id)) +// } else if self.external_provider_id != ExternalProviderId::NONE { +// Ok(ExternalProvider(self.external_provider_id)) +// } else { +// // This should never be possible to hit, but this check exists to protect +// // against potential regressions. +// Err(AttributeContextError::LeastSpecificFieldKindNotFound) +// } +// } + +// pub async fn prop(&self, ctx: &DalContext) -> AttributeContextResult> { +// Ok(Prop::get_by_id(ctx, &self.prop_id()).await?) +// } + +// pub async fn internal_provider( +// &self, +// ctx: &DalContext, +// ) -> AttributeContextResult> { +// Ok(crate::InternalProvider::get_by_id(ctx, &self.internal_provider_id()).await?) +// } + +// pub async fn external_provider( +// &self, +// ctx: &DalContext, +// ) -> AttributeContextResult> { +// Ok(crate::ExternalProvider::get_by_id(ctx, &self.external_provider_id()).await?) +// } +// } + +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum AttributeContextBuilderError { +// #[error( +// "cannot specify more than one field at the lowest level in the order of precedence: {0:?}" +// )] +// MultipleLeastSpecificFieldsSpecified(AttributeContextBuilder), +// #[error("for builder {0:?}, the following fields must be set: {1:?}")] +// PrerequisteFieldsUnset(AttributeContextBuilder, Vec<&'static str>), +// } + +// pub type AttributeContextBuilderResult = Result; + +// /// A builder with non-consuming "setter" and "unsetter" methods that +// /// verify the order of precedence for [`AttributeContext`]. +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy)] +// pub struct AttributeContextBuilder { +// prop_id: PropId, +// internal_provider_id: InternalProviderId, +// external_provider_id: ExternalProviderId, +// component_id: ComponentId, +// } + +// /// Returns [`Self::new()`]. +// impl Default for AttributeContextBuilder { +// fn default() -> Self { +// Self::new() +// } +// } + +// impl AttributeContextBuilder { +// /// Creates [`Self`] with all fields unset. +// pub fn new() -> Self { +// Self { +// prop_id: PropId::NONE, +// internal_provider_id: InternalProviderId::NONE, +// external_provider_id: ExternalProviderId::NONE, +// component_id: ComponentId::NONE, +// } +// } + +// pub fn to_context_unchecked(&self) -> AttributeContext { +// AttributeContext { +// prop_id: self.prop_id, +// internal_provider_id: self.internal_provider_id, +// external_provider_id: self.external_provider_id, +// component_id: self.component_id, +// } +// } + +// /// Converts [`Self`] to [`AttributeContext`]. This method will +// /// fail if the order of precedence is broken (i.e. more-specific +// /// fields are set, but one-to-all less-specific fields are unset) +// /// or if the field of least specificity, [`PropId`], is unset. +// pub fn to_context(&self) -> AttributeContextBuilderResult { +// let mut unset_prerequisite_fields = Vec::new(); + +// // The lowest level in the order of precedence must always be set. +// if self.prop_id == PropId::NONE +// && self.internal_provider_id == InternalProviderId::NONE +// && self.external_provider_id == ExternalProviderId::NONE +// { +// unset_prerequisite_fields.push("PropId or InternalProviderId or ExternalProviderId"); +// } + +// // Only one field at the lowest level in the order of precedence can be set. +// #[allow(clippy::nonminimal_bool)] +// if (self.prop_id != PropId::NONE && self.internal_provider_id != InternalProviderId::NONE) +// || (self.prop_id != PropId::NONE +// && self.external_provider_id != ExternalProviderId::NONE) +// || (self.internal_provider_id != InternalProviderId::NONE +// && self.external_provider_id != ExternalProviderId::NONE) +// { +// return Err(AttributeContextBuilderError::MultipleLeastSpecificFieldsSpecified(*self)); +// } + +// if !unset_prerequisite_fields.is_empty() { +// return Err(AttributeContextBuilderError::PrerequisteFieldsUnset( +// *self, +// unset_prerequisite_fields, +// )); +// } + +// Ok(AttributeContext { +// prop_id: self.prop_id, +// internal_provider_id: self.internal_provider_id, +// external_provider_id: self.external_provider_id, +// component_id: self.component_id, +// }) +// } + +// /// Sets the [`PropId`] field. If the unset value is passed in, then +// /// [`Self::unset_prop_id()`] is returned. +// pub fn set_prop_id(&mut self, prop_id: PropId) -> &mut Self { +// if prop_id == PropId::NONE { +// return self.unset_prop_id(); +// } +// self.prop_id = prop_id; +// self +// } + +// /// Sets the [`InternalProviderId`] field. If the unset value is passed in, then +// /// [`Self::unset_internal_provider_id()`] is returned. +// pub fn set_internal_provider_id( +// &mut self, +// internal_provider_id: InternalProviderId, +// ) -> &mut Self { +// if internal_provider_id == InternalProviderId::NONE { +// return self.unset_internal_provider_id(); +// } +// self.internal_provider_id = internal_provider_id; +// self +// } + +// /// Sets the [`ExternalProviderId`] field. If the unset value is passed in, then +// /// [`Self::unset_external_provider_id()`] is returned. +// pub fn set_external_provider_id( +// &mut self, +// external_provider_id: ExternalProviderId, +// ) -> &mut Self { +// if external_provider_id == ExternalProviderId::NONE { +// return self.unset_external_provider_id(); +// } +// self.external_provider_id = external_provider_id; +// self +// } + +// /// Sets the [`ComponentId`] field. If the unset value is passed in, then +// /// [`Self::unset_component_id()`] is returned. +// pub fn set_component_id(&mut self, component_id: ComponentId) -> &mut Self { +// if component_id == ComponentId::NONE { +// return self.unset_component_id(); +// } +// self.component_id = component_id; +// self +// } + +// /// Unsets the [`PropId`]. +// pub fn unset_prop_id(&mut self) -> &mut Self { +// self.prop_id = PropId::NONE; +// self +// } + +// /// Unsets the [`InternalProviderId`]. +// pub fn unset_internal_provider_id(&mut self) -> &mut Self { +// self.internal_provider_id = InternalProviderId::NONE; +// self +// } + +// /// Unsets the [`ExternalProviderId`]. +// pub fn unset_external_provider_id(&mut self) -> &mut Self { +// self.external_provider_id = ExternalProviderId::NONE; +// self +// } + +// /// Unsets the [`ComponentId`]. +// pub fn unset_component_id(&mut self) -> &mut Self { +// self.component_id = ComponentId::NONE; +// self +// } +// } + +// impl postgres_types::ToSql for AttributeContext { +// fn to_sql( +// &self, +// ty: &postgres_types::Type, +// out: &mut postgres_types::private::BytesMut, +// ) -> Result> +// where +// Self: Sized, +// { +// let json = serde_json::to_value(self)?; +// postgres_types::ToSql::to_sql(&json, ty, out) +// } + +// fn accepts(ty: &postgres_types::Type) -> bool +// where +// Self: Sized, +// { +// ty == &postgres_types::Type::JSONB +// } + +// fn to_sql_checked( +// &self, +// ty: &postgres_types::Type, +// out: &mut postgres_types::private::BytesMut, +// ) -> Result> { +// let json = serde_json::to_value(self)?; +// postgres_types::ToSql::to_sql(&json, ty, out) +// } +// } + +// // NOTE(nick): there are only error permutations tests for fields that have at least two prerequisite +// // fields. Thus ComponentId, and SchemaVariantId have error permutations tests and SchemaId +// // and PropId do not. + +// // TODO(nick): for the aforementioned error permutations tests, when/if more "layers" are added, we will likely +// // need a helper to "flip" values from set to unset (and vice versa) to automatically test every condition. +// // Currently, all error permutations are manually written. In an example using an automatic setup, the +// // helper could provide an iteration method that flips each fields value from unset to +// // "Id::generate()" and vice versa. Then, the test writer could supply contraints to indicate when the helper +// // should expect failure or success upon iteration. + +// #[cfg(test)] +// mod tests { +// use super::*; + +// #[test] +// fn less_specific() { +// let prop_id = PropId::generate(); +// let component_id = ComponentId::generate(); +// let context = AttributeContextBuilder::new() +// .set_prop_id(prop_id) +// .set_component_id(component_id) +// .to_context() +// .expect("cannot build attribute context"); +// assert!(!context.is_least_specific()); + +// let new_context = context +// .less_specific() +// .expect("cannot create less specific context"); + +// assert_eq!( +// AttributeContextBuilder::new() +// .set_prop_id(prop_id) +// .to_context() +// .expect("cannot create expected context"), +// new_context, +// ); + +// let new_context = new_context +// .less_specific() +// .expect("cannot create less specific context"); + +// // Should be the exact same. +// assert_eq!( +// AttributeContextBuilder::new() +// .set_prop_id(prop_id) +// .to_context() +// .expect("cannot create expected context"), +// new_context, +// ); +// assert!(new_context.is_least_specific()); +// } + +// #[test] +// fn builder_new() { +// let prop_id = PropId::generate(); +// let component_id = ComponentId::generate(); + +// let mut builder = AttributeContextBuilder::new(); + +// // Empty (FAIL) +// assert!(builder.to_context().is_err()); + +// // ComponentId without PropId (FAIL) +// builder.set_component_id(component_id); +// assert!(builder.to_context().is_err()); +// builder.unset_component_id(); + +// // PropId (PASS) +// builder.set_prop_id(prop_id); +// assert!(builder.to_context().is_ok()); + +// // ComponentId with PropId (PASS) +// builder.set_component_id(component_id); +// assert!(builder.to_context().is_ok()); +// } +// } diff --git a/lib/dal/src/attribute/context/read.rs b/lib/dal/src/attribute/context/read.rs index 7077e1eb46..0a29fef2c0 100644 --- a/lib/dal/src/attribute/context/read.rs +++ b/lib/dal/src/attribute/context/read.rs @@ -1,237 +1,237 @@ -use serde::{Deserialize, Serialize}; - -use crate::{AttributeContext, ComponentId, ExternalProviderId, InternalProviderId, PropId}; - -/// An `AttributeReadContext` allows for saying "do not use this field -/// to filter results" by providing [`None`] for the field's value. -/// It also allows for saying "explicitly filter out results for that -/// have this field set" by providing the unset value for the field's -/// value. -/// -/// For example: -/// -/// ```rust -/// # use dal::attribute::context::read::AttributeReadContext; -/// # use dal::{ExternalProviderId, InternalProviderId, ComponentId}; -/// let read_context = AttributeReadContext { -/// prop_id: None, -/// internal_provider_id: Some(InternalProviderId::NONE), -/// external_provider_id: Some(ExternalProviderId::NONE), -/// component_id: Some(ComponentId::generate()) -/// }; -/// ``` -/// -/// The above `AttributeReadContext` would be used for finding all -/// attributes, across all [`Props`](crate::Prop) that have been set -/// for a given [`ComponentId`]. -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] -pub struct AttributeReadContext { - #[serde(rename = "attribute_context_prop_id")] - pub prop_id: Option, - #[serde(rename = "attribute_context_internal_provider_id")] - pub internal_provider_id: Option, - #[serde(rename = "attribute_context_external_provider_id")] - pub external_provider_id: Option, - #[serde(rename = "attribute_context_component_id")] - pub component_id: Option, -} - -impl Default for AttributeReadContext { - fn default() -> Self { - Self { - prop_id: Some(PropId::NONE), - internal_provider_id: Some(InternalProviderId::NONE), - external_provider_id: Some(ExternalProviderId::NONE), - component_id: Some(ComponentId::NONE), - } - } -} - -impl From for AttributeReadContext { - fn from(from_context: AttributeContext) -> Self { - Self { - prop_id: Some(from_context.prop_id()), - internal_provider_id: Some(from_context.internal_provider_id()), - external_provider_id: Some(from_context.external_provider_id()), - component_id: Some(from_context.component_id()), - } - } -} - -impl AttributeReadContext { - /// Creates a [`read context`](Self) with a given [`PropId`](crate::Prop) - /// and all other fields set to their defaults. - pub fn default_with_prop(prop_id: PropId) -> Self { - Self { - prop_id: Some(prop_id), - ..Self::default() - } - } - - pub fn default_with_prop_and_component_id( - prop_id: PropId, - component_id: Option, - ) -> Self { - Self { - prop_id: Some(prop_id), - component_id: match component_id { - Some(component_id) => Some(component_id), - None => Some(ComponentId::NONE), - }, - ..Self::default() - } - } - - /// Creates a [`read context`](Self) with a given [`InternalProviderId`](crate::InternalProvider) - /// and all other fields set to their defaults. - pub fn default_with_internal_provider(internal_provider_id: InternalProviderId) -> Self { - Self { - internal_provider_id: Some(internal_provider_id), - ..Self::default() - } - } - - /// Creates a [`read context`](Self) with a given [`ExternalProviderId`](crate::ExternalProvider) - /// and all other fields set to their defaults. - pub fn default_with_external_provider(external_provider_id: ExternalProviderId) -> Self { - Self { - external_provider_id: Some(external_provider_id), - ..Self::default() - } - } - - pub fn prop_id(&self) -> Option { - self.prop_id - } - - pub fn has_prop_id(&self) -> bool { - self.prop_id.is_some() - } - - pub fn has_set_prop_id(&self) -> bool { - if let Some(prop_id) = self.prop_id { - prop_id != PropId::NONE - } else { - false - } - } - - pub fn has_unset_prop_id(&self) -> bool { - if let Some(prop_id) = self.prop_id { - prop_id == PropId::NONE - } else { - false - } - } - - pub fn internal_provider_id(&self) -> Option { - self.internal_provider_id - } - - pub fn has_internal_provider_id(&self) -> bool { - self.internal_provider_id.is_some() - } - - pub fn has_set_internal_provider(&self) -> bool { - if let Some(internal_provider) = self.internal_provider_id { - internal_provider != InternalProviderId::NONE - } else { - false - } - } - - pub fn has_unset_internal_provider(&self) -> bool { - if let Some(internal_provider) = self.internal_provider_id { - internal_provider == InternalProviderId::NONE - } else { - false - } - } - - pub fn external_provider_id(&self) -> Option { - self.external_provider_id - } - - pub fn has_external_provider_id(&self) -> bool { - self.external_provider_id.is_some() - } - - pub fn has_set_external_provider(&self) -> bool { - if let Some(external_provider) = self.external_provider_id { - external_provider != ExternalProviderId::NONE - } else { - false - } - } - - pub fn has_unset_external_provider(&self) -> bool { - if let Some(external_provider) = self.external_provider_id { - external_provider == ExternalProviderId::NONE - } else { - false - } - } - - pub fn component_id(&self) -> Option { - self.component_id - } - - pub fn has_component_id(&self) -> bool { - self.component_id.is_some() - } - - pub fn has_set_component_id(&self) -> bool { - if let Some(component_id) = self.component_id { - component_id != ComponentId::NONE - } else { - false - } - } - - pub fn has_unset_component_id(&self) -> bool { - if let Some(component_id) = self.component_id { - component_id == ComponentId::NONE - } else { - false - } - } - - pub fn any() -> Self { - Self { - prop_id: None, - internal_provider_id: None, - external_provider_id: None, - component_id: None, - } - } -} - -impl postgres_types::ToSql for AttributeReadContext { - fn to_sql( - &self, - ty: &postgres_types::Type, - out: &mut postgres_types::private::BytesMut, - ) -> Result> - where - Self: Sized, - { - let json = serde_json::to_value(self)?; - postgres_types::ToSql::to_sql(&json, ty, out) - } - - fn accepts(ty: &postgres_types::Type) -> bool - where - Self: Sized, - { - ty == &postgres_types::Type::JSONB - } - - fn to_sql_checked( - &self, - ty: &postgres_types::Type, - out: &mut postgres_types::private::BytesMut, - ) -> Result> { - let json = serde_json::to_value(self)?; - postgres_types::ToSql::to_sql(&json, ty, out) - } -} +// use serde::{Deserialize, Serialize}; + +// use crate::{AttributeContext, ComponentId, ExternalProviderId, InternalProviderId, PropId}; + +// /// An `AttributeReadContext` allows for saying "do not use this field +// /// to filter results" by providing [`None`] for the field's value. +// /// It also allows for saying "explicitly filter out results for that +// /// have this field set" by providing the unset value for the field's +// /// value. +// /// +// /// For example: +// /// +// /// ```rust +// /// # use dal::attribute::context::read::AttributeReadContext; +// /// # use dal::{ExternalProviderId, InternalProviderId, ComponentId}; +// /// let read_context = AttributeReadContext { +// /// prop_id: None, +// /// internal_provider_id: Some(InternalProviderId::NONE), +// /// external_provider_id: Some(ExternalProviderId::NONE), +// /// component_id: Some(ComponentId::generate()) +// /// }; +// /// ``` +// /// +// /// The above `AttributeReadContext` would be used for finding all +// /// attributes, across all [`Props`](crate::Prop) that have been set +// /// for a given [`ComponentId`]. +// #[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] +// pub struct AttributeReadContext { +// #[serde(rename = "attribute_context_prop_id")] +// pub prop_id: Option, +// #[serde(rename = "attribute_context_internal_provider_id")] +// pub internal_provider_id: Option, +// #[serde(rename = "attribute_context_external_provider_id")] +// pub external_provider_id: Option, +// #[serde(rename = "attribute_context_component_id")] +// pub component_id: Option, +// } + +// impl Default for AttributeReadContext { +// fn default() -> Self { +// Self { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: Some(ComponentId::NONE), +// } +// } +// } + +// impl From for AttributeReadContext { +// fn from(from_context: AttributeContext) -> Self { +// Self { +// prop_id: Some(from_context.prop_id()), +// internal_provider_id: Some(from_context.internal_provider_id()), +// external_provider_id: Some(from_context.external_provider_id()), +// component_id: Some(from_context.component_id()), +// } +// } +// } + +// impl AttributeReadContext { +// /// Creates a [`read context`](Self) with a given [`PropId`](crate::Prop) +// /// and all other fields set to their defaults. +// pub fn default_with_prop(prop_id: PropId) -> Self { +// Self { +// prop_id: Some(prop_id), +// ..Self::default() +// } +// } + +// pub fn default_with_prop_and_component_id( +// prop_id: PropId, +// component_id: Option, +// ) -> Self { +// Self { +// prop_id: Some(prop_id), +// component_id: match component_id { +// Some(component_id) => Some(component_id), +// None => Some(ComponentId::NONE), +// }, +// ..Self::default() +// } +// } + +// /// Creates a [`read context`](Self) with a given [`InternalProviderId`](crate::InternalProvider) +// /// and all other fields set to their defaults. +// pub fn default_with_internal_provider(internal_provider_id: InternalProviderId) -> Self { +// Self { +// internal_provider_id: Some(internal_provider_id), +// ..Self::default() +// } +// } + +// /// Creates a [`read context`](Self) with a given [`ExternalProviderId`](crate::ExternalProvider) +// /// and all other fields set to their defaults. +// pub fn default_with_external_provider(external_provider_id: ExternalProviderId) -> Self { +// Self { +// external_provider_id: Some(external_provider_id), +// ..Self::default() +// } +// } + +// pub fn prop_id(&self) -> Option { +// self.prop_id +// } + +// pub fn has_prop_id(&self) -> bool { +// self.prop_id.is_some() +// } + +// pub fn has_set_prop_id(&self) -> bool { +// if let Some(prop_id) = self.prop_id { +// prop_id != PropId::NONE +// } else { +// false +// } +// } + +// pub fn has_unset_prop_id(&self) -> bool { +// if let Some(prop_id) = self.prop_id { +// prop_id == PropId::NONE +// } else { +// false +// } +// } + +// pub fn internal_provider_id(&self) -> Option { +// self.internal_provider_id +// } + +// pub fn has_internal_provider_id(&self) -> bool { +// self.internal_provider_id.is_some() +// } + +// pub fn has_set_internal_provider(&self) -> bool { +// if let Some(internal_provider) = self.internal_provider_id { +// internal_provider != InternalProviderId::NONE +// } else { +// false +// } +// } + +// pub fn has_unset_internal_provider(&self) -> bool { +// if let Some(internal_provider) = self.internal_provider_id { +// internal_provider == InternalProviderId::NONE +// } else { +// false +// } +// } + +// pub fn external_provider_id(&self) -> Option { +// self.external_provider_id +// } + +// pub fn has_external_provider_id(&self) -> bool { +// self.external_provider_id.is_some() +// } + +// pub fn has_set_external_provider(&self) -> bool { +// if let Some(external_provider) = self.external_provider_id { +// external_provider != ExternalProviderId::NONE +// } else { +// false +// } +// } + +// pub fn has_unset_external_provider(&self) -> bool { +// if let Some(external_provider) = self.external_provider_id { +// external_provider == ExternalProviderId::NONE +// } else { +// false +// } +// } + +// pub fn component_id(&self) -> Option { +// self.component_id +// } + +// pub fn has_component_id(&self) -> bool { +// self.component_id.is_some() +// } + +// pub fn has_set_component_id(&self) -> bool { +// if let Some(component_id) = self.component_id { +// component_id != ComponentId::NONE +// } else { +// false +// } +// } + +// pub fn has_unset_component_id(&self) -> bool { +// if let Some(component_id) = self.component_id { +// component_id == ComponentId::NONE +// } else { +// false +// } +// } + +// pub fn any() -> Self { +// Self { +// prop_id: None, +// internal_provider_id: None, +// external_provider_id: None, +// component_id: None, +// } +// } +// } + +// impl postgres_types::ToSql for AttributeReadContext { +// fn to_sql( +// &self, +// ty: &postgres_types::Type, +// out: &mut postgres_types::private::BytesMut, +// ) -> Result> +// where +// Self: Sized, +// { +// let json = serde_json::to_value(self)?; +// postgres_types::ToSql::to_sql(&json, ty, out) +// } + +// fn accepts(ty: &postgres_types::Type) -> bool +// where +// Self: Sized, +// { +// ty == &postgres_types::Type::JSONB +// } + +// fn to_sql_checked( +// &self, +// ty: &postgres_types::Type, +// out: &mut postgres_types::private::BytesMut, +// ) -> Result> { +// let json = serde_json::to_value(self)?; +// postgres_types::ToSql::to_sql(&json, ty, out) +// } +// } diff --git a/lib/dal/src/attribute/prototype.rs b/lib/dal/src/attribute/prototype.rs index c216cade5b..e8e4b34a92 100644 --- a/lib/dal/src/attribute/prototype.rs +++ b/lib/dal/src/attribute/prototype.rs @@ -9,956 +9,877 @@ //! [`Map`](crate::prop::PropKind::Map): Which key of the `Map` the value is //! for. -use async_recursion::async_recursion; + +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; + + +use strum::EnumDiscriminants; use telemetry::prelude::*; -use thiserror::Error; + +use crate::workspace_snapshot::content_address::ContentAddress; use crate::{ - attribute::{ - context::{AttributeContext, AttributeContextError}, - value::{AttributeValue, AttributeValueError, AttributeValueId}, - }, - func::FuncId, - func::{ - binding::{FuncBindingError, FuncBindingId}, - binding_return_value::{FuncBindingReturnValueError, FuncBindingReturnValueId}, - }, - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_has_many, - AttributePrototypeArgument, AttributePrototypeArgumentError, AttributeReadContext, ComponentId, - DalContext, ExternalProvider, ExternalProviderId, Func, FuncBackendResponseType, - HistoryEventError, InternalProvider, InternalProviderId, Prop, PropId, PropKind, - SchemaVariantId, StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, - Visibility, + pk, StandardModel, Timestamp, }; pub mod argument; -const ARGUMENT_VALUES_BY_NAME_FOR_HEAD_COMPONENT_ID: &str = include_str!( - "../queries/attribute_prototype/argument_values_by_name_for_head_component_id.sql" -); -const ATTRIBUTE_VALUES_IN_CONTEXT_OR_GREATER: &str = - include_str!("../queries/attribute_prototype/attribute_values_in_context_or_greater.sql"); -const LIST_BY_HEAD_FROM_EXTERNAL_PROVIDER_USE_WITH_TAIL: &str = include_str!( - "../queries/attribute_prototype/list_by_head_from_external_provider_use_with_tail.sql" -); -const LIST_FROM_INTERNAL_PROVIDER_USE: &str = - include_str!("../queries/attribute_prototype/list_from_internal_provider_use.sql"); -const LIST_FOR_CONTEXT: &str = include_str!("../queries/attribute_prototype/list_for_context.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/attribute_prototype/list_for_schema_variant.sql"); -const LIST_FUNCS_FOR_CONTEXT_AND_BACKEND_RESPONSE_TYPE: &str = include_str!("../queries/attribute_prototype/list_protoype_funcs_for_context_and_func_backend_response_type.sql"); -const FIND_WITH_PARENT_VALUE_AND_KEY_FOR_CONTEXT: &str = - include_str!("../queries/attribute_prototype/find_with_parent_value_and_key_for_context.sql"); -const FIND_FOR_FUNC: &str = include_str!("../queries/attribute_prototype/find_for_func.sql"); -const FIND_FOR_CONTEXT_AND_KEY: &str = - include_str!("../queries/attribute_prototype/find_for_context_and_key.sql"); -const FIND_FOR_CONTEXT_NULL_KEY: &str = - include_str!("../queries/attribute_prototype/find_for_context_null_key.sql"); -const FIND_FOR_FUNC_AS_VARIANT_AND_COMPONENT: &str = - include_str!("../queries/attribute_prototype/find_for_func_as_variant_and_component.sql"); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum AttributePrototypeError { - #[error("attribute resolver context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextError), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("unable to construct component view for attribute function execution")] - ComponentView, - #[error("external provider not found by id: {0}")] - ExternalProviderNotFound(ExternalProviderId), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("cannot hard delete prototype from changeset if corresponding prototype does not exist on head or if the prototype does not represent an element of a map or array")] - HardDeletePrototypeWithNoHeadPrototypeOrKey(AttributePrototypeId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("internal provider not found by id: {0}")] - InternalProviderNotFound(InternalProviderId), - #[error("invalid prop value; expected {0} but got {1}")] - InvalidPropValue(String, serde_json::Value), - #[error("cannot remove prototype with a least-specific context: {0}")] - LeastSpecificContextPrototypeRemovalNotAllowed(AttributePrototypeId), - #[error("cannot remove value with a least-specific context: {0}")] - LeastSpecificContextValueRemovalNotAllowed(AttributeValueId), - #[error("AttributePrototype is missing")] - Missing, - #[error("func not found: {0}")] - MissingFunc(String), - #[error("attribute prototypes must have an associated prop, and this one does not. bug!")] - MissingProp, - #[error("missing attribute value for tenancy {0:?}, visibility {1:?}, prototype {2:?}, with parent attribute value {3:?}")] - MissingValue( - Tenancy, - Visibility, - AttributePrototypeId, - Option, - ), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("attribute prototype not found: {0} ({1:?})")] - NotFound(AttributePrototypeId, Visibility), - #[error( - "parent must be for an array, map, or object prop: attribute prototype id {0} is for a {1}" - )] - ParentNotAllowed(AttributePrototypeId, PropKind), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("prop not found by id: {0}")] - PropNotFound(PropId), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), -} +// const ARGUMENT_VALUES_BY_NAME_FOR_HEAD_COMPONENT_ID: &str = include_str!( +// "../queries/attribute_prototype/argument_values_by_name_for_head_component_id.sql" +// ); +// const ATTRIBUTE_VALUES_IN_CONTEXT_OR_GREATER: &str = +// include_str!("../queries/attribute_prototype/attribute_values_in_context_or_greater.sql"); +// const LIST_BY_HEAD_FROM_EXTERNAL_PROVIDER_USE_WITH_TAIL: &str = include_str!( +// "../queries/attribute_prototype/list_by_head_from_external_provider_use_with_tail.sql" +// ); +// const LIST_FROM_INTERNAL_PROVIDER_USE: &str = +// include_str!("../queries/attribute_prototype/list_from_internal_provider_use.sql"); +// const LIST_FOR_CONTEXT: &str = include_str!("../queries/attribute_prototype/list_for_context.sql"); +// const LIST_FOR_SCHEMA_VARIANT: &str = +// include_str!("../queries/attribute_prototype/list_for_schema_variant.sql"); +// const LIST_FUNCS_FOR_CONTEXT_AND_BACKEND_RESPONSE_TYPE: &str = include_str!("../queries/attribute_prototype/list_protoype_funcs_for_context_and_func_backend_response_type.sql"); +// const FIND_WITH_PARENT_VALUE_AND_KEY_FOR_CONTEXT: &str = +// include_str!("../queries/attribute_prototype/find_with_parent_value_and_key_for_context.sql"); +// const FIND_FOR_FUNC: &str = include_str!("../queries/attribute_prototype/find_for_func.sql"); +// const FIND_FOR_CONTEXT_AND_KEY: &str = +// include_str!("../queries/attribute_prototype/find_for_context_and_key.sql"); +// const FIND_FOR_CONTEXT_NULL_KEY: &str = +// include_str!("../queries/attribute_prototype/find_for_context_null_key.sql"); +// const FIND_FOR_FUNC_AS_VARIANT_AND_COMPONENT: &str = +// include_str!("../queries/attribute_prototype/find_for_func_as_variant_and_component.sql"); -pub type AttributePrototypeResult = Result; - -pk!(AttributePrototypePk); pk!(AttributePrototypeId); #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct AttributePrototype { - pk: AttributePrototypePk, id: AttributePrototypeId, #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] timestamp: Timestamp, - - /// The [`AttributeContext`] corresponding to the prototype. - #[serde(flatten)] - pub context: AttributeContext, - /// The [`Func`](crate::Func) corresponding to the prototype. - func_id: FuncId, - /// An optional key used for tracking parentage. - pub key: Option, -} - -/// This object is used for -/// [`AttributePrototype::list_by_head_from_external_provider_use_with_tail()`]. -#[derive(Serialize, Deserialize, Debug)] -pub struct AttributePrototypeGroupByHeadComponentId { - pub head_component_id: ComponentId, - pub attribute_prototype: AttributePrototype, } -impl_standard_model! { - model: AttributePrototype, - pk: AttributePrototypePk, +#[derive(Debug, PartialEq)] +pub struct AttributePrototypeGraphNode { id: AttributePrototypeId, - table_name: "attribute_prototypes", - history_event_label_base: "attribute_prototype", - history_event_message_name: "Attribute Prototype" + content_address: ContentAddress, + content: AttributePrototypeContentV1, } -impl AttributePrototype { - #[allow(clippy::too_many_arguments)] - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - func_id: FuncId, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - context: AttributeContext, - key: Option, - parent_attribute_value_id: Option, - ) -> AttributePrototypeResult { - let row = ctx.txns().await?.pg().query_one( - "SELECT new_attribute_prototype AS object FROM attribute_prototype_new_v1($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &func_binding_id, - &func_binding_return_value_id, - &context, - &key, - &parent_attribute_value_id, - ], - ).await?; - - Ok(standard_model::finish_create_from_row(ctx, row).await?) - } - - #[allow(clippy::too_many_arguments)] - #[instrument(skip_all)] - pub async fn new_with_existing_value( - ctx: &DalContext, - func_id: FuncId, - context: AttributeContext, - key: Option, - parent_attribute_value_id: Option, - attribute_value_id: AttributeValueId, - ) -> AttributePrototypeResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT new_attribute_prototype_id AS prototype_id - FROM attribute_prototype_new_with_attribute_value_v1($1, - $2, - $3, - $4, - $5, - $6, - $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &context, - &key, - &parent_attribute_value_id, - &attribute_value_id, - ], - ) - .await?; - let prototype_id: AttributePrototypeId = row.try_get("prototype_id")?; - let object = Self::get_by_id(ctx, &prototype_id) - .await? - .ok_or_else(|| AttributePrototypeError::NotFound(prototype_id, *ctx.visibility()))?; - - Ok(object) - } - - pub async fn new_with_context_only( - ctx: &DalContext, - func_id: FuncId, - context: AttributeContext, - key: Option<&str>, - ) -> AttributePrototypeResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_create_v1($1, $2, $3, $4, $5)", - &[ctx.tenancy(), ctx.visibility(), &context, &func_id, &key], - ) - .await?; - - Ok(standard_model::finish_create_from_row(ctx, row).await?) - } - - standard_model_accessor!(func_id, Pk(FuncId), AttributePrototypeResult); - standard_model_accessor!(key, Option, AttributePrototypeResult); - standard_model_has_many!( - lookup_fn: attribute_values, - table: "attribute_value_belongs_to_attribute_prototype", - model_table: "attribute_values", - returns: AttributeValue, - result: AttributePrototypeResult, - ); - - /// Permanently deletes the [`AttributePrototype`] for the given id along with any - /// corresponding [`AttributeValue`](crate::AttributeValue) prototype and - /// any [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) - /// for the prototype, if and only if any of the above values are in a changeset (i.e., - /// not in HEAD). The effect is to revert the prototype, it's values, and arguments, - /// to the HEAD state. Marking them as soft-deleted would propagate the deletion up to - /// HEAD. The implementation here is almost identical to that of - /// [`AttributePrototype::remove`](crate::AttributePrototype::remove)` but (1) - /// checks for in_change_set and (2) hard deletes. Least-specific checks are not necessary here - /// because we only do this for prototypes that exist only in a changeset. A corresponding - /// prototype for this prop will exist in head, and it will take priority when this one is - /// deleted. - pub async fn hard_delete_if_in_changeset( - ctx: &DalContext, - attribute_prototype_id: &AttributePrototypeId, - ) -> AttributePrototypeResult<()> { - let attribute_prototype = - match AttributePrototype::get_by_id(ctx, attribute_prototype_id).await? { - Some(v) => v, - None => return Ok(()), - }; - - // Ensure a prototype matching this context exists on head, or the prototype is for a - // map/array element - { - let head_ctx = ctx.clone_with_head(); - let has_head_proto = AttributePrototype::find_for_context_and_key( - &head_ctx, - attribute_prototype.context, - &attribute_prototype.key, - ) - .await? - .pop() - .is_some(); - - if !(has_head_proto || attribute_prototype.key().is_some()) { - return Err( - AttributePrototypeError::HardDeletePrototypeWithNoHeadPrototypeOrKey( - *attribute_prototype_id, - ), - ); - } - } - - // Delete all values and arguments found for a prototype before deleting the prototype. - let attribute_values = attribute_prototype.attribute_values(ctx).await?; - for argument in - AttributePrototypeArgument::list_for_attribute_prototype(ctx, *attribute_prototype_id) - .await? - { - if argument.visibility().in_change_set() { - argument.hard_delete(ctx).await?; - } - } - if attribute_prototype.visibility().in_change_set() { - standard_model::hard_unset_all_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_prototype", - attribute_prototype.id(), - ) - .await?; - attribute_prototype.hard_delete(ctx).await?; - } - - // Start with the initial value(s) from the prototype and build a work queue based on the - // value's children (and their children, recursively). Once we find the child values, - // we can delete the current value in the queue and its prototype. - let mut work_queue = attribute_values; - while let Some(current_value) = work_queue.pop() { - let child_attribute_values = current_value.child_attribute_values(ctx).await?; - if !child_attribute_values.is_empty() { - work_queue.extend(child_attribute_values); - } - - // Delete the prototype if we find one and if its context is not "least-specific". - if let Some(current_prototype) = current_value.attribute_prototype(ctx).await? { - // Delete all arguments found for a prototype before deleting the prototype. - for argument in AttributePrototypeArgument::list_for_attribute_prototype( - ctx, - *current_prototype.id(), - ) - .await? - { - if argument.visibility().in_change_set() { - argument.hard_delete(ctx).await?; - } - } - if current_prototype.visibility().in_change_set() { - standard_model::hard_unset_all_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_prototype", - current_prototype.id(), - ) - .await?; - current_prototype.hard_delete(ctx).await?; - } - } - - if current_value.visibility().in_change_set() { - standard_model::hard_unset_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_prototype", - current_value.id(), - ) - .await?; - standard_model::hard_unset_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_value", - current_value.id(), - ) - .await?; - standard_model::hard_unset_all_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_value", - current_value.id(), - ) - .await?; - current_value.hard_delete(ctx).await?; - } - } - Ok(()) - } - - /// Deletes the [`AttributePrototype`] corresponding to a provided ID. Before deletion occurs, - /// its corresponding [`AttributeValue`](crate::AttributeValue), all of its child values - /// (and their children, recursively) and those children's prototypes are deleted. Any value or - /// prototype that could not be found or does not exist is assumed to have already been deleted - /// or never existed. Moreover, before deletion of the [`AttributePrototype`] occurs, we delete - /// all [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) that belong to the - /// prototype. - /// - /// Caution: this should be used rather than [`StandardModel::delete_by_id()`] when deleting an - /// [`AttributePrototype`]. That method should never be called directly. - /// - /// Normally we forbid deleting "least specific" attribute prototypes, that is, prototypes - /// at the schema variant level, but we need to do so when removing a schema variant and - /// all its associated objects. To make this possible, set `force` to `true` - pub async fn remove( - ctx: &DalContext, - attribute_prototype_id: &AttributePrototypeId, - force: bool, - ) -> AttributePrototypeResult<()> { - // Get the prototype for the given id. Once we get its corresponding value, we can delete - // the prototype. - let mut attribute_prototype = - match AttributePrototype::get_by_id(ctx, attribute_prototype_id).await? { - Some(v) => v, - None => return Ok(()), - }; - - let parent_proto_is_map_or_array_element = attribute_prototype.key().is_some(); - if attribute_prototype.context.is_least_specific() - && !parent_proto_is_map_or_array_element - && !force - { - return Err( - AttributePrototypeError::LeastSpecificContextPrototypeRemovalNotAllowed( - *attribute_prototype_id, - ), - ); - } - - // Delete all values and arguments found for a prototype before deleting the prototype. - let attribute_values = attribute_prototype.attribute_values(ctx).await?; - for mut argument in - AttributePrototypeArgument::list_for_attribute_prototype(ctx, *attribute_prototype_id) - .await? - { - argument.delete_by_id(ctx).await?; - } - standard_model::unset_all_belongs_to( - ctx, - "attribute_value_belongs_to_attribute_prototype", - attribute_prototype.id(), - ) - .await?; - attribute_prototype.delete_by_id(ctx).await?; - - // Start with the initial value(s) from the prototype and build a work queue based on the - // value's children (and their children, recursively). Once we find the child values, - // we can delete the current value in the queue and its prototype. - let mut work_queue = attribute_values; - while let Some(mut current_value) = work_queue.pop() { - let child_attribute_values = current_value.child_attribute_values(ctx).await?; - if !child_attribute_values.is_empty() { - work_queue.extend(child_attribute_values); - } - - // Delete the prototype if we find one and if its context is not "least-specific". - if let Some(mut current_prototype) = current_value.attribute_prototype(ctx).await? { - if current_prototype.context.is_least_specific() - && !parent_proto_is_map_or_array_element - && !force - { - return Err( - AttributePrototypeError::LeastSpecificContextPrototypeRemovalNotAllowed( - *current_prototype.id(), - ), - ); - } - // Delete all arguments found for a prototype before deleting the prototype. - for mut argument in AttributePrototypeArgument::list_for_attribute_prototype( - ctx, - *current_prototype.id(), - ) - .await? - { - argument.delete_by_id(ctx).await?; - } - standard_model::unset_all_belongs_to( - ctx, - "attribute_value_belongs_to_attribute_prototype", - current_prototype.id(), - ) - .await?; - current_prototype.delete_by_id(ctx).await?; - } - - // Delete the value if its context is not "least-specific". - if current_value.context.is_least_specific() - && !parent_proto_is_map_or_array_element - && !force - { - return Err( - AttributePrototypeError::LeastSpecificContextValueRemovalNotAllowed( - *current_value.id(), - ), - ); - } - current_value.unset_attribute_prototype(ctx).await?; - current_value.unset_parent_attribute_value(ctx).await?; - standard_model::unset_all_belongs_to( - ctx, - "attribute_value_belongs_to_attribute_value", - current_value.id(), - ) - .await?; - current_value.delete_by_id(ctx).await?; - } - Ok(()) - } - - #[instrument(skip_all)] - pub async fn list_prototype_funcs_by_context_and_backend_response_type( - ctx: &DalContext, - context: AttributeContext, - backend_response_type: FuncBackendResponseType, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FUNCS_FOR_CONTEXT_AND_BACKEND_RESPONSE_TYPE, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &context.prop_id(), - &backend_response_type.as_ref(), - ], - ) - .await?; - - let mut result = Vec::new(); - for row in rows.into_iter() { - let func_json: serde_json::Value = row.try_get("func_object")?; - let func: Func = serde_json::from_value(func_json)?; - - let ap_json: serde_json::Value = row.try_get("prototype_object")?; - let ap: Self = serde_json::from_value(ap_json)?; - - result.push((ap, func)); - } - - Ok(result) - } - - pub async fn list_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - #[instrument(skip_all)] - pub async fn list_for_context( - ctx: &DalContext, - context: AttributeContext, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &context.prop_id(), - ], - ) - .await?; - let object = standard_model::objects_from_rows(rows)?; - Ok(object) - } - - #[tracing::instrument(skip_all)] - pub async fn find_with_parent_value_and_key_for_context( - ctx: &DalContext, - parent_attribute_value_id: Option, - key: Option, - context: AttributeContext, - ) -> AttributePrototypeResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_WITH_PARENT_VALUE_AND_KEY_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &parent_attribute_value_id, - &key, - ], - ) - .await?; - - Ok(standard_model::option_object_from_row(row)?) - } - - /// List [`Vec`] that depend on a provided [`InternalProviderId`](crate::InternalProvider). - pub async fn list_from_internal_provider_use( - ctx: &DalContext, - internal_provider_id: InternalProviderId, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FROM_INTERNAL_PROVIDER_USE, - &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// List [`Vec`] that depend on a provided [`ExternalProviderId`](crate::ExternalProvider) - /// and _tail_ [`ComponentId`](crate::Component). - pub async fn list_by_head_from_external_provider_use_with_tail( - ctx: &DalContext, - external_provider_id: ExternalProviderId, - tail_component_id: ComponentId, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_BY_HEAD_FROM_EXTERNAL_PROVIDER_USE_WITH_TAIL, - &[ - ctx.tenancy(), - ctx.visibility(), - &external_provider_id, - &tail_component_id, - ], - ) - .await?; - - let mut result = Vec::new(); - for row in rows.into_iter() { - let head_component_id: ComponentId = row.try_get("head_component_id")?; - - let attribute_prototype_json: serde_json::Value = row.try_get("object")?; - let attribute_prototype = serde_json::from_value(attribute_prototype_json)?; - - result.push(AttributePrototypeGroupByHeadComponentId { - head_component_id, - attribute_prototype, - }); - } - Ok(result) - } - - pub async fn argument_values( - &self, - ctx: &DalContext, - attribute_write_context: AttributeContext, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ARGUMENT_VALUES_BY_NAME_FOR_HEAD_COMPONENT_ID, - &[ - ctx.tenancy(), - ctx.visibility(), - &self.id, - &attribute_write_context.component_id(), - &attribute_write_context, - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - /// List [`AttributeValues`](crate::AttributeValue) that belong to a provided [`AttributePrototypeId`](Self) - /// and whose context contains the provided [`AttributeReadContext`](crate::AttributeReadContext) - /// or are "more-specific" than the provided [`AttributeReadContext`](crate::AttributeReadContext). - pub async fn attribute_values_in_context_or_greater( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - context: AttributeReadContext, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ATTRIBUTE_VALUES_IN_CONTEXT_OR_GREATER, - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &context, - ], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum AttributePrototypeContent { + V1(AttributePrototypeContentV1), +} - #[instrument(skip_all)] - #[allow(clippy::too_many_arguments)] - #[async_recursion] - async fn create_intermediate_proxy_values( - ctx: &DalContext, - parent_attribute_value_id: Option, - prototype_id: AttributePrototypeId, - context: AttributeContext, - ) -> AttributePrototypeResult<()> { - if context.is_least_specific() { - return Ok(()); - } +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct AttributePrototypeContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, +} - if (AttributeValue::find_with_parent_and_prototype_for_context( - ctx, - parent_attribute_value_id, - prototype_id, - context, - ) - .await?) - .is_none() - { - // Need to create a proxy to the next lowest level - Self::create_intermediate_proxy_values( - ctx, - parent_attribute_value_id, - prototype_id, - context.less_specific()?, - ) - .await?; - - if let Some(proxy_target) = AttributeValue::find_with_parent_and_prototype_for_context( - ctx, - parent_attribute_value_id, - prototype_id, - context.less_specific()?, - ) - .await? - { - // Create the proxy at this level - let mut proxy_attribute_value = AttributeValue::new( - ctx, - proxy_target.func_binding_id(), - proxy_target.func_binding_return_value_id(), - context, - proxy_target.key().map(|k| k.to_string()), - ) - .await?; - proxy_attribute_value - .set_proxy_for_attribute_value_id(ctx, Some(*proxy_target.id())) - .await?; - proxy_attribute_value - .set_attribute_prototype(ctx, &prototype_id) - .await? - } else { - return Err(AttributePrototypeError::MissingValue( - *ctx.tenancy(), - *ctx.visibility(), - prototype_id, - parent_attribute_value_id, - )); - } +impl AttributePrototypeGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: AttributePrototypeContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::AttributePrototype(content_hash), + content, } - - Ok(()) - } - - #[allow(clippy::too_many_arguments)] - pub async fn update_for_context( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - context: AttributeContext, - func_id: FuncId, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - parent_attribute_value_id: Option, - existing_attribute_value_id: Option, - ) -> AttributePrototypeResult { - let given_attribute_prototype = Self::get_by_id(ctx, &attribute_prototype_id) - .await? - .ok_or_else(|| { - AttributePrototypeError::NotFound(attribute_prototype_id, *ctx.visibility()) - })?; - - // If the AttributePrototype we were given isn't for the _specific_ context that we're - // trying to update, make a new one. This is necessary so that we don't end up changing the - // prototype for a context less specific than the one that we're trying to update. - let mut attribute_prototype = if given_attribute_prototype.context == context { - given_attribute_prototype - } else if let Some(attribute_value_id) = existing_attribute_value_id { - // Create new prototype with an existing value and clone the arguments of the given prototype into the new one. - let prototype = Self::new_with_existing_value( - ctx, - func_id, - context, - given_attribute_prototype.key().map(|k| k.to_string()), - parent_attribute_value_id, - attribute_value_id, - ) - .await?; - - let mut value = AttributeValue::get_by_id(ctx, &attribute_value_id) - .await? - .ok_or_else(|| { - AttributePrototypeError::MissingValue( - *ctx.tenancy(), - *ctx.visibility(), - *prototype.id(), - Some(attribute_value_id), - ) - })?; - value.set_func_binding_id(ctx, func_binding_id).await?; - - prototype - } else { - // Create new prototype and clone the arguments of the given prototype into the new one. - Self::new( - ctx, - func_id, - func_binding_id, - func_binding_return_value_id, - context, - given_attribute_prototype.key().map(|k| k.to_string()), - parent_attribute_value_id, - ) - .await? - }; - - attribute_prototype.set_func_id(ctx, func_id).await?; - - Ok(*attribute_prototype.id()) - } - - pub async fn find_for_func( - ctx: &DalContext, - func_id: &FuncId, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), func_id]) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) } +} - pub async fn find_for_func_as_variant_and_component( - ctx: &DalContext, - func_id: FuncId, - ) -> AttributePrototypeResult> { - let mut result = vec![]; - - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_FUNC_AS_VARIANT_AND_COMPONENT, - &[ctx.tenancy(), ctx.visibility(), &func_id], - ) - .await?; - - for row in rows.into_iter() { - let schema_variant_id: SchemaVariantId = row.try_get("schema_variant_id")?; - let component_id: ComponentId = row.try_get("component_id")?; - - result.push((schema_variant_id, component_id)); +impl AttributePrototype { + pub fn assemble(id: AttributePrototypeId, inner: &AttributePrototypeContentV1) -> Self { + let inner: AttributePrototypeContentV1 = inner.to_owned(); + Self { + id, + timestamp: inner.timestamp, } - - Ok(result) - } - - pub async fn find_for_context_and_key( - ctx: &DalContext, - context: AttributeContext, - key: &Option, - ) -> AttributePrototypeResult> { - let rows = if key.is_some() { - ctx.txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_AND_KEY, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.prop_id(), - &context.internal_provider_id(), - &context.external_provider_id(), - &context.component_id(), - &key, - ], - ) - .await? - } else { - ctx.txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_NULL_KEY, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.prop_id(), - &context.internal_provider_id(), - &context.external_provider_id(), - &context.component_id(), - ], - ) - .await? - }; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn external_provider( - &self, - ctx: &DalContext, - ) -> AttributePrototypeResult { - ExternalProvider::get_by_id(ctx, &self.context.external_provider_id()) - .await? - .ok_or(AttributePrototypeError::ExternalProviderNotFound( - self.context.external_provider_id(), - )) - } - - pub async fn internal_provider( - &self, - ctx: &DalContext, - ) -> AttributePrototypeResult { - InternalProvider::get_by_id(ctx, &self.context.internal_provider_id()) - .await? - .ok_or(AttributePrototypeError::InternalProviderNotFound( - self.context.internal_provider_id(), - )) } - pub async fn prop(&self, ctx: &DalContext) -> AttributePrototypeResult { - Prop::get_by_id(ctx, &self.context.prop_id()).await?.ok_or( - AttributePrototypeError::PropNotFound(self.context.prop_id()), - ) + pub fn id(&self) -> AttributePrototypeId { + self.id } } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AttributePrototypeArgumentValues { - pub attribute_prototype_id: AttributePrototypeId, - pub argument_name: String, - pub values: Vec, -} +// /// This object is used for +// /// [`AttributePrototype::list_by_head_from_external_provider_use_with_tail()`]. +// #[derive(Serialize, Deserialize, Debug)] +// pub struct AttributePrototypeGroupByHeadComponentId { +// pub head_component_id: ComponentId, +// pub attribute_prototype: AttributePrototype, +// } + +// impl AttributePrototype { +// #[allow(clippy::too_many_arguments)] +// #[instrument(skip_all)] +// pub async fn new_with_existing_value( +// ctx: &DalContext, +// func_id: FuncId, +// context: AttributeContext, +// key: Option, +// parent_attribute_value_id: Option, +// attribute_value_id: AttributeValueId, +// ) -> AttributePrototypeResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT new_attribute_prototype_id AS prototype_id +// FROM attribute_prototype_new_with_attribute_value_v1($1, +// $2, +// $3, +// $4, +// $5, +// $6, +// $7)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &func_id, +// &context, +// &key, +// &parent_attribute_value_id, +// &attribute_value_id, +// ], +// ) +// .await?; +// let prototype_id: AttributePrototypeId = row.try_get("prototype_id")?; +// let object = Self::get_by_id(ctx, &prototype_id) +// .await? +// .ok_or_else(|| AttributePrototypeError::NotFound(prototype_id, *ctx.visibility()))?; + +// Ok(object) +// } + +// pub async fn new_with_context_only( +// ctx: &DalContext, +// func_id: FuncId, +// context: AttributeContext, +// key: Option<&str>, +// ) -> AttributePrototypeResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_create_v1($1, $2, $3, $4, $5)", +// &[ctx.tenancy(), ctx.visibility(), &context, &func_id, &key], +// ) +// .await?; + +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// standard_model_accessor!(func_id, Pk(FuncId), AttributePrototypeResult); +// standard_model_accessor!(key, Option, AttributePrototypeResult); +// standard_model_has_many!( +// lookup_fn: attribute_values, +// table: "attribute_value_belongs_to_attribute_prototype", +// model_table: "attribute_values", +// returns: AttributeValue, +// result: AttributePrototypeResult, +// ); + +// /// Permanently deletes the [`AttributePrototype`] for the given id along with any +// /// corresponding [`AttributeValue`](crate::AttributeValue) prototype and +// /// any [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) +// /// for the prototype, if and only if any of the above values are in a changeset (i.e., +// /// not in HEAD). The effect is to revert the prototype, it's values, and arguments, +// /// to the HEAD state. Marking them as soft-deleted would propagate the deletion up to +// /// HEAD. The implementation here is almost identical to that of +// /// [`AttributePrototype::remove`](crate::AttributePrototype::remove)` but (1) +// /// checks for in_change_set and (2) hard deletes. Least-specific checks are not necessary here +// /// because we only do this for prototypes that exist only in a changeset. A corresponding +// /// prototype for this prop will exist in head, and it will take priority when this one is +// /// deleted. +// pub async fn hard_delete_if_in_changeset( +// ctx: &DalContext, +// attribute_prototype_id: &AttributePrototypeId, +// ) -> AttributePrototypeResult<()> { +// let attribute_prototype = +// match AttributePrototype::get_by_id(ctx, attribute_prototype_id).await? { +// Some(v) => v, +// None => return Ok(()), +// }; + +// // Ensure a prototype matching this context exists on head, or the prototype is for a +// // map/array element +// { +// let head_ctx = ctx.clone_with_head(); +// let has_head_proto = AttributePrototype::find_for_context_and_key( +// &head_ctx, +// attribute_prototype.context, +// &attribute_prototype.key, +// ) +// .await? +// .pop() +// .is_some(); + +// if !(has_head_proto || attribute_prototype.key().is_some()) { +// return Err( +// AttributePrototypeError::HardDeletePrototypeWithNoHeadPrototypeOrKey( +// *attribute_prototype_id, +// ), +// ); +// } +// } + +// // Delete all values and arguments found for a prototype before deleting the prototype. +// let attribute_values = attribute_prototype.attribute_values(ctx).await?; +// for argument in +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *attribute_prototype_id) +// .await? +// { +// if argument.visibility().in_change_set() { +// argument.hard_delete(ctx).await?; +// } +// } +// if attribute_prototype.visibility().in_change_set() { +// standard_model::hard_unset_all_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// attribute_prototype.id(), +// ) +// .await?; +// attribute_prototype.hard_delete(ctx).await?; +// } + +// // Start with the initial value(s) from the prototype and build a work queue based on the +// // value's children (and their children, recursively). Once we find the child values, +// // we can delete the current value in the queue and its prototype. +// let mut work_queue = attribute_values; +// while let Some(current_value) = work_queue.pop() { +// let child_attribute_values = current_value.child_attribute_values(ctx).await?; +// if !child_attribute_values.is_empty() { +// work_queue.extend(child_attribute_values); +// } + +// // Delete the prototype if we find one and if its context is not "least-specific". +// if let Some(current_prototype) = current_value.attribute_prototype(ctx).await? { +// // Delete all arguments found for a prototype before deleting the prototype. +// for argument in AttributePrototypeArgument::list_for_attribute_prototype( +// ctx, +// *current_prototype.id(), +// ) +// .await? +// { +// if argument.visibility().in_change_set() { +// argument.hard_delete(ctx).await?; +// } +// } +// if current_prototype.visibility().in_change_set() { +// standard_model::hard_unset_all_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// current_prototype.id(), +// ) +// .await?; +// current_prototype.hard_delete(ctx).await?; +// } +// } + +// if current_value.visibility().in_change_set() { +// standard_model::hard_unset_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// current_value.id(), +// ) +// .await?; +// standard_model::hard_unset_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_value", +// current_value.id(), +// ) +// .await?; +// standard_model::hard_unset_all_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_value", +// current_value.id(), +// ) +// .await?; +// current_value.hard_delete(ctx).await?; +// } +// } +// Ok(()) +// } + +// /// Deletes the [`AttributePrototype`] corresponding to a provided ID. Before deletion occurs, +// /// its corresponding [`AttributeValue`](crate::AttributeValue), all of its child values +// /// (and their children, recursively) and those children's prototypes are deleted. Any value or +// /// prototype that could not be found or does not exist is assumed to have already been deleted +// /// or never existed. Moreover, before deletion of the [`AttributePrototype`] occurs, we delete +// /// all [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) that belong to the +// /// prototype. +// /// +// /// Caution: this should be used rather than [`StandardModel::delete_by_id()`] when deleting an +// /// [`AttributePrototype`]. That method should never be called directly. +// /// +// /// Normally we forbid deleting "least specific" attribute prototypes, that is, prototypes +// /// at the schema variant level, but we need to do so when removing a schema variant and +// /// all its associated objects. To make this possible, set `force` to `true` +// pub async fn remove( +// ctx: &DalContext, +// attribute_prototype_id: &AttributePrototypeId, +// force: bool, +// ) -> AttributePrototypeResult<()> { +// // Get the prototype for the given id. Once we get its corresponding value, we can delete +// // the prototype. +// let mut attribute_prototype = +// match AttributePrototype::get_by_id(ctx, attribute_prototype_id).await? { +// Some(v) => v, +// None => return Ok(()), +// }; + +// let parent_proto_is_map_or_array_element = attribute_prototype.key().is_some(); +// if attribute_prototype.context.is_least_specific() +// && !parent_proto_is_map_or_array_element +// && !force +// { +// return Err( +// AttributePrototypeError::LeastSpecificContextPrototypeRemovalNotAllowed( +// *attribute_prototype_id, +// ), +// ); +// } + +// // Delete all values and arguments found for a prototype before deleting the prototype. +// let attribute_values = attribute_prototype.attribute_values(ctx).await?; +// for mut argument in +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *attribute_prototype_id) +// .await? +// { +// argument.delete_by_id(ctx).await?; +// } +// standard_model::unset_all_belongs_to( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// attribute_prototype.id(), +// ) +// .await?; +// attribute_prototype.delete_by_id(ctx).await?; + +// // Start with the initial value(s) from the prototype and build a work queue based on the +// // value's children (and their children, recursively). Once we find the child values, +// // we can delete the current value in the queue and its prototype. +// let mut work_queue = attribute_values; +// while let Some(mut current_value) = work_queue.pop() { +// let child_attribute_values = current_value.child_attribute_values(ctx).await?; +// if !child_attribute_values.is_empty() { +// work_queue.extend(child_attribute_values); +// } + +// // Delete the prototype if we find one and if its context is not "least-specific". +// if let Some(mut current_prototype) = current_value.attribute_prototype(ctx).await? { +// if current_prototype.context.is_least_specific() +// && !parent_proto_is_map_or_array_element +// && !force +// { +// return Err( +// AttributePrototypeError::LeastSpecificContextPrototypeRemovalNotAllowed( +// *current_prototype.id(), +// ), +// ); +// } +// // Delete all arguments found for a prototype before deleting the prototype. +// for mut argument in AttributePrototypeArgument::list_for_attribute_prototype( +// ctx, +// *current_prototype.id(), +// ) +// .await? +// { +// argument.delete_by_id(ctx).await?; +// } +// standard_model::unset_all_belongs_to( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// current_prototype.id(), +// ) +// .await?; +// current_prototype.delete_by_id(ctx).await?; +// } + +// // Delete the value if its context is not "least-specific". +// if current_value.context.is_least_specific() +// && !parent_proto_is_map_or_array_element +// && !force +// { +// return Err( +// AttributePrototypeError::LeastSpecificContextValueRemovalNotAllowed( +// *current_value.id(), +// ), +// ); +// } +// current_value.unset_attribute_prototype(ctx).await?; +// current_value.unset_parent_attribute_value(ctx).await?; +// standard_model::unset_all_belongs_to( +// ctx, +// "attribute_value_belongs_to_attribute_value", +// current_value.id(), +// ) +// .await?; +// current_value.delete_by_id(ctx).await?; +// } +// Ok(()) +// } + +// #[instrument(skip_all)] +// pub async fn list_prototype_funcs_by_context_and_backend_response_type( +// ctx: &DalContext, +// context: AttributeContext, +// backend_response_type: FuncBackendResponseType, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FUNCS_FOR_CONTEXT_AND_BACKEND_RESPONSE_TYPE, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &context.prop_id(), +// &backend_response_type.as_ref(), +// ], +// ) +// .await?; + +// let mut result = Vec::new(); +// for row in rows.into_iter() { +// let func_json: serde_json::Value = row.try_get("func_object")?; +// let func: Func = serde_json::from_value(func_json)?; + +// let ap_json: serde_json::Value = row.try_get("prototype_object")?; +// let ap: Self = serde_json::from_value(ap_json)?; + +// result.push((ap, func)); +// } + +// Ok(result) +// } + +// pub async fn list_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// #[instrument(skip_all)] +// pub async fn list_for_context( +// ctx: &DalContext, +// context: AttributeContext, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &context.prop_id(), +// ], +// ) +// .await?; +// let object = standard_model::objects_from_rows(rows)?; +// Ok(object) +// } + +// #[tracing::instrument(skip_all)] +// pub async fn find_with_parent_value_and_key_for_context( +// ctx: &DalContext, +// parent_attribute_value_id: Option, +// key: Option, +// context: AttributeContext, +// ) -> AttributePrototypeResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_WITH_PARENT_VALUE_AND_KEY_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &parent_attribute_value_id, +// &key, +// ], +// ) +// .await?; + +// Ok(standard_model::option_object_from_row(row)?) +// } + +// /// List [`Vec`] that depend on a provided [`InternalProviderId`](crate::InternalProvider). +// pub async fn list_from_internal_provider_use( +// ctx: &DalContext, +// internal_provider_id: InternalProviderId, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FROM_INTERNAL_PROVIDER_USE, +// &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// List [`Vec`] that depend on a provided [`ExternalProviderId`](crate::ExternalProvider) +// /// and _tail_ [`ComponentId`](crate::Component). +// pub async fn list_by_head_from_external_provider_use_with_tail( +// ctx: &DalContext, +// external_provider_id: ExternalProviderId, +// tail_component_id: ComponentId, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_BY_HEAD_FROM_EXTERNAL_PROVIDER_USE_WITH_TAIL, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &external_provider_id, +// &tail_component_id, +// ], +// ) +// .await?; + +// let mut result = Vec::new(); +// for row in rows.into_iter() { +// let head_component_id: ComponentId = row.try_get("head_component_id")?; + +// let attribute_prototype_json: serde_json::Value = row.try_get("object")?; +// let attribute_prototype = serde_json::from_value(attribute_prototype_json)?; + +// result.push(AttributePrototypeGroupByHeadComponentId { +// head_component_id, +// attribute_prototype, +// }); +// } +// Ok(result) +// } + +// pub async fn argument_values( +// &self, +// ctx: &DalContext, +// attribute_write_context: AttributeContext, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// ARGUMENT_VALUES_BY_NAME_FOR_HEAD_COMPONENT_ID, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &self.id, +// &attribute_write_context.component_id(), +// &attribute_write_context, +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// List [`AttributeValues`](crate::AttributeValue) that belong to a provided [`AttributePrototypeId`](Self) +// /// and whose context contains the provided [`AttributeReadContext`](crate::AttributeReadContext) +// /// or are "more-specific" than the provided [`AttributeReadContext`](crate::AttributeReadContext). +// pub async fn attribute_values_in_context_or_greater( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// context: AttributeReadContext, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// ATTRIBUTE_VALUES_IN_CONTEXT_OR_GREATER, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &context, +// ], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// #[instrument(skip_all)] +// #[allow(clippy::too_many_arguments)] +// #[async_recursion] +// async fn create_intermediate_proxy_values( +// ctx: &DalContext, +// parent_attribute_value_id: Option, +// prototype_id: AttributePrototypeId, +// context: AttributeContext, +// ) -> AttributePrototypeResult<()> { +// if context.is_least_specific() { +// return Ok(()); +// } + +// if (AttributeValue::find_with_parent_and_prototype_for_context( +// ctx, +// parent_attribute_value_id, +// prototype_id, +// context, +// ) +// .await?) +// .is_none() +// { +// // Need to create a proxy to the next lowest level +// Self::create_intermediate_proxy_values( +// ctx, +// parent_attribute_value_id, +// prototype_id, +// context.less_specific()?, +// ) +// .await?; + +// if let Some(proxy_target) = AttributeValue::find_with_parent_and_prototype_for_context( +// ctx, +// parent_attribute_value_id, +// prototype_id, +// context.less_specific()?, +// ) +// .await? +// { +// // Create the proxy at this level +// let mut proxy_attribute_value = AttributeValue::new( +// ctx, +// proxy_target.func_binding_id(), +// proxy_target.func_binding_return_value_id(), +// context, +// proxy_target.key().map(|k| k.to_string()), +// ) +// .await?; +// proxy_attribute_value +// .set_proxy_for_attribute_value_id(ctx, Some(*proxy_target.id())) +// .await?; +// proxy_attribute_value +// .set_attribute_prototype(ctx, &prototype_id) +// .await? +// } else { +// return Err(AttributePrototypeError::MissingValue( +// *ctx.tenancy(), +// *ctx.visibility(), +// prototype_id, +// parent_attribute_value_id, +// )); +// } +// } + +// Ok(()) +// } + +// #[allow(clippy::too_many_arguments)] +// pub async fn update_for_context( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// context: AttributeContext, +// func_id: FuncId, +// func_binding_id: FuncBindingId, +// func_binding_return_value_id: FuncBindingReturnValueId, +// parent_attribute_value_id: Option, +// existing_attribute_value_id: Option, +// ) -> AttributePrototypeResult { +// let given_attribute_prototype = Self::get_by_id(ctx, &attribute_prototype_id) +// .await? +// .ok_or_else(|| { +// AttributePrototypeError::NotFound(attribute_prototype_id, *ctx.visibility()) +// })?; + +// // If the AttributePrototype we were given isn't for the _specific_ context that we're +// // trying to update, make a new one. This is necessary so that we don't end up changing the +// // prototype for a context less specific than the one that we're trying to update. +// let mut attribute_prototype = if given_attribute_prototype.context == context { +// given_attribute_prototype +// } else if let Some(attribute_value_id) = existing_attribute_value_id { +// // Create new prototype with an existing value and clone the arguments of the given prototype into the new one. +// let prototype = Self::new_with_existing_value( +// ctx, +// func_id, +// context, +// given_attribute_prototype.key().map(|k| k.to_string()), +// parent_attribute_value_id, +// attribute_value_id, +// ) +// .await?; + +// let mut value = AttributeValue::get_by_id(ctx, &attribute_value_id) +// .await? +// .ok_or_else(|| { +// AttributePrototypeError::MissingValue( +// *ctx.tenancy(), +// *ctx.visibility(), +// *prototype.id(), +// Some(attribute_value_id), +// ) +// })?; +// value.set_func_binding_id(ctx, func_binding_id).await?; + +// prototype +// } else { +// // Create new prototype and clone the arguments of the given prototype into the new one. +// Self::new( +// ctx, +// func_id, +// func_binding_id, +// func_binding_return_value_id, +// context, +// given_attribute_prototype.key().map(|k| k.to_string()), +// parent_attribute_value_id, +// ) +// .await? +// }; + +// attribute_prototype.set_func_id(ctx, func_id).await?; + +// Ok(*attribute_prototype.id()) +// } + +// pub async fn find_for_func( +// ctx: &DalContext, +// func_id: &FuncId, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), func_id]) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_func_as_variant_and_component( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> AttributePrototypeResult> { +// let mut result = vec![]; + +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_FUNC_AS_VARIANT_AND_COMPONENT, +// &[ctx.tenancy(), ctx.visibility(), &func_id], +// ) +// .await?; + +// for row in rows.into_iter() { +// let schema_variant_id: SchemaVariantId = row.try_get("schema_variant_id")?; +// let component_id: ComponentId = row.try_get("component_id")?; + +// result.push((schema_variant_id, component_id)); +// } + +// Ok(result) +// } + +// pub async fn find_for_context_and_key( +// ctx: &DalContext, +// context: AttributeContext, +// key: &Option, +// ) -> AttributePrototypeResult> { +// let rows = if key.is_some() { +// ctx.txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT_AND_KEY, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.prop_id(), +// &context.internal_provider_id(), +// &context.external_provider_id(), +// &context.component_id(), +// &key, +// ], +// ) +// .await? +// } else { +// ctx.txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT_NULL_KEY, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.prop_id(), +// &context.internal_provider_id(), +// &context.external_provider_id(), +// &context.component_id(), +// ], +// ) +// .await? +// }; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn external_provider( +// &self, +// ctx: &DalContext, +// ) -> AttributePrototypeResult { +// ExternalProvider::get_by_id(ctx, &self.context.external_provider_id()) +// .await? +// .ok_or(AttributePrototypeError::ExternalProviderNotFound( +// self.context.external_provider_id(), +// )) +// } + +// pub async fn internal_provider( +// &self, +// ctx: &DalContext, +// ) -> AttributePrototypeResult { +// InternalProvider::get_by_id(ctx, &self.context.internal_provider_id()) +// .await? +// .ok_or(AttributePrototypeError::InternalProviderNotFound( +// self.context.internal_provider_id(), +// )) +// } + +// pub async fn prop(&self, ctx: &DalContext) -> AttributePrototypeResult { +// Prop::get_by_id(ctx, &self.context.prop_id()).await?.ok_or( +// AttributePrototypeError::PropNotFound(self.context.prop_id()), +// ) +// } +// } + +// #[derive(Debug, Clone, Serialize, Deserialize)] +// pub struct AttributePrototypeArgumentValues { +// pub attribute_prototype_id: AttributePrototypeId, +// pub argument_name: String, +// pub values: Vec, +// } diff --git a/lib/dal/src/attribute/prototype/argument.rs b/lib/dal/src/attribute/prototype/argument.rs index 823dc34829..e8a63bc446 100644 --- a/lib/dal/src/attribute/prototype/argument.rs +++ b/lib/dal/src/attribute/prototype/argument.rs @@ -1,6 +1,6 @@ -//! An [`AttributePrototypeArgument`] represents an argument name and how to dynamically derive -//! the corresponding value. [`AttributePrototype`](crate::AttributePrototype) can have multiple -//! arguments. +// //! An [`AttributePrototypeArgument`] represents an argument name and how to dynamically derive +// //! the corresponding value. [`AttributePrototype`](crate::AttributePrototype) can have multiple +// //! arguments. use serde::{Deserialize, Serialize}; use thiserror::Error; @@ -8,449 +8,449 @@ use thiserror::Error; use si_data_pg::PgError; use telemetry::prelude::*; -use crate::{ - func::argument::FuncArgumentId, impl_standard_model, pk, - provider::internal::InternalProviderId, standard_model, standard_model_accessor, - AttributePrototypeId, ComponentId, DalContext, ExternalProviderId, HistoryEventError, - StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, -}; - -const LIST_FOR_ATTRIBUTE_PROTOTYPE: &str = - include_str!("../../queries/attribute_prototype_argument/list_for_attribute_prototype.sql"); -const LIST_FOR_FUNC_ARGUMENT_ID: &str = - include_str!("../../queries/attribute_prototype_argument/list_for_func_argument.sql"); -const FIND_FOR_PROVIDERS_AND_COMPONENTS: &str = include_str!( - "../../queries/attribute_prototype_argument/find_for_providers_and_components.sql" -); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum AttributePrototypeArgumentError { - #[error("cannot update set field to become unset: {0}")] - CannotFlipSetFieldToUnset(&'static str), - #[error("cannot update unset field to become set: {0}")] - CannotFlipUnsetFieldToSet(&'static str), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("required value fields must be set, found at least one unset required value field")] - RequiredValueFieldsUnset, - #[error("serde json error: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), -} - -pub type AttributePrototypeArgumentResult = Result; - -pk!(AttributePrototypeArgumentPk); -pk!(AttributePrototypeArgumentId); - -/// Contains a "key" and fields to derive a "value" that dynamically used as an argument for a -/// [`AttributePrototypes`](crate::AttributePrototype) function execution. -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -pub struct AttributePrototypeArgument { - pk: AttributePrototypeArgumentPk, - id: AttributePrototypeArgumentId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] - timestamp: Timestamp, - - /// Indicates the [`AttributePrototype`](crate::AttributePrototype) that [`Self`] is used as - /// an argument for. - attribute_prototype_id: AttributePrototypeId, - /// Where to find the name and type of the "key" for a given argument. - func_argument_id: FuncArgumentId, - /// Where to find the value for a given argument for _intra_ [`Component`](crate::Component) - /// connections. - internal_provider_id: InternalProviderId, - /// Where to find the value for a given argument for _inter_ [`Component`](crate::Component) - /// connections. - external_provider_id: ExternalProviderId, - /// For _inter_ [`Component`](crate::Component) connections, this field provides additional - /// information to determine the _source_ of the value. - tail_component_id: ComponentId, - /// For _inter_ [`Component`](crate::Component) connections, this field provides additional - /// information to determine the _destination_ of the value. - head_component_id: ComponentId, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct AttributePrototypeArgumentGroup { - pub name: String, - pub arguments: Vec, -} - -impl_standard_model! { - model: AttributePrototypeArgument, - pk: AttributePrototypeArgumentPk, - id: AttributePrototypeArgumentId, - table_name: "attribute_prototype_arguments", - history_event_label_base: "attribute_prototype_argument", - history_event_message_name: "Attribute Prototype Argument" -} - -impl AttributePrototypeArgument { - #[instrument(skip_all)] - /// Create a new [`AttributePrototypeArgument`] for _intra_ [`Component`](crate::Component) use. - pub async fn new_for_intra_component( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - func_argument_id: FuncArgumentId, - internal_provider_id: InternalProviderId, - ) -> AttributePrototypeArgumentResult { - // Ensure the value fields are what we expect. - let external_provider_id = ExternalProviderId::NONE; - let tail_component_id = ComponentId::NONE; - let head_component_id = ComponentId::NONE; - if internal_provider_id == InternalProviderId::NONE { - return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); - } - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &func_argument_id, - &internal_provider_id, - &external_provider_id, - &tail_component_id, - &head_component_id, - ], - ) - .await?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) - } - - /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. - #[instrument(skip_all)] - pub async fn new_for_inter_component( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - func_argument_id: FuncArgumentId, - head_component_id: ComponentId, - tail_component_id: ComponentId, - external_provider_id: ExternalProviderId, - ) -> AttributePrototypeArgumentResult { - // Ensure the value fields are what we expect. - if external_provider_id == ExternalProviderId::NONE - || tail_component_id == ComponentId::NONE - || head_component_id == ComponentId::NONE - { - return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); - } - - // For inter component connections, the internal provider id field must be unset. - let internal_provider_id = InternalProviderId::NONE; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &func_argument_id, - &internal_provider_id, - &external_provider_id, - &tail_component_id, - &head_component_id, - ], - ) - .await?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) - } - - /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. - #[instrument(skip_all)] - pub async fn new_explicit_internal_to_explicit_internal_inter_component( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - func_argument_id: FuncArgumentId, - head_component_id: ComponentId, - tail_component_id: ComponentId, - internal_provider_id: InternalProviderId, - ) -> AttributePrototypeArgumentResult { - // Ensure the value fields are what we expect. - if internal_provider_id == InternalProviderId::NONE - || tail_component_id == ComponentId::NONE - || head_component_id == ComponentId::NONE - { - return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); - } - - // For inter component connections, the internal provider id field must be unset. - let external_provider_id = ExternalProviderId::NONE; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &func_argument_id, - &internal_provider_id, - &external_provider_id, - &tail_component_id, - &head_component_id, - ], - ) - .await?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) - } - - /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. - #[instrument(skip_all)] - pub async fn new_external_to_external_inter_component( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - func_argument_id: FuncArgumentId, - head_component_id: ComponentId, - tail_component_id: ComponentId, - external_provider_id: ExternalProviderId, - ) -> AttributePrototypeArgumentResult { - // Ensure the value fields are what we expect. - if external_provider_id == ExternalProviderId::NONE - || tail_component_id == ComponentId::NONE - || head_component_id == ComponentId::NONE - { - return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); - } - - // For inter component connections, the internal provider id field must be unset. - let internal_provider_id = InternalProviderId::NONE; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &func_argument_id, - &internal_provider_id, - &external_provider_id, - &tail_component_id, - &head_component_id, - ], - ) - .await?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) - } - - standard_model_accessor!( - attribute_prototype_id, - Pk(AttributePrototypeId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - func_argument_id, - Pk(FuncArgumentId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - internal_provider_id, - Pk(InternalProviderId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - external_provider_id, - Pk(ExternalProviderId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - tail_component_id, - Pk(ComponentId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - head_component_id, - Pk(ComponentId), - AttributePrototypeArgumentResult - ); - - /// Wraps the standard model accessor for "internal_provider_id" to ensure that a set value - /// cannot become unset and vice versa. - pub async fn set_internal_provider_id_safe( - &mut self, - ctx: &DalContext, - internal_provider_id: InternalProviderId, - ) -> AttributePrototypeArgumentResult<()> { - if self.internal_provider_id != InternalProviderId::NONE - && internal_provider_id == InternalProviderId::NONE - { - return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( - "InternalProviderId", - )); - }; - if self.internal_provider_id == InternalProviderId::NONE - && internal_provider_id != InternalProviderId::NONE - { - return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( - "InternalProviderId", - )); - } - self.set_internal_provider_id(ctx, internal_provider_id) - .await?; - Ok(()) - } - - /// Wraps the standard model accessor for "external_provider_id" to ensure that a set value - /// cannot become unset and vice versa. - pub async fn set_external_provider_id_safe( - mut self, - ctx: &DalContext, - external_provider_id: ExternalProviderId, - ) -> AttributePrototypeArgumentResult<()> { - if self.external_provider_id != ExternalProviderId::NONE - && external_provider_id == ExternalProviderId::NONE - { - return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( - "ExternalProviderId", - )); - } - if self.external_provider_id == ExternalProviderId::NONE - && external_provider_id != ExternalProviderId::NONE - { - return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( - "ExternalProviderId", - )); - } - self.set_external_provider_id(ctx, external_provider_id) - .await?; - Ok(()) - } - - /// Wraps the standard model accessor for "tail_component_id" to ensure that a set value - /// cannot become unset and vice versa. - pub async fn set_tail_component_id_safe( - mut self, - ctx: &DalContext, - tail_component_id: ComponentId, - ) -> AttributePrototypeArgumentResult<()> { - if self.tail_component_id != ComponentId::NONE && tail_component_id == ComponentId::NONE { - return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( - "tail ComponentId", - )); - } - if self.tail_component_id == ComponentId::NONE && tail_component_id != ComponentId::NONE { - return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( - "tail ComponentId", - )); - } - self.set_tail_component_id(ctx, tail_component_id).await?; - Ok(()) - } - - /// Wraps the standard model accessor for "head_component_id" to ensure that a set value - /// cannot become unset and vice versa. - pub async fn set_head_component_id_safe( - mut self, - ctx: &DalContext, - head_component_id: ComponentId, - ) -> AttributePrototypeArgumentResult<()> { - if self.head_component_id != ComponentId::NONE && head_component_id == ComponentId::NONE { - return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( - "head ComponentId", - )); - } - if self.head_component_id == ComponentId::NONE && head_component_id != ComponentId::NONE { - return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( - "head ComponentId", - )); - } - self.set_head_component_id(ctx, head_component_id).await?; - Ok(()) - } - - /// Determines if the [`InternalProviderId`](crate::InternalProvider) is unset. This function - /// can be useful for determining how to build [`FuncBinding`](crate::FuncBinding) arguments. - pub fn is_internal_provider_unset(&self) -> bool { - self.internal_provider_id == InternalProviderId::NONE - } - - /// List all [`AttributePrototypeArguments`](Self) for a given - /// [`AttributePrototype`](crate::AttributePrototype). - #[tracing::instrument(skip(ctx))] - pub async fn list_for_attribute_prototype( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - ) -> AttributePrototypeArgumentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_ATTRIBUTE_PROTOTYPE, - &[ctx.tenancy(), ctx.visibility(), &attribute_prototype_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// List all [`AttributePrototypeArguments`](Self) for a given [`FuncArgument`](crate::func::argument::FuncArgument). - pub async fn list_by_func_argument_id( - ctx: &DalContext, - func_argument_id: FuncArgumentId, - ) -> AttributePrototypeArgumentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_FUNC_ARGUMENT_ID, - &[ctx.tenancy(), ctx.visibility(), &func_argument_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_providers_and_components( - ctx: &DalContext, - external_provider_id: &ExternalProviderId, - internal_provider_id: &InternalProviderId, - tail_component: &ComponentId, - head_component: &ComponentId, - ) -> AttributePrototypeArgumentResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_FOR_PROVIDERS_AND_COMPONENTS, - &[ - ctx.tenancy(), - ctx.visibility(), - external_provider_id, - internal_provider_id, - tail_component, - head_component, - ], - ) - .await?; - - Ok(standard_model::object_option_from_row_option(row)?) - } -} +// use crate::{ +// func::argument::FuncArgumentId, impl_standard_model, pk, +// provider::internal::InternalProviderId, standard_model, standard_model_accessor, +// AttributePrototypeId, ComponentId, DalContext, ExternalProviderId, HistoryEventError, +// StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, +// }; + +// const LIST_FOR_ATTRIBUTE_PROTOTYPE: &str = +// include_str!("../../queries/attribute_prototype_argument/list_for_attribute_prototype.sql"); +// const LIST_FOR_FUNC_ARGUMENT_ID: &str = +// include_str!("../../queries/attribute_prototype_argument/list_for_func_argument.sql"); +// const FIND_FOR_PROVIDERS_AND_COMPONENTS: &str = include_str!( +// "../../queries/attribute_prototype_argument/find_for_providers_and_components.sql" +// ); + +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum AttributePrototypeArgumentError { +// #[error("cannot update set field to become unset: {0}")] +// CannotFlipSetFieldToUnset(&'static str), +// #[error("cannot update unset field to become set: {0}")] +// CannotFlipUnsetFieldToSet(&'static str), +// #[error("history event error: {0}")] +// HistoryEvent(#[from] HistoryEventError), +// #[error("pg error: {0}")] +// Pg(#[from] PgError), +// #[error("required value fields must be set, found at least one unset required value field")] +// RequiredValueFieldsUnset, +// #[error("serde json error: {0}")] +// SerdeJson(#[from] serde_json::Error), +// #[error("standard model error: {0}")] +// StandardModel(#[from] StandardModelError), +// #[error("transactions error: {0}")] +// Transactions(#[from] TransactionsError), +// } + +// pub type AttributePrototypeArgumentResult = Result; + +// pk!(AttributePrototypeArgumentPk); +// pk!(AttributePrototypeArgumentId); + +// /// Contains a "key" and fields to derive a "value" that dynamically used as an argument for a +// /// [`AttributePrototypes`](crate::AttributePrototype) function execution. +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// pub struct AttributePrototypeArgument { +// pk: AttributePrototypeArgumentPk, +// id: AttributePrototypeArgumentId, +// #[serde(flatten)] +// tenancy: Tenancy, +// #[serde(flatten)] +// visibility: Visibility, +// #[serde(flatten)] +// timestamp: Timestamp, + +// /// Indicates the [`AttributePrototype`](crate::AttributePrototype) that [`Self`] is used as +// /// an argument for. +// attribute_prototype_id: AttributePrototypeId, +// /// Where to find the name and type of the "key" for a given argument. +// func_argument_id: FuncArgumentId, +// /// Where to find the value for a given argument for _intra_ [`Component`](crate::Component) +// /// connections. +// internal_provider_id: InternalProviderId, +// /// Where to find the value for a given argument for _inter_ [`Component`](crate::Component) +// /// connections. +// external_provider_id: ExternalProviderId, +// /// For _inter_ [`Component`](crate::Component) connections, this field provides additional +// /// information to determine the _source_ of the value. +// tail_component_id: ComponentId, +// /// For _inter_ [`Component`](crate::Component) connections, this field provides additional +// /// information to determine the _destination_ of the value. +// head_component_id: ComponentId, +// } + +// #[derive(Serialize, Deserialize, Debug)] +// pub struct AttributePrototypeArgumentGroup { +// pub name: String, +// pub arguments: Vec, +// } + +// impl_standard_model! { +// model: AttributePrototypeArgument, +// pk: AttributePrototypeArgumentPk, +// id: AttributePrototypeArgumentId, +// table_name: "attribute_prototype_arguments", +// history_event_label_base: "attribute_prototype_argument", +// history_event_message_name: "Attribute Prototype Argument" +// } + +// impl AttributePrototypeArgument { +// #[instrument(skip_all)] +// /// Create a new [`AttributePrototypeArgument`] for _intra_ [`Component`](crate::Component) use. +// pub async fn new_for_intra_component( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// func_argument_id: FuncArgumentId, +// internal_provider_id: InternalProviderId, +// ) -> AttributePrototypeArgumentResult { +// // Ensure the value fields are what we expect. +// let external_provider_id = ExternalProviderId::NONE; +// let tail_component_id = ComponentId::NONE; +// let head_component_id = ComponentId::NONE; +// if internal_provider_id == InternalProviderId::NONE { +// return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); +// } + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &func_argument_id, +// &internal_provider_id, +// &external_provider_id, +// &tail_component_id, +// &head_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. +// #[instrument(skip_all)] +// pub async fn new_for_inter_component( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// func_argument_id: FuncArgumentId, +// head_component_id: ComponentId, +// tail_component_id: ComponentId, +// external_provider_id: ExternalProviderId, +// ) -> AttributePrototypeArgumentResult { +// // Ensure the value fields are what we expect. +// if external_provider_id == ExternalProviderId::NONE +// || tail_component_id == ComponentId::NONE +// || head_component_id == ComponentId::NONE +// { +// return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); +// } + +// // For inter component connections, the internal provider id field must be unset. +// let internal_provider_id = InternalProviderId::NONE; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &func_argument_id, +// &internal_provider_id, +// &external_provider_id, +// &tail_component_id, +// &head_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. +// #[instrument(skip_all)] +// pub async fn new_explicit_internal_to_explicit_internal_inter_component( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// func_argument_id: FuncArgumentId, +// head_component_id: ComponentId, +// tail_component_id: ComponentId, +// internal_provider_id: InternalProviderId, +// ) -> AttributePrototypeArgumentResult { +// // Ensure the value fields are what we expect. +// if internal_provider_id == InternalProviderId::NONE +// || tail_component_id == ComponentId::NONE +// || head_component_id == ComponentId::NONE +// { +// return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); +// } + +// // For inter component connections, the internal provider id field must be unset. +// let external_provider_id = ExternalProviderId::NONE; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &func_argument_id, +// &internal_provider_id, +// &external_provider_id, +// &tail_component_id, +// &head_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. +// #[instrument(skip_all)] +// pub async fn new_external_to_external_inter_component( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// func_argument_id: FuncArgumentId, +// head_component_id: ComponentId, +// tail_component_id: ComponentId, +// external_provider_id: ExternalProviderId, +// ) -> AttributePrototypeArgumentResult { +// // Ensure the value fields are what we expect. +// if external_provider_id == ExternalProviderId::NONE +// || tail_component_id == ComponentId::NONE +// || head_component_id == ComponentId::NONE +// { +// return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); +// } + +// // For inter component connections, the internal provider id field must be unset. +// let internal_provider_id = InternalProviderId::NONE; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &func_argument_id, +// &internal_provider_id, +// &external_provider_id, +// &tail_component_id, +// &head_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// standard_model_accessor!( +// attribute_prototype_id, +// Pk(AttributePrototypeId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// func_argument_id, +// Pk(FuncArgumentId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// internal_provider_id, +// Pk(InternalProviderId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// external_provider_id, +// Pk(ExternalProviderId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// tail_component_id, +// Pk(ComponentId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// head_component_id, +// Pk(ComponentId), +// AttributePrototypeArgumentResult +// ); + +// /// Wraps the standard model accessor for "internal_provider_id" to ensure that a set value +// /// cannot become unset and vice versa. +// pub async fn set_internal_provider_id_safe( +// &mut self, +// ctx: &DalContext, +// internal_provider_id: InternalProviderId, +// ) -> AttributePrototypeArgumentResult<()> { +// if self.internal_provider_id != InternalProviderId::NONE +// && internal_provider_id == InternalProviderId::NONE +// { +// return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( +// "InternalProviderId", +// )); +// }; +// if self.internal_provider_id == InternalProviderId::NONE +// && internal_provider_id != InternalProviderId::NONE +// { +// return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( +// "InternalProviderId", +// )); +// } +// self.set_internal_provider_id(ctx, internal_provider_id) +// .await?; +// Ok(()) +// } + +// /// Wraps the standard model accessor for "external_provider_id" to ensure that a set value +// /// cannot become unset and vice versa. +// pub async fn set_external_provider_id_safe( +// mut self, +// ctx: &DalContext, +// external_provider_id: ExternalProviderId, +// ) -> AttributePrototypeArgumentResult<()> { +// if self.external_provider_id != ExternalProviderId::NONE +// && external_provider_id == ExternalProviderId::NONE +// { +// return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( +// "ExternalProviderId", +// )); +// } +// if self.external_provider_id == ExternalProviderId::NONE +// && external_provider_id != ExternalProviderId::NONE +// { +// return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( +// "ExternalProviderId", +// )); +// } +// self.set_external_provider_id(ctx, external_provider_id) +// .await?; +// Ok(()) +// } + +// /// Wraps the standard model accessor for "tail_component_id" to ensure that a set value +// /// cannot become unset and vice versa. +// pub async fn set_tail_component_id_safe( +// mut self, +// ctx: &DalContext, +// tail_component_id: ComponentId, +// ) -> AttributePrototypeArgumentResult<()> { +// if self.tail_component_id != ComponentId::NONE && tail_component_id == ComponentId::NONE { +// return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( +// "tail ComponentId", +// )); +// } +// if self.tail_component_id == ComponentId::NONE && tail_component_id != ComponentId::NONE { +// return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( +// "tail ComponentId", +// )); +// } +// self.set_tail_component_id(ctx, tail_component_id).await?; +// Ok(()) +// } + +// /// Wraps the standard model accessor for "head_component_id" to ensure that a set value +// /// cannot become unset and vice versa. +// pub async fn set_head_component_id_safe( +// mut self, +// ctx: &DalContext, +// head_component_id: ComponentId, +// ) -> AttributePrototypeArgumentResult<()> { +// if self.head_component_id != ComponentId::NONE && head_component_id == ComponentId::NONE { +// return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( +// "head ComponentId", +// )); +// } +// if self.head_component_id == ComponentId::NONE && head_component_id != ComponentId::NONE { +// return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( +// "head ComponentId", +// )); +// } +// self.set_head_component_id(ctx, head_component_id).await?; +// Ok(()) +// } + +// /// Determines if the [`InternalProviderId`](crate::InternalProvider) is unset. This function +// /// can be useful for determining how to build [`FuncBinding`](crate::FuncBinding) arguments. +// pub fn is_internal_provider_unset(&self) -> bool { +// self.internal_provider_id == InternalProviderId::NONE +// } + +// /// List all [`AttributePrototypeArguments`](Self) for a given +// /// [`AttributePrototype`](crate::AttributePrototype). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_attribute_prototype( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// ) -> AttributePrototypeArgumentResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_ATTRIBUTE_PROTOTYPE, +// &[ctx.tenancy(), ctx.visibility(), &attribute_prototype_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// List all [`AttributePrototypeArguments`](Self) for a given [`FuncArgument`](crate::func::argument::FuncArgument). +// pub async fn list_by_func_argument_id( +// ctx: &DalContext, +// func_argument_id: FuncArgumentId, +// ) -> AttributePrototypeArgumentResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_FUNC_ARGUMENT_ID, +// &[ctx.tenancy(), ctx.visibility(), &func_argument_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_providers_and_components( +// ctx: &DalContext, +// external_provider_id: &ExternalProviderId, +// internal_provider_id: &InternalProviderId, +// tail_component: &ComponentId, +// head_component: &ComponentId, +// ) -> AttributePrototypeArgumentResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_FOR_PROVIDERS_AND_COMPONENTS, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// external_provider_id, +// internal_provider_id, +// tail_component, +// head_component, +// ], +// ) +// .await?; + +// Ok(standard_model::object_option_from_row_option(row)?) +// } +// } diff --git a/lib/dal/src/attribute/value.rs b/lib/dal/src/attribute/value.rs index 7142c9946d..7a80380b98 100644 --- a/lib/dal/src/attribute/value.rs +++ b/lib/dal/src/attribute/value.rs @@ -37,1252 +37,1100 @@ //! to find the [`AttributeValue`] whose [`context`](crate::AttributeContext) corresponds to a //! direct child [`Prop`](crate::Prop) of the [`RootProp`](crate::RootProp). +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use std::collections::HashMap; +use strum::EnumDiscriminants; use telemetry::prelude::*; -use thiserror::Error; - -use crate::func::before::before_funcs_for_component; -use crate::{ - attribute::{ - context::{ - AttributeContext, AttributeContextBuilder, AttributeContextBuilderError, - AttributeReadContext, - }, - prototype::{AttributePrototype, AttributePrototypeId}, - }, - func::{ - binding::{FuncBindingError, FuncBindingId}, - binding_return_value::{ - FuncBindingReturnValue, FuncBindingReturnValueError, FuncBindingReturnValueId, - }, - }, - impl_standard_model, - job::definition::DependentValuesUpdate, - pk, - standard_model::{self, TypeHint}, - standard_model_accessor, standard_model_belongs_to, standard_model_has_many, - AttributeContextError, AttributePrototypeArgumentError, Component, ComponentId, DalContext, - Func, FuncBinding, FuncError, HistoryEventError, IndexMap, InternalProvider, - InternalProviderId, Prop, PropError, PropId, PropKind, StandardModel, StandardModelError, - Tenancy, Timestamp, TransactionsError, Visibility, WsEventError, -}; -pub mod view; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::{pk, StandardModel, Timestamp}; -const CHILD_ATTRIBUTE_VALUES_FOR_CONTEXT: &str = - include_str!("../queries/attribute_value/child_attribute_values_for_context.sql"); -const FETCH_UPDATE_GRAPH_DATA: &str = - include_str!("../queries/attribute_value/fetch_update_graph_data.sql"); -const IS_FOR_INTERNAL_PROVIDER_OF_ROOT_PROP: &str = - include_str!("../queries/attribute_value/is_for_internal_provider_of_root_prop.sql"); -const FIND_PROP_FOR_VALUE: &str = - include_str!("../queries/attribute_value/find_prop_for_value.sql"); -const FIND_WITH_PARENT_AND_KEY_FOR_CONTEXT: &str = - include_str!("../queries/attribute_value/find_with_parent_and_key_for_context.sql"); -const FIND_WITH_PARENT_AND_PROTOTYPE_FOR_CONTEXT: &str = - include_str!("../queries/attribute_value/find_with_parent_and_prototype_for_context.sql"); -const LIST_FOR_CONTEXT: &str = include_str!("../queries/attribute_value/list_for_context.sql"); -const LIST_PAYLOAD_FOR_READ_CONTEXT: &str = - include_str!("../queries/attribute_value/list_payload_for_read_context.sql"); -const LIST_PAYLOAD_FOR_READ_CONTEXT_AND_ROOT: &str = - include_str!("../queries/attribute_value/list_payload_for_read_context_and_root.sql"); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum AttributeValueError { - #[error("AttributeContext error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("AttributeContextBuilder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("AttributePrototype error: {0}")] - AttributePrototype(String), - #[error("AttributePrototypeArgument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("AttributePrototype not found for AttributeValue: {0} ({1:?})")] - AttributePrototypeNotFound(AttributeValueId, Visibility), - #[error("invalid json pointer: {0} for {1}")] - BadJsonPointer(String, String), - #[error("component error: {0}")] - Component(String), - #[error("component not found for id: {0}")] - ComponentNotFound(ComponentId), - #[error("component not found by id: {0}")] - ComponentNotFoundById(ComponentId), - #[error(transparent)] - Council(#[from] council_server::client::Error), - #[error("empty attribute prototype arguments for group name: {0}")] - EmptyAttributePrototypeArgumentsForGroup(String), - #[error("external provider error: {0}")] - ExternalProvider(String), - #[error("found duplicate attribute value ({0}) for self ({1}) for parent: {2}")] - FoundDuplicateForParent(AttributeValueId, AttributeValueId, AttributeValueId), - #[error("found duplicate attribute value ({0}) when creating new attribute value in provider context: {1:?}")] - FoundDuplicateForProviderContext(AttributeValueId, AttributeContext), - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error("function result failure: kind={kind}, message={message}, backend={backend}")] - FuncBackendResultFailure { - kind: String, - message: String, - backend: String, - }, - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("FuncBindingReturnValue error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("FuncBindingReturnValue not found for AttributeValue: {0}")] - FuncBindingReturnValueNotFound(AttributeValueId, Visibility), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("{0}")] - IncompatibleAttributeReadContext(&'static str), - #[error("internal provider error: {0}")] - InternalProvider(String), - #[error("internal provider not found by id: {0}")] - InternalProviderNotFound(InternalProviderId), - #[error("found invalid object value fields not found in corresponding prop: {0:?}")] - InvalidObjectValueFields(Vec), - #[error("invalid prop value; expected {0} but got {1}")] - InvalidPropValue(String, serde_json::Value), - #[error("json pointer missing for attribute view {0:?} {1:?}")] - JsonPointerMissing(AttributeValueId, HashMap), - #[error("missing attribute value")] - Missing, - #[error( - "attribute values must have an associated attribute prototype, and this one does not. bug!" - )] - MissingAttributePrototype, - #[error("expected prop id {0} to have a child")] - MissingChildProp(PropId), - #[error("component missing in context: {0:?}")] - MissingComponentInReadContext(AttributeReadContext), - #[error("missing attribute value with id: {0}")] - MissingForId(AttributeValueId), - #[error("func not found: {0}")] - MissingFunc(String), - #[error("FuncBinding not found: {0}")] - MissingFuncBinding(FuncBindingId), - #[error("func binding return value not found")] - MissingFuncBindingReturnValue, - #[error("missing value from func binding return value for attribute value id: {0}")] - MissingValueFromFuncBindingReturnValue(AttributeValueId), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("attribute value not found: {0} ({1:?})")] - NotFound(AttributeValueId, Visibility), - #[error("missing attribute value for external provider context: {0:?}")] - NotFoundForExternalProviderContext(AttributeContext), - #[error("missing attribute value for internal provider context: {0:?}")] - NotFoundForInternalProviderContext(AttributeContext), - #[error("No AttributeValue found for AttributeReadContext: {0:?}")] - NotFoundForReadContext(AttributeReadContext), - #[error("using json pointer for attribute view yielded no value")] - NoValueForJsonPointer, - #[error( - "parent must be for an array, map, or object prop: attribute resolver id {0} is for a {1}" - )] - ParentNotAllowed(AttributeValueId, PropKind), - #[error("parent not found or does not exist for value: {0}")] - ParentNotFound(AttributeValueId), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error(transparent)] - PgPool(#[from] si_data_pg::PgPoolError), - #[error("prop error: {0}")] - Prop(#[from] Box), - #[error("Prop not found: {0}")] - PropNotFound(PropId), - #[error("schema missing in context")] - SchemaMissing, - #[error("schema not found for component id: {0}")] - SchemaNotFoundForComponent(ComponentId), - #[error("schema variant missing in context")] - SchemaVariantMissing, - #[error("schema variant not found for component id: {0}")] - SchemaVariantNotFoundForComponent(ComponentId), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error(transparent)] - Transactions(#[from] TransactionsError), - #[error("Unable to create parent AttributeValue: {0}")] - UnableToCreateParent(String), - #[error("the root prop id stack cannot be empty while work queue is not empty")] - UnexpectedEmptyRootStack, - #[error("unexpected prop kind: {0}")] - UnexpectedPropKind(PropKind), - #[error("JSON value failed to parse as an array")] - ValueAsArray, - #[error("JSON value failed to parse as an map")] - ValueAsMap, - #[error("JSON value failed to parse as an object")] - ValueAsObject, - #[error("ws event publishing error")] - WsEvent(#[from] WsEventError), -} +pub mod view; -pub type AttributeValueResult = Result; +// const CHILD_ATTRIBUTE_VALUES_FOR_CONTEXT: &str = +// include_str!("../queries/attribute_value/child_attribute_values_for_context.sql"); +// const FETCH_UPDATE_GRAPH_DATA: &str = +// include_str!("../queries/attribute_value/fetch_update_graph_data.sql"); +// const IS_FOR_INTERNAL_PROVIDER_OF_ROOT_PROP: &str = +// include_str!("../queries/attribute_value/is_for_internal_provider_of_root_prop.sql"); +// const FIND_PROP_FOR_VALUE: &str = +// include_str!("../queries/attribute_value/find_prop_for_value.sql"); +// const FIND_WITH_PARENT_AND_KEY_FOR_CONTEXT: &str = +// include_str!("../queries/attribute_value/find_with_parent_and_key_for_context.sql"); +// const FIND_WITH_PARENT_AND_PROTOTYPE_FOR_CONTEXT: &str = +// include_str!("../queries/attribute_value/find_with_parent_and_prototype_for_context.sql"); +// const LIST_FOR_CONTEXT: &str = include_str!("../queries/attribute_value/list_for_context.sql"); +// const LIST_PAYLOAD_FOR_READ_CONTEXT: &str = +// include_str!("../queries/attribute_value/list_payload_for_read_context.sql"); +// const LIST_PAYLOAD_FOR_READ_CONTEXT_AND_ROOT: &str = +// include_str!("../queries/attribute_value/list_payload_for_read_context_and_root.sql"); -pk!(AttributeValuePk); pk!(AttributeValueId); #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct AttributeValue { - pk: AttributeValuePk, - id: AttributeValueId, - func_binding_id: FuncBindingId, - /// The [`FuncBindingReturnValueId`] that represents the value at this specific position & context. - func_binding_return_value_id: FuncBindingReturnValueId, - /// The [`AttributeValueId`] (from a less-specific [`AttributeContext`]) that this - /// [`AttributeValue`] is standing in for in this more-specific [`AttributeContext`]. - proxy_for_attribute_value_id: Option, - /// If this is a `sealed_proxy`, then it should **not** update its [`FuncBindingReturnValueId`] from the - /// [`AttributeValue`] referenced to in `proxy_for_attribute_value_id`. - sealed_proxy: bool, - pub index_map: Option, - pub key: Option, - #[serde(flatten)] - pub context: AttributeContext, - #[serde(flatten)] - tenancy: Tenancy, + pub id: AttributeValueId, #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] - timestamp: Timestamp, -} - -impl_standard_model! { - model: AttributeValue, - pk: AttributeValuePk, - id: AttributeValueId, - table_name: "attribute_values", - history_event_label_base: "attribute_value", - history_event_message_name: "Attribute Value" + pub timestamp: Timestamp, + /// The unprocessed return value is the "real" result, unprocessed for any other behavior. + /// This is potentially-maybe-only-kinda-sort-of(?) useful for non-scalar values. + /// Example: a populated array. + pub unprocessed_value: Option, + /// The processed return value. + /// Example: empty array. + pub value: Option, } impl AttributeValue { - #[instrument(level = "debug", skip(ctx, key), fields(key))] - pub async fn new( - ctx: &DalContext, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - context: AttributeContext, - key: Option>, - ) -> AttributeValueResult { - let key: Option = key.map(|s| s.into()); - tracing::Span::current().record("key", &key); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT new_attribute_value AS object FROM attribute_value_new_v1($1, $2, $3, $4, $5, $6)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_binding_id, - &func_binding_return_value_id, - &context, - &key, - ], - ) - .await?; - let object: Self = standard_model::finish_create_from_row(ctx, row).await?; - - Ok(object) - } - - standard_model_accessor!( - proxy_for_attribute_value_id, - Option, - AttributeValueResult - ); - standard_model_accessor!(sealed_proxy, bool, AttributeValueResult); - standard_model_accessor!(func_binding_id, Pk(FuncBindingId), AttributeValueResult); - standard_model_accessor!( - func_binding_return_value_id, - Pk(FuncBindingReturnValueId), - AttributeValueResult - ); - standard_model_accessor!(index_map, Option, AttributeValueResult); - standard_model_accessor!(key, Option, AttributeValueResult); - - standard_model_belongs_to!( - lookup_fn: parent_attribute_value, - set_fn: set_parent_attribute_value_unchecked, - unset_fn: unset_parent_attribute_value, - table: "attribute_value_belongs_to_attribute_value", - model_table: "attribute_values", - belongs_to_id: AttributeValueId, - returns: AttributeValue, - result: AttributeValueResult, - ); - - standard_model_has_many!( - lookup_fn: child_attribute_values, - table: "attribute_value_belongs_to_attribute_value", - model_table: "attribute_values", - returns: AttributeValue, - result: AttributeValueResult, - ); - - standard_model_belongs_to!( - lookup_fn: attribute_prototype, - set_fn: set_attribute_prototype, - unset_fn: unset_attribute_prototype, - table: "attribute_value_belongs_to_attribute_prototype", - model_table: "attribute_prototypes", - belongs_to_id: AttributePrototypeId, - returns: AttributePrototype, - result: AttributeValueResult, - ); - - pub fn index_map_mut(&mut self) -> Option<&mut IndexMap> { - self.index_map.as_mut() - } - - /// Returns the *unprocessed* [`serde_json::Value`] within the [`FuncBindingReturnValue`](crate::FuncBindingReturnValue) - /// corresponding to the field on [`Self`]. - pub async fn get_unprocessed_value( - &self, - ctx: &DalContext, - ) -> AttributeValueResult> { - match FuncBindingReturnValue::get_by_id(ctx, &self.func_binding_return_value_id).await? { - Some(func_binding_return_value) => { - Ok(func_binding_return_value.unprocessed_value().cloned()) - } - None => Err(AttributeValueError::MissingFuncBindingReturnValue), - } - } - - /// Returns the [`serde_json::Value`] within the [`FuncBindingReturnValue`](crate::FuncBindingReturnValue) - /// corresponding to the field on [`Self`]. - pub async fn get_value( - &self, - ctx: &DalContext, - ) -> AttributeValueResult> { - match FuncBindingReturnValue::get_by_id(ctx, &self.func_binding_return_value_id).await? { - Some(func_binding_return_value) => Ok(func_binding_return_value.value().cloned()), - None => Err(AttributeValueError::MissingFuncBindingReturnValue), - } - } - - pub async fn update_stored_index_map(&self, ctx: &DalContext) -> AttributeValueResult<()> { - standard_model::update( - ctx, - "attribute_values", - "index_map", - self.id(), - &self.index_map, - TypeHint::JsonB, - ) - .await?; - Ok(()) - } - - /// Returns a list of child [`AttributeValues`](crate::AttributeValue) for a given - /// [`AttributeValue`] and [`AttributeReadContext`](crate::AttributeReadContext). - pub async fn child_attribute_values_for_context( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - attribute_read_context: AttributeReadContext, - ) -> AttributeValueResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - CHILD_ATTRIBUTE_VALUES_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_value_id, - &attribute_read_context, - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_with_parent_and_prototype_for_context( - ctx: &DalContext, - parent_attribute_value_id: Option, - attribute_prototype_id: AttributePrototypeId, - context: AttributeContext, - ) -> AttributeValueResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_WITH_PARENT_AND_PROTOTYPE_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &attribute_prototype_id, - &parent_attribute_value_id, - ], - ) - .await?; - - Ok(standard_model::option_object_from_row(row)?) - } - - /// Find [`Self`] with a given parent value and key. - pub async fn find_with_parent_and_key_for_context( - ctx: &DalContext, - parent_attribute_value_id: Option, - key: Option, - context: AttributeReadContext, - ) -> AttributeValueResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_WITH_PARENT_AND_KEY_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &parent_attribute_value_id, - &key, - ], - ) - .await?; - - Ok(standard_model::option_object_from_row(row)?) - } - - /// List [`AttributeValues`](crate::AttributeValue) for a provided - /// [`AttributeReadContext`](crate::AttributeReadContext). - /// - /// If you only anticipate one result to be returned and have an - /// [`AttributeReadContext`](crate::AttributeReadContext) - /// that is also a valid [`AttributeContext`](crate::AttributeContext), then you should use - /// [`Self::find_for_context()`] instead of this method. - /// - /// This does _not_ work for maps and arrays, barring the _first_ instance of the array or map - /// object themselves! For those objects, please use - /// [`Self::find_with_parent_and_key_for_context()`]. - pub async fn list_for_context( - ctx: &DalContext, - context: AttributeReadContext, - ) -> AttributeValueResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_CONTEXT, - &[ctx.tenancy(), ctx.visibility(), &context], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// Find one [`AttributeValue`](crate::AttributeValue) for a provided - /// [`AttributeReadContext`](crate::AttributeReadContext). - /// - /// This is a modified version of [`Self::list_for_context()`] that requires an - /// [`AttributeReadContext`](crate::AttributeReadContext) - /// that is also a valid [`AttributeContext`](crate::AttributeContext) _and_ "pops" the first - /// row off the rows found (which are sorted from most to least specific). Thus, the "popped" - /// row will corresponding to the most specific [`AttributeValue`] found. - /// - /// This does _not_ work for maps and arrays, barring the _first_ instance of the array or map - /// object themselves! For those objects, please use - /// [`Self::find_with_parent_and_key_for_context()`]. - pub async fn find_for_context( - ctx: &DalContext, - context: AttributeReadContext, - ) -> AttributeValueResult> { - AttributeContextBuilder::from(context).to_context()?; - let mut rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_CONTEXT, - &[ctx.tenancy(), ctx.visibility(), &context], - ) - .await?; - let maybe_row = rows.pop(); - Ok(standard_model::option_object_from_row(maybe_row)?) - } - - /// Return the [`Prop`] that the [`AttributeValueId`] belongs to, - /// following the relationship through [`AttributePrototype`]. - pub async fn find_prop_for_value( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - ) -> AttributeValueResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_PROP_FOR_VALUE, - &[ctx.tenancy(), ctx.visibility(), &attribute_value_id], - ) - .await?; - - Ok(standard_model::object_from_row(row)?) - } - - /// List [`AttributeValuePayloads`](AttributeValuePayload) for a given - /// [`context`](crate::AttributeReadContext), which must specify a - /// [`ComponentId`](crate::Component). - pub async fn list_payload_for_read_context( - ctx: &DalContext, - context: AttributeReadContext, - ) -> AttributeValueResult> { - let schema_variant_id = match context.component_id { - Some(component_id) if component_id != ComponentId::NONE => { - let component = Component::get_by_id(ctx, &component_id) - .await? - .ok_or(AttributeValueError::ComponentNotFoundById(component_id))?; - let schema_variant = component - .schema_variant(ctx) - .await - .map_err(|e| AttributeValueError::Component(e.to_string()))? - .ok_or(AttributeValueError::SchemaVariantNotFoundForComponent( - component_id, - ))?; - *schema_variant.id() - } - _ => { - return Err(AttributeValueError::MissingComponentInReadContext(context)); - } - }; - - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_PAYLOAD_FOR_READ_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &schema_variant_id, - ], - ) - .await?; - let mut result = Vec::new(); - for row in rows.into_iter() { - let func_binding_return_value_json: serde_json::Value = row.try_get("object")?; - let func_binding_return_value: Option = - serde_json::from_value(func_binding_return_value_json)?; - - let prop_json: serde_json::Value = row.try_get("prop_object")?; - let prop: Prop = serde_json::from_value(prop_json)?; - - let attribute_value_json: serde_json::Value = row.try_get("attribute_value_object")?; - let attribute_value: AttributeValue = serde_json::from_value(attribute_value_json)?; - - let parent_attribute_value_id: Option = - row.try_get("parent_attribute_value_id")?; - - result.push(AttributeValuePayload::new( - prop, - func_binding_return_value, - attribute_value, - parent_attribute_value_id, - )); - } - Ok(result) - } - - /// This method is similar to [`Self::list_payload_for_read_context()`], but it leverages a - /// root [`AttributeValueId`](crate::AttributeValue) in order to find payloads at any - /// root [`Prop`](crate::Prop) corresponding to the provided context and root value. - /// - /// Requirements for the [`AttributeReadContext`](crate::AttributeReadContext): - /// - [`PropId`](crate::Prop) must be set to [`None`] - /// - Both providers fields must be unset - pub async fn list_payload_for_read_context_and_root( - ctx: &DalContext, - root_attribute_value_id: AttributeValueId, - context: AttributeReadContext, - ) -> AttributeValueResult> { - if context.has_prop_id() - || !context.has_unset_internal_provider() - || !context.has_unset_external_provider() - { - return Err(AttributeValueError::IncompatibleAttributeReadContext("incompatible attribute read context for query: prop must be empty and providers must be unset")); - } - - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_PAYLOAD_FOR_READ_CONTEXT_AND_ROOT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &root_attribute_value_id, - ], - ) - .await?; - - let mut result = Vec::new(); - for row in rows.into_iter() { - let func_binding_return_value_json: serde_json::Value = row.try_get("object")?; - let func_binding_return_value: Option = - serde_json::from_value(func_binding_return_value_json)?; - - let prop_json: serde_json::Value = row.try_get("prop_object")?; - let prop: Prop = serde_json::from_value(prop_json)?; - - let attribute_value_json: serde_json::Value = row.try_get("attribute_value_object")?; - let attribute_value: AttributeValue = serde_json::from_value(attribute_value_json)?; - - let parent_attribute_value_id: Option = - row.try_get("parent_attribute_value_id")?; - - result.push(AttributeValuePayload::new( - prop, - func_binding_return_value, - attribute_value, - parent_attribute_value_id, - )); - } - Ok(result) - } - - /// Update the [`AttributeValue`] for a specific [`AttributeContext`] to the given value. If the - /// given [`AttributeValue`] is for a different [`AttributeContext`] than the one provided, a - /// new [`AttributeValue`] will be created for the given [`AttributeContext`]. - /// - /// By passing in [`None`] as the `value`, the caller is explicitly saying "this value does not - /// exist here". This is potentially useful for "tombstoning" values that have been inherited - /// from a less-specific [`AttributeContext`]. For example, if a value has been set for a - /// [`SchemaVariant`](crate::SchemaVariant), but we do not want that value to exist for a - /// specific [`Component`](crate::Component), we can update the variant's value to [`None`] in - /// an [`AttributeContext`] specific to that component. - /// - /// This method returns the following: - /// - the [`Option`] that was passed in - /// - the updated [`AttributeValueId`](Self) - pub async fn update_for_context( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - parent_attribute_value_id: Option, - context: AttributeContext, - value: Option, - // TODO: Allow updating the key - key: Option, - ) -> AttributeValueResult<(Option, AttributeValueId)> { - Self::update_for_context_raw( - ctx, - attribute_value_id, - parent_attribute_value_id, - context, - value, - key, - true, - true, - ) - .await - } - - pub async fn update_for_context_without_propagating_dependent_values( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - parent_attribute_value_id: Option, - context: AttributeContext, - value: Option, - // TODO: Allow updating the key - key: Option, - ) -> AttributeValueResult<(Option, AttributeValueId)> { - Self::update_for_context_raw( - ctx, - attribute_value_id, - parent_attribute_value_id, - context, - value, - key, - true, - false, - ) - .await - } - - pub async fn update_for_context_without_creating_proxies( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - parent_attribute_value_id: Option, - context: AttributeContext, - value: Option, - // TODO: Allow updating the key - key: Option, - ) -> AttributeValueResult<(Option, AttributeValueId)> { - Self::update_for_context_raw( - ctx, - attribute_value_id, - parent_attribute_value_id, - context, - value, - key, - false, - true, - ) - .await - } - - #[allow(clippy::too_many_arguments)] - async fn update_for_context_raw( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - parent_attribute_value_id: Option, - context: AttributeContext, - value: Option, - // TODO: Allow updating the key - key: Option, - create_child_proxies: bool, - propagate_dependent_values: bool, - ) -> AttributeValueResult<(Option, AttributeValueId)> { - // TODO(nick,paulo,zack,jacob): ensure we do not _have_ to do this in the future. - let ctx = &ctx.clone_without_deleted_visibility(); - - let row = ctx.txns() - .await? - .pg() - .query_one( - "SELECT new_attribute_value_id FROM attribute_value_update_for_context_raw_v1($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_value_id, - &parent_attribute_value_id, - &context, - &value, - &key, - &create_child_proxies, - ], - ).await?; - - let new_attribute_value_id: AttributeValueId = row.try_get("new_attribute_value_id")?; - - // TODO(fnichol): we might want to fire off a status even at this point, however we've - // already updated the initial attribute value, so is there much value? - - if propagate_dependent_values && !ctx.no_dependent_values() { - ctx.enqueue_job(DependentValuesUpdate::new( - ctx.access_builder(), - *ctx.visibility(), - vec![new_attribute_value_id], - )) - .await?; - } - - Ok((value, new_attribute_value_id)) - } - - /// Insert a new value under the parent [`AttributeValue`] in the given [`AttributeContext`]. This is mostly only - /// useful for adding elements to a [`PropKind::Array`], or to a [`PropKind::Map`]. Updating existing values in an - /// [`Array`](PropKind::Array), or [`Map`](PropKind::Map), and setting/updating all other [`PropKind`] should be - /// able to directly use [`update_for_context()`](AttributeValue::update_for_context()), as there will already be an - /// appropriate [`AttributeValue`] to use. By using this function, - /// [`update_for_context()`](AttributeValue::update_for_context()) is called after we have created an appropriate - /// [`AttributeValue`] to use. - #[instrument(skip_all, level = "debug")] - pub async fn insert_for_context( - ctx: &DalContext, - item_attribute_context: AttributeContext, - array_or_map_attribute_value_id: AttributeValueId, - value: Option, - key: Option, - ) -> AttributeValueResult { - Self::insert_for_context_raw( - ctx, - item_attribute_context, - array_or_map_attribute_value_id, - value, - key, - true, - ) - .await - } - - #[instrument(skip_all, level = "debug")] - pub async fn insert_for_context_without_creating_proxies( - ctx: &DalContext, - parent_context: AttributeContext, - parent_attribute_value_id: AttributeValueId, - value: Option, - key: Option, - ) -> AttributeValueResult { - Self::insert_for_context_raw( - ctx, - parent_context, - parent_attribute_value_id, - value, - key, - false, - ) - .await - } - - #[instrument(skip_all, level = "debug")] - async fn insert_for_context_raw( - ctx: &DalContext, - item_attribute_context: AttributeContext, - array_or_map_attribute_value_id: AttributeValueId, - value: Option, - key: Option, - create_child_proxies: bool, - ) -> AttributeValueResult { - let row = ctx.txns().await?.pg().query_one( - "SELECT new_attribute_value_id FROM attribute_value_insert_for_context_raw_v1($1, $2, $3, $4, $5, $6, $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &item_attribute_context, - &array_or_map_attribute_value_id, - &value, - &key, - &create_child_proxies, - ], - ).await?; - - let new_attribute_value_id: AttributeValueId = row.try_get("new_attribute_value_id")?; - - if !ctx.no_dependent_values() { - ctx.enqueue_job(DependentValuesUpdate::new( - ctx.access_builder(), - *ctx.visibility(), - vec![new_attribute_value_id], - )) - .await?; - } - - Ok(new_attribute_value_id) - } - - #[instrument(skip_all, level = "debug")] - pub async fn update_parent_index_map(&self, ctx: &DalContext) -> AttributeValueResult<()> { - let _row = ctx - .txns() - .await? - .pg() - .query( - "SELECT attribute_value_update_parent_index_map_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), &self.id], - ) - .await?; - - Ok(()) - } - - async fn populate_nested_values( - ctx: &DalContext, - parent_attribute_value_id: AttributeValueId, - update_context: AttributeContext, - unprocessed_value: serde_json::Value, - ) -> AttributeValueResult<()> { - let _row = ctx - .txns() - .await? - .pg() - .query( - "SELECT attribute_value_populate_nested_values_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &parent_attribute_value_id, - &update_context, - &unprocessed_value, - ], - ) - .await?; - - Ok(()) - } - - /// Convenience method to determine if this [`AttributeValue`](Self) is for the implicit - /// [`InternalProvider`](crate::InternalProvider) that represents the "snapshot" of the entire - /// [`Component`](crate::Component). This means that the [`Prop`](crate::Prop) that the - /// [`InternalProvider`](crate::InternalProvider) is sourcing its data from does not have a - /// parent [`Prop`](crate::Prop). - #[allow(unused)] - async fn is_for_internal_provider_of_root_prop( - &mut self, - ctx: &DalContext, - ) -> AttributeValueResult { - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - IS_FOR_INTERNAL_PROVIDER_OF_ROOT_PROP, - &[&ctx.tenancy(), ctx.visibility(), &self.context], - ) - .await?; - if let Some(row) = maybe_row { - // If we got a row back, that means that we are an AttributeValue for an InternalProvider, - // and we should have gotten a row back from the query. - Ok(row.try_get("is_for_root_prop")?) - } else { - // If we didn't get a row back, that means that we didn't find an InternalProvider for the - // InternalProviderId in our AttributeContext. Likely because it is ident_nil_v1, indicating that we're - // not for an InternalProvider at all. - Ok(false) - } - } - - #[instrument(skip(ctx), level = "debug")] - pub async fn create_dependent_values( - ctx: &DalContext, - attribute_value_ids: &[AttributeValueId], - ) -> AttributeValueResult<()> { - ctx.txns() - .await? - .pg() - .execute( - "SELECT attribute_value_create_new_affected_values_v1($1, $2, $3)", - &[&ctx.tenancy(), &ctx.visibility(), &attribute_value_ids], - ) - .await?; - Ok(()) - } - - /// Returns a [`HashMap`] with key [`AttributeValueId`](Self) and value - /// [`Vec`](Self) where the keys correspond to [`AttributeValues`](Self) that - /// are affected (directly and indirectly) by at least one of the provided - /// [`AttributeValueIds`](Self) having a new value. The [`Vec`](Self) - /// correspond to the [`AttributeValues`](Self) that the key directly depends on that are also - /// affected by at least one of the provided [`AttributeValueIds`](Self) having a new value. - /// - /// **NOTE**: This has the side effect of **CREATING NEW [`AttributeValues`](Self)** - /// if this [`AttributeValue`] affects an [`AttributeContext`](crate::AttributeContext) where an - /// [`AttributePrototype`](crate::AttributePrototype) that uses it didn't already have an - /// [`AttributeValue`]. - #[instrument(skip(ctx), level = "debug")] - pub async fn dependent_value_graph( - ctx: &DalContext, - attribute_value_ids: &[AttributeValueId], - ) -> AttributeValueResult>> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FETCH_UPDATE_GRAPH_DATA, - &[&ctx.tenancy(), ctx.visibility(), &attribute_value_ids], - ) - .instrument(debug_span!("Graph SQL query")) - .await?; - - let mut result: HashMap> = HashMap::new(); - for row in rows.into_iter() { - let attr_val_id: AttributeValueId = row.try_get("attribute_value_id")?; - let dependencies: Vec = - row.try_get("dependent_attribute_value_ids")?; - result.insert(attr_val_id, dependencies); + pub fn assemble(id: AttributeValueId, inner: &AttributeValueContentV1) -> Self { + let inner = inner.to_owned(); + Self { + id, + timestamp: inner.timestamp, + value: inner.value, + unprocessed_value: inner.unprocessed_value, } - - Ok(result) - } - - pub async fn vivify_value_and_parent_values( - &self, - ctx: &DalContext, - ) -> AttributeValueResult { - let row = ctx.txns().await?.pg().query_one( - "SELECT new_attribute_value_id FROM attribute_value_vivify_value_and_parent_values_raw_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &self.context, - &self.id, - &true - ]).await?; - - Ok(row.try_get("new_attribute_value_id")?) } +} - /// Re-evaluates the current `AttributeValue`'s `AttributePrototype` to update the - /// `FuncBinding`, and `FuncBindingReturnValue`, reflecting the current inputs to - /// the function. - /// - /// If the `AttributeValue` represents the `InternalProvider` for a `Prop` that - /// does not have a parent `Prop` (this is typically the `InternalProvider` for - /// the "root" `Prop` of a `SchemaVariant`), then it will also enqueue a - /// `CodeGeneration` job for the `Component`. - #[instrument( - name = "attribute_value.update_from_prototype_function", - skip_all, - level = "debug", - fields( - attribute_value.id = %self.id, - change_set_pk = %ctx.visibility().change_set_pk, - ) - )] - pub async fn update_from_prototype_function( - &mut self, - ctx: &DalContext, - ) -> AttributeValueResult<()> { - // Check if this AttributeValue is for an implicit InternalProvider as they have special behavior that doesn't involve - // AttributePrototype and AttributePrototypeArguments. - if self - .context - .is_least_specific_field_kind_internal_provider()? - { - let internal_provider = - InternalProvider::get_by_id(ctx, &self.context.internal_provider_id()) - .await? - .ok_or_else(|| { - AttributeValueError::InternalProviderNotFound( - self.context.internal_provider_id(), - ) - })?; - if internal_provider.is_internal_consumer() { - // We don't care about the AttributeValue that comes back from implicit_emit, since we should already be - // operating on an AttributeValue that has the correct AttributeContext, which means that a new one should - // not need to be created. - internal_provider - .implicit_emit(ctx, self) - .await - .map_err(|e| AttributeValueError::InternalProvider(e.to_string()))?; - - debug!("InternalProvider is internal consumer"); - - return Ok(()); - } - } else if self.context.is_least_specific_field_kind_prop()? { - if let Some(parent_attribute_value) = self.parent_attribute_value(ctx).await? { - parent_attribute_value - .vivify_value_and_parent_values(ctx) - .await?; - } - } - - // The following should handle explicit "normal" Attributes, InternalProviders, and ExternalProviders already. - let attribute_prototype = self.attribute_prototype(ctx).await?.ok_or_else(|| { - AttributeValueError::AttributePrototypeNotFound(self.id, *ctx.visibility()) - })?; - - // Note(victor): Secrets should never be passed to functions as arguments directly. - // We detect if they're set as dependencies and later fetch before functions to execute - // This is so secret values still trigger the dependent values system, - // and before functions are only called when necessary - let mut has_secrets_as_arg = false; - let mut func_binding_args: HashMap> = HashMap::new(); - for mut argument_data in attribute_prototype - .argument_values(ctx, self.context) - .await - .map_err(|e| AttributeValueError::AttributePrototype(e.to_string()))? - { - if argument_data.argument_name == "secrets" { - has_secrets_as_arg = true; - continue; - } - - match argument_data.values.len() { - 1 => { - let argument = argument_data.values.pop().ok_or_else(|| { - AttributeValueError::EmptyAttributePrototypeArgumentsForGroup( - argument_data.argument_name.clone(), - ) - })?; - - func_binding_args.insert( - argument_data.argument_name, - Some(serde_json::to_value(argument)?), - ); - } - 2.. => { - func_binding_args.insert( - argument_data.argument_name, - Some(serde_json::to_value(argument_data.values)?), - ); - } - _ => { - return Err( - AttributeValueError::EmptyAttributePrototypeArgumentsForGroup( - argument_data.argument_name, - ), - ); - } - }; - } +#[derive(Debug, PartialEq)] +pub struct AttributeValueGraphNode { + id: AttributeValueId, + content_address: ContentAddress, + content: AttributeValueContentV1, +} - let func_id = attribute_prototype.func_id(); - - let before = if has_secrets_as_arg { - // We need the associated [`ComponentId`] for this function--this is how we resolve and - // prepare before functions - let associated_component_id = self.context.component_id(); - - before_funcs_for_component(ctx, &associated_component_id).await? - } else { - vec![] - }; - - let (func_binding, mut func_binding_return_value) = match FuncBinding::create_and_execute( - ctx, - serde_json::to_value(func_binding_args.clone())?, - attribute_prototype.func_id(), - before, - ) - .instrument(debug_span!( - "Func execution", - "func.id" = %func_id, - ?func_binding_args, - )) - .await - { - Ok(function_return_value) => function_return_value, - Err(FuncBindingError::FuncBackendResultFailure { - kind, - message, - backend, - }) => { - return Err(AttributeValueError::FuncBackendResultFailure { - kind, - message, - backend, - }) - } - Err(err) => Err(err)?, - }; - - self.set_func_binding_id(ctx, *func_binding.id()).await?; - self.set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) - .await?; - - // If the value we just updated was for a Prop, we might have run a function that - // generates a deep data structure. If the Prop is an Array/Map/Object, then the - // value should be an empty Array/Map/Object, while the unprocessed value contains - // the deep data structure. - if self.context.is_least_specific_field_kind_prop()? { - let processed_value = match func_binding_return_value.unprocessed_value().cloned() { - Some(unprocessed_value) => { - let prop = Prop::get_by_id(ctx, &self.context.prop_id()) - .await? - .ok_or_else(|| AttributeValueError::PropNotFound(self.context.prop_id()))?; - - match prop.kind() { - PropKind::Object | PropKind::Map => Some(serde_json::json!({})), - PropKind::Array => Some(serde_json::json!([])), - _ => Some(unprocessed_value), - } - } - None => None, - }; - - func_binding_return_value - .set_value(ctx, processed_value) - .await?; - }; - // If they are different from each other, then we know - // that we need to fully process the deep data structure, populating - // AttributeValues for the child Props. - // cannot be si:setArray / si:setMap / si:setObject - if self.context.prop_id() != PropId::NONE { - let prop = Prop::get_by_id(ctx, &self.context.prop_id()) - .await? - .ok_or_else(|| AttributeValueError::PropNotFound(self.context.prop_id()))?; - - if *prop.kind() == PropKind::Array - || *prop.kind() == PropKind::Object - || *prop.kind() == PropKind::Map - { - let func_name = match *prop.kind() { - PropKind::Array => "si:setArray", - PropKind::Object => "si:setObject", - PropKind::Map => "si:setMap", - _ => unreachable!(), - }; - - let func = Func::find_by_attr(ctx, "name", &func_name) - .await? - .pop() - .ok_or_else(|| AttributeValueError::MissingFunc(func_name.to_owned()))?; - - if attribute_prototype.func_id() != *func.id() { - if let Some(unprocessed_value) = - func_binding_return_value.unprocessed_value().cloned() - { - AttributeValue::populate_nested_values( - ctx, - self.id, - self.context, - unprocessed_value, - ) - .await?; - } - } - } - } +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum AttributeValueContent { + V1(AttributeValueContentV1), +} - Ok(()) - } +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct AttributeValueContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + /// The unprocessed return value is the "real" result, unprocessed for any other behavior. + /// This is potentially-maybe-only-kinda-sort-of(?) useful for non-scalar values. + /// Example: a populated array. + pub unprocessed_value: Option, + /// The processed return value. + /// Example: empty array. + pub value: Option, +} - pub async fn populate_child_proxies_for_value( - &self, - ctx: &DalContext, - less_specific_attribute_value_id: AttributeValueId, - more_specific_context: AttributeContext, - ) -> AttributeValueResult>> { - let row = ctx.txns().await?.pg().query_one( - "SELECT new_proxy_value_ids FROM attribute_value_populate_child_proxies_for_value_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &less_specific_attribute_value_id, - &more_specific_context, - self.id(), - ] - ).await?; - - // Are we part of a map or array? Be sure to update the index map - if self.key.is_some() { - ctx.txns() - .await? - .pg() - .query_opt( - "SELECT * FROM attribute_value_update_parent_index_map_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), self.id()], - ) - .await?; +impl From for AttributeValueContentV1 { + fn from(value: AttributeValue) -> Self { + Self { + timestamp: value.timestamp, + value: value.value, + unprocessed_value: value.unprocessed_value, } - - Ok(row.try_get("new_proxy_value_ids")?) } } -#[derive(Debug, Clone)] -pub struct AttributeValuePayload { - pub prop: Prop, - pub func_binding_return_value: Option, - pub attribute_value: AttributeValue, - pub parent_attribute_value_id: Option, -} - -impl AttributeValuePayload { - pub fn new( - prop: Prop, - func_binding_return_value: Option, - attribute_value: AttributeValue, - parent_attribute_value_id: Option, +impl AttributeValueGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: AttributeValueContentV1, ) -> Self { Self { - prop, - func_binding_return_value, - attribute_value, - parent_attribute_value_id, + id: id.into(), + content_address: ContentAddress::AttributeValue(content_hash), + content, } } } + +// impl AttributeValue { +// standard_model_accessor!( +// proxy_for_attribute_value_id, +// Option, +// AttributeValueResult +// ); +// standard_model_accessor!(sealed_proxy, bool, AttributeValueResult); +// standard_model_accessor!(func_binding_id, Pk(FuncBindingId), AttributeValueResult); +// standard_model_accessor!( +// func_binding_return_value_id, +// Pk(FuncBindingReturnValueId), +// AttributeValueResult +// ); +// standard_model_accessor!(index_map, Option, AttributeValueResult); +// standard_model_accessor!(key, Option, AttributeValueResult); + +// standard_model_belongs_to!( +// lookup_fn: parent_attribute_value, +// set_fn: set_parent_attribute_value_unchecked, +// unset_fn: unset_parent_attribute_value, +// table: "attribute_value_belongs_to_attribute_value", +// model_table: "attribute_values", +// belongs_to_id: AttributeValueId, +// returns: AttributeValue, +// result: AttributeValueResult, +// ); + +// standard_model_has_many!( +// lookup_fn: child_attribute_values, +// table: "attribute_value_belongs_to_attribute_value", +// model_table: "attribute_values", +// returns: AttributeValue, +// result: AttributeValueResult, +// ); + +// standard_model_belongs_to!( +// lookup_fn: attribute_prototype, +// set_fn: set_attribute_prototype, +// unset_fn: unset_attribute_prototype, +// table: "attribute_value_belongs_to_attribute_prototype", +// model_table: "attribute_prototypes", +// belongs_to_id: AttributePrototypeId, +// returns: AttributePrototype, +// result: AttributeValueResult, +// ); + +// pub fn index_map_mut(&mut self) -> Option<&mut IndexMap> { +// self.index_map.as_mut() +// } + +// /// Returns the *unprocessed* [`serde_json::Value`] within the [`FuncBindingReturnValue`](crate::FuncBindingReturnValue) +// /// corresponding to the field on [`Self`]. +// pub async fn get_unprocessed_value( +// &self, +// ctx: &DalContext, +// ) -> AttributeValueResult> { +// match FuncBindingReturnValue::get_by_id(ctx, &self.func_binding_return_value_id).await? { +// Some(func_binding_return_value) => { +// Ok(func_binding_return_value.unprocessed_value().cloned()) +// } +// None => Err(AttributeValueError::MissingFuncBindingReturnValue), +// } +// } + +// /// Returns the [`serde_json::Value`] within the [`FuncBindingReturnValue`](crate::FuncBindingReturnValue) +// /// corresponding to the field on [`Self`]. +// pub async fn get_value( +// &self, +// ctx: &DalContext, +// ) -> AttributeValueResult> { +// match FuncBindingReturnValue::get_by_id(ctx, &self.func_binding_return_value_id).await? { +// Some(func_binding_return_value) => Ok(func_binding_return_value.value().cloned()), +// None => Err(AttributeValueError::MissingFuncBindingReturnValue), +// } +// } + +// pub async fn update_stored_index_map(&self, ctx: &DalContext) -> AttributeValueResult<()> { +// standard_model::update( +// ctx, +// "attribute_values", +// "index_map", +// self.id(), +// &self.index_map, +// TypeHint::JsonB, +// ) +// .await?; +// Ok(()) +// } + +// /// Returns a list of child [`AttributeValues`](crate::AttributeValue) for a given +// /// [`AttributeValue`] and [`AttributeReadContext`](crate::AttributeReadContext). +// pub async fn child_attribute_values_for_context( +// ctx: &DalContext, +// attribute_value_id: AttributeValueId, +// attribute_read_context: AttributeReadContext, +// ) -> AttributeValueResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// CHILD_ATTRIBUTE_VALUES_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_value_id, +// &attribute_read_context, +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_with_parent_and_prototype_for_context( +// ctx: &DalContext, +// parent_attribute_value_id: Option, +// attribute_prototype_id: AttributePrototypeId, +// context: AttributeContext, +// ) -> AttributeValueResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_WITH_PARENT_AND_PROTOTYPE_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &attribute_prototype_id, +// &parent_attribute_value_id, +// ], +// ) +// .await?; + +// Ok(standard_model::option_object_from_row(row)?) +// } + +// /// Find [`Self`] with a given parent value and key. +// pub async fn find_with_parent_and_key_for_context( +// ctx: &DalContext, +// parent_attribute_value_id: Option, +// key: Option, +// context: AttributeReadContext, +// ) -> AttributeValueResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_WITH_PARENT_AND_KEY_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &parent_attribute_value_id, +// &key, +// ], +// ) +// .await?; + +// Ok(standard_model::option_object_from_row(row)?) +// } + +// /// List [`AttributeValues`](crate::AttributeValue) for a provided +// /// [`AttributeReadContext`](crate::AttributeReadContext). +// /// +// /// If you only anticipate one result to be returned and have an +// /// [`AttributeReadContext`](crate::AttributeReadContext) +// /// that is also a valid [`AttributeContext`](crate::AttributeContext), then you should use +// /// [`Self::find_for_context()`] instead of this method. +// /// +// /// This does _not_ work for maps and arrays, barring the _first_ instance of the array or map +// /// object themselves! For those objects, please use +// /// [`Self::find_with_parent_and_key_for_context()`]. +// pub async fn list_for_context( +// ctx: &DalContext, +// context: AttributeReadContext, +// ) -> AttributeValueResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_CONTEXT, +// &[ctx.tenancy(), ctx.visibility(), &context], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find one [`AttributeValue`](crate::AttributeValue) for a provided +// /// [`AttributeReadContext`](crate::AttributeReadContext). +// /// +// /// This is a modified version of [`Self::list_for_context()`] that requires an +// /// [`AttributeReadContext`](crate::AttributeReadContext) +// /// that is also a valid [`AttributeContext`](crate::AttributeContext) _and_ "pops" the first +// /// row off the rows found (which are sorted from most to least specific). Thus, the "popped" +// /// row will corresponding to the most specific [`AttributeValue`] found. +// /// +// /// This does _not_ work for maps and arrays, barring the _first_ instance of the array or map +// /// object themselves! For those objects, please use +// /// [`Self::find_with_parent_and_key_for_context()`]. +// pub async fn find_for_context( +// ctx: &DalContext, +// context: AttributeReadContext, +// ) -> AttributeValueResult> { +// AttributeContextBuilder::from(context).to_context()?; +// let mut rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_CONTEXT, +// &[ctx.tenancy(), ctx.visibility(), &context], +// ) +// .await?; +// let maybe_row = rows.pop(); +// Ok(standard_model::option_object_from_row(maybe_row)?) +// } + +// /// Return the [`Prop`] that the [`AttributeValueId`] belongs to, +// /// following the relationship through [`AttributePrototype`]. +// pub async fn find_prop_for_value( +// ctx: &DalContext, +// attribute_value_id: AttributeValueId, +// ) -> AttributeValueResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// FIND_PROP_FOR_VALUE, +// &[ctx.tenancy(), ctx.visibility(), &attribute_value_id], +// ) +// .await?; + +// Ok(standard_model::object_from_row(row)?) +// } + +// /// List [`AttributeValuePayloads`](AttributeValuePayload) for a given +// /// [`context`](crate::AttributeReadContext), which must specify a +// /// [`ComponentId`](crate::Component). +// pub async fn list_payload_for_read_context( +// ctx: &DalContext, +// context: AttributeReadContext, +// ) -> AttributeValueResult> { +// let schema_variant_id = match context.component_id { +// Some(component_id) if component_id != ComponentId::NONE => { +// let component = Component::get_by_id(ctx, &component_id) +// .await? +// .ok_or(AttributeValueError::ComponentNotFoundById(component_id))?; +// let schema_variant = component +// .schema_variant(ctx) +// .await +// .map_err(|e| AttributeValueError::Component(e.to_string()))? +// .ok_or(AttributeValueError::SchemaVariantNotFoundForComponent( +// component_id, +// ))?; +// *schema_variant.id() +// } +// _ => { +// return Err(AttributeValueError::MissingComponentInReadContext(context)); +// } +// }; + +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_PAYLOAD_FOR_READ_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &schema_variant_id, +// ], +// ) +// .await?; +// let mut result = Vec::new(); +// for row in rows.into_iter() { +// let func_binding_return_value_json: serde_json::Value = row.try_get("object")?; +// let func_binding_return_value: Option = +// serde_json::from_value(func_binding_return_value_json)?; + +// let prop_json: serde_json::Value = row.try_get("prop_object")?; +// let prop: Prop = serde_json::from_value(prop_json)?; + +// let attribute_value_json: serde_json::Value = row.try_get("attribute_value_object")?; +// let attribute_value: AttributeValue = serde_json::from_value(attribute_value_json)?; + +// let parent_attribute_value_id: Option = +// row.try_get("parent_attribute_value_id")?; + +// result.push(AttributeValuePayload::new( +// prop, +// func_binding_return_value, +// attribute_value, +// parent_attribute_value_id, +// )); +// } +// Ok(result) +// } + +// /// This method is similar to [`Self::list_payload_for_read_context()`], but it leverages a +// /// root [`AttributeValueId`](crate::AttributeValue) in order to find payloads at any +// /// root [`Prop`](crate::Prop) corresponding to the provided context and root value. +// /// +// /// Requirements for the [`AttributeReadContext`](crate::AttributeReadContext): +// /// - [`PropId`](crate::Prop) must be set to [`None`] +// /// - Both providers fields must be unset +// pub async fn list_payload_for_read_context_and_root( +// ctx: &DalContext, +// root_attribute_value_id: AttributeValueId, +// context: AttributeReadContext, +// ) -> AttributeValueResult> { +// if context.has_prop_id() +// || !context.has_unset_internal_provider() +// || !context.has_unset_external_provider() +// { +// return Err(AttributeValueError::IncompatibleAttributeReadContext("incompatible attribute read context for query: prop must be empty and providers must be unset")); +// } + +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_PAYLOAD_FOR_READ_CONTEXT_AND_ROOT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &root_attribute_value_id, +// ], +// ) +// .await?; + +// let mut result = Vec::new(); +// for row in rows.into_iter() { +// let func_binding_return_value_json: serde_json::Value = row.try_get("object")?; +// let func_binding_return_value: Option = +// serde_json::from_value(func_binding_return_value_json)?; + +// let prop_json: serde_json::Value = row.try_get("prop_object")?; +// let prop: Prop = serde_json::from_value(prop_json)?; + +// let attribute_value_json: serde_json::Value = row.try_get("attribute_value_object")?; +// let attribute_value: AttributeValue = serde_json::from_value(attribute_value_json)?; + +// let parent_attribute_value_id: Option = +// row.try_get("parent_attribute_value_id")?; + +// result.push(AttributeValuePayload::new( +// prop, +// func_binding_return_value, +// attribute_value, +// parent_attribute_value_id, +// )); +// } +// Ok(result) +// } + +// /// Update the [`AttributeValue`] for a specific [`AttributeContext`] to the given value. If the +// /// given [`AttributeValue`] is for a different [`AttributeContext`] than the one provided, a +// /// new [`AttributeValue`] will be created for the given [`AttributeContext`]. +// /// +// /// By passing in [`None`] as the `value`, the caller is explicitly saying "this value does not +// /// exist here". This is potentially useful for "tombstoning" values that have been inherited +// /// from a less-specific [`AttributeContext`]. For example, if a value has been set for a +// /// [`SchemaVariant`](crate::SchemaVariant), but we do not want that value to exist for a +// /// specific [`Component`](crate::Component), we can update the variant's value to [`None`] in +// /// an [`AttributeContext`] specific to that component. +// /// +// /// This method returns the following: +// /// - the [`Option`] that was passed in +// /// - the updated [`AttributeValueId`](Self) +// pub async fn update_for_context( +// ctx: &DalContext, +// attribute_value_id: AttributeValueId, +// parent_attribute_value_id: Option, +// context: AttributeContext, +// value: Option, +// // TODO: Allow updating the key +// key: Option, +// ) -> AttributeValueResult<(Option, AttributeValueId)> { +// Self::update_for_context_raw( +// ctx, +// attribute_value_id, +// parent_attribute_value_id, +// context, +// value, +// key, +// true, +// true, +// ) +// .await +// } + +// pub async fn update_for_context_without_propagating_dependent_values( +// ctx: &DalContext, +// attribute_value_id: AttributeValueId, +// parent_attribute_value_id: Option, +// context: AttributeContext, +// value: Option, +// // TODO: Allow updating the key +// key: Option, +// ) -> AttributeValueResult<(Option, AttributeValueId)> { +// Self::update_for_context_raw( +// ctx, +// attribute_value_id, +// parent_attribute_value_id, +// context, +// value, +// key, +// true, +// false, +// ) +// .await +// } + +// pub async fn update_for_context_without_creating_proxies( +// ctx: &DalContext, +// attribute_value_id: AttributeValueId, +// parent_attribute_value_id: Option, +// context: AttributeContext, +// value: Option, +// // TODO: Allow updating the key +// key: Option, +// ) -> AttributeValueResult<(Option, AttributeValueId)> { +// Self::update_for_context_raw( +// ctx, +// attribute_value_id, +// parent_attribute_value_id, +// context, +// value, +// key, +// false, +// true, +// ) +// .await +// } + +// #[allow(clippy::too_many_arguments)] +// async fn update_for_context_raw( +// ctx: &DalContext, +// attribute_value_id: AttributeValueId, +// parent_attribute_value_id: Option, +// context: AttributeContext, +// value: Option, +// // TODO: Allow updating the key +// key: Option, +// create_child_proxies: bool, +// propagate_dependent_values: bool, +// ) -> AttributeValueResult<(Option, AttributeValueId)> { +// // TODO(nick,paulo,zack,jacob): ensure we do not _have_ to do this in the future. +// let ctx = &ctx.clone_without_deleted_visibility(); + +// let row = ctx.txns() +// .await? +// .pg() +// .query_one( +// "SELECT new_attribute_value_id FROM attribute_value_update_for_context_raw_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_value_id, +// &parent_attribute_value_id, +// &context, +// &value, +// &key, +// &create_child_proxies, +// ], +// ).await?; + +// let new_attribute_value_id: AttributeValueId = row.try_get("new_attribute_value_id")?; + +// // TODO(fnichol): we might want to fire off a status even at this point, however we've +// // already updated the initial attribute value, so is there much value? + +// if propagate_dependent_values && !ctx.no_dependent_values() { +// ctx.enqueue_job(DependentValuesUpdate::new( +// ctx.access_builder(), +// *ctx.visibility(), +// vec![new_attribute_value_id], +// )) +// .await?; +// } + +// Ok((value, new_attribute_value_id)) +// } + +// /// Insert a new value under the parent [`AttributeValue`] in the given [`AttributeContext`]. This is mostly only +// /// useful for adding elements to a [`PropKind::Array`], or to a [`PropKind::Map`]. Updating existing values in an +// /// [`Array`](PropKind::Array), or [`Map`](PropKind::Map), and setting/updating all other [`PropKind`] should be +// /// able to directly use [`update_for_context()`](AttributeValue::update_for_context()), as there will already be an +// /// appropriate [`AttributeValue`] to use. By using this function, +// /// [`update_for_context()`](AttributeValue::update_for_context()) is called after we have created an appropriate +// /// [`AttributeValue`] to use. +// #[instrument(skip_all, level = "debug")] +// pub async fn insert_for_context( +// ctx: &DalContext, +// item_attribute_context: AttributeContext, +// array_or_map_attribute_value_id: AttributeValueId, +// value: Option, +// key: Option, +// ) -> AttributeValueResult { +// Self::insert_for_context_raw( +// ctx, +// item_attribute_context, +// array_or_map_attribute_value_id, +// value, +// key, +// true, +// ) +// .await +// } + +// #[instrument(skip_all, level = "debug")] +// pub async fn insert_for_context_without_creating_proxies( +// ctx: &DalContext, +// parent_context: AttributeContext, +// parent_attribute_value_id: AttributeValueId, +// value: Option, +// key: Option, +// ) -> AttributeValueResult { +// Self::insert_for_context_raw( +// ctx, +// parent_context, +// parent_attribute_value_id, +// value, +// key, +// false, +// ) +// .await +// } + +// #[instrument(skip_all, level = "debug")] +// async fn insert_for_context_raw( +// ctx: &DalContext, +// item_attribute_context: AttributeContext, +// array_or_map_attribute_value_id: AttributeValueId, +// value: Option, +// key: Option, +// create_child_proxies: bool, +// ) -> AttributeValueResult { +// let row = ctx.txns().await?.pg().query_one( +// "SELECT new_attribute_value_id FROM attribute_value_insert_for_context_raw_v1($1, $2, $3, $4, $5, $6, $7)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &item_attribute_context, +// &array_or_map_attribute_value_id, +// &value, +// &key, +// &create_child_proxies, +// ], +// ).await?; + +// let new_attribute_value_id: AttributeValueId = row.try_get("new_attribute_value_id")?; + +// if !ctx.no_dependent_values() { +// ctx.enqueue_job(DependentValuesUpdate::new( +// ctx.access_builder(), +// *ctx.visibility(), +// vec![new_attribute_value_id], +// )) +// .await?; +// } + +// Ok(new_attribute_value_id) +// } + +// #[instrument(skip_all, level = "debug")] +// pub async fn update_parent_index_map(&self, ctx: &DalContext) -> AttributeValueResult<()> { +// let _row = ctx +// .txns() +// .await? +// .pg() +// .query( +// "SELECT attribute_value_update_parent_index_map_v1($1, $2, $3)", +// &[ctx.tenancy(), ctx.visibility(), &self.id], +// ) +// .await?; + +// Ok(()) +// } + +// async fn populate_nested_values( +// ctx: &DalContext, +// parent_attribute_value_id: AttributeValueId, +// update_context: AttributeContext, +// unprocessed_value: serde_json::Value, +// ) -> AttributeValueResult<()> { +// let _row = ctx +// .txns() +// .await? +// .pg() +// .query( +// "SELECT attribute_value_populate_nested_values_v1($1, $2, $3, $4, $5)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &parent_attribute_value_id, +// &update_context, +// &unprocessed_value, +// ], +// ) +// .await?; + +// Ok(()) +// } + +// /// Convenience method to determine if this [`AttributeValue`](Self) is for the implicit +// /// [`InternalProvider`](crate::InternalProvider) that represents the "snapshot" of the entire +// /// [`Component`](crate::Component). This means that the [`Prop`](crate::Prop) that the +// /// [`InternalProvider`](crate::InternalProvider) is sourcing its data from does not have a +// /// parent [`Prop`](crate::Prop). +// #[allow(unused)] +// async fn is_for_internal_provider_of_root_prop( +// &mut self, +// ctx: &DalContext, +// ) -> AttributeValueResult { +// let maybe_row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// IS_FOR_INTERNAL_PROVIDER_OF_ROOT_PROP, +// &[&ctx.tenancy(), ctx.visibility(), &self.context], +// ) +// .await?; +// if let Some(row) = maybe_row { +// // If we got a row back, that means that we are an AttributeValue for an InternalProvider, +// // and we should have gotten a row back from the query. +// Ok(row.try_get("is_for_root_prop")?) +// } else { +// // If we didn't get a row back, that means that we didn't find an InternalProvider for the +// // InternalProviderId in our AttributeContext. Likely because it is ident_nil_v1, indicating that we're +// // not for an InternalProvider at all. +// Ok(false) +// } +// } + +// #[instrument(skip(ctx), level = "debug")] +// pub async fn create_dependent_values( +// ctx: &DalContext, +// attribute_value_ids: &[AttributeValueId], +// ) -> AttributeValueResult<()> { +// ctx.txns() +// .await? +// .pg() +// .execute( +// "SELECT attribute_value_create_new_affected_values_v1($1, $2, $3)", +// &[&ctx.tenancy(), &ctx.visibility(), &attribute_value_ids], +// ) +// .await?; +// Ok(()) +// } + +// /// Returns a [`HashMap`] with key [`AttributeValueId`](Self) and value +// /// [`Vec`](Self) where the keys correspond to [`AttributeValues`](Self) that +// /// are affected (directly and indirectly) by at least one of the provided +// /// [`AttributeValueIds`](Self) having a new value. The [`Vec`](Self) +// /// correspond to the [`AttributeValues`](Self) that the key directly depends on that are also +// /// affected by at least one of the provided [`AttributeValueIds`](Self) having a new value. +// /// +// /// **NOTE**: This has the side effect of **CREATING NEW [`AttributeValues`](Self)** +// /// if this [`AttributeValue`] affects an [`AttributeContext`](crate::AttributeContext) where an +// /// [`AttributePrototype`](crate::AttributePrototype) that uses it didn't already have an +// /// [`AttributeValue`]. +// #[instrument(skip(ctx), level = "debug")] +// pub async fn dependent_value_graph( +// ctx: &DalContext, +// attribute_value_ids: &[AttributeValueId], +// ) -> AttributeValueResult>> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FETCH_UPDATE_GRAPH_DATA, +// &[&ctx.tenancy(), ctx.visibility(), &attribute_value_ids], +// ) +// .instrument(debug_span!("Graph SQL query")) +// .await?; + +// let mut result: HashMap> = HashMap::new(); +// for row in rows.into_iter() { +// let attr_val_id: AttributeValueId = row.try_get("attribute_value_id")?; +// let dependencies: Vec = +// row.try_get("dependent_attribute_value_ids")?; +// result.insert(attr_val_id, dependencies); +// } + +// Ok(result) +// } + +// pub async fn vivify_value_and_parent_values( +// &self, +// ctx: &DalContext, +// ) -> AttributeValueResult { +// let row = ctx.txns().await?.pg().query_one( +// "SELECT new_attribute_value_id FROM attribute_value_vivify_value_and_parent_values_raw_v1($1, $2, $3, $4, $5)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &self.context, +// &self.id, +// &true +// ]).await?; + +// Ok(row.try_get("new_attribute_value_id")?) +// } + +// /// Re-evaluates the current `AttributeValue`'s `AttributePrototype` to update the +// /// `FuncBinding`, and `FuncBindingReturnValue`, reflecting the current inputs to +// /// the function. +// /// +// /// If the `AttributeValue` represents the `InternalProvider` for a `Prop` that +// /// does not have a parent `Prop` (this is typically the `InternalProvider` for +// /// the "root" `Prop` of a `SchemaVariant`), then it will also enqueue a +// /// `CodeGeneration` job for the `Component`. +// #[instrument( +// name = "attribute_value.update_from_prototype_function", +// skip_all, +// level = "debug", +// fields( +// attribute_value.id = %self.id, +// change_set_pk = %ctx.visibility().change_set_pk, +// ) +// )] +// pub async fn update_from_prototype_function( +// &mut self, +// ctx: &DalContext, +// ) -> AttributeValueResult<()> { +// // Check if this AttributeValue is for an implicit InternalProvider as they have special behavior that doesn't involve +// // AttributePrototype and AttributePrototypeArguments. +// if self +// .context +// .is_least_specific_field_kind_internal_provider()? +// { +// let internal_provider = +// InternalProvider::get_by_id(ctx, &self.context.internal_provider_id()) +// .await? +// .ok_or_else(|| { +// AttributeValueError::InternalProviderNotFound( +// self.context.internal_provider_id(), +// ) +// })?; +// if internal_provider.is_internal_consumer() { +// // We don't care about the AttributeValue that comes back from implicit_emit, since we should already be +// // operating on an AttributeValue that has the correct AttributeContext, which means that a new one should +// // not need to be created. +// internal_provider +// .implicit_emit(ctx, self) +// .await +// .map_err(|e| AttributeValueError::InternalProvider(e.to_string()))?; + +// debug!("InternalProvider is internal consumer"); + +// return Ok(()); +// } +// } else if self.context.is_least_specific_field_kind_prop()? { +// if let Some(parent_attribute_value) = self.parent_attribute_value(ctx).await? { +// parent_attribute_value +// .vivify_value_and_parent_values(ctx) +// .await?; +// } +// } + +// // The following should handle explicit "normal" Attributes, InternalProviders, and ExternalProviders already. +// let attribute_prototype = self.attribute_prototype(ctx).await?.ok_or_else(|| { +// AttributeValueError::AttributePrototypeNotFound(self.id, *ctx.visibility()) +// })?; + +// // Note(victor): Secrets should never be passed to functions as arguments directly. +// // We detect if they're set as dependencies and later fetch before functions to execute +// // This is so secret values still trigger the dependent values system, +// // and before functions are only called when necessary +// let mut has_secrets_as_arg = false; +// let mut func_binding_args: HashMap> = HashMap::new(); +// for mut argument_data in attribute_prototype +// .argument_values(ctx, self.context) +// .await +// .map_err(|e| AttributeValueError::AttributePrototype(e.to_string()))? +// { +// if argument_data.argument_name == "secrets" { +// has_secrets_as_arg = true; +// continue; +// } + +// match argument_data.values.len() { +// 1 => { +// let argument = argument_data.values.pop().ok_or_else(|| { +// AttributeValueError::EmptyAttributePrototypeArgumentsForGroup( +// argument_data.argument_name.clone(), +// ) +// })?; + +// func_binding_args.insert( +// argument_data.argument_name, +// Some(serde_json::to_value(argument)?), +// ); +// } +// 2.. => { +// func_binding_args.insert( +// argument_data.argument_name, +// Some(serde_json::to_value(argument_data.values)?), +// ); +// } +// _ => { +// return Err( +// AttributeValueError::EmptyAttributePrototypeArgumentsForGroup( +// argument_data.argument_name, +// ), +// ); +// } +// }; +// } + +// let func_id = attribute_prototype.func_id(); + +// let before = if has_secrets_as_arg { +// // We need the associated [`ComponentId`] for this function--this is how we resolve and +// // prepare before functions +// let associated_component_id = self.context.component_id(); + +// before_funcs_for_component(ctx, &associated_component_id).await? +// } else { +// vec![] +// }; + +// let (func_binding, mut func_binding_return_value) = match FuncBinding::create_and_execute( +// ctx, +// serde_json::to_value(func_binding_args.clone())?, +// attribute_prototype.func_id(), +// before, +// ) +// .instrument(debug_span!( +// "Func execution", +// "func.id" = %func_id, +// ?func_binding_args, +// )) +// .await +// { +// Ok(function_return_value) => function_return_value, +// Err(FuncBindingError::FuncBackendResultFailure { +// kind, +// message, +// backend, +// }) => { +// return Err(AttributeValueError::FuncBackendResultFailure { +// kind, +// message, +// backend, +// }) +// } +// Err(err) => Err(err)?, +// }; + +// self.set_func_binding_id(ctx, *func_binding.id()).await?; +// self.set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) +// .await?; + +// // If the value we just updated was for a Prop, we might have run a function that +// // generates a deep data structure. If the Prop is an Array/Map/Object, then the +// // value should be an empty Array/Map/Object, while the unprocessed value contains +// // the deep data structure. +// if self.context.is_least_specific_field_kind_prop()? { +// let processed_value = match func_binding_return_value.unprocessed_value().cloned() { +// Some(unprocessed_value) => { +// let prop = Prop::get_by_id(ctx, &self.context.prop_id()) +// .await? +// .ok_or_else(|| AttributeValueError::PropNotFound(self.context.prop_id()))?; + +// match prop.kind() { +// PropKind::Object | PropKind::Map => Some(serde_json::json!({})), +// PropKind::Array => Some(serde_json::json!([])), +// _ => Some(unprocessed_value), +// } +// } +// None => None, +// }; + +// func_binding_return_value +// .set_value(ctx, processed_value) +// .await?; +// }; +// // If they are different from each other, then we know +// // that we need to fully process the deep data structure, populating +// // AttributeValues for the child Props. +// // cannot be si:setArray / si:setMap / si:setObject +// if self.context.prop_id() != PropId::NONE { +// let prop = Prop::get_by_id(ctx, &self.context.prop_id()) +// .await? +// .ok_or_else(|| AttributeValueError::PropNotFound(self.context.prop_id()))?; + +// if *prop.kind() == PropKind::Array +// || *prop.kind() == PropKind::Object +// || *prop.kind() == PropKind::Map +// { +// let func_name = match *prop.kind() { +// PropKind::Array => "si:setArray", +// PropKind::Object => "si:setObject", +// PropKind::Map => "si:setMap", +// _ => unreachable!(), +// }; + +// let func = Func::find_by_attr(ctx, "name", &func_name) +// .await? +// .pop() +// .ok_or_else(|| AttributeValueError::MissingFunc(func_name.to_owned()))?; + +// if attribute_prototype.func_id() != *func.id() { +// if let Some(unprocessed_value) = +// func_binding_return_value.unprocessed_value().cloned() +// { +// AttributeValue::populate_nested_values( +// ctx, +// self.id, +// self.context, +// unprocessed_value, +// ) +// .await?; +// } +// } +// } +// } + +// Ok(()) +// } + +// pub async fn populate_child_proxies_for_value( +// &self, +// ctx: &DalContext, +// less_specific_attribute_value_id: AttributeValueId, +// more_specific_context: AttributeContext, +// ) -> AttributeValueResult>> { +// let row = ctx.txns().await?.pg().query_one( +// "SELECT new_proxy_value_ids FROM attribute_value_populate_child_proxies_for_value_v1($1, $2, $3, $4, $5)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &less_specific_attribute_value_id, +// &more_specific_context, +// self.id(), +// ] +// ).await?; + +// // Are we part of a map or array? Be sure to update the index map +// if self.key.is_some() { +// ctx.txns() +// .await? +// .pg() +// .query_opt( +// "SELECT * FROM attribute_value_update_parent_index_map_v1($1, $2, $3)", +// &[ctx.tenancy(), ctx.visibility(), self.id()], +// ) +// .await?; +// } + +// Ok(row.try_get("new_proxy_value_ids")?) +// } +// } + +// #[derive(Debug, Clone)] +// pub struct AttributeValuePayload { +// pub prop: Prop, +// pub func_binding_return_value: Option, +// pub attribute_value: AttributeValue, +// pub parent_attribute_value_id: Option, +// } + +// impl AttributeValuePayload { +// pub fn new( +// prop: Prop, +// func_binding_return_value: Option, +// attribute_value: AttributeValue, +// parent_attribute_value_id: Option, +// ) -> Self { +// Self { +// prop, +// func_binding_return_value, +// attribute_value, +// parent_attribute_value_id, +// } +// } +// } diff --git a/lib/dal/src/attribute/value/view.rs b/lib/dal/src/attribute/value/view.rs index f766df0aca..47e6815688 100644 --- a/lib/dal/src/attribute/value/view.rs +++ b/lib/dal/src/attribute/value/view.rs @@ -1,254 +1,253 @@ -//! This module contains the [`AttributeView`] struct and its methods. This object does not exist -//! in the database. - -use serde_json::Value; -use std::collections::{HashMap, VecDeque}; -use telemetry::prelude::*; - -use crate::{ - AttributeReadContext, AttributeValue, AttributeValueError, AttributeValueId, - AttributeValuePayload, AttributeValueResult, DalContext, Prop, PropError, PropKind, - StandardModel, -}; - -/// A generated view for an [`AttributeReadContext`](crate::AttributeReadContext) and an optional -/// root [`AttributeValueId`](crate::AttributeValue). The requirements for the context are laid -/// out in [`Self::new()`]. -#[derive(Debug)] -pub struct AttributeView { - /// The value that was generated from [`Self::new()`]. This can also be referred to as the - /// "properties" or "tree" of the view. - value: Value, - json_pointer_for_attribute_value_id: HashMap, -} - -impl AttributeView { - /// Generates an [`AttributeView`] with an [`AttributeReadContext`](crate::AttributeReadContext) - /// and an optional root [`AttributeValueId`](crate::AttributeValue). The context's requirements - /// are specified in the following locations: - /// - /// - If the root is _not_ provided: [`AttributeValue::list_payload_for_read_context()`] - /// - If the root is provided: [`AttributeValue::list_payload_for_read_context_and_root()`] - /// - /// The view is generated based on the [`AttributeValuePayloads`](crate::AttributeValuePayload) - /// found, including their corresponding [`Props`](crate::Prop). Usually, the root should be - /// provided if a view is desired for any given context and "location" in the object value. If - /// the [`SchemaVariant`](crate::SchemaVariant) is known and you only desire to generate a view - /// for the entire value, you do not need to provide the root. - pub async fn new( - ctx: &DalContext, - attribute_read_context: AttributeReadContext, - root_attribute_value_id: Option, - ) -> AttributeValueResult { - let mut initial_work = match root_attribute_value_id { - Some(root_attribute_value_id) => { - AttributeValue::list_payload_for_read_context_and_root( - ctx, - root_attribute_value_id, - attribute_read_context, - ) - .await? - } - None => { - AttributeValue::list_payload_for_read_context(ctx, attribute_read_context).await? - } - }; - - // When we have a parent AttributeValueId (K: AttributeValueId), we need to know where in - // the structure we need to insert the value we are working with (V: String). - let mut json_pointer_for_attribute_value_id: HashMap = - HashMap::new(); - - // Handle scenarios where we are generating views starting anywhere other than the root - // of a prop tree. - let maybe_parent_attribute_value_id = - if let Some(root_attribute_value_id) = root_attribute_value_id { - let root_attribute_value = AttributeValue::get_by_id(ctx, &root_attribute_value_id) - .await? - .ok_or(AttributeValueError::Missing)?; - root_attribute_value - .parent_attribute_value(ctx) - .await? - .map(|av| *av.id()) - } else { - None - }; - if let Some(parent_attribute_value_id) = maybe_parent_attribute_value_id { - json_pointer_for_attribute_value_id.insert(parent_attribute_value_id, "".to_string()); - } - - // We sort the work queue according to the order of every nested IndexMap. This ensures that - // when we reconstruct the final shape, we don't have to worry about the order that things - // appear in. - let attribute_value_order: Vec = initial_work - .iter() - .filter_map(|avp| avp.attribute_value.index_map()) - .flat_map(|index_map| index_map.order()) - .copied() - .collect(); - initial_work.sort_by_cached_key(|avp| { - attribute_value_order - .iter() - .position(|attribute_value_id| attribute_value_id == avp.attribute_value.id()) - .unwrap_or(0) - }); - - // We need the work queue to be a VecDeque so we can pop elements off of the front - // as it's supposed to be a queue, not a stack. - let mut work_queue: VecDeque = VecDeque::from(initial_work); - - let mut properties = serde_json::json![{}]; - let mut root_stack: Vec<(Option, String)> = - vec![(maybe_parent_attribute_value_id, "".to_string())]; - - while !work_queue.is_empty() { - let mut unprocessed: Vec = vec![]; - if root_stack.is_empty() { - warn!( - "Unexpected empty root stack with work_queue: {:?}", - &work_queue - ); - break; - } - let (root_id, json_pointer) = root_stack.pop().ok_or_else(|| { - error!( - "unexpected empty root stack, current work queue state: {:?}", - work_queue - ); - AttributeValueError::UnexpectedEmptyRootStack - })?; - - while let Some(AttributeValuePayload { - prop, - func_binding_return_value, - attribute_value, - parent_attribute_value_id, - }) = work_queue.pop_front() - { - if let Some(func_binding_return_value) = func_binding_return_value { - if let Some(found_value) = func_binding_return_value.value() { - if root_id == parent_attribute_value_id { - let insertion_pointer = - if let Some(parent_avi) = parent_attribute_value_id { - match json_pointer_for_attribute_value_id.get(&parent_avi) { - Some(ptr) => ptr.clone(), - // A `None` here would mean that we're trying to process a child before we've handled its parent, - // and that shouldn't be possible given how we're going through the work_queue. - None => unreachable!(), - } - } else { - // After we've processed the "root" property, we shouldn't hit this case any more. - json_pointer.clone() - }; - - let write_location = match properties.pointer_mut(&insertion_pointer) { - Some(write_location) => write_location, - None => { - return Err(AttributeValueError::BadJsonPointer( - insertion_pointer.clone(), - properties.to_string(), - )); - } - }; - let next_json_pointer = - if let Some(object) = write_location.as_object_mut() { - if let Some(key) = attribute_value.key() { - object.insert(key.to_string(), found_value.clone()); - format!("{insertion_pointer}/{key}") - } else { - object.insert(prop.name().to_string(), found_value.clone()); - format!("{}/{}", insertion_pointer, prop.name()) - } - } else if let Some(array) = write_location.as_array_mut() { - // This code can just push, because we ordered the work queue above. - // Magic! - array.push(found_value.clone()); - format!("{}/{}", insertion_pointer, array.len() - 1) - } else { - // Note: this shouldn't ever actually get used. - insertion_pointer.to_string() - }; - // Record the json pointer path to this specific attribute value's location. - json_pointer_for_attribute_value_id - .insert(*attribute_value.id(), next_json_pointer.clone()); - - match prop.kind() { - &PropKind::Object | &PropKind::Array | &PropKind::Map => { - root_stack - .push((Some(*attribute_value.id()), next_json_pointer)); - } - _ => {} - } - } else { - unprocessed.push(AttributeValuePayload::new( - prop, - Some(func_binding_return_value), - attribute_value, - parent_attribute_value_id, - )); - } - } - } - } - work_queue = VecDeque::from(unprocessed); - } - - if let Some(root_attribute_value_id) = root_attribute_value_id { - let root_json_pointer = match json_pointer_for_attribute_value_id - .get(&root_attribute_value_id) - { - Some(pointer) => pointer, - None => { - let root_attribute_value = - AttributeValue::get_by_id(ctx, &root_attribute_value_id) - .await? - .ok_or_else(|| { - AttributeValueError::NotFound( - root_attribute_value_id, - *ctx.visibility(), - ) - })?; - let root_prop = Prop::get_by_id(ctx, &root_attribute_value.context.prop_id()) - .await? - .ok_or_else(|| { - PropError::NotFound( - root_attribute_value.context.prop_id(), - *ctx.visibility(), - ) - }) - .map_err(Box::new)?; - debug!("\ - likely tried to build an attribute view for an attribute value that is unset, \ - so the \"properties\" object is empty ({:?}), and does not contain a key matching \ - our prop's name (root attribute value ({:?}) and root prop ({:?}))", properties, root_attribute_value, root_prop - ); - return Ok(Self { - value: Value::Null, - json_pointer_for_attribute_value_id, - }); - } - }; - - let properties = properties - .pointer(root_json_pointer) - .ok_or(AttributeValueError::NoValueForJsonPointer)?; - return Ok(Self { - value: properties.to_owned(), - json_pointer_for_attribute_value_id, - }); - } - - Ok(Self { - value: properties.to_owned(), - json_pointer_for_attribute_value_id, - }) - } - - pub fn value(&self) -> &serde_json::Value { - &self.value - } - - pub fn json_pointers_for_attribute_value_id(&self) -> &HashMap { - &self.json_pointer_for_attribute_value_id - } -} +// //! This module contains the [`AttributeView`] struct and its methods. This object does not exist +// //! in the database. + +// use serde_json::Value; +// use std::collections::{HashMap, VecDeque}; +// use telemetry::prelude::*; + +// use crate::{ +// AttributeReadContext, AttributeValue, AttributeValueId, AttributeValuePayload, DalContext, +// Prop, PropKind, StandardModel, +// }; + +// /// A generated view for an [`AttributeReadContext`](crate::AttributeReadContext) and an optional +// /// root [`AttributeValueId`](crate::AttributeValue). The requirements for the context are laid +// /// out in [`Self::new()`]. +// #[derive(Debug)] +// pub struct AttributeView { +// /// The value that was generated from [`Self::new()`]. This can also be referred to as the +// /// "properties" or "tree" of the view. +// value: Value, +// json_pointer_for_attribute_value_id: HashMap, +// } + +// impl AttributeView { +// /// Generates an [`AttributeView`] with an [`AttributeReadContext`](crate::AttributeReadContext) +// /// and an optional root [`AttributeValueId`](crate::AttributeValue). The context's requirements +// /// are specified in the following locations: +// /// +// /// - If the root is _not_ provided: [`AttributeValue::list_payload_for_read_context()`] +// /// - If the root is provided: [`AttributeValue::list_payload_for_read_context_and_root()`] +// /// +// /// The view is generated based on the [`AttributeValuePayloads`](crate::AttributeValuePayload) +// /// found, including their corresponding [`Props`](crate::Prop). Usually, the root should be +// /// provided if a view is desired for any given context and "location" in the object value. If +// /// the [`SchemaVariant`](crate::SchemaVariant) is known and you only desire to generate a view +// /// for the entire value, you do not need to provide the root. +// pub async fn new( +// ctx: &DalContext, +// attribute_read_context: AttributeReadContext, +// root_attribute_value_id: Option, +// ) -> AttributeValueResult { +// let mut initial_work = match root_attribute_value_id { +// Some(root_attribute_value_id) => { +// AttributeValue::list_payload_for_read_context_and_root( +// ctx, +// root_attribute_value_id, +// attribute_read_context, +// ) +// .await? +// } +// None => { +// AttributeValue::list_payload_for_read_context(ctx, attribute_read_context).await? +// } +// }; + +// // When we have a parent AttributeValueId (K: AttributeValueId), we need to know where in +// // the structure we need to insert the value we are working with (V: String). +// let mut json_pointer_for_attribute_value_id: HashMap = +// HashMap::new(); + +// // Handle scenarios where we are generating views starting anywhere other than the root +// // of a prop tree. +// let maybe_parent_attribute_value_id = +// if let Some(root_attribute_value_id) = root_attribute_value_id { +// let root_attribute_value = AttributeValue::get_by_id(ctx, &root_attribute_value_id) +// .await? +// .ok_or(AttributeValueError::Missing)?; +// root_attribute_value +// .parent_attribute_value(ctx) +// .await? +// .map(|av| *av.id()) +// } else { +// None +// }; +// if let Some(parent_attribute_value_id) = maybe_parent_attribute_value_id { +// json_pointer_for_attribute_value_id.insert(parent_attribute_value_id, "".to_string()); +// } + +// // We sort the work queue according to the order of every nested IndexMap. This ensures that +// // when we reconstruct the final shape, we don't have to worry about the order that things +// // appear in. +// let attribute_value_order: Vec = initial_work +// .iter() +// .filter_map(|avp| avp.attribute_value.index_map()) +// .flat_map(|index_map| index_map.order()) +// .copied() +// .collect(); +// initial_work.sort_by_cached_key(|avp| { +// attribute_value_order +// .iter() +// .position(|attribute_value_id| attribute_value_id == avp.attribute_value.id()) +// .unwrap_or(0) +// }); + +// // We need the work queue to be a VecDeque so we can pop elements off of the front +// // as it's supposed to be a queue, not a stack. +// let mut work_queue: VecDeque = VecDeque::from(initial_work); + +// let mut properties = serde_json::json![{}]; +// let mut root_stack: Vec<(Option, String)> = +// vec![(maybe_parent_attribute_value_id, "".to_string())]; + +// while !work_queue.is_empty() { +// let mut unprocessed: Vec = vec![]; +// if root_stack.is_empty() { +// warn!( +// "Unexpected empty root stack with work_queue: {:?}", +// &work_queue +// ); +// break; +// } +// let (root_id, json_pointer) = root_stack.pop().ok_or_else(|| { +// error!( +// "unexpected empty root stack, current work queue state: {:?}", +// work_queue +// ); +// AttributeValueError::UnexpectedEmptyRootStack +// })?; + +// while let Some(AttributeValuePayload { +// prop, +// func_binding_return_value, +// attribute_value, +// parent_attribute_value_id, +// }) = work_queue.pop_front() +// { +// if let Some(func_binding_return_value) = func_binding_return_value { +// if let Some(found_value) = func_binding_return_value.value() { +// if root_id == parent_attribute_value_id { +// let insertion_pointer = +// if let Some(parent_avi) = parent_attribute_value_id { +// match json_pointer_for_attribute_value_id.get(&parent_avi) { +// Some(ptr) => ptr.clone(), +// // A `None` here would mean that we're trying to process a child before we've handled its parent, +// // and that shouldn't be possible given how we're going through the work_queue. +// None => unreachable!(), +// } +// } else { +// // After we've processed the "root" property, we shouldn't hit this case any more. +// json_pointer.clone() +// }; + +// let write_location = match properties.pointer_mut(&insertion_pointer) { +// Some(write_location) => write_location, +// None => { +// return Err(AttributeValueError::BadJsonPointer( +// insertion_pointer.clone(), +// properties.to_string(), +// )); +// } +// }; +// let next_json_pointer = +// if let Some(object) = write_location.as_object_mut() { +// if let Some(key) = attribute_value.key() { +// object.insert(key.to_string(), found_value.clone()); +// format!("{insertion_pointer}/{key}") +// } else { +// object.insert(prop.name().to_string(), found_value.clone()); +// format!("{}/{}", insertion_pointer, prop.name()) +// } +// } else if let Some(array) = write_location.as_array_mut() { +// // This code can just push, because we ordered the work queue above. +// // Magic! +// array.push(found_value.clone()); +// format!("{}/{}", insertion_pointer, array.len() - 1) +// } else { +// // Note: this shouldn't ever actually get used. +// insertion_pointer.to_string() +// }; +// // Record the json pointer path to this specific attribute value's location. +// json_pointer_for_attribute_value_id +// .insert(*attribute_value.id(), next_json_pointer.clone()); + +// match prop.kind() { +// &PropKind::Object | &PropKind::Array | &PropKind::Map => { +// root_stack +// .push((Some(*attribute_value.id()), next_json_pointer)); +// } +// _ => {} +// } +// } else { +// unprocessed.push(AttributeValuePayload::new( +// prop, +// Some(func_binding_return_value), +// attribute_value, +// parent_attribute_value_id, +// )); +// } +// } +// } +// } +// work_queue = VecDeque::from(unprocessed); +// } + +// if let Some(root_attribute_value_id) = root_attribute_value_id { +// let root_json_pointer = match json_pointer_for_attribute_value_id +// .get(&root_attribute_value_id) +// { +// Some(pointer) => pointer, +// None => { +// let root_attribute_value = +// AttributeValue::get_by_id(ctx, &root_attribute_value_id) +// .await? +// .ok_or_else(|| { +// AttributeValueError::NotFound( +// root_attribute_value_id, +// *ctx.visibility(), +// ) +// })?; +// let root_prop = Prop::get_by_id(ctx, &root_attribute_value.context.prop_id()) +// .await? +// .ok_or_else(|| { +// PropError::NotFound( +// root_attribute_value.context.prop_id(), +// *ctx.visibility(), +// ) +// }) +// .map_err(Box::new)?; +// debug!("\ +// likely tried to build an attribute view for an attribute value that is unset, \ +// so the \"properties\" object is empty ({:?}), and does not contain a key matching \ +// our prop's name (root attribute value ({:?}) and root prop ({:?}))", properties, root_attribute_value, root_prop +// ); +// return Ok(Self { +// value: Value::Null, +// json_pointer_for_attribute_value_id, +// }); +// } +// }; + +// let properties = properties +// .pointer(root_json_pointer) +// .ok_or(AttributeValueError::NoValueForJsonPointer)?; +// return Ok(Self { +// value: properties.to_owned(), +// json_pointer_for_attribute_value_id, +// }); +// } + +// Ok(Self { +// value: properties.to_owned(), +// json_pointer_for_attribute_value_id, +// }) +// } + +// pub fn value(&self) -> &serde_json::Value { +// &self.value +// } + +// pub fn json_pointers_for_attribute_value_id(&self) -> &HashMap { +// &self.json_pointer_for_attribute_value_id +// } +// } diff --git a/lib/dal/src/authentication_prototype.rs b/lib/dal/src/authentication_prototype.rs index 4a73f86202..ee73f57f93 100644 --- a/lib/dal/src/authentication_prototype.rs +++ b/lib/dal/src/authentication_prototype.rs @@ -9,10 +9,9 @@ use telemetry::prelude::*; use crate::authentication_prototype::AuthenticationPrototypeError::AuthAlreadySet; use crate::{ - component::view::ComponentViewError, impl_standard_model, pk, standard_model, - standard_model_accessor, ComponentId, DalContext, FuncBindingError, - FuncBindingReturnValueError, FuncId, HistoryEventError, SchemaVariantId, StandardModel, - StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, WsEventError, + impl_standard_model, pk, standard_model, standard_model_accessor, ComponentId, DalContext, + FuncId, HistoryEventError, SchemaVariantId, StandardModel, StandardModelError, Tenancy, + Timestamp, TransactionsError, Visibility, WsEventError, }; const FIND_FOR_CONTEXT: &str = @@ -30,12 +29,6 @@ pub enum AuthenticationPrototypeError { Component(String), #[error("component not found: {0}")] ComponentNotFound(ComponentId), - #[error(transparent)] - ComponentView(#[from] ComponentViewError), - #[error(transparent)] - FuncBinding(#[from] FuncBindingError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), #[error("action Func {0} not found for ActionPrototype {1}")] FuncNotFound(FuncId, AuthenticationPrototypeId), #[error("history event error: {0}")] diff --git a/lib/dal/src/builtins.rs b/lib/dal/src/builtins.rs index c88efc4637..0710b795b2 100644 --- a/lib/dal/src/builtins.rs +++ b/lib/dal/src/builtins.rs @@ -9,26 +9,18 @@ use thiserror::Error; use si_pkg::{SiPkgError, SpecError}; -use crate::func::argument::FuncArgumentError; -use crate::func::binding::FuncBindingError; -use crate::func::binding_return_value::FuncBindingReturnValueError; +use crate::func::FuncError; use crate::installed_pkg::InstalledPkgError; use crate::pkg::PkgError; -use crate::provider::external::ExternalProviderError; -use crate::provider::internal::InternalProviderError; -use crate::schema::variant::definition::SchemaVariantDefinitionError; -use crate::schema::variant::SchemaVariantError; -use crate::socket::SocketError; +// use crate::schema::variant::definition::SchemaVariantDefinitionError; use crate::{ - AttributeContextBuilderError, AttributePrototypeArgumentError, AttributePrototypeError, - AttributeReadContext, AttributeValueError, AttributeValueId, DalContext, ExternalProviderId, - FuncError, InternalProviderId, PropError, PropId, SchemaError, SchemaVariantId, - StandardModelError, TransactionsError, ValidationPrototypeError, + AttributeValueId, DalContext, ExternalProviderId, InternalProviderId, PropId, SchemaVariantId, + StandardModelError, TransactionsError, }; // Private builtins modules. pub mod func; -pub mod schema; +// pub mod schema; pub const SI_AWS_PKG: &str = "si-aws-2023-09-13.sipkg"; pub const SI_AWS_EC2_PKG: &str = "si-aws-ec2-2023-09-26.sipkg"; @@ -43,36 +35,18 @@ pub const SI_AWS_LB_TARGET_GROUP_PKG: &str = "si-aws-lb-target-group-2023-12-05. #[remain::sorted] #[derive(Error, Debug)] pub enum BuiltinsError { - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), #[error("attribute value not found by id: {0}")] AttributeValueNotFound(AttributeValueId), - #[error("attribute value not found for attribute read context: {0:?}")] - AttributeValueNotFoundForContext(AttributeReadContext), #[error("builtin {0} missing func argument {1}")] BuiltinMissingFuncArgument(String, String), #[error("explicit internal provider not found by name: {0}")] ExplicitInternalProviderNotFound(String), - #[error("external provider error: {0}")] - ExternalProvider(#[from] ExternalProviderError), #[error("external provider not found by name: {0}")] ExternalProviderNotFound(String), #[error("Filesystem IO error: {0}")] FilesystemIO(#[from] std::io::Error), - #[error("func error: {0}")] + #[error(transparent)] Func(#[from] FuncError), - #[error("func argument error: {0}")] - FuncArgument(#[from] FuncArgumentError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), #[error("json error {1} at file {0}")] FuncJson(String, serde_json::Error), #[error("Func Metadata error: {0}")] @@ -83,8 +57,6 @@ pub enum BuiltinsError { ImplicitInternalProviderNotFoundForProp(PropId), #[error(transparent)] InstalledPkg(#[from] InstalledPkgError), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), #[error("missing attribute prototype for attribute value")] MissingAttributePrototypeForAttributeValue, #[error("missing attribute prototype for explicit internal provider: {0}")] @@ -95,36 +67,24 @@ pub enum BuiltinsError { MissingPkgsPath, #[error(transparent)] Pkg(#[from] PkgError), - #[error("prop error: {0}")] - Prop(#[from] PropError), #[error("prop cache not found: {0}")] PropCacheNotFound(SchemaVariantId), #[error("prop not bound by id: {0}")] PropNotFound(PropId), #[error("Regex parsing error: {0}")] Regex(#[from] regex::Error), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), - #[error("schema variant error: {0}")] - SchemaVariant(#[from] SchemaVariantError), - #[error("schema variant definition error")] - SchemaVariantDefinition(#[from] SchemaVariantDefinitionError), #[error("serde json error: {0}")] SerdeJson(#[from] serde_json::Error), #[error("encountered serde json error for func ({0}): {1}")] SerdeJsonErrorForFunc(String, serde_json::Error), #[error(transparent)] SiPkg(#[from] SiPkgError), - #[error("socket error: {0}")] - Socket(#[from] SocketError), #[error(transparent)] Spec(#[from] SpecError), #[error("standard model error: {0}")] StandardModel(#[from] StandardModelError), #[error("error creating new transactions")] Transactions(#[from] TransactionsError), - #[error("validation prototype error: {0}")] - ValidationPrototype(#[from] ValidationPrototypeError), } pub type BuiltinsResult = Result; @@ -146,28 +106,25 @@ pub enum SelectedTestBuiltinSchemas { } /// Migrate all "builtins" in a definitive order. -/// -/// 1. [`Funcs`](crate::Func) -/// 1. [`Schemas`](crate::Schema) -/// 1. ['ActionPrototypes'](crate::ActionPrototype) pub async fn migrate( ctx: &DalContext, - selected_test_builtin_schemas: Option, + _selected_test_builtin_schemas: Option, ) -> BuiltinsResult<()> { info!("migrating intrinsic functions"); func::migrate_intrinsics(ctx).await?; - info!("migrating builtin functions"); - func::migrate(ctx).await?; + info!("intrinsics migrated"); + // info!("migrating builtin functions"); + // func::migrate(ctx).await?; - match selected_test_builtin_schemas { - Some(found_selected_test_builtin_schemas) => { - schema::migrate_for_tests(ctx, found_selected_test_builtin_schemas).await?; - } - None => { - schema::migrate_for_production(ctx).await?; - } - } + // match selected_test_builtin_schemas { + // Some(found_selected_test_builtin_schemas) => { + // schema::migrate_for_tests(ctx, found_selected_test_builtin_schemas).await?; + // } + // None => { + // schema::migrate_for_production(ctx).await?; + // } + // } - info!("completed migrating functions, workflows and schemas"); + // info!("completed migrating functions, workflows and schemas"); Ok(()) } diff --git a/lib/dal/src/builtins/func.rs b/lib/dal/src/builtins/func.rs index 6a4706201f..1e145ab386 100644 --- a/lib/dal/src/builtins/func.rs +++ b/lib/dal/src/builtins/func.rs @@ -5,34 +5,29 @@ use si_pkg::SiPkg; use telemetry::prelude::*; use crate::{ - func::{ - argument::{FuncArgument, FuncArgumentKind}, - intrinsics::IntrinsicFunc, - }, - installed_pkg::InstalledPkg, - pkg::import_pkg_from_pkg, + func::intrinsics::IntrinsicFunc, installed_pkg::InstalledPkg, pkg::import_pkg_from_pkg, BuiltinsError, BuiltinsResult, DalContext, Func, FuncBackendKind, FuncBackendResponseType, StandardModel, }; -#[derive(Deserialize, Serialize, Debug)] -struct FunctionMetadataArgument { - name: String, - kind: FuncArgumentKind, -} - -#[derive(Deserialize, Serialize, Debug)] -struct FunctionMetadata { - kind: FuncBackendKind, - arguments: Option>, - response_type: FuncBackendResponseType, - hidden: Option, - display_name: Option, - description: Option, - link: Option, - code_file: Option, - code_entrypoint: Option, -} +// #[derive(Deserialize, Serialize, Debug)] +// struct FunctionMetadataArgument { +// name: String, +// kind: FuncArgumentKind, +// } +// +// #[derive(Deserialize, Serialize, Debug)] +// struct FunctionMetadata { +// kind: FuncBackendKind, +// arguments: Option>, +// response_type: FuncBackendResponseType, +// hidden: Option, +// display_name: Option, +// description: Option, +// link: Option, +// code_file: Option, +// code_entrypoint: Option, +// } /// We want the src/builtins/func/** files to be available at run time inside of the Docker container /// that we build, but it would be nice to not have to include arbitrary bits of the source tree when @@ -63,6 +58,7 @@ pub async fn migrate_intrinsics(ctx: &DalContext) -> BuiltinsResult<()> { .await? .is_none() { + dbg!("intrinsics pkg not found, importing"); import_pkg_from_pkg(ctx, &intrinsics_pkg, None).await?; ctx.blocking_commit().await?; } @@ -70,123 +66,123 @@ pub async fn migrate_intrinsics(ctx: &DalContext) -> BuiltinsResult<()> { Ok(()) } -pub async fn migrate(ctx: &DalContext) -> BuiltinsResult<()> { - for builtin_func_file in ASSETS.iter() { - let builtin_path = std::path::Path::new(builtin_func_file.relative_path); - match builtin_path.extension() { - Some(extension) => { - if extension != std::ffi::OsStr::new("json") { - debug!("skipping {:?}: not a json file", builtin_path); - continue; - } - } - None => { - warn!("skipping {:?}: no file extension", builtin_path); - continue; - } - }; - - let func_metadata: FunctionMetadata = serde_json::from_str(builtin_func_file.contents_str) - .map_err(|e| BuiltinsError::FuncJson(builtin_path.to_string_lossy().to_string(), e))?; - - let func_name = format!( - "si:{}", - builtin_path - .file_stem() - .ok_or_else(|| { - BuiltinsError::FuncMetadata(format!( - "Unable to determine base file name for {builtin_path:?}" - )) - })? - .to_string_lossy() - ); - - let mut existing_func = Func::find_by_attr(ctx, "name", &func_name).await?; - if let Some(mut existing_func) = existing_func.pop() { - if *existing_func.backend_kind() != func_metadata.kind { - info!( - "updating backend kind for {:?} from {:?} to {:?}", - &func_name, - *existing_func.backend_kind(), - func_metadata.kind - ); - existing_func - .set_backend_kind(ctx, func_metadata.kind) - .await?; - } - - warn!("skipping {:?}: func already exists", &func_name); - continue; - } - - let mut new_func = Func::new( - ctx, - &func_name, - func_metadata.kind, - func_metadata.response_type, - ) - .await - .expect("cannot create func"); - - if let Some(code_file) = func_metadata.code_file { - if func_metadata.code_entrypoint.is_none() { - panic!("cannot create function with code_file but no code_entrypoint") - } - - let metadata_base_path = builtin_path.parent().ok_or_else(|| { - BuiltinsError::FuncMetadata(format!( - "Cannot determine parent path of {builtin_path:?}" - )) - })?; - let func_path = metadata_base_path.join(std::path::Path::new(&code_file)); - - let code = FUNC_BUILTIN_BY_PATH - .get(func_path.as_os_str().to_str().ok_or_else(|| { - BuiltinsError::FuncMetadata(format!("Unable to convert {func_path:?} to &str")) - })?) - .ok_or_else(|| { - BuiltinsError::FuncMetadata(format!("Code file not found: {code_file:?}")) - })?; - let code = general_purpose::STANDARD_NO_PAD.encode(code.contents_str); - new_func - .set_code_base64(ctx, Some(code)) - .await - .expect("cannot set code"); - } - - new_func - .set_handler(ctx, func_metadata.code_entrypoint) - .await - .expect("cannot set handler"); - - new_func - .set_display_name(ctx, func_metadata.display_name) - .await - .expect("cannot set display name"); - new_func - .set_description(ctx, func_metadata.description) - .await - .expect("cannot set func description"); - new_func - .set_link(ctx, func_metadata.link) - .await - .expect("cannot set func link"); - new_func - .set_hidden(ctx, func_metadata.hidden.unwrap_or(false)) - .await - .expect("cannot set func hidden"); - new_func - .set_builtin(ctx, true) - .await - .expect("cannot set func builtin"); - - if let Some(arguments) = func_metadata.arguments { - for arg in arguments { - FuncArgument::new(ctx, &arg.name, arg.kind, None, *new_func.id()).await?; - } - } - ctx.blocking_commit().await?; - } - - Ok(()) -} +// pub async fn migrate(ctx: &DalContext) -> BuiltinsResult<()> { +// for builtin_func_file in ASSETS.iter() { +// let builtin_path = std::path::Path::new(builtin_func_file.relative_path); +// match builtin_path.extension() { +// Some(extension) => { +// if extension != std::ffi::OsStr::new("json") { +// debug!("skipping {:?}: not a json file", builtin_path); +// continue; +// } +// } +// None => { +// warn!("skipping {:?}: no file extension", builtin_path); +// continue; +// } +// }; +// +// let func_metadata: FunctionMetadata = serde_json::from_str(builtin_func_file.contents_str) +// .map_err(|e| BuiltinsError::FuncJson(builtin_path.to_string_lossy().to_string(), e))?; +// +// let func_name = format!( +// "si:{}", +// builtin_path +// .file_stem() +// .ok_or_else(|| { +// BuiltinsError::FuncMetadata(format!( +// "Unable to determine base file name for {builtin_path:?}" +// )) +// })? +// .to_string_lossy() +// ); +// +// let mut existing_func = Func::find_by_attr(ctx, "name", &func_name).await?; +// if let Some(mut existing_func) = existing_func.pop() { +// if *existing_func.backend_kind() != func_metadata.kind { +// info!( +// "updating backend kind for {:?} from {:?} to {:?}", +// &func_name, +// *existing_func.backend_kind(), +// func_metadata.kind +// ); +// existing_func +// .set_backend_kind(ctx, func_metadata.kind) +// .await?; +// } +// +// warn!("skipping {:?}: func already exists", &func_name); +// continue; +// } +// +// let mut new_func = Func::new( +// ctx, +// &func_name, +// func_metadata.kind, +// func_metadata.response_type, +// ) +// .await +// .expect("cannot create func"); +// +// if let Some(code_file) = func_metadata.code_file { +// if func_metadata.code_entrypoint.is_none() { +// panic!("cannot create function with code_file but no code_entrypoint") +// } +// +// let metadata_base_path = builtin_path.parent().ok_or_else(|| { +// BuiltinsError::FuncMetadata(format!( +// "Cannot determine parent path of {builtin_path:?}" +// )) +// })?; +// let func_path = metadata_base_path.join(std::path::Path::new(&code_file)); +// +// let code = FUNC_BUILTIN_BY_PATH +// .get(func_path.as_os_str().to_str().ok_or_else(|| { +// BuiltinsError::FuncMetadata(format!("Unable to convert {func_path:?} to &str")) +// })?) +// .ok_or_else(|| { +// BuiltinsError::FuncMetadata(format!("Code file not found: {code_file:?}")) +// })?; +// let code = general_purpose::STANDARD_NO_PAD.encode(code.contents_str); +// new_func +// .set_code_base64(ctx, Some(code)) +// .await +// .expect("cannot set code"); +// } +// +// new_func +// .set_handler(ctx, func_metadata.code_entrypoint) +// .await +// .expect("cannot set handler"); +// +// new_func +// .set_display_name(ctx, func_metadata.display_name) +// .await +// .expect("cannot set display name"); +// new_func +// .set_description(ctx, func_metadata.description) +// .await +// .expect("cannot set func description"); +// new_func +// .set_link(ctx, func_metadata.link) +// .await +// .expect("cannot set func link"); +// new_func +// .set_hidden(ctx, func_metadata.hidden.unwrap_or(false)) +// .await +// .expect("cannot set func hidden"); +// new_func +// .set_builtin(ctx, true) +// .await +// .expect("cannot set func builtin"); +// +// if let Some(arguments) = func_metadata.arguments { +// for arg in arguments { +// FuncArgument::new(ctx, &arg.name, arg.kind, None, *new_func.id()).await?; +// } +// } +// ctx.blocking_commit().await?; +// } +// +// Ok(()) +// } diff --git a/lib/dal/src/builtins/schema.rs b/lib/dal/src/builtins/schema.rs index 22129ac0b0..8f560bcf7a 100644 --- a/lib/dal/src/builtins/schema.rs +++ b/lib/dal/src/builtins/schema.rs @@ -13,8 +13,8 @@ use crate::{ binding::{FuncBinding, FuncBindingId}, binding_return_value::FuncBindingReturnValueId, }, - BuiltinsError, BuiltinsResult, DalContext, Func, FuncError, FuncId, SchemaError, - SelectedTestBuiltinSchemas, StandardModel, + BuiltinsError, BuiltinsResult, DalContext, Func, FuncId, SelectedTestBuiltinSchemas, + StandardModel, }; mod test_exclusive_fallout; diff --git a/lib/dal/src/change_set.rs b/lib/dal/src/change_set.rs index 0db3fefebf..7915f41af1 100644 --- a/lib/dal/src/change_set.rs +++ b/lib/dal/src/change_set.rs @@ -9,11 +9,11 @@ use thiserror::Error; use crate::standard_model::{object_option_from_row_option, objects_from_rows}; use crate::{ - pk, Action, ActionError, HistoryActor, HistoryEvent, HistoryEventError, LabelListError, - StandardModelError, Tenancy, Timestamp, TransactionsError, User, UserError, UserPk, Visibility, - WsEvent, WsEventError, WsPayload, + pk, HistoryActor, HistoryEvent, HistoryEventError, LabelListError, StandardModelError, Tenancy, + Timestamp, TransactionsError, User, UserError, UserPk, Visibility, WsEvent, WsEventError, + WsPayload, }; -use crate::{ComponentError, DalContext, WsEventResult}; +use crate::{DalContext, WsEventResult}; const CHANGE_SET_OPEN_LIST: &str = include_str!("queries/change_set/open_list.sql"); const CHANGE_SET_GET_BY_PK: &str = include_str!("queries/change_set/get_by_pk.sql"); @@ -26,10 +26,6 @@ const ABANDON_CHANGE_SET: &str = include_str!("queries/change_set/abandon_change #[remain::sorted] #[derive(Error, Debug)] pub enum ChangeSetError { - #[error(transparent)] - Action(#[from] ActionError), - #[error(transparent)] - Component(#[from] ComponentError), #[error(transparent)] HistoryEvent(#[from] HistoryEventError), #[error("invalid user actor pk")] @@ -236,53 +232,53 @@ impl ChangeSet { Ok(change_set) } - pub async fn sort_actions(&self, ctx: &DalContext) -> ChangeSetResult<()> { - let ctx = - ctx.clone_with_new_visibility(Visibility::new(self.pk, ctx.visibility().deleted_at)); - Ok(Action::sort_of_change_set(&ctx).await?) - } - - pub async fn actions(&self, ctx: &DalContext) -> ChangeSetResult> { - let ctx = - ctx.clone_with_new_visibility(Visibility::new(self.pk, ctx.visibility().deleted_at)); - Ok(Action::find_for_change_set(&ctx).await?) - } - - pub async fn actors(&self, ctx: &DalContext) -> ChangeSetResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(GET_ACTORS, &[&ctx.tenancy().workspace_pk(), &self.pk]) - .await?; - - let mut result: Vec = vec![]; - for row in rows.into_iter() { - let email: String = row.try_get("email")?; - result.push(email); - } - - Ok(result) - } - - pub async fn force_new(ctx: &mut DalContext) -> ChangeSetResult> { - Ok(if ctx.visibility().is_head() { - let change_set = Self::new(ctx, Self::generate_name(), None).await?; - - let new_visibility = Visibility::new(change_set.pk, ctx.visibility().deleted_at); - - ctx.update_visibility(new_visibility); - - WsEvent::change_set_created(ctx, change_set.pk) - .await? - .publish_on_commit(ctx) - .await?; - - Some(change_set.pk) - } else { - None - }) - } + // pub async fn sort_actions(&self, ctx: &DalContext) -> ChangeSetResult<()> { + // let ctx = + // ctx.clone_with_new_visibility(Visibility::new(self.pk, ctx.visibility().deleted_at)); + // Ok(Action::sort_of_change_set(&ctx).await?) + // } + + // pub async fn actions(&self, ctx: &DalContext) -> ChangeSetResult> { + // let ctx = + // ctx.clone_with_new_visibility(Visibility::new(self.pk, ctx.visibility().deleted_at)); + // Ok(Action::find_for_change_set(&ctx).await?) + // } + + // pub async fn actors(&self, ctx: &DalContext) -> ChangeSetResult> { + // let rows = ctx + // .txns() + // .await? + // .pg() + // .query(GET_ACTORS, &[&ctx.tenancy().workspace_pk(), &self.pk]) + // .await?; + + // let mut result: Vec = vec![]; + // for row in rows.into_iter() { + // let email: String = row.try_get("email")?; + // result.push(email); + // } + + // Ok(result) + // } + + // pub async fn force_new(ctx: &mut DalContext) -> ChangeSetResult> { + // Ok(if ctx.visibility().is_head() { + // let change_set = Self::new(ctx, Self::generate_name(), None).await?; + + // let new_visibility = Visibility::new(change_set.pk, ctx.visibility().deleted_at); + + // ctx.update_visibility(new_visibility); + + // WsEvent::change_set_created(ctx, change_set.pk) + // .await? + // .publish_on_commit(ctx) + // .await?; + + // Some(change_set.pk) + // } else { + // None + // }) + // } } impl WsEvent { diff --git a/lib/dal/src/change_set_pointer.rs b/lib/dal/src/change_set_pointer.rs index 737827b3c9..650e1857e4 100644 --- a/lib/dal/src/change_set_pointer.rs +++ b/lib/dal/src/change_set_pointer.rs @@ -39,6 +39,7 @@ pub struct ChangeSetPointer { pub updated_at: DateTime, pub name: String, + pub base_change_set_id: Option, pub workspace_snapshot_id: Option, #[serde(skip)] @@ -54,6 +55,7 @@ impl TryFrom for ChangeSetPointer { created_at: value.try_get("created_at")?, updated_at: value.try_get("updated_at")?, name: value.try_get("name")?, + base_change_set_id: value.try_get("base_change_set_id")?, workspace_snapshot_id: value.try_get("workspace_snapshot_id")?, generator: Arc::new(Mutex::new(Default::default())), }) @@ -70,20 +72,40 @@ impl ChangeSetPointer { created_at: Utc::now(), updated_at: Utc::now(), generator: Arc::new(Mutex::new(generator)), + base_change_set_id: None, workspace_snapshot_id: None, name: "".to_string(), }) } - pub async fn new(ctx: &DalContext, name: impl AsRef) -> ChangeSetPointerResult { + pub async fn new( + ctx: &DalContext, + name: impl AsRef, + base_change_set_id: Option, + ) -> ChangeSetPointerResult { let name = name.as_ref(); + dbg!(name, &base_change_set_id); + let row = ctx + .txns() + .await? + .pg() + .query_one( + "INSERT INTO change_set_pointers (name, base_change_set_id) VALUES ($1, $2) RETURNING *", + &[&name, &base_change_set_id], + ) + .await?; + Self::try_from(row) + } + + pub async fn new_head(ctx: &DalContext) -> ChangeSetPointerResult { + let name = "HEAD"; let row = ctx .txns() .await? .pg() .query_one( - "INSERT INTO change_set_pointers (name) VALUES ($1) RETURNING *", - &[&name], + "INSERT INTO change_set_pointers (id, name, base_change_set_id) VALUES ($1, $2, $3) RETURNING *", + &[&ChangeSetPointerId::NONE, &name, &None::], ) .await?; Self::try_from(row) @@ -123,17 +145,21 @@ impl ChangeSetPointer { pub async fn find( ctx: &DalContext, change_set_pointer_id: ChangeSetPointerId, - ) -> ChangeSetPointerResult { + ) -> ChangeSetPointerResult> { let row = ctx .txns() .await? .pg() - .query_one( + .query_opt( "SELECT * FROM change_set_pointers WHERE id = $1", &[&change_set_pointer_id], ) .await?; - Self::try_from(row) + + match row { + Some(row) => Ok(Some(Self::try_from(row)?)), + None => Ok(None), + } } } diff --git a/lib/dal/src/component.rs b/lib/dal/src/component.rs index 15acbcc035..0048f51f63 100644 --- a/lib/dal/src/component.rs +++ b/lib/dal/src/component.rs @@ -2,27 +2,28 @@ //! [`SchemaVariant`](crate::SchemaVariant) and a _model_ of a "real world resource". use chrono::{DateTime, Utc}; +use content_store::ContentHash; use serde::{Deserialize, Serialize}; use serde_json::Value; use si_data_nats::NatsError; use si_data_pg::PgError; +use strum::EnumDiscriminants; use strum::{AsRefStr, Display, EnumIter, EnumString}; use telemetry::prelude::*; use thiserror::Error; -use crate::attribute::context::AttributeContextBuilder; use crate::attribute::value::AttributeValue; -use crate::attribute::value::AttributeValueError; use crate::code_view::CodeViewError; use crate::func::binding::FuncBindingError; use crate::func::binding_return_value::{FuncBindingReturnValueError, FuncBindingReturnValueId}; use crate::job::definition::DependentValuesUpdate; use crate::schema::variant::root_prop::SiPropChild; -use crate::schema::variant::{SchemaVariantError, SchemaVariantId}; +use crate::schema::variant::SchemaVariantId; use crate::schema::SchemaVariant; -use crate::socket::{SocketEdgeKind, SocketError}; +use crate::socket::SocketEdgeKind; use crate::standard_model::object_from_row; use crate::validation::ValidationConstructorError; +use crate::workspace_snapshot::content_address::ContentAddress; use crate::ws_event::WsEventError; use crate::{ impl_standard_model, node::NodeId, pk, provider::internal::InternalProviderError, @@ -38,163 +39,30 @@ use crate::{ use crate::{AttributeValueId, QualificationError}; use crate::{Edge, FixResolverError, NodeKind}; -pub mod code; -pub mod diff; -pub mod qualification; -pub mod resource; -pub mod status; -pub mod validation; -pub mod view; +// pub mod code; +// pub mod diff; +// pub mod qualification; +// pub mod resource; +// pub mod status; +// pub mod validation; +// pub mod view; + +// pub use view::{ComponentView, ComponentViewError, ComponentViewProperties}; + +// const FIND_FOR_NODE: &str = include_str!("queries/component/find_for_node.sql"); +// const FIND_SI_CHILD_PROP_ATTRIBUTE_VALUE: &str = +// include_str!("queries/component/find_si_child_attribute_value.sql"); +// const LIST_FOR_SCHEMA_VARIANT: &str = include_str!("queries/component/list_for_schema_variant.sql"); +// const LIST_SOCKETS_FOR_SOCKET_EDGE_KIND: &str = +// include_str!("queries/component/list_sockets_for_socket_edge_kind.sql"); +// const FIND_NAME: &str = include_str!("queries/component/find_name.sql"); +// const ROOT_CHILD_ATTRIBUTE_VALUE_FOR_COMPONENT: &str = +// include_str!("queries/component/root_child_attribute_value_for_component.sql"); +// const LIST_CONNECTED_INPUT_SOCKETS_FOR_ATTRIBUTE_VALUE: &str = +// include_str!("queries/component/list_connected_input_sockets_for_attribute_value.sql"); +// const COMPONENT_STATUS_UPDATE_BY_PK: &str = +// include_str!("queries/component/status_update_by_pk.sql"); -pub use view::{ComponentView, ComponentViewError, ComponentViewProperties}; - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum ComponentError { - #[error(transparent)] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute context error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error(transparent)] - AttributePrototype(#[from] AttributePrototypeError), - /// Found an [`AttributePrototypeArgumentError`](crate::AttributePrototypeArgumentError). - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("attribute value not found for context: {0:?}")] - AttributeValueNotFoundForContext(AttributeReadContext), - #[error("cannot update the resource tree when in a change set")] - CannotUpdateResourceTreeInChangeSet, - #[error(transparent)] - CodeView(#[from] CodeViewError), - #[error("component marked as protected: {0}")] - ComponentProtected(ComponentId), - /// No "protected" boolean was found for the appropriate - /// [`AttributeValue`](crate::AttributeValue) and [`Component`](crate::Component). In other - /// words, the value contained in the [`AttributeValue`](crate::AttributeValue) was "none". - #[error("component protection is none for component ({0}) and attribute value ({1}")] - ComponentProtectionIsNone(ComponentId, AttributeValueId), - /// No [`ComponentType`](crate::ComponentType) was found for the appropriate - /// [`AttributeValue`](crate::AttributeValue) and [`Component`](crate::Component). In other - /// words, the value contained in the [`AttributeValue`](crate::AttributeValue) was "none". - #[error("component type is none for component ({0}) and attribute value ({1})")] - ComponentTypeIsNone(ComponentId, AttributeValueId), - #[error(transparent)] - ComponentView(#[from] ComponentViewError), - #[error(transparent)] - ContextTransaction(#[from] TransactionsError), - #[error("edge error: {0}")] - Edge(#[from] EdgeError), - /// Found an [`ExternalProviderError`](crate::ExternalProviderError). - #[error("external provider error: {0}")] - ExternalProvider(#[from] ExternalProviderError), - #[error("fix error: {0}")] - Fix(#[from] Box), - #[error("fix not found for id: {0}")] - FixNotFound(FixId), - #[error("fix resolver error: {0}")] - FixResolver(#[from] FixResolverError), - #[error("found child attribute value of a map without a key: {0}")] - FoundMapEntryWithoutKey(AttributeValueId), - #[error("unable to delete frame due to attached components")] - FrameHasAttachedComponents, - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("func binding return value: {0} not found")] - FuncBindingReturnValueNotFound(FuncBindingReturnValueId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - /// No "protected" boolean was found for the appropriate - #[error("component({0}) can't be restored because it's inside a deleted frame ({1})")] - InsideDeletedFrame(ComponentId, ComponentId), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), - #[error("invalid context(s) provided for diff")] - InvalidContextForDiff, - #[error("invalid func backend kind (0:?) for checking validations (need validation kind)")] - InvalidFuncBackendKindForValidations(FuncBackendKind), - #[error("attribute value does not have a prototype: {0}")] - MissingAttributePrototype(AttributeValueId), - #[error("attribute prototype does not have a function: {0}")] - MissingAttributePrototypeFunction(AttributePrototypeId), - #[error("no func binding return value for leaf entry name: {0}")] - MissingFuncBindingReturnValueIdForLeafEntryName(String), - #[error("/root/si/name is unset for component {0}")] - NameIsUnset(ComponentId), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("node error: {0}")] - NodeError(#[from] NodeError), - #[error("node not found for component: {0}")] - NodeNotFoundForComponent(ComponentId), - #[error("no schema for component {0}")] - NoSchema(ComponentId), - #[error("no schema variant for component {0}")] - NoSchemaVariant(ComponentId), - #[error("component not found: {0}")] - NotFound(ComponentId), - /// A parent [`AttributeValue`](crate::AttributeValue) was not found for the specified - /// [`AttributeValueId`](crate::AttributeValue). - #[error("parent attribute value not found for attribute value: {0}")] - ParentAttributeValueNotFound(AttributeValueId), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error(transparent)] - PgPool(#[from] si_data_pg::PgPoolError), - #[error("prop error: {0}")] - Prop(#[from] PropError), - #[error("qualification error: {0}")] - Qualification(#[from] QualificationError), - #[error("qualification result for {0} on component {1} has no value")] - QualificationResultEmpty(String, ComponentId), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), - #[error("schema variant error: {0}")] - SchemaVariant(#[from] SchemaVariantError), - #[error("schema variant has not been finalized at least once: {0}")] - SchemaVariantNotFinalized(SchemaVariantId), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("validation error: {0}")] - Validation(#[from] ValidationConstructorError), - #[error("validation prototype error: {0}")] - ValidationPrototype(#[from] ValidationPrototypeError), - #[error("validation resolver error: {0}")] - ValidationResolver(#[from] ValidationResolverError), - #[error("workspace error: {0}")] - Workspace(#[from] WorkspaceError), - #[error("ws event error: {0}")] - WsEvent(#[from] WsEventError), -} - -pub type ComponentResult = Result; - -const FIND_FOR_NODE: &str = include_str!("queries/component/find_for_node.sql"); -const FIND_SI_CHILD_PROP_ATTRIBUTE_VALUE: &str = - include_str!("queries/component/find_si_child_attribute_value.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = include_str!("queries/component/list_for_schema_variant.sql"); -const LIST_SOCKETS_FOR_SOCKET_EDGE_KIND: &str = - include_str!("queries/component/list_sockets_for_socket_edge_kind.sql"); -const FIND_NAME: &str = include_str!("queries/component/find_name.sql"); -const ROOT_CHILD_ATTRIBUTE_VALUE_FOR_COMPONENT: &str = - include_str!("queries/component/root_child_attribute_value_for_component.sql"); -const LIST_CONNECTED_INPUT_SOCKETS_FOR_ATTRIBUTE_VALUE: &str = - include_str!("queries/component/list_connected_input_sockets_for_attribute_value.sql"); -const COMPONENT_STATUS_UPDATE_BY_PK: &str = - include_str!("queries/component/status_update_by_pk.sql"); - -pk!(ComponentPk); pk!(ComponentId); #[remain::sorted] @@ -225,768 +93,783 @@ impl Default for ComponentKind { } /// A [`Component`] is an instantiation of a [`SchemaVariant`](crate::SchemaVariant). -/// -/// ## Updating "Fields" on a [`Component`] -/// -/// To learn more about updating a "field" on a [`Component`], please see the -/// [`AttributeValue module`](crate::attribute::value). #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct Component { - pk: ComponentPk, id: ComponentId, - kind: ComponentKind, - deletion_user_pk: Option, - needs_destroy: bool, - #[serde(flatten)] - tenancy: Tenancy, #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, + kind: ComponentKind, + needs_destroy: bool, } -impl_standard_model! { - model: Component, - pk: ComponentPk, +#[derive(Debug, PartialEq)] +pub struct ComponentGraphNode { id: ComponentId, - table_name: "components", - history_event_label_base: "component", - history_event_message_name: "Component" + content_address: ContentAddress, + content: ComponentContentV1, } -impl Component { - /// The primary constructor method for creating [`Components`](Self). It returns a new - /// [`Component`] with a corresponding [`Node`](crate::Node). - /// - /// If you would like to use the default [`SchemaVariant`](crate::SchemaVariant) for - /// a [`Schema`](crate::Schema) rather than - /// a specific [`SchemaVariantId`](crate::SchemaVariant), use - /// [`Self::new_for_default_variant_from_schema()`]. - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - name: impl AsRef, - schema_variant_id: SchemaVariantId, - ) -> ComponentResult<(Self, Node)> { - let schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) - .await? - .ok_or(SchemaVariantError::NotFound(schema_variant_id))?; - - // Ensure components are not created unless the variant has been finalized at least once. - if !schema_variant.finalized_once() { - return Err(ComponentError::SchemaVariantNotFinalized(schema_variant_id)); - } - - let schema = schema_variant - .schema(ctx) - .await? - .ok_or(SchemaVariantError::MissingSchema(schema_variant_id))?; - let actor_user_pk = match ctx.history_actor() { - HistoryActor::User(user_pk) => Some(*user_pk), - _ => None, - }; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM component_create_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &actor_user_pk, - &schema.component_kind().as_ref(), - schema_variant.id(), - ], - ) - .await?; - - let component: Component = standard_model::finish_create_from_row(ctx, row).await?; - - // Need to flesh out node so that the template data is also included in the node we - // persist. But it isn't, - our node is anemic. - let node = Node::new(ctx, &NodeKind::Configuration).await?; - node.set_component(ctx, component.id()).await?; - component.set_name(ctx, Some(name.as_ref())).await?; - - Ok((component, node)) - } - - /// A secondary constructor method that finds the default - /// [`SchemaVariant`](crate::SchemaVariant) for a given [`SchemaId`](crate::Schema). Once found, - /// the [`primary constructor method`](Self::new) is called. - pub async fn new_for_default_variant_from_schema( - ctx: &DalContext, - name: impl AsRef, - schema_id: SchemaId, - ) -> ComponentResult<(Self, Node)> { - let schema = Schema::get_by_id(ctx, &schema_id) - .await? - .ok_or(SchemaError::NotFound(schema_id))?; - - let schema_variant_id = schema - .default_schema_variant_id() - .ok_or(SchemaError::NoDefaultVariant(schema_id))?; - - Self::new(ctx, name, *schema_variant_id).await - } - - standard_model_accessor!(kind, Enum(ComponentKind), ComponentResult); - standard_model_accessor!(needs_destroy, bool, ComponentResult); - standard_model_accessor!(deletion_user_pk, Option, ComponentResult); - - standard_model_belongs_to!( - lookup_fn: schema, - set_fn: set_schema, - unset_fn: unset_schema, - table: "component_belongs_to_schema", - model_table: "schemas", - belongs_to_id: SchemaId, - returns: Schema, - result: ComponentResult, - ); - - standard_model_belongs_to!( - lookup_fn: schema_variant, - set_fn: set_schema_variant, - unset_fn: unset_schema_variant, - table: "component_belongs_to_schema_variant", - model_table: "schema_variants", - belongs_to_id: SchemaVariantId, - returns: SchemaVariant, - result: ComponentResult, - ); - - standard_model_has_many!( - lookup_fn: node, - table: "node_belongs_to_component", - model_table: "nodes", - returns: Node, - result: ComponentResult, - ); - - pub fn tenancy(&self) -> &Tenancy { - &self.tenancy - } - - /// List [`Sockets`](crate::Socket) with a given - /// [`SocketEdgeKind`](crate::socket::SocketEdgeKind). - #[instrument(skip_all)] - pub async fn list_sockets_for_kind( - ctx: &DalContext, - component_id: ComponentId, - socket_edge_kind: SocketEdgeKind, - ) -> ComponentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_SOCKETS_FOR_SOCKET_EDGE_KIND, - &[ - ctx.tenancy(), - ctx.visibility(), - &component_id, - &(socket_edge_kind.to_string()), - ], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// Find [`Self`] with a provided [`NodeId`](crate::Node). - #[instrument(skip_all)] - pub async fn find_for_node(ctx: &DalContext, node_id: NodeId) -> ComponentResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt(FIND_FOR_NODE, &[ctx.tenancy(), ctx.visibility(), &node_id]) - .await?; - Ok(standard_model::object_option_from_row_option(row)?) - } - - /// Find the [`AttributeValue`](crate::AttributeValue) whose - /// [`context`](crate::AttributeContext) corresponds to the following: - /// - /// - The [`PropId`](crate::Prop) corresponding to the child [`Prop`](crate::Prop) of "/root/si" - /// whose name matches the provided - /// [`SiPropChild`](crate::schema::variant::root_prop::SiPropChild) - /// - The [`ComponentId`](Self) matching the provided [`ComponentId`](Self). - /// - /// _Note:_ if the type has never been updated, this will find the _default_ - /// [`AttributeValue`](crate::AttributeValue) where the [`ComponentId`](Self) is unset. - #[instrument(skip_all)] - pub async fn find_si_child_attribute_value( - ctx: &DalContext, - component_id: ComponentId, - schema_variant_id: SchemaVariantId, - si_prop_child: SiPropChild, - ) -> ComponentResult { - let si_child_prop_name = si_prop_child.prop_name(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_SI_CHILD_PROP_ATTRIBUTE_VALUE, - &[ - ctx.tenancy(), - ctx.visibility(), - &component_id, - &schema_variant_id, - &si_child_prop_name, - ], - ) - .await?; - Ok(object_from_row(row)?) - } - - #[instrument(skip_all)] - pub async fn is_in_tenancy(ctx: &DalContext, id: ComponentId) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT id FROM components WHERE id = $1 AND in_tenancy_v1($2, components.tenancy_workspace_pk) LIMIT 1", - &[ - &id, - ctx.tenancy(), - ], - ) - .await?; - Ok(row.is_some()) - } - - #[instrument(skip_all)] - pub async fn list_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> ComponentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - - let mut results = Vec::new(); - for row in rows.into_iter() { - let json: serde_json::Value = row.try_get("object")?; - let object: Self = serde_json::from_value(json)?; - results.push(object); - } - - Ok(results) - } - - /// Sets the "/root/si/name" for [`self`](Self). - #[instrument(skip_all)] - pub async fn set_name( - &self, - ctx: &DalContext, - value: Option, - ) -> ComponentResult<()> { - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let attribute_value = - Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Name) - .await?; - - // Before we set the name, ensure that another function is not setting the name (e.g. - // something different than "unset" or "setString"). - let attribute_prototype = attribute_value - .attribute_prototype(ctx) - .await? - .ok_or_else(|| ComponentError::MissingAttributePrototype(*attribute_value.id()))?; - let prototype_func = Func::get_by_id(ctx, &attribute_prototype.func_id()) - .await? - .ok_or_else(|| { - ComponentError::MissingAttributePrototypeFunction(*attribute_prototype.id()) - })?; - let name = prototype_func.name(); - if name != "si:unset" && name != "si:setString" { - return Ok(()); - } - - let attribute_context = AttributeContext::builder() - .set_component_id(self.id) - .set_prop_id(attribute_value.context.prop_id()) - .to_context()?; - - let json_value = match value.clone() { - Some(v) => Some(serde_json::to_value(v)?), - None => None, - }; - - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| ComponentError::ParentAttributeValueNotFound(*attribute_value.id()))?; - let (_, _) = AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - attribute_context, - json_value, - None, - ) - .await?; - - Ok(()) - } - - #[instrument(skip_all)] - pub async fn set_deleted_at( - &self, - ctx: &DalContext, - value: Option>, - ) -> ComponentResult>> { - let json_value = match value { - Some(v) => Some(serde_json::to_value(v)?), - None => None, - }; - - let attribute_value = Self::root_prop_child_attribute_value_for_component( - ctx, - self.id, - RootPropChild::DeletedAt, - ) - .await?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| ComponentError::ParentAttributeValueNotFound(*attribute_value.id()))?; - let attribute_context = AttributeContext::builder() - .set_component_id(self.id) - .set_prop_id(attribute_value.context.prop_id()) - .to_context()?; - let (_, _) = AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - attribute_context, - json_value, - None, - ) - .await?; - - Ok(value) - } - - /// Return the name of the [`Component`](Self) for the provided [`ComponentId`](Self). - #[instrument(skip_all)] - pub async fn find_name(ctx: &DalContext, component_id: ComponentId) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_one(FIND_NAME, &[ctx.tenancy(), ctx.visibility(), &component_id]) - .await?; - let component_name: Value = row.try_get("component_name")?; - let component_name: Option = serde_json::from_value(component_name)?; - let component_name = component_name.ok_or(ComponentError::NameIsUnset(component_id))?; - Ok(component_name) - } - - /// Calls [`Self::find_name()`] and provides the "id" off [`self`](Self). - pub async fn name(&self, ctx: &DalContext) -> ComponentResult { - Self::find_name(ctx, self.id).await - } - - /// Grabs the [`AttributeValue`](crate::AttributeValue) corresponding to the - /// [`RootPropChild`](crate::RootPropChild) [`Prop`](crate::Prop) for the given - /// [`Component`](Self). - #[instrument(skip_all)] - pub async fn root_prop_child_attribute_value_for_component( - ctx: &DalContext, - component_id: ComponentId, - root_prop_child: RootPropChild, - ) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - ROOT_CHILD_ATTRIBUTE_VALUE_FOR_COMPONENT, - &[ - ctx.tenancy(), - ctx.visibility(), - &root_prop_child.as_str(), - &component_id, - ], - ) - .await?; - Ok(object_from_row(row)?) - } - - /// List the connected input [`Sockets`](crate::Socket) for a given [`ComponentId`](Self) and - /// [`AttributeValueId`](crate::AttributeValue) whose [`context`](crate::AttributeContext)'s - /// least specific field corresponding to a [`PropId`](crate::Prop). In other words, this is - /// the list of input [`Sockets`](crate::Socket) with incoming connections from other - /// [`Component(s)`](Self) that the given [`AttributeValue`](crate::AttributeValue) depends on. - /// - /// ```raw - /// ┌────────────────────────────┐ - /// │ This │ - /// │ Component │ - /// ┌───────────┐ │ ┌────────────────┐ │ - /// │ Another │ │ ┌───►│ AttributeValue │ │ - /// │ Component │ │ │ │ for Prop │ │ - /// │ │ │ │ └────────────────┘ │ - /// │ ┌────────┤ ├────┴─────────┐ │ - /// │ │ Output ├───────►│ Input │ │ - /// │ │ Socket │ │ Socket │ │ - /// │ │ │ │ (list these) │ │ - /// └──┴────────┘ └──────────────┴─────────────┘ - /// ``` - /// - /// _Warning: users of this query must ensure that the - /// [`AttributeValueId`](crate::AttributeValue) provided has a - /// [`context`](crate::AttributeContext) whose least specific field corresponds to a - /// [`PropId`](crate::Prop)._ - #[instrument(skip_all)] - pub async fn list_connected_input_sockets_for_attribute_value( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - component_id: ComponentId, - ) -> ComponentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_CONNECTED_INPUT_SOCKETS_FOR_ATTRIBUTE_VALUE, - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_value_id, - &component_id, - ], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// Find the [`SchemaVariantId`](crate::SchemaVariantId) that belongs to the provided - /// [`Component`](crate::Component). - pub async fn schema_variant_id( - ctx: &DalContext, - component_id: ComponentId, - ) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "select belongs_to_id as schema_variant_id from - component_belongs_to_schema_variant_v1($1, $2) - where object_id = $3 - ", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; - - Ok(row.try_get("schema_variant_id")?) - } - - /// Find the [`SchemaId`](crate::SchemaId) that belongs to the provided - /// [`Component`](crate::Component). - pub async fn schema_id( - ctx: &DalContext, - component_id: ComponentId, - ) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "select belongs_to_id as schema_id from - component_belongs_to_schema_v1($1, $2) - where object_id = $3 - ", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; - - Ok(row.try_get("schema_id")?) - } - - /// Gets the [`ComponentType`](crate::ComponentType) of [`self`](Self). - /// - /// Mutate this with [`Self::set_type()`]. - pub async fn get_type(&self, ctx: &DalContext) -> ComponentResult { - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let type_attribute_value = - Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Type) - .await?; - let raw_value = type_attribute_value.get_value(ctx).await?.ok_or_else(|| { - ComponentError::ComponentTypeIsNone(self.id, *type_attribute_value.id()) - })?; - let component_type: ComponentType = serde_json::from_value(raw_value)?; - Ok(component_type) - } - - /// Gets the protected attribute value of [`self`](Self). - pub async fn get_protected(&self, ctx: &DalContext) -> ComponentResult { - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let protected_attribute_value = Self::find_si_child_attribute_value( - ctx, - self.id, - schema_variant_id, - SiPropChild::Protected, - ) - .await?; - let raw_value = protected_attribute_value - .get_value(ctx) - .await? - .ok_or_else(|| { - ComponentError::ComponentProtectionIsNone(self.id, *protected_attribute_value.id()) - })?; - let protected: bool = serde_json::from_value(raw_value)?; - Ok(protected) - } - - /// Sets the field corresponding to "/root/si/type" for the [`Component`]. Possible values - /// are limited to variants of [`ComponentType`](crate::ComponentType). - #[instrument(skip(ctx))] - pub async fn set_type( - &self, - ctx: &DalContext, - component_type: ComponentType, - ) -> ComponentResult<()> { - // anytime a component_type is changed to a Configuration Frame, - // we delete all current edges that were configured for the previously - // set component_type (aka AggregationFrame and Component) - // The 2 other component_types can retain their edges. - if let ComponentType::ConfigurationFrame = component_type { - let edges = Edge::list_for_component(ctx, self.id).await?; - for mut edge in edges { - edge.delete_and_propagate(ctx).await?; - } - } - - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let type_attribute_value = - Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Type) - .await?; - - // If we are setting the type for the first time, we will need to mutate the context to - // be component-specific. This is because the attribute value will have an unset component - // id and we will need to deviate from the schema variant default component type. - let attribute_context = if type_attribute_value.context.is_component_unset() { - AttributeContextBuilder::from(type_attribute_value.context) - .set_component_id(self.id) - .to_context()? - } else { - type_attribute_value.context - }; - - let si_attribute_value = type_attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| { - ComponentError::ParentAttributeValueNotFound(*type_attribute_value.id()) - })?; - AttributeValue::update_for_context( - ctx, - *type_attribute_value.id(), - Some(*si_attribute_value.id()), - attribute_context, - Some(serde_json::to_value(component_type)?), - None, - ) - .await?; - - Ok(()) - } - - pub async fn delete_and_propagate(&mut self, ctx: &DalContext) -> ComponentResult<()> { - // Block deletion of frames with children - if self.get_type(ctx).await? != ComponentType::Component { - let frame_edges = Edge::list_for_component(ctx, self.id).await?; - let frame_node = self - .node(ctx) - .await? - .pop() - .ok_or(ComponentError::NodeNotFoundForComponent(self.id))?; - let frame_socket = Socket::find_frame_socket_for_node( - ctx, - *frame_node.id(), - SocketEdgeKind::ConfigurationInput, - ) - .await?; - let connected_children = frame_edges - .into_iter() - .filter(|edge| edge.head_socket_id() == *frame_socket.id()) - .count(); - if connected_children > 0 { - return Err(ComponentError::FrameHasAttachedComponents); - } - } - - self.set_deleted_at(ctx, Some(Utc::now())).await?; - - if self.get_protected(ctx).await? { - return Err(ComponentError::ComponentProtected(self.id)); - } - - let actor_user_pk = match ctx.history_actor() { - HistoryActor::User(user_pk) => Some(*user_pk), - _ => None, - }; - - let has_resource = self.resource(ctx).await?.payload.is_some(); - let rows = ctx - .txns() - .await? - .pg() - .query( - "SELECT * FROM component_delete_and_propagate_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - self.id(), - &actor_user_pk, - &has_resource, - ], - ) - .await?; - let mut attr_values: Vec = standard_model::objects_from_rows(rows)?; - - for attr_value in attr_values.iter_mut() { - attr_value.update_from_prototype_function(ctx).await?; - } - - let ids = attr_values.iter().map(|av| *av.id()).collect(); - - ctx.enqueue_job(DependentValuesUpdate::new( - ctx.access_builder(), - *ctx.visibility(), - ids, - )) - .await?; - - Ok(()) - } - - pub async fn restore_and_propagate( - ctx: &DalContext, - component_id: ComponentId, - ) -> ComponentResult> { - // Check if component has deleted frame before restoring - let component = { - let ctx_with_deleted = &ctx.clone_with_delete_visibility(); - - let component = Self::get_by_id(ctx_with_deleted, &component_id) - .await? - .ok_or_else(|| ComponentError::NotFound(component_id))?; - - let sockets = Socket::list_for_component(ctx_with_deleted, component_id).await?; - - let maybe_socket_to_parent = sockets.iter().find(|socket| { - socket.name() == "Frame" - && *socket.edge_kind() == SocketEdgeKind::ConfigurationOutput - }); - - let edges_with_deleted = Edge::list(ctx_with_deleted).await?; - - let mut maybe_deleted_parent_id = None; - - if let Some(socket_to_parent) = maybe_socket_to_parent { - for edge in &edges_with_deleted { - if edge.tail_object_id() == (*component.id()).into() - && edge.tail_socket_id() == *socket_to_parent.id() - && (edge.visibility().deleted_at.is_some() && edge.deleted_implicitly()) - { - maybe_deleted_parent_id = Some(edge.head_object_id().into()); - break; - } - } - }; - - if let Some(parent_id) = maybe_deleted_parent_id { - let parent_comp = Self::get_by_id(ctx_with_deleted, &parent_id) - .await? - .ok_or_else(|| ComponentError::NotFound(parent_id))?; - - if parent_comp.visibility().deleted_at.is_some() { - return Err(ComponentError::InsideDeletedFrame(component_id, parent_id)); - } - } - - component - }; - - component.set_deleted_at(ctx, None).await?; - - let rows = ctx - .txns() - .await? - .pg() - .query( - "SELECT * FROM component_restore_and_propagate_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; - let mut attr_values: Vec = standard_model::objects_from_rows(rows)?; - - for attr_value in &mut attr_values { - attr_value.update_from_prototype_function(ctx).await?; - } - - let ids = attr_values.iter().map(|av| *av.id()).collect(); - - ctx.enqueue_job(DependentValuesUpdate::new( - ctx.access_builder(), - *ctx.visibility(), - ids, - )) - .await?; - - Ok(Component::get_by_id(ctx, &component_id).await?) - } - - /// Finds the "color" that the [`Component`] should be in the [`Diagram`](crate::Diagram). - pub async fn color(&self, ctx: &DalContext) -> ComponentResult> { - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let color_attribute_value = Component::find_si_child_attribute_value( - ctx, - self.id, - schema_variant_id, - SiPropChild::Color, - ) - .await?; - let color = color_attribute_value - .get_value(ctx) - .await? - .map(serde_json::from_value) - .transpose()?; - Ok(color) - } - - /// Check if the [`Component`] has been fully destroyed. - pub fn is_destroyed(&self) -> bool { - self.visibility.deleted_at.is_some() && !self.needs_destroy() - } +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum ComponentContent { + V1(ComponentContentV1), } -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct ComponentCreatedPayload { - success: bool, +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct ComponentContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + pub kind: ComponentKind, + pub needs_destroy: bool, } -impl WsEvent { - pub async fn component_created(ctx: &DalContext) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::ComponentCreated(ComponentCreatedPayload { success: true }), - ) - .await +impl ComponentGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: ComponentContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::Component(content_hash), + content, + } } } + +// impl Component { +// /// The primary constructor method for creating [`Components`](Self). It returns a new +// /// [`Component`] with a corresponding [`Node`](crate::Node). +// /// +// /// If you would like to use the default [`SchemaVariant`](crate::SchemaVariant) for +// /// a [`Schema`](crate::Schema) rather than +// /// a specific [`SchemaVariantId`](crate::SchemaVariant), use +// /// [`Self::new_for_default_variant_from_schema()`]. +// #[instrument(skip_all)] +// pub async fn new( +// ctx: &DalContext, +// name: impl AsRef, +// schema_variant_id: SchemaVariantId, +// ) -> ComponentResult<(Self, Node)> { +// let schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) +// .await? +// .ok_or(SchemaVariantError::NotFound(schema_variant_id))?; + +// // Ensure components are not created unless the variant has been finalized at least once. +// if !schema_variant.finalized_once() { +// return Err(ComponentError::SchemaVariantNotFinalized(schema_variant_id)); +// } + +// let schema = schema_variant +// .schema(ctx) +// .await? +// .ok_or(SchemaVariantError::MissingSchema(schema_variant_id))?; +// let actor_user_pk = match ctx.history_actor() { +// HistoryActor::User(user_pk) => Some(*user_pk), +// _ => None, +// }; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM component_create_v1($1, $2, $3, $4, $5)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &actor_user_pk, +// &schema.component_kind().as_ref(), +// schema_variant.id(), +// ], +// ) +// .await?; + +// let component: Component = standard_model::finish_create_from_row(ctx, row).await?; + +// // Need to flesh out node so that the template data is also included in the node we +// // persist. But it isn't, - our node is anemic. +// let node = Node::new(ctx, &NodeKind::Configuration).await?; +// node.set_component(ctx, component.id()).await?; +// component.set_name(ctx, Some(name.as_ref())).await?; + +// Ok((component, node)) +// } + +// /// A secondary constructor method that finds the default +// /// [`SchemaVariant`](crate::SchemaVariant) for a given [`SchemaId`](crate::Schema). Once found, +// /// the [`primary constructor method`](Self::new) is called. +// pub async fn new_for_default_variant_from_schema( +// ctx: &DalContext, +// name: impl AsRef, +// schema_id: SchemaId, +// ) -> ComponentResult<(Self, Node)> { +// let schema = Schema::get_by_id(ctx, &schema_id) +// .await? +// .ok_or(SchemaError::NotFound(schema_id))?; + +// let schema_variant_id = schema +// .default_schema_variant_id() +// .ok_or(SchemaError::NoDefaultVariant(schema_id))?; + +// Self::new(ctx, name, *schema_variant_id).await +// } + +// standard_model_accessor!(kind, Enum(ComponentKind), ComponentResult); +// standard_model_accessor!(needs_destroy, bool, ComponentResult); +// standard_model_accessor!(deletion_user_pk, Option, ComponentResult); + +// standard_model_belongs_to!( +// lookup_fn: schema, +// set_fn: set_schema, +// unset_fn: unset_schema, +// table: "component_belongs_to_schema", +// model_table: "schemas", +// belongs_to_id: SchemaId, +// returns: Schema, +// result: ComponentResult, +// ); + +// standard_model_belongs_to!( +// lookup_fn: schema_variant, +// set_fn: set_schema_variant, +// unset_fn: unset_schema_variant, +// table: "component_belongs_to_schema_variant", +// model_table: "schema_variants", +// belongs_to_id: SchemaVariantId, +// returns: SchemaVariant, +// result: ComponentResult, +// ); + +// standard_model_has_many!( +// lookup_fn: node, +// table: "node_belongs_to_component", +// model_table: "nodes", +// returns: Node, +// result: ComponentResult, +// ); + +// pub fn tenancy(&self) -> &Tenancy { +// &self.tenancy +// } + +// /// List [`Sockets`](crate::Socket) with a given +// /// [`SocketEdgeKind`](crate::socket::SocketEdgeKind). +// #[instrument(skip_all)] +// pub async fn list_sockets_for_kind( +// ctx: &DalContext, +// component_id: ComponentId, +// socket_edge_kind: SocketEdgeKind, +// ) -> ComponentResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_SOCKETS_FOR_SOCKET_EDGE_KIND, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &component_id, +// &(socket_edge_kind.to_string()), +// ], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find [`Self`] with a provided [`NodeId`](crate::Node). +// #[instrument(skip_all)] +// pub async fn find_for_node(ctx: &DalContext, node_id: NodeId) -> ComponentResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt(FIND_FOR_NODE, &[ctx.tenancy(), ctx.visibility(), &node_id]) +// .await?; +// Ok(standard_model::object_option_from_row_option(row)?) +// } + +// /// Find the [`AttributeValue`](crate::AttributeValue) whose +// /// [`context`](crate::AttributeContext) corresponds to the following: +// /// +// /// - The [`PropId`](crate::Prop) corresponding to the child [`Prop`](crate::Prop) of "/root/si" +// /// whose name matches the provided +// /// [`SiPropChild`](crate::schema::variant::root_prop::SiPropChild) +// /// - The [`ComponentId`](Self) matching the provided [`ComponentId`](Self). +// /// +// /// _Note:_ if the type has never been updated, this will find the _default_ +// /// [`AttributeValue`](crate::AttributeValue) where the [`ComponentId`](Self) is unset. +// #[instrument(skip_all)] +// pub async fn find_si_child_attribute_value( +// ctx: &DalContext, +// component_id: ComponentId, +// schema_variant_id: SchemaVariantId, +// si_prop_child: SiPropChild, +// ) -> ComponentResult { +// let si_child_prop_name = si_prop_child.prop_name(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// FIND_SI_CHILD_PROP_ATTRIBUTE_VALUE, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &component_id, +// &schema_variant_id, +// &si_child_prop_name, +// ], +// ) +// .await?; +// Ok(object_from_row(row)?) +// } + +// #[instrument(skip_all)] +// pub async fn is_in_tenancy(ctx: &DalContext, id: ComponentId) -> ComponentResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// "SELECT id FROM components WHERE id = $1 AND in_tenancy_v1($2, components.tenancy_workspace_pk) LIMIT 1", +// &[ +// &id, +// ctx.tenancy(), +// ], +// ) +// .await?; +// Ok(row.is_some()) +// } + +// #[instrument(skip_all)] +// pub async fn list_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> ComponentResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; + +// let mut results = Vec::new(); +// for row in rows.into_iter() { +// let json: serde_json::Value = row.try_get("object")?; +// let object: Self = serde_json::from_value(json)?; +// results.push(object); +// } + +// Ok(results) +// } + +// /// Sets the "/root/si/name" for [`self`](Self). +// #[instrument(skip_all)] +// pub async fn set_name( +// &self, +// ctx: &DalContext, +// value: Option, +// ) -> ComponentResult<()> { +// let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; +// let attribute_value = +// Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Name) +// .await?; + +// // Before we set the name, ensure that another function is not setting the name (e.g. +// // something different than "unset" or "setString"). +// let attribute_prototype = attribute_value +// .attribute_prototype(ctx) +// .await? +// .ok_or_else(|| ComponentError::MissingAttributePrototype(*attribute_value.id()))?; +// let prototype_func = Func::get_by_id(ctx, &attribute_prototype.func_id()) +// .await? +// .ok_or_else(|| { +// ComponentError::MissingAttributePrototypeFunction(*attribute_prototype.id()) +// })?; +// let name = prototype_func.name(); +// if name != "si:unset" && name != "si:setString" { +// return Ok(()); +// } + +// let attribute_context = AttributeContext::builder() +// .set_component_id(self.id) +// .set_prop_id(attribute_value.context.prop_id()) +// .to_context()?; + +// let json_value = match value.clone() { +// Some(v) => Some(serde_json::to_value(v)?), +// None => None, +// }; + +// let parent_attribute_value = attribute_value +// .parent_attribute_value(ctx) +// .await? +// .ok_or_else(|| ComponentError::ParentAttributeValueNotFound(*attribute_value.id()))?; +// let (_, _) = AttributeValue::update_for_context( +// ctx, +// *attribute_value.id(), +// Some(*parent_attribute_value.id()), +// attribute_context, +// json_value, +// None, +// ) +// .await?; + +// Ok(()) +// } + +// #[instrument(skip_all)] +// pub async fn set_deleted_at( +// &self, +// ctx: &DalContext, +// value: Option>, +// ) -> ComponentResult>> { +// let json_value = match value { +// Some(v) => Some(serde_json::to_value(v)?), +// None => None, +// }; + +// let attribute_value = Self::root_prop_child_attribute_value_for_component( +// ctx, +// self.id, +// RootPropChild::DeletedAt, +// ) +// .await?; +// let parent_attribute_value = attribute_value +// .parent_attribute_value(ctx) +// .await? +// .ok_or_else(|| ComponentError::ParentAttributeValueNotFound(*attribute_value.id()))?; +// let attribute_context = AttributeContext::builder() +// .set_component_id(self.id) +// .set_prop_id(attribute_value.context.prop_id()) +// .to_context()?; +// let (_, _) = AttributeValue::update_for_context( +// ctx, +// *attribute_value.id(), +// Some(*parent_attribute_value.id()), +// attribute_context, +// json_value, +// None, +// ) +// .await?; + +// Ok(value) +// } + +// /// Return the name of the [`Component`](Self) for the provided [`ComponentId`](Self). +// #[instrument(skip_all)] +// pub async fn find_name(ctx: &DalContext, component_id: ComponentId) -> ComponentResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one(FIND_NAME, &[ctx.tenancy(), ctx.visibility(), &component_id]) +// .await?; +// let component_name: Value = row.try_get("component_name")?; +// let component_name: Option = serde_json::from_value(component_name)?; +// let component_name = component_name.ok_or(ComponentError::NameIsUnset(component_id))?; +// Ok(component_name) +// } + +// /// Calls [`Self::find_name()`] and provides the "id" off [`self`](Self). +// pub async fn name(&self, ctx: &DalContext) -> ComponentResult { +// Self::find_name(ctx, self.id).await +// } + +// /// Grabs the [`AttributeValue`](crate::AttributeValue) corresponding to the +// /// [`RootPropChild`](crate::RootPropChild) [`Prop`](crate::Prop) for the given +// /// [`Component`](Self). +// #[instrument(skip_all)] +// pub async fn root_prop_child_attribute_value_for_component( +// ctx: &DalContext, +// component_id: ComponentId, +// root_prop_child: RootPropChild, +// ) -> ComponentResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// ROOT_CHILD_ATTRIBUTE_VALUE_FOR_COMPONENT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &root_prop_child.as_str(), +// &component_id, +// ], +// ) +// .await?; +// Ok(object_from_row(row)?) +// } + +// /// List the connected input [`Sockets`](crate::Socket) for a given [`ComponentId`](Self) and +// /// [`AttributeValueId`](crate::AttributeValue) whose [`context`](crate::AttributeContext)'s +// /// least specific field corresponding to a [`PropId`](crate::Prop). In other words, this is +// /// the list of input [`Sockets`](crate::Socket) with incoming connections from other +// /// [`Component(s)`](Self) that the given [`AttributeValue`](crate::AttributeValue) depends on. +// /// +// /// ```raw +// /// ┌────────────────────────────┐ +// /// │ This │ +// /// │ Component │ +// /// ┌───────────┐ │ ┌────────────────┐ │ +// /// │ Another │ │ ┌───►│ AttributeValue │ │ +// /// │ Component │ │ │ │ for Prop │ │ +// /// │ │ │ │ └────────────────┘ │ +// /// │ ┌────────┤ ├────┴─────────┐ │ +// /// │ │ Output ├───────►│ Input │ │ +// /// │ │ Socket │ │ Socket │ │ +// /// │ │ │ │ (list these) │ │ +// /// └──┴────────┘ └──────────────┴─────────────┘ +// /// ``` +// /// +// /// _Warning: users of this query must ensure that the +// /// [`AttributeValueId`](crate::AttributeValue) provided has a +// /// [`context`](crate::AttributeContext) whose least specific field corresponds to a +// /// [`PropId`](crate::Prop)._ +// #[instrument(skip_all)] +// pub async fn list_connected_input_sockets_for_attribute_value( +// ctx: &DalContext, +// attribute_value_id: AttributeValueId, +// component_id: ComponentId, +// ) -> ComponentResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_CONNECTED_INPUT_SOCKETS_FOR_ATTRIBUTE_VALUE, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_value_id, +// &component_id, +// ], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find the [`SchemaVariantId`](crate::SchemaVariantId) that belongs to the provided +// /// [`Component`](crate::Component). +// pub async fn schema_variant_id( +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> ComponentResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "select belongs_to_id as schema_variant_id from +// component_belongs_to_schema_variant_v1($1, $2) +// where object_id = $3 +// ", +// &[ctx.tenancy(), ctx.visibility(), &component_id], +// ) +// .await?; + +// Ok(row.try_get("schema_variant_id")?) +// } + +// /// Find the [`SchemaId`](crate::SchemaId) that belongs to the provided +// /// [`Component`](crate::Component). +// pub async fn schema_id( +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> ComponentResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "select belongs_to_id as schema_id from +// component_belongs_to_schema_v1($1, $2) +// where object_id = $3 +// ", +// &[ctx.tenancy(), ctx.visibility(), &component_id], +// ) +// .await?; + +// Ok(row.try_get("schema_id")?) +// } + +// /// Gets the [`ComponentType`](crate::ComponentType) of [`self`](Self). +// /// +// /// Mutate this with [`Self::set_type()`]. +// pub async fn get_type(&self, ctx: &DalContext) -> ComponentResult { +// let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; +// let type_attribute_value = +// Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Type) +// .await?; +// let raw_value = type_attribute_value.get_value(ctx).await?.ok_or_else(|| { +// ComponentError::ComponentTypeIsNone(self.id, *type_attribute_value.id()) +// })?; +// let component_type: ComponentType = serde_json::from_value(raw_value)?; +// Ok(component_type) +// } + +// /// Gets the protected attribute value of [`self`](Self). +// pub async fn get_protected(&self, ctx: &DalContext) -> ComponentResult { +// let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; +// let protected_attribute_value = Self::find_si_child_attribute_value( +// ctx, +// self.id, +// schema_variant_id, +// SiPropChild::Protected, +// ) +// .await?; +// let raw_value = protected_attribute_value +// .get_value(ctx) +// .await? +// .ok_or_else(|| { +// ComponentError::ComponentProtectionIsNone(self.id, *protected_attribute_value.id()) +// })?; +// let protected: bool = serde_json::from_value(raw_value)?; +// Ok(protected) +// } + +// /// Sets the field corresponding to "/root/si/type" for the [`Component`]. Possible values +// /// are limited to variants of [`ComponentType`](crate::ComponentType). +// #[instrument(skip(ctx))] +// pub async fn set_type( +// &self, +// ctx: &DalContext, +// component_type: ComponentType, +// ) -> ComponentResult<()> { +// // anytime a component_type is changed to a Configuration Frame, +// // we delete all current edges that were configured for the previously +// // set component_type (aka AggregationFrame and Component) +// // The 2 other component_types can retain their edges. +// if let ComponentType::ConfigurationFrame = component_type { +// let edges = Edge::list_for_component(ctx, self.id).await?; +// for mut edge in edges { +// edge.delete_and_propagate(ctx).await?; +// } +// } + +// let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; +// let type_attribute_value = +// Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Type) +// .await?; + +// // If we are setting the type for the first time, we will need to mutate the context to +// // be component-specific. This is because the attribute value will have an unset component +// // id and we will need to deviate from the schema variant default component type. +// let attribute_context = if type_attribute_value.context.is_component_unset() { +// AttributeContextBuilder::from(type_attribute_value.context) +// .set_component_id(self.id) +// .to_context()? +// } else { +// type_attribute_value.context +// }; + +// let si_attribute_value = type_attribute_value +// .parent_attribute_value(ctx) +// .await? +// .ok_or_else(|| { +// ComponentError::ParentAttributeValueNotFound(*type_attribute_value.id()) +// })?; +// AttributeValue::update_for_context( +// ctx, +// *type_attribute_value.id(), +// Some(*si_attribute_value.id()), +// attribute_context, +// Some(serde_json::to_value(component_type)?), +// None, +// ) +// .await?; + +// Ok(()) +// } + +// pub async fn delete_and_propagate(&mut self, ctx: &DalContext) -> ComponentResult<()> { +// // Block deletion of frames with children +// if self.get_type(ctx).await? != ComponentType::Component { +// let frame_edges = Edge::list_for_component(ctx, self.id).await?; +// let frame_node = self +// .node(ctx) +// .await? +// .pop() +// .ok_or(ComponentError::NodeNotFoundForComponent(self.id))?; +// let frame_socket = Socket::find_frame_socket_for_node( +// ctx, +// *frame_node.id(), +// SocketEdgeKind::ConfigurationInput, +// ) +// .await?; +// let connected_children = frame_edges +// .into_iter() +// .filter(|edge| edge.head_socket_id() == *frame_socket.id()) +// .count(); +// if connected_children > 0 { +// return Err(ComponentError::FrameHasAttachedComponents); +// } +// } + +// self.set_deleted_at(ctx, Some(Utc::now())).await?; + +// if self.get_protected(ctx).await? { +// return Err(ComponentError::ComponentProtected(self.id)); +// } + +// let actor_user_pk = match ctx.history_actor() { +// HistoryActor::User(user_pk) => Some(*user_pk), +// _ => None, +// }; + +// let has_resource = self.resource(ctx).await?.payload.is_some(); +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// "SELECT * FROM component_delete_and_propagate_v1($1, $2, $3, $4, $5)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// self.id(), +// &actor_user_pk, +// &has_resource, +// ], +// ) +// .await?; +// let mut attr_values: Vec = standard_model::objects_from_rows(rows)?; + +// for attr_value in attr_values.iter_mut() { +// attr_value.update_from_prototype_function(ctx).await?; +// } + +// let ids = attr_values.iter().map(|av| *av.id()).collect(); + +// ctx.enqueue_job(DependentValuesUpdate::new( +// ctx.access_builder(), +// *ctx.visibility(), +// ids, +// )) +// .await?; + +// Ok(()) +// } + +// pub async fn restore_and_propagate( +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> ComponentResult> { +// // Check if component has deleted frame before restoring +// let component = { +// let ctx_with_deleted = &ctx.clone_with_delete_visibility(); + +// let component = Self::get_by_id(ctx_with_deleted, &component_id) +// .await? +// .ok_or_else(|| ComponentError::NotFound(component_id))?; + +// let sockets = Socket::list_for_component(ctx_with_deleted, component_id).await?; + +// let maybe_socket_to_parent = sockets.iter().find(|socket| { +// socket.name() == "Frame" +// && *socket.edge_kind() == SocketEdgeKind::ConfigurationOutput +// }); + +// let edges_with_deleted = Edge::list(ctx_with_deleted).await?; + +// let mut maybe_deleted_parent_id = None; + +// if let Some(socket_to_parent) = maybe_socket_to_parent { +// for edge in &edges_with_deleted { +// if edge.tail_object_id() == (*component.id()).into() +// && edge.tail_socket_id() == *socket_to_parent.id() +// && (edge.visibility().deleted_at.is_some() && edge.deleted_implicitly()) +// { +// maybe_deleted_parent_id = Some(edge.head_object_id().into()); +// break; +// } +// } +// }; + +// if let Some(parent_id) = maybe_deleted_parent_id { +// let parent_comp = Self::get_by_id(ctx_with_deleted, &parent_id) +// .await? +// .ok_or_else(|| ComponentError::NotFound(parent_id))?; + +// if parent_comp.visibility().deleted_at.is_some() { +// return Err(ComponentError::InsideDeletedFrame(component_id, parent_id)); +// } +// } + +// component +// }; + +// component.set_deleted_at(ctx, None).await?; + +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// "SELECT * FROM component_restore_and_propagate_v1($1, $2, $3)", +// &[ctx.tenancy(), ctx.visibility(), &component_id], +// ) +// .await?; +// let mut attr_values: Vec = standard_model::objects_from_rows(rows)?; + +// for attr_value in &mut attr_values { +// attr_value.update_from_prototype_function(ctx).await?; +// } + +// let ids = attr_values.iter().map(|av| *av.id()).collect(); + +// ctx.enqueue_job(DependentValuesUpdate::new( +// ctx.access_builder(), +// *ctx.visibility(), +// ids, +// )) +// .await?; + +// Ok(Component::get_by_id(ctx, &component_id).await?) +// } + +// /// Finds the "color" that the [`Component`] should be in the [`Diagram`](crate::Diagram). +// pub async fn color(&self, ctx: &DalContext) -> ComponentResult> { +// let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; +// let color_attribute_value = Component::find_si_child_attribute_value( +// ctx, +// self.id, +// schema_variant_id, +// SiPropChild::Color, +// ) +// .await?; +// let color = color_attribute_value +// .get_value(ctx) +// .await? +// .map(serde_json::from_value) +// .transpose()?; +// Ok(color) +// } + +// /// Check if the [`Component`] has been fully destroyed. +// pub fn is_destroyed(&self) -> bool { +// self.visibility.deleted_at.is_some() && !self.needs_destroy() +// } +// } + +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct ComponentCreatedPayload { +// success: bool, +// } + +// impl WsEvent { +// pub async fn component_created(ctx: &DalContext) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::ComponentCreated(ComponentCreatedPayload { success: true }), +// ) +// .await +// } +// } diff --git a/lib/dal/src/component/code.rs b/lib/dal/src/component/code.rs index b578c8a6b4..1b044e48fa 100644 --- a/lib/dal/src/component/code.rs +++ b/lib/dal/src/component/code.rs @@ -4,11 +4,9 @@ use std::collections::{HashMap, HashSet}; use telemetry::prelude::*; use crate::attribute::value::AttributeValue; -use crate::attribute::value::AttributeValueError; -use crate::component::ComponentResult; use crate::{ - AttributeReadContext, AttributeValueId, CodeLanguage, CodeView, ComponentError, ComponentId, - DalContext, StandardModel, WsEvent, WsPayload, + AttributeValueId, CodeLanguage, CodeView, ComponentId, DalContext, StandardModel, WsEvent, + WsPayload, }; use crate::{Component, SchemaVariant}; use crate::{RootPropChild, WsEventResult}; diff --git a/lib/dal/src/component/resource.rs b/lib/dal/src/component/resource.rs index b23c6ff7fa..86f3e5abd3 100644 --- a/lib/dal/src/component/resource.rs +++ b/lib/dal/src/component/resource.rs @@ -5,16 +5,13 @@ use serde_json::Value; use std::collections::HashMap; use veritech_client::ResourceStatus; -use crate::attribute::context::AttributeContextBuilder; use crate::attribute::value::AttributeValue; -use crate::attribute::value::AttributeValueError; -use crate::component::ComponentResult; use crate::func::binding_return_value::FuncBindingReturnValue; use crate::ws_event::WsEvent; use crate::{ func::backend::js_action::ActionRunResult, ActionKind, ActionPrototype, ActionPrototypeContext, - AttributeReadContext, Component, ComponentError, ComponentId, DalContext, SchemaVariant, - StandardModel, WsPayload, + AttributeReadContext, Component, ComponentId, DalContext, SchemaVariant, StandardModel, + WsPayload, }; use crate::{RootPropChild, WsEventResult}; diff --git a/lib/dal/src/component/status.rs b/lib/dal/src/component/status.rs index bb81d2b058..e0bb3ec83c 100644 --- a/lib/dal/src/component/status.rs +++ b/lib/dal/src/component/status.rs @@ -2,7 +2,6 @@ use chrono::DateTime; use chrono::Utc; use serde::{Deserialize, Serialize}; -use crate::component::{ComponentResult, COMPONENT_STATUS_UPDATE_BY_PK}; use crate::standard_model::TypeHint; use crate::{ impl_standard_model, pk, standard_model, ComponentId, DalContext, HistoryActor, diff --git a/lib/dal/src/component/view/debug.rs b/lib/dal/src/component/view/debug.rs index 2890f9717d..eee929c550 100644 --- a/lib/dal/src/component/view/debug.rs +++ b/lib/dal/src/component/view/debug.rs @@ -5,14 +5,12 @@ use thiserror::Error; use crate::{ func::execution::{FuncExecution, FuncExecutionError}, - socket::{SocketEdgeKind, SocketError}, - AttributePrototype, AttributeReadContext, AttributeValue, AttributeValueError, - AttributeValueId, AttributeValuePayload, Component, ComponentError, ComponentId, DalContext, - ExternalProvider, ExternalProviderId, Func, FuncBinding, FuncBindingError, - FuncBindingReturnValue, FuncBindingReturnValueError, FuncError, InternalProvider, - InternalProviderError, InternalProviderId, Prop, PropError, PropId, PropKind, - SchemaVariantError, SchemaVariantId, SecretError, SecretId, Socket, SocketId, StandardModel, - StandardModelError, + socket::SocketEdgeKind, + AttributePrototype, AttributeValue, AttributeValueId, AttributeValuePayload, Component, + ComponentId, DalContext, ExternalProvider, ExternalProviderId, Func, FuncBinding, + FuncBindingError, FuncBindingReturnValue, FuncBindingReturnValueError, InternalProvider, + InternalProviderId, Prop, PropId, PropKind, SchemaVariantId, SecretError, SecretId, Socket, + SocketId, StandardModel, StandardModelError, }; type ComponentDebugViewResult = Result; diff --git a/lib/dal/src/context.rs b/lib/dal/src/context.rs index d880e33c7d..79e485a71d 100644 --- a/lib/dal/src/context.rs +++ b/lib/dal/src/context.rs @@ -9,9 +9,11 @@ use si_data_pg::{InstrumentedClient, PgError, PgPool, PgPoolError, PgPoolResult, use telemetry::prelude::*; use thiserror::Error; use tokio::sync::{MappedMutexGuard, Mutex, MutexGuard}; +use ulid::Ulid; use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; use crate::{ + change_set_pointer::ChangeSetPointerId, job::{ processor::{JobQueueProcessor, JobQueueProcessorError}, producer::{BlockingJobError, BlockingJobResult, JobProducer}, @@ -518,18 +520,24 @@ impl DalContext { /// Needed to remove universal tenancy while packages aren't a thing #[instrument(skip_all)] pub async fn import_builtins(&self) -> Result<(), TransactionsError> { - let source_workspace_pk = WorkspacePk::NONE; - self.txns() - .await? - .pg() - .execute( - "SELECT import_builtins_v1($1, $2)", - &[self.tenancy(), &source_workspace_pk], - ) - .await?; + // TODO(nick,zack,jacob): restore the ability to "import builtins" via the graph work. + // let source_workspace_pk = WorkspacePk::NONE; + // self.txns() + // .await? + // .pg() + // .execute( + // "SELECT import_builtins_v1($1, $2)", + // &[self.tenancy(), &source_workspace_pk], + // ) + // .await?; Ok(()) } + // NOTE(nick,zack,jacob): likely a temporary func to get the change set id from the visibility. + pub fn change_set_id(&self) -> ChangeSetPointerId { + ChangeSetPointerId::from(Ulid::from(self.visibility.change_set_pk)) + } + pub fn access_builder(&self) -> AccessBuilder { AccessBuilder::new(self.tenancy, self.history_actor) } diff --git a/lib/dal/src/diagram.rs b/lib/dal/src/diagram.rs index 5678eafb88..562700dba5 100644 --- a/lib/dal/src/diagram.rs +++ b/lib/dal/src/diagram.rs @@ -9,17 +9,10 @@ use crate::change_status::{ ChangeStatus, ChangeStatusError, ComponentChangeStatus, EdgeChangeStatus, }; use crate::diagram::connection::{Connection, DiagramEdgeView}; -use crate::diagram::node::{DiagramComponentView, SocketDirection, SocketView}; use crate::edge::EdgeKind; -use crate::provider::external::ExternalProviderError; -use crate::provider::internal::InternalProviderError; -use crate::schema::variant::SchemaVariantError; -use crate::socket::SocketError; use crate::{ - ActionPrototypeError, AttributeContextBuilderError, AttributePrototypeArgumentError, - AttributeValueError, ChangeSetPk, ComponentError, ComponentId, DalContext, Edge, EdgeError, - Node, NodeError, NodeId, NodeKind, PropError, SchemaError, SocketId, StandardModel, - StandardModelError, + ChangeSetPk, ComponentId, DalContext, Edge, EdgeError, Node, NodeId, NodeKind, SocketId, + StandardModel, StandardModelError, }; pub mod connection; @@ -28,22 +21,12 @@ pub mod node; #[remain::sorted] #[derive(Error, Debug)] pub enum DiagramError { - #[error("action prototype: {0}")] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute context error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), #[error("attribute prototype not found")] AttributePrototypeNotFound, - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), #[error("attribute value not found")] AttributeValueNotFound, #[error("change status error: {0}")] ChangeStatus(#[from] ChangeStatusError), - #[error("component error: {0}")] - Component(#[from] ComponentError), #[error("component not found")] ComponentNotFound, #[error("component status not found for component: {0}")] @@ -54,16 +37,10 @@ pub enum DiagramError { Edge(#[from] EdgeError), #[error("edge not found")] EdgeNotFound, - #[error("external provider error: {0}")] - ExternalProvider(#[from] ExternalProviderError), #[error("external provider not found for socket id: {0}")] ExternalProviderNotFoundForSocket(SocketId), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), #[error("internal provider not found for socket id: {0}")] InternalProviderNotFoundForSocket(SocketId), - #[error("node error: {0}")] - Node(#[from] NodeError), #[error("node not found")] NodeNotFound, #[error("no node positions found for node ({0}) and kind ({1})")] @@ -76,18 +53,10 @@ pub enum DiagramError { Pg(#[from] PgError), #[error("position not found")] PositionNotFound, - #[error("prop error: {0}")] - Prop(#[from] PropError), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), #[error("schema not found")] SchemaNotFound, - #[error(transparent)] - SchemaVariant(#[from] SchemaVariantError), #[error("schema variant not found")] SchemaVariantNotFound, - #[error("socket error: {0}")] - Socket(#[from] SocketError), #[error("socket not found")] SocketNotFound, #[error("standard model error: {0}")] diff --git a/lib/dal/src/diagram/node.rs b/lib/dal/src/diagram/node.rs index 3abfafa9ce..276ce47eb8 100644 --- a/lib/dal/src/diagram/node.rs +++ b/lib/dal/src/diagram/node.rs @@ -1,330 +1,329 @@ -use chrono::{DateTime, Utc}; -use serde::{Deserialize, Serialize}; -use strum::{AsRefStr, Display, EnumIter, EnumString}; - -use crate::change_status::ChangeStatus; -use crate::diagram::DiagramResult; -use crate::schema::SchemaUiMenu; -use crate::socket::{SocketArity, SocketEdgeKind}; -use crate::{ - history_event, ActionKind, ActionPrototype, ActionPrototypeContext, ActionPrototypeView, - ActorView, Component, ComponentId, ComponentStatus, ComponentType, DalContext, DiagramError, - HistoryActorTimestamp, Node, NodeId, ResourceView, SchemaVariant, StandardModel, -}; - -#[remain::sorted] -#[derive( - AsRefStr, - Clone, - Copy, - Debug, - Deserialize, - Display, - EnumIter, - EnumString, - Eq, - PartialEq, - Serialize, -)] -#[serde(rename_all = "camelCase")] -#[strum(serialize_all = "camelCase")] -pub enum SocketDirection { - Bidirectional, - Input, - Output, -} - -#[remain::sorted] -#[derive( - AsRefStr, - Clone, - Copy, - Debug, - Deserialize, - Display, - EnumIter, - EnumString, - Eq, - PartialEq, - Serialize, -)] -#[serde(rename_all = "camelCase")] -#[strum(serialize_all = "camelCase")] -pub enum NodeSide { - Left, - Right, -} - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct SocketView { - pub id: String, - pub label: String, - #[serde(rename = "type")] - pub ty: String, - pub direction: SocketDirection, - pub max_connections: Option, - pub is_required: Option, - pub node_side: NodeSide, -} - -impl SocketView { - pub async fn list( - ctx: &DalContext, - schema_variant: &SchemaVariant, - ) -> DiagramResult> { - Ok(schema_variant - .sockets(ctx) - .await? - .into_iter() - .filter_map(|socket| { - (!socket.ui_hidden()).then(|| Self { - id: socket.id().to_string(), - label: socket.human_name().unwrap_or(socket.name()).to_owned(), - ty: socket.name().to_owned(), - // Note: it's not clear if this mapping is correct, and there is no backend support for bidirectional sockets for now - direction: match socket.edge_kind() { - SocketEdgeKind::ConfigurationOutput => SocketDirection::Output, - _ => SocketDirection::Input, - }, - max_connections: match socket.arity() { - SocketArity::Many => None, - SocketArity::One => Some(1), - }, - is_required: Some(socket.required()), - node_side: match socket.edge_kind() { - SocketEdgeKind::ConfigurationOutput => NodeSide::Right, - _ => NodeSide::Left, - }, - }) - }) - .collect()) - } -} - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct GridPoint { - x: isize, - y: isize, -} - -impl GridPoint { - pub fn x(&self) -> isize { - self.x - } - - pub fn y(&self) -> isize { - self.y - } -} - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct Size2D { - width: isize, - height: isize, -} - -impl Size2D { - pub fn width(&self) -> isize { - self.width - } - pub fn height(&self) -> isize { - self.height - } -} - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct DiagramComponentView { - id: ComponentId, - node_id: NodeId, - display_name: Option, - - parent_node_id: Option, - child_node_ids: Vec, - - schema_name: String, - schema_id: String, - schema_variant_id: String, - schema_variant_name: String, - schema_category: Option, - - actions: Vec, - - sockets: Option>, - position: GridPoint, - size: Option, - color: Option, - node_type: ComponentType, - change_status: ChangeStatus, - resource: ResourceView, - - created_info: HistoryEventMetadata, - updated_info: HistoryEventMetadata, - - deleted_info: Option, -} - -impl DiagramComponentView { - #[allow(clippy::too_many_arguments)] - pub async fn new( - ctx: &DalContext, - component: &Component, - node: &Node, - parent_node_id: Option, - child_node_ids: Vec, - is_modified: bool, - schema_variant: &SchemaVariant, - ) -> DiagramResult { - let schema = schema_variant - .schema(ctx) - .await? - .ok_or(DiagramError::SchemaNotFound)?; - - let schema_category = SchemaUiMenu::find_for_schema(ctx, *schema.id()) - .await? - .map(|um| um.category().to_string()); - - let size = if let (Some(w), Some(h)) = (node.width(), node.height()) { - Some(Size2D { - height: h.parse()?, - width: w.parse()?, - }) - } else { - None - }; - - let x = node.x().parse::()?; - let y = node.y().parse::()?; - - // Change status should track the component, not the node, since node position is on the - // node and the node will change if it is moved - let change_status = if component.visibility().deleted_at.is_some() { - ChangeStatus::Deleted - } else if !component.exists_in_head(ctx).await? { - ChangeStatus::Added - } else if is_modified { - ChangeStatus::Modified - } else { - ChangeStatus::Unmodified - }; - - let component_status = ComponentStatus::get_by_id(ctx, component.id()) - .await? - .ok_or_else(|| DiagramError::ComponentStatusNotFound(*component.id()))?; - - let created_info = - HistoryEventMetadata::from_history_actor_timestamp(ctx, component_status.creation()) - .await?; - let updated_info = - HistoryEventMetadata::from_history_actor_timestamp(ctx, component_status.update()) - .await?; - - let mut deleted_info: Option = None; - { - if let Some(deleted_at) = ctx.visibility().deleted_at { - if let Some(deletion_user_pk) = component.deletion_user_pk() { - let history_actor = history_event::HistoryActor::User(*deletion_user_pk); - let actor = ActorView::from_history_actor(ctx, history_actor).await?; - - deleted_info = Some(HistoryEventMetadata { - actor, - timestamp: deleted_at, - }); - } - } - } - - // TODO(theo): probably dont want to fetch this here and load totally separately, but we inherited from existing endpoints - let resource = ResourceView::new(component.resource(ctx).await?); - - let action_prototypes = ActionPrototype::find_for_context( - ctx, - ActionPrototypeContext { - schema_variant_id: *schema_variant.id(), - }, - ) - .await?; - let mut action_views: Vec = Vec::new(); - for action_prototype in action_prototypes { - if *action_prototype.kind() == ActionKind::Refresh { - continue; - } - - let view = ActionPrototypeView::new(ctx, action_prototype).await?; - action_views.push(view); - } - - Ok(Self { - id: *component.id(), - node_id: *node.id(), - parent_node_id, - child_node_ids, - display_name: Some(component.name(ctx).await?), - schema_name: schema.name().to_owned(), - schema_variant_name: schema_variant.name().to_owned(), - schema_id: schema.id().to_string(), - schema_variant_id: schema_variant.id().to_string(), - schema_category, - sockets: Some(SocketView::list(ctx, schema_variant).await?), - position: GridPoint { - x: x.round() as isize, - y: y.round() as isize, - }, - size, - color: component.color(ctx).await?, - node_type: component.get_type(ctx).await?, - change_status, - resource, - actions: action_views, - created_info, - updated_info, - deleted_info, - }) - } - - pub fn id(&self) -> ComponentId { - self.id - } - - pub fn node_id(&self) -> NodeId { - self.node_id - } - - pub fn position(&self) -> &GridPoint { - &self.position - } - - pub fn size(&self) -> &Option { - &self.size - } - - pub fn resource(&self) -> &ResourceView { - &self.resource - } -} - -// TODO(theo,victor): this should probably move and be used more generally in a few places? - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct HistoryEventMetadata { - pub actor: ActorView, - pub timestamp: DateTime, -} - -impl HistoryEventMetadata { - pub async fn from_history_actor_timestamp( - ctx: &DalContext, - value: HistoryActorTimestamp, - ) -> DiagramResult { - let actor = ActorView::from_history_actor(ctx, value.actor).await?; - - Ok(Self { - actor, - timestamp: value.timestamp, - }) - } -} +// use chrono::{DateTime, Utc}; +// use serde::{Deserialize, Serialize}; +// use strum::{AsRefStr, Display, EnumIter, EnumString}; + +// use crate::change_status::ChangeStatus; +// use crate::diagram::DiagramResult; +// use crate::schema::SchemaUiMenu; +// use crate::socket::{SocketArity, SocketEdgeKind}; +// use crate::{ +// history_event, ActionKind, ActionPrototype, ActionPrototypeContext, ActorView, Component, Node, +// NodeId, SchemaVariant, StandardModel, +// }; + +// #[remain::sorted] +// #[derive( +// AsRefStr, +// Clone, +// Copy, +// Debug, +// Deserialize, +// Display, +// EnumIter, +// EnumString, +// Eq, +// PartialEq, +// Serialize, +// )] +// #[serde(rename_all = "camelCase")] +// #[strum(serialize_all = "camelCase")] +// pub enum SocketDirection { +// Bidirectional, +// Input, +// Output, +// } + +// #[remain::sorted] +// #[derive( +// AsRefStr, +// Clone, +// Copy, +// Debug, +// Deserialize, +// Display, +// EnumIter, +// EnumString, +// Eq, +// PartialEq, +// Serialize, +// )] +// #[serde(rename_all = "camelCase")] +// #[strum(serialize_all = "camelCase")] +// pub enum NodeSide { +// Left, +// Right, +// } + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct SocketView { +// pub id: String, +// pub label: String, +// #[serde(rename = "type")] +// pub ty: String, +// pub direction: SocketDirection, +// pub max_connections: Option, +// pub is_required: Option, +// pub node_side: NodeSide, +// } + +// impl SocketView { +// pub async fn list( +// ctx: &DalContext, +// schema_variant: &SchemaVariant, +// ) -> DiagramResult> { +// Ok(schema_variant +// .sockets(ctx) +// .await? +// .into_iter() +// .filter_map(|socket| { +// (!socket.ui_hidden()).then(|| Self { +// id: socket.id().to_string(), +// label: socket.human_name().unwrap_or(socket.name()).to_owned(), +// ty: socket.name().to_owned(), +// // Note: it's not clear if this mapping is correct, and there is no backend support for bidirectional sockets for now +// direction: match socket.edge_kind() { +// SocketEdgeKind::ConfigurationOutput => SocketDirection::Output, +// _ => SocketDirection::Input, +// }, +// max_connections: match socket.arity() { +// SocketArity::Many => None, +// SocketArity::One => Some(1), +// }, +// is_required: Some(socket.required()), +// node_side: match socket.edge_kind() { +// SocketEdgeKind::ConfigurationOutput => NodeSide::Right, +// _ => NodeSide::Left, +// }, +// }) +// }) +// .collect()) +// } +// } + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct GridPoint { +// x: isize, +// y: isize, +// } + +// impl GridPoint { +// pub fn x(&self) -> isize { +// self.x +// } + +// pub fn y(&self) -> isize { +// self.y +// } +// } + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct Size2D { +// width: isize, +// height: isize, +// } + +// impl Size2D { +// pub fn width(&self) -> isize { +// self.width +// } +// pub fn height(&self) -> isize { +// self.height +// } +// } + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct DiagramComponentView { +// id: ComponentId, +// node_id: NodeId, +// display_name: Option, + +// parent_node_id: Option, +// child_node_ids: Vec, + +// schema_name: String, +// schema_id: String, +// schema_variant_id: String, +// schema_variant_name: String, +// schema_category: Option, + +// actions: Vec, + +// sockets: Option>, +// position: GridPoint, +// size: Option, +// color: Option, +// node_type: ComponentType, +// change_status: ChangeStatus, +// resource: ResourceView, + +// created_info: HistoryEventMetadata, +// updated_info: HistoryEventMetadata, + +// deleted_info: Option, +// } + +// impl DiagramComponentView { +// #[allow(clippy::too_many_arguments)] +// pub async fn new( +// ctx: &DalContext, +// component: &Component, +// node: &Node, +// parent_node_id: Option, +// child_node_ids: Vec, +// is_modified: bool, +// schema_variant: &SchemaVariant, +// ) -> DiagramResult { +// let schema = schema_variant +// .schema(ctx) +// .await? +// .ok_or(DiagramError::SchemaNotFound)?; + +// let schema_category = SchemaUiMenu::find_for_schema(ctx, *schema.id()) +// .await? +// .map(|um| um.category().to_string()); + +// let size = if let (Some(w), Some(h)) = (node.width(), node.height()) { +// Some(Size2D { +// height: h.parse()?, +// width: w.parse()?, +// }) +// } else { +// None +// }; + +// let x = node.x().parse::()?; +// let y = node.y().parse::()?; + +// // Change status should track the component, not the node, since node position is on the +// // node and the node will change if it is moved +// let change_status = if component.visibility().deleted_at.is_some() { +// ChangeStatus::Deleted +// } else if !component.exists_in_head(ctx).await? { +// ChangeStatus::Added +// } else if is_modified { +// ChangeStatus::Modified +// } else { +// ChangeStatus::Unmodified +// }; + +// let component_status = ComponentStatus::get_by_id(ctx, component.id()) +// .await? +// .ok_or_else(|| DiagramError::ComponentStatusNotFound(*component.id()))?; + +// let created_info = +// HistoryEventMetadata::from_history_actor_timestamp(ctx, component_status.creation()) +// .await?; +// let updated_info = +// HistoryEventMetadata::from_history_actor_timestamp(ctx, component_status.update()) +// .await?; + +// let mut deleted_info: Option = None; +// { +// if let Some(deleted_at) = ctx.visibility().deleted_at { +// if let Some(deletion_user_pk) = component.deletion_user_pk() { +// let history_actor = history_event::HistoryActor::User(*deletion_user_pk); +// let actor = ActorView::from_history_actor(ctx, history_actor).await?; + +// deleted_info = Some(HistoryEventMetadata { +// actor, +// timestamp: deleted_at, +// }); +// } +// } +// } + +// // TODO(theo): probably dont want to fetch this here and load totally separately, but we inherited from existing endpoints +// let resource = ResourceView::new(component.resource(ctx).await?); + +// let action_prototypes = ActionPrototype::find_for_context( +// ctx, +// ActionPrototypeContext { +// schema_variant_id: *schema_variant.id(), +// }, +// ) +// .await?; +// let mut action_views: Vec = Vec::new(); +// for action_prototype in action_prototypes { +// if *action_prototype.kind() == ActionKind::Refresh { +// continue; +// } + +// let view = ActionPrototypeView::new(ctx, action_prototype).await?; +// action_views.push(view); +// } + +// Ok(Self { +// id: *component.id(), +// node_id: *node.id(), +// parent_node_id, +// child_node_ids, +// display_name: Some(component.name(ctx).await?), +// schema_name: schema.name().to_owned(), +// schema_variant_name: schema_variant.name().to_owned(), +// schema_id: schema.id().to_string(), +// schema_variant_id: schema_variant.id().to_string(), +// schema_category, +// sockets: Some(SocketView::list(ctx, schema_variant).await?), +// position: GridPoint { +// x: x.round() as isize, +// y: y.round() as isize, +// }, +// size, +// color: component.color(ctx).await?, +// node_type: component.get_type(ctx).await?, +// change_status, +// resource, +// actions: action_views, +// created_info, +// updated_info, +// deleted_info, +// }) +// } + +// pub fn id(&self) -> ComponentId { +// self.id +// } + +// pub fn node_id(&self) -> NodeId { +// self.node_id +// } + +// pub fn position(&self) -> &GridPoint { +// &self.position +// } + +// pub fn size(&self) -> &Option { +// &self.size +// } + +// pub fn resource(&self) -> &ResourceView { +// &self.resource +// } +// } + +// // TODO(theo,victor): this should probably move and be used more generally in a few places? + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct HistoryEventMetadata { +// pub actor: ActorView, +// pub timestamp: DateTime, +// } + +// impl HistoryEventMetadata { +// pub async fn from_history_actor_timestamp( +// ctx: &DalContext, +// value: HistoryActorTimestamp, +// ) -> DiagramResult { +// let actor = ActorView::from_history_actor(ctx, value.actor).await?; + +// Ok(Self { +// actor, +// timestamp: value.timestamp, +// }) +// } +// } diff --git a/lib/dal/src/edge.rs b/lib/dal/src/edge.rs index 4ba2314475..dbec803463 100644 --- a/lib/dal/src/edge.rs +++ b/lib/dal/src/edge.rs @@ -11,18 +11,15 @@ use thiserror::Error; use crate::func::argument::FuncArgumentError; use crate::job::definition::DependentValuesUpdate; use crate::node::NodeId; -use crate::socket::SocketError; use crate::standard_model::objects_from_rows; use crate::{ impl_standard_model, pk, socket::SocketId, standard_model, standard_model_accessor, - AttributeReadContext, AttributeValue, AttributeValueError, ComponentId, ExternalProviderError, - Func, FuncError, HistoryActor, HistoryEventError, InternalProviderError, Node, PropId, Socket, + AttributeValue, ComponentId, Func, HistoryActor, HistoryEventError, Node, PropId, Socket, StandardModel, StandardModelError, Tenancy, Timestamp, UserPk, Visibility, }; use crate::{ - AttributePrototypeArgument, AttributePrototypeArgumentError, Component, DalContext, - ExternalProvider, ExternalProviderId, InternalProvider, InternalProviderId, NodeError, - TransactionsError, + Component, DalContext, ExternalProvider, ExternalProviderId, InternalProvider, + InternalProviderId, TransactionsError, }; const LIST_PARENTS_FOR_COMPONENT: &str = diff --git a/lib/dal/src/func.rs b/lib/dal/src/func.rs index 814793d00f..54508389bc 100644 --- a/lib/dal/src/func.rs +++ b/lib/dal/src/func.rs @@ -1,37 +1,17 @@ -use std::string::FromUtf8Error; - -use base64::{engine::general_purpose, Engine}; +use base64::Engine; +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use strum::IntoEnumIterator; +use std::string::FromUtf8Error; +use strum::{EnumDiscriminants, IntoEnumIterator}; use telemetry::prelude::*; use thiserror::Error; use veritech_client::CycloneValueEncryptError; -use crate::func::argument::FuncArgumentError; +use crate::workspace_snapshot::content_address::ContentAddress; use crate::{ - generate_unique_id, impl_standard_model, pk, standard_model, standard_model_accessor, - standard_model_accessor_ro, DalContext, FuncBinding, HistoryEventError, SecretError, - StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, - WorkspacePk, + pk, FuncBackendKind, FuncBackendResponseType, HistoryEventError, StandardModel, Timestamp, }; -use self::backend::{FuncBackendKind, FuncBackendResponseType}; - -pub mod argument; -pub mod backend; -pub mod before; -pub mod binding; -pub mod binding_return_value; -pub mod execution; -pub mod identity; -pub mod intrinsics; - -pub fn is_intrinsic(name: &str) -> bool { - intrinsics::IntrinsicFunc::iter().any(|intrinsic| intrinsic.name() == name) -} - #[remain::sorted] #[derive(Error, Debug)] pub enum FuncError { @@ -41,8 +21,6 @@ pub enum FuncError { Decode(#[from] base64::DecodeError), #[error("utf8 encoding error: {0}")] FromUtf8(#[from] FromUtf8Error), - #[error("func argument error: {0}")] - FuncArgument(#[from] FuncArgumentError), #[error("func binding error: {0}")] FuncBinding(String), #[error("history event error: {0}")] @@ -57,41 +35,19 @@ pub enum FuncError { IntrinsicParse(String), #[error("intrinsic spec creation error {0}")] IntrinsicSpecCreation(String), - #[error("Function missing expected code: {0}")] - MissingCode(FuncId), - #[error("Function missing expected handler: {0}")] - MissingHandler(FuncId), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("could not find func by id: {0}")] - NotFound(FuncId), - #[error("could not find func by name: {0}")] - NotFoundByName(String), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("secret error: {0}")] - Secret(#[from] SecretError), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - /// When attempting to find the identity [`Func`], there were too many [`Funcs`](Func) returned. - #[error("too many funcs found when looking for identity func")] - TooManyFuncsFoundForIdentity, - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), } pub type FuncResult = Result; -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -pub struct FuncMetadataView { - pub display_name: String, - pub description: Option, - pub link: Option, -} +// pub mod argument; +pub mod backend; +// pub before; +// pub mod binding; +// pub mod binding_return_value; +// pub mod execution; +// pub mod identity; +pub mod intrinsics; -pk!(FuncPk); pk!(FuncId); /// A `Func` is the declaration of the existence of a function. It has a name, @@ -103,182 +59,270 @@ pk!(FuncId); /// the `handler` value should be `myValidator`. #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct Func { - pk: FuncPk, - id: FuncId, - name: String, - display_name: Option, - description: Option, - link: Option, - hidden: bool, - builtin: bool, - backend_kind: FuncBackendKind, - backend_response_type: FuncBackendResponseType, - handler: Option, - code_base64: Option, - code_sha256: String, - #[serde(flatten)] - tenancy: Tenancy, + pub id: FuncId, #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, -} - -impl_standard_model! { - model: Func, - pk: FuncPk, - id: FuncId, - table_name: "funcs", - history_event_label_base: "function", - history_event_message_name: "Function" + pub timestamp: Timestamp, + pub name: String, + pub display_name: Option, + pub description: Option, + pub link: Option, + pub hidden: bool, + pub builtin: bool, + pub backend_kind: FuncBackendKind, + pub backend_response_type: FuncBackendResponseType, + pub handler: Option, + pub code_base64: Option, + pub code_sha256: String, } impl Func { - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - name: impl AsRef, - backend_kind: FuncBackendKind, - backend_response_type: FuncBackendResponseType, - ) -> FuncResult { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM func_create_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &backend_kind.as_ref(), - &backend_response_type.as_ref(), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) + pub fn assemble(id: FuncId, inner: &FuncContentV1) -> Self { + let inner = inner.to_owned(); + Self { + id, + timestamp: inner.timestamp, + name: inner.name, + display_name: inner.display_name, + description: inner.description, + link: inner.link, + hidden: inner.hidden, + builtin: inner.builtin, + backend_kind: inner.backend_kind, + backend_response_type: inner.backend_response_type, + handler: inner.handler, + code_base64: inner.code_base64, + code_sha256: inner.code_sha256, + } } +} - /// Creates a new [`Func`] from [`self`](Func). All relevant fields are duplicated, but rows - /// existing on relationship tables (e.g. "belongs_to" or "many_to_many") are not. - pub async fn duplicate(&self, ctx: &DalContext) -> FuncResult { - // Generate a unique name and make sure it's not in use - let mut new_unique_name; - loop { - new_unique_name = format!("{}{}", self.name(), generate_unique_id(4)); - if Self::find_by_name(ctx, &new_unique_name).await?.is_none() { - break; - }; +impl From for FuncContentV1 { + fn from(value: Func) -> Self { + Self { + timestamp: value.timestamp, + name: value.name, + display_name: value.display_name, + description: value.description, + link: value.link, + hidden: value.hidden, + builtin: value.builtin, + backend_kind: value.backend_kind, + backend_response_type: value.backend_response_type, + handler: value.handler, + code_base64: value.code_base64, + code_sha256: value.code_sha256, } + } +} + +#[derive(Debug, PartialEq)] +pub struct FuncGraphNode { + id: FuncId, + name: String, + content_address: ContentAddress, + content: FuncContentV1, +} - let mut new_func = Self::new( - ctx, - new_unique_name, - *self.backend_kind(), - *self.backend_response_type(), - ) - .await?; +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum FuncContent { + V1(FuncContentV1), +} - // Duplicate all fields on the func that do not come in through the constructor. - new_func.set_display_name(ctx, self.display_name()).await?; - new_func.set_description(ctx, self.description()).await?; - new_func.set_link(ctx, self.link()).await?; - new_func.set_hidden(ctx, self.hidden).await?; - new_func.set_builtin(ctx, self.builtin).await?; - new_func.set_handler(ctx, self.handler()).await?; - new_func.set_code_base64(ctx, self.code_base64()).await?; +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct FuncContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + pub name: String, + pub display_name: Option, + pub description: Option, + pub link: Option, + pub hidden: bool, + pub builtin: bool, + pub backend_kind: FuncBackendKind, + pub backend_response_type: FuncBackendResponseType, + pub handler: Option, + pub code_base64: Option, + pub code_sha256: String, +} - Ok(new_func) +impl FuncGraphNode { + pub fn assemble( + id: impl Into, + name: impl Into, + content_hash: ContentHash, + content: FuncContentV1, + ) -> Self { + Self { + id: id.into(), + name: name.into(), + content_address: ContentAddress::Func(content_hash), + content, + } } - #[allow(clippy::result_large_err)] - pub fn code_plaintext(&self) -> FuncResult> { - Ok(match self.code_base64() { - Some(base64_code) => Some(String::from_utf8( - general_purpose::STANDARD_NO_PAD.decode(base64_code)?, - )?), - None => None, - }) + pub fn name(&self) -> &String { + &self.name } +} - pub async fn is_builtin(&self, ctx: &DalContext) -> FuncResult { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT id FROM funcs WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", - &[self.id(), &WorkspacePk::NONE], - ) - .await?; +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +pub struct FuncMetadataView { + pub display_name: String, + pub description: Option, + pub link: Option, +} - Ok(row.is_some()) - } +pub fn is_intrinsic(name: &str) -> bool { + intrinsics::IntrinsicFunc::iter().any(|intrinsic| intrinsic.name() == name) +} - pub async fn set_code_plaintext( - &mut self, - ctx: &DalContext, - code: Option<&'_ str>, - ) -> FuncResult<()> { - self.set_code_base64( - ctx, - code.as_ref() - .map(|code| general_purpose::STANDARD_NO_PAD.encode(code)), - ) - .await - } +// impl Func { +// #[instrument(skip_all)] +// pub async fn new( +// ctx: &DalContext, +// name: impl AsRef, +// backend_kind: FuncBackendKind, +// backend_response_type: FuncBackendResponseType, +// ) -> FuncResult { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM func_create_v1($1, $2, $3, $4, $5)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &name, +// &backend_kind.as_ref(), +// &backend_response_type.as_ref(), +// ], +// ) +// .await?; +// let object = standard_model::finish_create_from_row(ctx, row).await?; +// Ok(object) +// } - pub fn metadata_view(&self) -> FuncMetadataView { - FuncMetadataView { - display_name: self.display_name().unwrap_or_else(|| self.name()).into(), - description: self.description().map(Into::into), - link: self.description().map(Into::into), - } - } +// /// Creates a new [`Func`] from [`self`](Func). All relevant fields are duplicated, but rows +// /// existing on relationship tables (e.g. "belongs_to" or "many_to_many") are not. +// pub async fn duplicate(&self, ctx: &DalContext) -> FuncResult { +// // Generate a unique name and make sure it's not in use +// let mut new_unique_name; +// loop { +// new_unique_name = format!("{}{}", self.name(), generate_unique_id(4)); +// if Self::find_by_name(ctx, &new_unique_name).await?.is_none() { +// break; +// }; +// } - pub async fn for_binding(ctx: &DalContext, func_binding: &FuncBinding) -> FuncResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT row_to_json(funcs.*) AS object - FROM funcs_v1($1, $2) AS funcs - INNER JOIN func_binding_belongs_to_func_v1($1, $2) AS func_binding_belongs_to_func - ON funcs.id = func_binding_belongs_to_func.belongs_to_id - WHERE func_binding_belongs_to_func.object_id = $3", - &[ctx.tenancy(), ctx.visibility(), func_binding.id()], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } +// let mut new_func = Self::new( +// ctx, +// new_unique_name, +// *self.backend_kind(), +// *self.backend_response_type(), +// ) +// .await?; - pub async fn find_by_name(ctx: &DalContext, name: &str) -> FuncResult> { - Ok(Self::find_by_attr(ctx, "name", &name).await?.pop()) - } +// // Duplicate all fields on the func that do not come in through the constructor. +// new_func.set_display_name(ctx, self.display_name()).await?; +// new_func.set_description(ctx, self.description()).await?; +// new_func.set_link(ctx, self.link()).await?; +// new_func.set_hidden(ctx, self.hidden).await?; +// new_func.set_builtin(ctx, self.builtin).await?; +// new_func.set_handler(ctx, self.handler()).await?; +// new_func.set_code_base64(ctx, self.code_base64()).await?; - /// Returns `true` if this function is one handled internally by the `dal`, `false` if the - /// function is one that will be executed by `veritech` - pub fn is_intrinsic(&self) -> bool { - is_intrinsic(self.name()) - } +// Ok(new_func) +// } - standard_model_accessor!(name, String, FuncResult); - standard_model_accessor!(display_name, Option, FuncResult); - standard_model_accessor!(description, Option, FuncResult); - standard_model_accessor!(link, Option, FuncResult); - standard_model_accessor!(hidden, bool, FuncResult); - standard_model_accessor!(builtin, bool, FuncResult); - standard_model_accessor!(backend_kind, Enum(FuncBackendKind), FuncResult); - standard_model_accessor!( - backend_response_type, - Enum(FuncBackendResponseType), - FuncResult - ); - standard_model_accessor!(handler, Option, FuncResult); - standard_model_accessor!(code_base64, Option, FuncResult); - standard_model_accessor_ro!(code_sha256, String); -} +// #[allow(clippy::result_large_err)] +// pub fn code_plaintext(&self) -> FuncResult> { +// Ok(match self.code_base64() { +// Some(base64_code) => Some(String::from_utf8( +// general_purpose::STANDARD_NO_PAD.decode(base64_code)?, +// )?), +// None => None, +// }) +// } + +// pub async fn is_builtin(&self, ctx: &DalContext) -> FuncResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// "SELECT id FROM funcs WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", +// &[self.id(), &WorkspacePk::NONE], +// ) +// .await?; + +// Ok(row.is_some()) +// } + +// pub async fn set_code_plaintext( +// &mut self, +// ctx: &DalContext, +// code: Option<&'_ str>, +// ) -> FuncResult<()> { +// self.set_code_base64( +// ctx, +// code.as_ref() +// .map(|code| general_purpose::STANDARD_NO_PAD.encode(code)), +// ) +// .await +// } + +// pub fn metadata_view(&self) -> FuncMetadataView { +// FuncMetadataView { +// display_name: self.display_name().unwrap_or_else(|| self.name()).into(), +// description: self.description().map(Into::into), +// link: self.description().map(Into::into), +// } +// } + +// pub async fn for_binding(ctx: &DalContext, func_binding: &FuncBinding) -> FuncResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT row_to_json(funcs.*) AS object +// FROM funcs_v1($1, $2) AS funcs +// INNER JOIN func_binding_belongs_to_func_v1($1, $2) AS func_binding_belongs_to_func +// ON funcs.id = func_binding_belongs_to_func.belongs_to_id +// WHERE func_binding_belongs_to_func.object_id = $3", +// &[ctx.tenancy(), ctx.visibility(), func_binding.id()], +// ) +// .await?; +// let object = standard_model::finish_create_from_row(ctx, row).await?; +// Ok(object) +// } + +// pub async fn find_by_name(ctx: &DalContext, name: &str) -> FuncResult> { +// Ok(Self::find_by_attr(ctx, "name", &name).await?.pop()) +// } + +// /// Returns `true` if this function is one handled internally by the `dal`, `false` if the +// /// function is one that will be executed by `veritech` +// pub fn is_intrinsic(&self) -> bool { +// is_intrinsic(self.name()) +// } + +// standard_model_accessor!(name, String, FuncResult); +// standard_model_accessor!(display_name, Option, FuncResult); +// standard_model_accessor!(description, Option, FuncResult); +// standard_model_accessor!(link, Option, FuncResult); +// standard_model_accessor!(hidden, bool, FuncResult); +// standard_model_accessor!(builtin, bool, FuncResult); +// standard_model_accessor!(backend_kind, Enum(FuncBackendKind), FuncResult); +// standard_model_accessor!( +// backend_response_type, +// Enum(FuncBackendResponseType), +// FuncResult +// ); +// standard_model_accessor!(handler, Option, FuncResult); +// standard_model_accessor!(code_base64, Option, FuncResult); +// standard_model_accessor_ro!(code_sha256, String); +// } diff --git a/lib/dal/src/func/argument.rs b/lib/dal/src/func/argument.rs index e215fce9a9..b8219de400 100644 --- a/lib/dal/src/func/argument.rs +++ b/lib/dal/src/func/argument.rs @@ -8,9 +8,9 @@ use thiserror::Error; use si_pkg::FuncArgumentKind as PkgFuncArgumentKind; use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, AttributePrototypeArgument, - AttributePrototypeArgumentError, AttributePrototypeId, DalContext, FuncId, HistoryEventError, - PropKind, StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, + impl_standard_model, pk, standard_model, standard_model_accessor, AttributePrototypeId, + DalContext, FuncId, HistoryEventError, PropKind, StandardModel, StandardModelError, Tenancy, + Timestamp, TransactionsError, Visibility, }; const LIST_FOR_FUNC: &str = include_str!("../queries/func_argument/list_for_func.sql"); diff --git a/lib/dal/src/func/backend.rs b/lib/dal/src/func/backend.rs index 389101b64a..4cc699f291 100644 --- a/lib/dal/src/func/backend.rs +++ b/lib/dal/src/func/backend.rs @@ -1,61 +1,60 @@ -use async_trait::async_trait; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use strum::{AsRefStr, Display, EnumIter, EnumString}; use telemetry::prelude::*; use thiserror::Error; -use tokio::sync::mpsc; + use veritech_client::{ ActionRunResultSuccess, BeforeFunction, Client as VeritechClient, FunctionResult, OutputStream, ResolverFunctionResponseType, }; -use crate::{label_list::ToLabelList, DalContext, Func, FuncId, PropKind, StandardModel}; +use crate::{label_list::ToLabelList, StandardModel}; -pub mod array; -pub mod boolean; -pub mod diff; -pub mod identity; -pub mod integer; -pub mod js_action; -pub mod js_attribute; -pub mod js_reconciliation; -pub mod js_schema_variant_definition; -pub mod js_validation; -pub mod map; -pub mod object; -pub mod string; +// pub mod array; +// pub mod boolean; +// pub mod diff; +// pub mod identity; +// pub mod integer; +// pub mod js_action; +// pub mod js_attribute; +// pub mod js_reconciliation; +// pub mod js_schema_variant_definition; +// pub mod js_validation; +// pub mod map; +// pub mod object; +// pub mod string; pub mod validation; -#[remain::sorted] -#[derive(Error, Debug)] -pub enum FuncBackendError { - #[error("expected same array entry prop kinds - expected {0}, found: {1}")] - DifferingArrayEntryPropKinds(PropKind, PropKind), - #[error("dispatch func missing code_base64 {0}")] - DispatchMissingBase64(FuncId), - #[error("dispatch func missing handler {0}")] - DispatchMissingHandler(FuncId), - #[error("function result action run error: {0:?}")] - FunctionResultActionRun(FunctionResult), - #[error("invalid data - expected a valid array entry value, got: {0}")] - InvalidArrayEntryData(serde_json::Value), - #[error("result failure: kind={kind}, message={message}, backend={backend}")] - ResultFailure { - kind: String, - message: String, - backend: String, - }, - #[error("send error")] - SendError, - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("unable to decode ulid")] - Ulid(#[from] ulid::DecodeError), - #[error("veritech client error: {0}")] - VeritechClient(#[from] veritech_client::ClientError), -} +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum FuncBackendError { +// #[error("expected same array entry prop kinds - expected {0}, found: {1}")] +// DifferingArrayEntryPropKinds(PropKind, PropKind), +// #[error("dispatch func missing code_base64 {0}")] +// DispatchMissingBase64(FuncId), +// #[error("dispatch func missing handler {0}")] +// DispatchMissingHandler(FuncId), +// #[error("function result action run error: {0:?}")] +// FunctionResultActionRun(FunctionResult), +// #[error("invalid data - expected a valid array entry value, got: {0}")] +// InvalidArrayEntryData(serde_json::Value), +// #[error("result failure: kind={kind}, message={message}, backend={backend}")] +// ResultFailure { +// kind: String, +// message: String, +// backend: String, +// }, +// #[error("send error")] +// SendError, +// #[error("error serializing/deserializing json: {0}")] +// SerdeJson(#[from] serde_json::Error), +// #[error("unable to decode ulid")] +// Ulid(#[from] ulid::DecodeError), +// #[error("veritech client error: {0}")] +// VeritechClient(#[from] veritech_client::ClientError), +// } -pub type FuncBackendResult = Result; +// pub type FuncBackendResult = Result; #[remain::sorted] #[derive( @@ -184,163 +183,163 @@ impl TryFrom for ResolverFunctionResponseType { impl ToLabelList for FuncBackendKind {} -#[derive(Debug, Clone)] -pub struct FuncDispatchContext { - pub veritech: VeritechClient, - pub output_tx: mpsc::Sender, -} +// #[derive(Debug, Clone)] +// pub struct FuncDispatchContext { +// pub veritech: VeritechClient, +// pub output_tx: mpsc::Sender, +// } -impl FuncDispatchContext { - pub fn new(ctx: &DalContext) -> (Self, mpsc::Receiver) { - let (output_tx, rx) = mpsc::channel(64); - ( - Self { - veritech: ctx.veritech().clone(), - output_tx, - }, - rx, - ) - } +// impl FuncDispatchContext { +// pub fn new(ctx: &DalContext) -> (Self, mpsc::Receiver) { +// let (output_tx, rx) = mpsc::channel(64); +// ( +// Self { +// veritech: ctx.veritech().clone(), +// output_tx, +// }, +// rx, +// ) +// } - pub fn into_inner(self) -> (VeritechClient, mpsc::Sender) { - (self.veritech, self.output_tx) - } -} +// pub fn into_inner(self) -> (VeritechClient, mpsc::Sender) { +// (self.veritech, self.output_tx) +// } +// } -#[async_trait] -pub trait FuncDispatch: std::fmt::Debug { - type Args: DeserializeOwned + Send + std::fmt::Debug; - type Output: ExtractPayload + std::fmt::Debug; +// #[async_trait] +// pub trait FuncDispatch: std::fmt::Debug { +// type Args: DeserializeOwned + Send + std::fmt::Debug; +// type Output: ExtractPayload + std::fmt::Debug; - async fn create_and_execute( - context: FuncDispatchContext, - func: &Func, - args: &serde_json::Value, - before: Vec, - ) -> FuncBackendResult<(Option, Option)> - where - ::Payload: Serialize, - { - let executor = Self::create(context, func, args, before)?; - Ok(executor.execute().await?) - } +// async fn create_and_execute( +// context: FuncDispatchContext, +// func: &Func, +// args: &serde_json::Value, +// before: Vec, +// ) -> FuncBackendResult<(Option, Option)> +// where +// ::Payload: Serialize, +// { +// let executor = Self::create(context, func, args, before)?; +// Ok(executor.execute().await?) +// } - /// This private function creates the "request" to send to veritech in a shape that it - /// likes. The request's type is [`Self`]. - fn create( - context: FuncDispatchContext, - func: &Func, - args: &serde_json::Value, - before: Vec, - ) -> FuncBackendResult> { - let args = Self::Args::deserialize(args)?; - let code_base64 = func - .code_base64() - .ok_or_else(|| FuncBackendError::DispatchMissingBase64(*func.id()))?; - let handler = func - .handler() - .ok_or_else(|| FuncBackendError::DispatchMissingHandler(*func.id()))?; - let value = Self::new(context, code_base64, handler, args, before); - Ok(value) - } +// /// This private function creates the "request" to send to veritech in a shape that it +// /// likes. The request's type is [`Self`]. +// fn create( +// context: FuncDispatchContext, +// func: &Func, +// args: &serde_json::Value, +// before: Vec, +// ) -> FuncBackendResult> { +// let args = Self::Args::deserialize(args)?; +// let code_base64 = func +// .code_base64() +// .ok_or_else(|| FuncBackendError::DispatchMissingBase64(*func.id()))?; +// let handler = func +// .handler() +// .ok_or_else(|| FuncBackendError::DispatchMissingHandler(*func.id()))?; +// let value = Self::new(context, code_base64, handler, args, before); +// Ok(value) +// } - #[instrument( - name = "funcdispatch.execute", - skip_all, - level = "debug", - fields( - otel.kind = %FormattedSpanKind(SpanKind::Client), - otel.status_code = Empty, - otel.status_message = Empty, - si.func.result = Empty - ) - )] - async fn execute( - self: Box, - ) -> FuncBackendResult<(Option, Option)> - where - ::Payload: Serialize, - { - let span = Span::current(); +// #[instrument( +// name = "funcdispatch.execute", +// skip_all, +// level = "debug", +// fields( +// otel.kind = %FormattedSpanKind(SpanKind::Client), +// otel.status_code = Empty, +// otel.status_message = Empty, +// si.func.result = Empty +// ) +// )] +// async fn execute( +// self: Box, +// ) -> FuncBackendResult<(Option, Option)> +// where +// ::Payload: Serialize, +// { +// let span = Span::current(); - // NOTE(nick,wendy): why is a debug output of "self" a valid backend? - let backend = format!("{:?}", &self); - let value = match self.dispatch().await.map_err(|err| span.record_err(err))? { - FunctionResult::Success(check_result) => { - let payload = serde_json::to_value(check_result.extract()?)?; - (Some(payload.clone()), Some(payload)) - } - FunctionResult::Failure(failure) => { - return Err(span.record_err(FuncBackendError::ResultFailure { - kind: failure.error.kind, - backend, - message: failure.error.message, - })); - } - }; +// // NOTE(nick,wendy): why is a debug output of "self" a valid backend? +// let backend = format!("{:?}", &self); +// let value = match self.dispatch().await.map_err(|err| span.record_err(err))? { +// FunctionResult::Success(check_result) => { +// let payload = serde_json::to_value(check_result.extract()?)?; +// (Some(payload.clone()), Some(payload)) +// } +// FunctionResult::Failure(failure) => { +// return Err(span.record_err(FuncBackendError::ResultFailure { +// kind: failure.error.kind, +// backend, +// message: failure.error.message, +// })); +// } +// }; - span.record_ok(); - span.record("si.func.result", &tracing::field::debug(&value)); - Ok(value) - } +// span.record_ok(); +// span.record("si.func.result", &tracing::field::debug(&value)); +// Ok(value) +// } - fn new( - context: FuncDispatchContext, - code_base64: &str, - handler: &str, - args: Self::Args, - before: Vec, - ) -> Box; - async fn dispatch(self: Box) -> FuncBackendResult>; -} +// fn new( +// context: FuncDispatchContext, +// code_base64: &str, +// handler: &str, +// args: Self::Args, +// before: Vec, +// ) -> Box; +// async fn dispatch(self: Box) -> FuncBackendResult>; +// } -#[async_trait] -pub trait FuncBackend { - type Args: DeserializeOwned + Send + std::fmt::Debug; +// #[async_trait] +// pub trait FuncBackend { +// type Args: DeserializeOwned + Send + std::fmt::Debug; - async fn create_and_execute( - args: &serde_json::Value, - ) -> FuncBackendResult<(Option, Option)> { - let executor = Self::create(args)?; - Ok(executor.execute().await?) - } +// async fn create_and_execute( +// args: &serde_json::Value, +// ) -> FuncBackendResult<(Option, Option)> { +// let executor = Self::create(args)?; +// Ok(executor.execute().await?) +// } - fn create(args: &serde_json::Value) -> FuncBackendResult> { - let args = Self::Args::deserialize(args)?; - Ok(Self::new(args)) - } +// fn create(args: &serde_json::Value) -> FuncBackendResult> { +// let args = Self::Args::deserialize(args)?; +// Ok(Self::new(args)) +// } - #[instrument( - name = "funcbackend.execute", - skip_all, - level = "debug", - fields( - otel.kind = %FormattedSpanKind(SpanKind::Client), - otel.status_code = Empty, - otel.status_message = Empty, - si.func.result = Empty - ) - )] - async fn execute( - self: Box, - ) -> FuncBackendResult<(Option, Option)> { - let span = Span::current(); +// #[instrument( +// name = "funcbackend.execute", +// skip_all, +// level = "debug", +// fields( +// otel.kind = %FormattedSpanKind(SpanKind::Client), +// otel.status_code = Empty, +// otel.status_message = Empty, +// si.func.result = Empty +// ) +// )] +// async fn execute( +// self: Box, +// ) -> FuncBackendResult<(Option, Option)> { +// let span = Span::current(); - let value = self.inline().await?; +// let value = self.inline().await?; - span.record_ok(); - span.record("si.func.result", &tracing::field::debug(&value)); - Ok(value) - } +// span.record_ok(); +// span.record("si.func.result", &tracing::field::debug(&value)); +// Ok(value) +// } - fn new(args: Self::Args) -> Box; - async fn inline( - self: Box, - ) -> FuncBackendResult<(Option, Option)>; -} +// fn new(args: Self::Args) -> Box; +// async fn inline( +// self: Box, +// ) -> FuncBackendResult<(Option, Option)>; +// } -pub trait ExtractPayload { - type Payload: std::fmt::Debug; +// pub trait ExtractPayload { +// type Payload: std::fmt::Debug; - fn extract(self) -> FuncBackendResult; -} +// fn extract(self) -> FuncBackendResult; +// } diff --git a/lib/dal/src/func/backend/validation.rs b/lib/dal/src/func/backend/validation.rs index 1666ee7cc8..8da446ba2c 100644 --- a/lib/dal/src/func/backend/validation.rs +++ b/lib/dal/src/func/backend/validation.rs @@ -1,11 +1,10 @@ -use async_trait::async_trait; -use regex::Regex; + + use serde::{Deserialize, Serialize}; -use std::net::IpAddr; -use std::str::FromStr; -use crate::func::backend::{FuncBackend, FuncBackendResult}; -use crate::validation::{Validation, ValidationError, ValidationErrorKind}; + + +use crate::validation::{Validation}; #[derive(Deserialize, Serialize, Debug, Clone)] pub struct FuncBackendValidation { @@ -23,130 +22,130 @@ impl FuncBackendValidationArgs { } } -#[async_trait] -impl FuncBackend for FuncBackendValidation { - type Args = FuncBackendValidationArgs; +// #[async_trait] +// impl FuncBackend for FuncBackendValidation { +// type Args = FuncBackendValidationArgs; - fn new(args: FuncBackendValidationArgs) -> Box { - Box::new(Self { args }) - } +// fn new(args: FuncBackendValidationArgs) -> Box { +// Box::new(Self { args }) +// } - async fn inline( - self: Box, - ) -> FuncBackendResult<(Option, Option)> { - let mut validation_errors = Vec::new(); - let value_must_be_present_error = ValidationError { - message: "This field is required".to_owned(), - kind: ValidationErrorKind::ValueMustBePresent, - link: None, - level: None, - }; +// async fn inline( +// self: Box, +// ) -> FuncBackendResult<(Option, Option)> { +// let mut validation_errors = Vec::new(); +// let value_must_be_present_error = ValidationError { +// message: "This field is required".to_owned(), +// kind: ValidationErrorKind::ValueMustBePresent, +// link: None, +// level: None, +// }; - let maybe_validation_error = match self.args.validation { - Validation::IntegerIsBetweenTwoIntegers { value, lower_bound, upper_bound } => match value { - Some(value) => match value > lower_bound && value < upper_bound { - true => None, - false => Some(ValidationError { - message: format!("value ({value}) is not in between lower ({lower_bound}) and upper ({upper_bound}) bounds"), - kind: ValidationErrorKind::IntegerNotInBetweenTwoIntegers, - link: None, - level: None, - }), - }, - None => Some(value_must_be_present_error), - }, - Validation::IntegerIsNotEmpty { value} => match value { - Some(_value) => None, - None => Some(value_must_be_present_error), - }, - Validation::StringIsNotEmpty { value} => match value { - Some(value) => match value.is_empty() { - true => Some(value_must_be_present_error), - false => None, - }, - None => Some(value_must_be_present_error), - }, - Validation::StringIsValidIpAddr { value } => match value { - Some(value) => match IpAddr::from_str(&value) { - Ok(_) => None, - Err(e) => Some(ValidationError { - message: format!("value ({value}) is an invalid ip address: {e}"), - kind: ValidationErrorKind::InvalidIpAddr, - link: None, - level: None, - }), - }, - None => Some(value_must_be_present_error), - }, - Validation::StringIsHexColor { value } => match value { - Some(value) => { - let re = Regex::new(r"^#[\dA-Fa-f]{6,8}$").unwrap(); - if re.is_match(value.as_str()) { - None - } else { - Some(ValidationError { - message: format!("value ({value}) is not a valid hex string"), - kind: ValidationErrorKind::InvalidHexString, - link: None, - level: None, - }) - } - }, - None => None, - }, - Validation::StringEquals { value, expected } => match value { - Some(value) => match value == expected { - true => None, - false => Some(ValidationError { - message: format!("value ({value}) does not match expected ({expected})"), - kind: ValidationErrorKind::StringDoesNotEqual, - link: None, - level: None, - }), - }, - None => Some(value_must_be_present_error), - }, - Validation::StringHasPrefix { value, expected } => match value { - Some(value) => match value.starts_with(&expected) { - true => None, - false => Some(ValidationError { - message: format!("value ({value}) does not contain prefix ({expected})"), - kind: ValidationErrorKind::StringDoesNotHavePrefix, - link: None, - level: None, - }), - }, - None => Some(value_must_be_present_error), - }, - Validation::StringInStringArray { - value, - expected, - display_expected, - } => match value { - Some(value) => match expected.iter().any(|e| e == &value) { - true => None, - false => Some(ValidationError { - message: match display_expected { - true => format!("value ({value}) not found in list of expected values ({expected:?})"), - false => format!("value ({value}) not found in list of expected values") - }, - kind: ValidationErrorKind::StringNotInStringArray, - link: None, - level: None, - }) - }, - None => Some(value_must_be_present_error), - }, - }; +// let maybe_validation_error = match self.args.validation { +// Validation::IntegerIsBetweenTwoIntegers { value, lower_bound, upper_bound } => match value { +// Some(value) => match value > lower_bound && value < upper_bound { +// true => None, +// false => Some(ValidationError { +// message: format!("value ({value}) is not in between lower ({lower_bound}) and upper ({upper_bound}) bounds"), +// kind: ValidationErrorKind::IntegerNotInBetweenTwoIntegers, +// link: None, +// level: None, +// }), +// }, +// None => Some(value_must_be_present_error), +// }, +// Validation::IntegerIsNotEmpty { value} => match value { +// Some(_value) => None, +// None => Some(value_must_be_present_error), +// }, +// Validation::StringIsNotEmpty { value} => match value { +// Some(value) => match value.is_empty() { +// true => Some(value_must_be_present_error), +// false => None, +// }, +// None => Some(value_must_be_present_error), +// }, +// Validation::StringIsValidIpAddr { value } => match value { +// Some(value) => match IpAddr::from_str(&value) { +// Ok(_) => None, +// Err(e) => Some(ValidationError { +// message: format!("value ({value}) is an invalid ip address: {e}"), +// kind: ValidationErrorKind::InvalidIpAddr, +// link: None, +// level: None, +// }), +// }, +// None => Some(value_must_be_present_error), +// }, +// Validation::StringIsHexColor { value } => match value { +// Some(value) => { +// let re = Regex::new(r"^#[\dA-Fa-f]{6,8}$").unwrap(); +// if re.is_match(value.as_str()) { +// None +// } else { +// Some(ValidationError { +// message: format!("value ({value}) is not a valid hex string"), +// kind: ValidationErrorKind::InvalidHexString, +// link: None, +// level: None, +// }) +// } +// }, +// None => None, +// }, +// Validation::StringEquals { value, expected } => match value { +// Some(value) => match value == expected { +// true => None, +// false => Some(ValidationError { +// message: format!("value ({value}) does not match expected ({expected})"), +// kind: ValidationErrorKind::StringDoesNotEqual, +// link: None, +// level: None, +// }), +// }, +// None => Some(value_must_be_present_error), +// }, +// Validation::StringHasPrefix { value, expected } => match value { +// Some(value) => match value.starts_with(&expected) { +// true => None, +// false => Some(ValidationError { +// message: format!("value ({value}) does not contain prefix ({expected})"), +// kind: ValidationErrorKind::StringDoesNotHavePrefix, +// link: None, +// level: None, +// }), +// }, +// None => Some(value_must_be_present_error), +// }, +// Validation::StringInStringArray { +// value, +// expected, +// display_expected, +// } => match value { +// Some(value) => match expected.iter().any(|e| e == &value) { +// true => None, +// false => Some(ValidationError { +// message: match display_expected { +// true => format!("value ({value}) not found in list of expected values ({expected:?})"), +// false => format!("value ({value}) not found in list of expected values") +// }, +// kind: ValidationErrorKind::StringNotInStringArray, +// link: None, +// level: None, +// }) +// }, +// None => Some(value_must_be_present_error), +// }, +// }; - // NOTE(nick): currently, the "find status" query expects an array with non-null values - // to be returned. Since we may add the ability to return multiple errors in the future, - // we are keeping the same same as before (i.e. Vec). - if let Some(validation_error) = maybe_validation_error { - validation_errors.push(validation_error); - } +// // NOTE(nick): currently, the "find status" query expects an array with non-null values +// // to be returned. Since we may add the ability to return multiple errors in the future, +// // we are keeping the same same as before (i.e. Vec). +// if let Some(validation_error) = maybe_validation_error { +// validation_errors.push(validation_error); +// } - let value = serde_json::to_value(validation_errors)?; - Ok((Some(value.clone()), Some(value))) - } -} +// let value = serde_json::to_value(validation_errors)?; +// Ok((Some(value.clone()), Some(value))) +// } +// } diff --git a/lib/dal/src/func/binding.rs b/lib/dal/src/func/binding.rs index 68f3f58bc5..b9ccdef6cd 100644 --- a/lib/dal/src/func/binding.rs +++ b/lib/dal/src/func/binding.rs @@ -185,9 +185,7 @@ impl FuncBinding { func_id: FuncId, before: Vec, ) -> FuncBindingResult<(Self, FuncBindingReturnValue)> { - let func = Func::get_by_id(ctx, &func_id) - .await? - .ok_or(FuncError::NotFound(func_id))?; + let func_id = func.id; let func_binding = Self::new(ctx, args, func_id, func.backend_kind).await?; let func_binding_return_value: FuncBindingReturnValue = diff --git a/lib/dal/src/func/identity.rs b/lib/dal/src/func/identity.rs index add46d4fc0..7474bcb68b 100644 --- a/lib/dal/src/func/identity.rs +++ b/lib/dal/src/func/identity.rs @@ -3,50 +3,47 @@ //! prevalence of the identity [`Func`](crate::Func) across the library, this helper should help //! ease some friction. -use crate::{ - DalContext, Func, FuncArgument, FuncBinding, FuncBindingReturnValue, FuncError, FuncResult, - StandardModel, -}; +use crate::{DalContext, Func, FuncArgument, FuncBinding, FuncBindingReturnValue, StandardModel}; const IDENTITY_FUNC_NAME: &str = "si:identity"; -impl Func { - /// Returns the identity [`Func`](Self) with its corresponding - /// [`FuncBinding`](crate::FuncBinding) and - /// [`FuncBindingReturnValue`](crate::FuncBindingReturnValue). - pub async fn identity_with_binding_and_return_value( - ctx: &DalContext, - ) -> FuncResult<(Func, FuncBinding, FuncBindingReturnValue)> { - let func = Self::identity_func(ctx).await?; - let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( - ctx, - serde_json::json![{ "identity": null }], - *func.id(), - vec![], - ) - .await - .map_err(|e| FuncError::FuncBinding(e.to_string()))?; +// impl Func { +// /// Returns the identity [`Func`](Self) with its corresponding +// /// [`FuncBinding`](crate::FuncBinding) and +// /// [`FuncBindingReturnValue`](crate::FuncBindingReturnValue). +// pub async fn identity_with_binding_and_return_value( +// ctx: &DalContext, +// ) -> FuncResult<(Func, FuncBinding, FuncBindingReturnValue)> { +// let func = Self::identity_func(ctx).await?; +// let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( +// ctx, +// serde_json::json![{ "identity": null }], +// *func.id(), +// vec![], +// ) +// .await +// .map_err(|e| FuncError::FuncBinding(e.to_string()))?; - Ok((func, func_binding, func_binding_return_value)) - } +// Ok((func, func_binding, func_binding_return_value)) +// } - /// Returns the identity [`Func`](Self) with its corresponding - /// [`FuncArgument`](crate::FuncArgument). - pub async fn identity_with_argument(ctx: &DalContext) -> FuncResult<(Func, FuncArgument)> { - let func = Self::identity_func(ctx).await?; - let func_argument = FuncArgument::find_by_name_for_func(ctx, "identity", *func.id()) - .await? - .ok_or(FuncError::IdentityFuncArgumentNotFound)?; - Ok((func, func_argument)) - } +// /// Returns the identity [`Func`](Self) with its corresponding +// /// [`FuncArgument`](crate::FuncArgument). +// pub async fn identity_with_argument(ctx: &DalContext) -> FuncResult<(Func, FuncArgument)> { +// let func = Self::identity_func(ctx).await?; +// let func_argument = FuncArgument::find_by_name_for_func(ctx, "identity", *func.id()) +// .await? +// .ok_or(FuncError::IdentityFuncArgumentNotFound)?; +// Ok((func, func_argument)) +// } - /// Returns the identity [`Func`](Self). - pub async fn identity_func(ctx: &DalContext) -> FuncResult { - let mut found_funcs = Func::find_by_attr(ctx, "name", &IDENTITY_FUNC_NAME).await?; - let func = found_funcs.pop().ok_or(FuncError::IdentityFuncNotFound)?; - match found_funcs.is_empty() { - true => Ok(func), - false => Err(FuncError::TooManyFuncsFoundForIdentity), - } - } -} +// /// Returns the identity [`Func`](Self). +// pub async fn identity_func(ctx: &DalContext) -> FuncResult { +// let mut found_funcs = Func::find_by_attr(ctx, "name", &IDENTITY_FUNC_NAME).await?; +// let func = found_funcs.pop().ok_or(FuncError::IdentityFuncNotFound)?; +// match found_funcs.is_empty() { +// true => Ok(func), +// false => Err(FuncError::TooManyFuncsFoundForIdentity), +// } +// } +// } diff --git a/lib/dal/src/func/intrinsics.rs b/lib/dal/src/func/intrinsics.rs index 557e37fd03..e3b2229da4 100644 --- a/lib/dal/src/func/intrinsics.rs +++ b/lib/dal/src/func/intrinsics.rs @@ -3,7 +3,7 @@ use si_pkg::{ FuncSpecData, PkgSpec, }; -use super::{FuncError, FuncResult}; +use crate::func::{FuncError, FuncResult}; use chrono::DateTime; use strum::{AsRefStr, Display, EnumIter, EnumString, IntoEnumIterator}; diff --git a/lib/dal/src/installed_pkg/asset.rs b/lib/dal/src/installed_pkg/asset.rs index d117eeef7e..f0e05be049 100644 --- a/lib/dal/src/installed_pkg/asset.rs +++ b/lib/dal/src/installed_pkg/asset.rs @@ -2,7 +2,7 @@ use super::{InstalledPkgId, InstalledPkgResult}; use serde::{Deserialize, Serialize}; use telemetry::prelude::*; -use crate::schema::variant::definition::SchemaVariantDefinitionId; +// use crate::schema::variant::definition::SchemaVariantDefinitionId; use crate::{ impl_standard_model, pk, standard_model, standard_model_accessor, DalContext, FuncId, SchemaId, SchemaVariantId, StandardModel, Tenancy, Timestamp, Visibility, @@ -37,9 +37,9 @@ pk!(InstalledPkgAssetAssetId); #[strum(serialize_all = "camelCase")] pub enum InstalledPkgAssetKind { Func, - Schema, - SchemaVariant, - SchemaVariantDefinition, + // Schema, + // SchemaVariant, + // SchemaVariantDefinition, } /// An `InstalledPkgAsset` is a record of the installation of a package asset. It tracks the @@ -72,65 +72,65 @@ pub enum InstalledPkgAssetTyped { id: FuncId, hash: String, }, - Schema { - installed_pkg_asset_id: InstalledPkgAssetId, - installed_pkg_id: InstalledPkgId, - id: SchemaId, - hash: String, - }, - SchemaVariant { - installed_pkg_asset_id: InstalledPkgAssetId, - installed_pkg_id: InstalledPkgId, - id: SchemaVariantId, - hash: String, - }, - SchemaVariantDefinition { - installed_pkg_asset_id: InstalledPkgAssetId, - installed_pkg_id: InstalledPkgId, - id: SchemaVariantDefinitionId, - hash: String, - }, + // Schema { + // installed_pkg_asset_id: InstalledPkgAssetId, + // installed_pkg_id: InstalledPkgId, + // id: SchemaId, + // hash: String, + // }, + // SchemaVariant { + // installed_pkg_asset_id: InstalledPkgAssetId, + // installed_pkg_id: InstalledPkgId, + // id: SchemaVariantId, + // hash: String, + // }, + // SchemaVariantDefinition { + // installed_pkg_asset_id: InstalledPkgAssetId, + // installed_pkg_id: InstalledPkgId, + // id: SchemaVariantDefinitionId, + // hash: String, + // }, } impl InstalledPkgAssetTyped { - pub fn new_for_schema( - schema_id: SchemaId, - installed_pkg_id: InstalledPkgId, - hash: String, - ) -> Self { - Self::Schema { - installed_pkg_asset_id: InstalledPkgAssetId::NONE, - installed_pkg_id, - id: schema_id, - hash, - } - } + // pub fn new_for_schema( + // schema_id: SchemaId, + // installed_pkg_id: InstalledPkgId, + // hash: String, + // ) -> Self { + // Self::Schema { + // installed_pkg_asset_id: InstalledPkgAssetId::NONE, + // installed_pkg_id, + // id: schema_id, + // hash, + // } + // } - pub fn new_for_schema_variant( - schema_variant_id: SchemaVariantId, - installed_pkg_id: InstalledPkgId, - hash: String, - ) -> Self { - Self::SchemaVariant { - installed_pkg_asset_id: InstalledPkgAssetId::NONE, - installed_pkg_id, - id: schema_variant_id, - hash, - } - } + // pub fn new_for_schema_variant( + // schema_variant_id: SchemaVariantId, + // installed_pkg_id: InstalledPkgId, + // hash: String, + // ) -> Self { + // Self::SchemaVariant { + // installed_pkg_asset_id: InstalledPkgAssetId::NONE, + // installed_pkg_id, + // id: schema_variant_id, + // hash, + // } + // } - pub fn new_for_schema_variant_definition( - schema_variant_definition_id: SchemaVariantDefinitionId, - installed_pkg_id: InstalledPkgId, - hash: String, - ) -> Self { - Self::SchemaVariantDefinition { - installed_pkg_asset_id: InstalledPkgAssetId::NONE, - installed_pkg_id, - id: schema_variant_definition_id, - hash, - } - } + // pub fn new_for_schema_variant_definition( + // schema_variant_definition_id: SchemaVariantDefinitionId, + // installed_pkg_id: InstalledPkgId, + // hash: String, + // ) -> Self { + // Self::SchemaVariantDefinition { + // installed_pkg_asset_id: InstalledPkgAssetId::NONE, + // installed_pkg_id, + // id: schema_variant_definition_id, + // hash, + // } + // } pub fn new_for_func(func_id: FuncId, installed_pkg_id: InstalledPkgId, hash: String) -> Self { Self::Func { @@ -149,30 +149,30 @@ impl From<&InstalledPkgAsset> for InstalledPkgAssetTyped { let hash = value.asset_hash().to_string(); match value.asset_kind { - InstalledPkgAssetKind::Schema => Self::Schema { - installed_pkg_asset_id, - installed_pkg_id, - id: Into::::into(value.asset_id()).into(), - hash, - }, - InstalledPkgAssetKind::SchemaVariant => Self::SchemaVariant { - installed_pkg_asset_id, - installed_pkg_id, - id: Into::::into(value.asset_id()).into(), - hash, - }, + // InstalledPkgAssetKind::Schema => Self::Schema { + // installed_pkg_asset_id, + // installed_pkg_id, + // id: Into::::into(value.asset_id()).into(), + // hash, + // }, + // InstalledPkgAssetKind::SchemaVariant => Self::SchemaVariant { + // installed_pkg_asset_id, + // installed_pkg_id, + // id: Into::::into(value.asset_id()).into(), + // hash, + // }, InstalledPkgAssetKind::Func => Self::Func { installed_pkg_asset_id, installed_pkg_id, id: Into::::into(value.asset_id()).into(), hash, }, - InstalledPkgAssetKind::SchemaVariantDefinition => Self::SchemaVariantDefinition { - installed_pkg_asset_id, - installed_pkg_id, - id: Into::::into(value.asset_id()).into(), - hash, - }, + // InstalledPkgAssetKind::SchemaVariantDefinition => Self::SchemaVariantDefinition { + // installed_pkg_asset_id, + // installed_pkg_id, + // id: Into::::into(value.asset_id()).into(), + // hash, + // }, } } } @@ -198,40 +198,39 @@ impl InstalledPkgAsset { String, InstalledPkgAssetKind, ) = match pkg_asset { - InstalledPkgAssetTyped::Schema { - installed_pkg_id, - id, - hash, - .. - } => ( - installed_pkg_id, - Into::::into(id).into(), - hash, - InstalledPkgAssetKind::Schema, - ), - InstalledPkgAssetTyped::SchemaVariant { - installed_pkg_id, - id, - hash, - .. - } => ( - installed_pkg_id, - Into::::into(id).into(), - hash, - InstalledPkgAssetKind::SchemaVariant, - ), - InstalledPkgAssetTyped::SchemaVariantDefinition { - installed_pkg_id, - id, - hash, - .. - } => ( - installed_pkg_id, - Into::::into(id).into(), - hash, - InstalledPkgAssetKind::SchemaVariantDefinition, - ), - + // InstalledPkgAssetTyped::Schema { + // installed_pkg_id, + // id, + // hash, + // .. + // } => ( + // installed_pkg_id, + // Into::::into(id).into(), + // hash, + // InstalledPkgAssetKind::Schema, + // ), + // InstalledPkgAssetTyped::SchemaVariant { + // installed_pkg_id, + // id, + // hash, + // .. + // } => ( + // installed_pkg_id, + // Into::::into(id).into(), + // hash, + // InstalledPkgAssetKind::SchemaVariant, + // ), + // InstalledPkgAssetTyped::SchemaVariantDefinition { + // installed_pkg_id, + // id, + // hash, + // .. + // } => ( + // installed_pkg_id, + // Into::::into(id).into(), + // hash, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // ), InstalledPkgAssetTyped::Func { installed_pkg_id, id, @@ -266,133 +265,133 @@ impl InstalledPkgAsset { Ok((object, asset_typed)) } - pub fn as_installed_schema(&self) -> InstalledPkgResult { - let typed: InstalledPkgAssetTyped = self.into(); + // pub fn as_installed_schema(&self) -> InstalledPkgResult { + // let typed: InstalledPkgAssetTyped = self.into(); - match typed { - InstalledPkgAssetTyped::Schema { .. } => Ok(typed), - InstalledPkgAssetTyped::SchemaVariant { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::Schema, - InstalledPkgAssetKind::SchemaVariant, - )), - InstalledPkgAssetTyped::SchemaVariantDefinition { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::Schema, - InstalledPkgAssetKind::SchemaVariantDefinition, - )), - InstalledPkgAssetTyped::Func { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::Schema, - InstalledPkgAssetKind::Func, - )), - } - } + // match typed { + // InstalledPkgAssetTyped::Schema { .. } => Ok(typed), + // InstalledPkgAssetTyped::SchemaVariant { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::Schema, + // InstalledPkgAssetKind::SchemaVariant, + // )), + // InstalledPkgAssetTyped::SchemaVariantDefinition { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::Schema, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // )), + // InstalledPkgAssetTyped::Func { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::Schema, + // InstalledPkgAssetKind::Func, + // )), + // } + // } - pub fn as_installed_schema_variant_definition( - &self, - ) -> InstalledPkgResult { - let typed: InstalledPkgAssetTyped = self.into(); + // pub fn as_installed_schema_variant_definition( + // &self, + // ) -> InstalledPkgResult { + // let typed: InstalledPkgAssetTyped = self.into(); - match typed { - InstalledPkgAssetTyped::SchemaVariantDefinition { .. } => Ok(typed), - InstalledPkgAssetTyped::SchemaVariant { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariantDefinition, - InstalledPkgAssetKind::SchemaVariant, - )), - InstalledPkgAssetTyped::Schema { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariantDefinition, - InstalledPkgAssetKind::Schema, - )), - InstalledPkgAssetTyped::Func { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariantDefinition, - InstalledPkgAssetKind::Func, - )), - } - } + // match typed { + // InstalledPkgAssetTyped::SchemaVariantDefinition { .. } => Ok(typed), + // InstalledPkgAssetTyped::SchemaVariant { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // InstalledPkgAssetKind::SchemaVariant, + // )), + // InstalledPkgAssetTyped::Schema { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // InstalledPkgAssetKind::Schema, + // )), + // InstalledPkgAssetTyped::Func { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // InstalledPkgAssetKind::Func, + // )), + // } + // } - pub fn as_installed_schema_variant(&self) -> InstalledPkgResult { - let typed: InstalledPkgAssetTyped = self.into(); + // pub fn as_installed_schema_variant(&self) -> InstalledPkgResult { + // let typed: InstalledPkgAssetTyped = self.into(); - match typed { - InstalledPkgAssetTyped::SchemaVariant { .. } => Ok(typed), - InstalledPkgAssetTyped::Schema { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariant, - InstalledPkgAssetKind::Schema, - )), - InstalledPkgAssetTyped::SchemaVariantDefinition { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariant, - InstalledPkgAssetKind::SchemaVariantDefinition, - )), - InstalledPkgAssetTyped::Func { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariant, - InstalledPkgAssetKind::Func, - )), - } - } + // match typed { + // InstalledPkgAssetTyped::SchemaVariant { .. } => Ok(typed), + // InstalledPkgAssetTyped::Schema { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariant, + // InstalledPkgAssetKind::Schema, + // )), + // InstalledPkgAssetTyped::SchemaVariantDefinition { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariant, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // )), + // InstalledPkgAssetTyped::Func { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariant, + // InstalledPkgAssetKind::Func, + // )), + // } + // } pub fn as_installed_func(&self) -> InstalledPkgResult { let typed: InstalledPkgAssetTyped = self.into(); match typed { InstalledPkgAssetTyped::Func { .. } => Ok(typed), - InstalledPkgAssetTyped::Schema { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::Func, - InstalledPkgAssetKind::Schema, - )), - InstalledPkgAssetTyped::SchemaVariantDefinition { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::Func, - InstalledPkgAssetKind::SchemaVariantDefinition, - )), - InstalledPkgAssetTyped::SchemaVariant { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::Func, - InstalledPkgAssetKind::SchemaVariant, - )), + // InstalledPkgAssetTyped::Schema { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::Func, + // InstalledPkgAssetKind::Schema, + // )), + // InstalledPkgAssetTyped::SchemaVariantDefinition { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::Func, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // )), + // InstalledPkgAssetTyped::SchemaVariant { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::Func, + // InstalledPkgAssetKind::SchemaVariant, + // )), } } diff --git a/lib/dal/src/job.rs b/lib/dal/src/job.rs index 00ec695402..03318a787d 100644 --- a/lib/dal/src/job.rs +++ b/lib/dal/src/job.rs @@ -1,5 +1,5 @@ pub mod consumer; -pub mod definition; +// pub mod definition; pub mod processor; pub mod producer; pub mod queue; diff --git a/lib/dal/src/job/consumer.rs b/lib/dal/src/job/consumer.rs index 9e81e23a46..344093a6ff 100644 --- a/lib/dal/src/job/consumer.rs +++ b/lib/dal/src/job/consumer.rs @@ -8,60 +8,46 @@ use thiserror::Error; use tokio::task::JoinError; use crate::{ - fix::FixError, func::binding_return_value::FuncBindingReturnValueError, - job::producer::BlockingJobError, job::producer::JobProducerError, status::StatusUpdaterError, - AccessBuilder, ActionPrototypeError, ActionPrototypeId, AttributeValueError, ComponentError, - ComponentId, DalContext, DalContextBuilder, FixBatchId, FixResolverError, StandardModelError, - TransactionsError, Visibility, WsEventError, + job::producer::BlockingJobError, job::producer::JobProducerError, AccessBuilder, + ActionPrototypeId, DalContext, DalContextBuilder, StandardModelError, TransactionsError, + Visibility, WsEventError, }; #[remain::sorted] #[derive(Error, Debug)] pub enum JobConsumerError { - #[error("action named {0} not found for component {1}")] - ActionNotFound(String, ComponentId), - #[error(transparent)] - ActionPrototype(#[from] ActionPrototypeError), + // #[error("action named {0} not found for component {1}")] + // ActionNotFound(String, ComponentId), #[error("ActionProtoype {0} not found")] ActionPrototypeNotFound(ActionPrototypeId), #[error("arg {0:?} not found at index {1}")] ArgNotFound(JobInfo, usize), - #[error(transparent)] - AttributeValue(#[from] AttributeValueError), #[error("Error blocking on job: {0}")] BlockingJob(#[from] BlockingJobError), - #[error(transparent)] - Component(#[from] ComponentError), - #[error("component {0} is destroyed")] - ComponentIsDestroyed(ComponentId), - #[error("component {0} not found")] - ComponentNotFound(ComponentId), + // #[error("component {0} is destroyed")] + // ComponentIsDestroyed(ComponentId), + // #[error("component {0} not found")] + // ComponentNotFound(ComponentId), #[error(transparent)] Council(#[from] council_server::client::Error), #[error("Protocol error with council: {0}")] CouncilProtocol(String), - #[error(transparent)] - Fix(#[from] FixError), - #[error(transparent)] - FixResolver(#[from] FixResolverError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), #[error("Invalid job arguments. Expected: {0} Actual: {1:?}")] InvalidArguments(String, Vec), #[error(transparent)] Io(#[from] ::std::io::Error), #[error(transparent)] JobProducer(#[from] JobProducerError), - #[error("missing fix execution batch for id: {0}")] - MissingFixBatch(FixBatchId), + // #[error("missing fix execution batch for id: {0}")] + // MissingFixBatch(FixBatchId), #[error(transparent)] Nats(#[from] NatsError), #[error("nats is unavailable")] NatsUnavailable, - #[error("no schema found for component {0}")] - NoSchemaFound(ComponentId), - #[error("no schema variant found for component {0}")] - NoSchemaVariantFound(ComponentId), + // #[error("no schema found for component {0}")] + // NoSchemaFound(ComponentId), + // #[error("no schema variant found for component {0}")] + // NoSchemaVariantFound(ComponentId), #[error(transparent)] PgPool(#[from] PgPoolError), #[error(transparent)] @@ -69,8 +55,6 @@ pub enum JobConsumerError { #[error(transparent)] StandardModel(#[from] StandardModelError), #[error(transparent)] - StatusUpdaterError(#[from] StatusUpdaterError), - #[error(transparent)] TokioTask(#[from] JoinError), #[error(transparent)] Transactions(#[from] TransactionsError), diff --git a/lib/dal/src/job/definition/dependent_values_update.rs b/lib/dal/src/job/definition/dependent_values_update.rs index cf623f1fbf..ef72dc550c 100644 --- a/lib/dal/src/job/definition/dependent_values_update.rs +++ b/lib/dal/src/job/definition/dependent_values_update.rs @@ -12,8 +12,8 @@ use crate::{ JobConsumer, JobConsumerError, JobConsumerMetadata, JobConsumerResult, JobInfo, }, job::producer::{JobProducer, JobProducerResult}, - AccessBuilder, AttributeValue, AttributeValueError, AttributeValueId, AttributeValueResult, - DalContext, StandardModel, StatusUpdater, Visibility, WsEvent, + AccessBuilder, AttributeValue, AttributeValueId, DalContext, StandardModel, StatusUpdater, + Visibility, WsEvent, }; #[derive(Debug, Deserialize, Serialize)] diff --git a/lib/dal/src/lib.rs b/lib/dal/src/lib.rs index 071e80d49f..6dac5a631d 100644 --- a/lib/dal/src/lib.rs +++ b/lib/dal/src/lib.rs @@ -18,128 +18,27 @@ use veritech_client::CycloneEncryptionKey; use crate::builtins::SelectedTestBuiltinSchemas; -pub use action::{Action, ActionError, ActionId}; -pub use action_prototype::{ - ActionKind, ActionPrototype, ActionPrototypeContext, ActionPrototypeError, ActionPrototypeId, - ActionPrototypeView, -}; -pub use actor_view::ActorView; -pub use attribute::value::view::AttributeView; -pub use attribute::{ - context::{ - AttributeContext, AttributeContextBuilder, AttributeContextBuilderError, - AttributeContextError, AttributeReadContext, - }, - prototype::argument::{ - AttributePrototypeArgument, AttributePrototypeArgumentError, AttributePrototypeArgumentId, - AttributePrototypeArgumentResult, - }, - prototype::{ - AttributePrototype, AttributePrototypeError, AttributePrototypeId, AttributePrototypeResult, - }, - value::{ - AttributeValue, AttributeValueError, AttributeValueId, AttributeValuePayload, - AttributeValueResult, - }, -}; -pub use builtins::{BuiltinsError, BuiltinsResult}; -pub use change_set::{ChangeSet, ChangeSetError, ChangeSetPk, ChangeSetStatus}; -pub use code_view::{CodeLanguage, CodeView}; -pub use component::{ - resource::ResourceView, status::ComponentStatus, status::HistoryActorTimestamp, Component, - ComponentError, ComponentId, ComponentView, ComponentViewProperties, -}; -pub use context::{ - AccessBuilder, Connections, DalContext, DalContextBuilder, RequestContext, ServicesContext, - Transactions, TransactionsError, -}; -pub use diagram::{ - connection::Connection, connection::DiagramEdgeView, Diagram, DiagramError, DiagramKind, -}; -pub use edge::{Edge, EdgeError, EdgeResult}; -pub use fix::batch::{FixBatch, FixBatchId}; -pub use fix::resolver::{FixResolver, FixResolverError, FixResolverId}; -pub use fix::{Fix, FixCompletionStatus, FixError, FixId}; -pub use func::argument::FuncArgument; -pub use func::binding_return_value::{FuncBindingReturnValue, FuncBindingReturnValueError}; -pub use func::{ - backend::{FuncBackendError, FuncBackendKind, FuncBackendResponseType}, - binding::{FuncBinding, FuncBindingError, FuncBindingId}, - Func, FuncError, FuncId, FuncResult, -}; -pub use history_event::{HistoryActor, HistoryEvent, HistoryEventError}; -pub use index_map::IndexMap; -pub use job::definition::DependentValuesUpdate; -pub use job::processor::{JobQueueProcessor, NatsProcessor}; -pub use job_failure::{JobFailure, JobFailureError, JobFailureResult}; -pub use jwt_key::JwtPublicSigningKey; -pub use key_pair::{KeyPair, KeyPairError, KeyPairResult, PublicKey}; -pub use label_list::{LabelEntry, LabelList, LabelListError}; -pub use node::NodeId; -pub use node::{Node, NodeError, NodeKind}; -pub use node_menu::NodeMenuError; -pub use prop::{Prop, PropError, PropId, PropKind, PropPk, PropResult}; -pub use prototype_context::HasPrototypeContext; -pub use prototype_list_for_func::{ - PrototypeListForFunc, PrototypeListForFuncError, PrototypeListForFuncResult, -}; -pub use provider::external::{ExternalProvider, ExternalProviderError, ExternalProviderId}; -pub use provider::internal::{InternalProvider, InternalProviderError, InternalProviderId}; -pub use qualification::{QualificationError, QualificationView}; -pub use reconciliation_prototype::{ - ReconciliationPrototype, ReconciliationPrototypeContext, ReconciliationPrototypeError, - ReconciliationPrototypeId, -}; -pub use schema::variant::leaves::LeafInput; -pub use schema::variant::leaves::LeafInputLocation; -pub use schema::variant::leaves::LeafKind; -pub use schema::variant::root_prop::component_type::ComponentType; -pub use schema::variant::root_prop::RootProp; -pub use schema::variant::root_prop::RootPropChild; -pub use schema::variant::SchemaVariantError; -pub use schema::{Schema, SchemaError, SchemaId, SchemaPk, SchemaVariant, SchemaVariantId}; -pub use secret::{ - DecryptedSecret, EncryptedSecret, Secret, SecretAlgorithm, SecretError, SecretId, SecretPk, - SecretResult, SecretVersion, -}; -pub use socket::{Socket, SocketArity, SocketId}; -pub use standard_model::{StandardModel, StandardModelError, StandardModelResult}; -pub use status::{ - StatusUpdate, StatusUpdateError, StatusUpdateResult, StatusUpdater, StatusUpdaterError, -}; -pub use tenancy::{Tenancy, TenancyError}; -pub use timestamp::{Timestamp, TimestampError}; -pub use user::{User, UserClaim, UserError, UserPk, UserResult}; -pub use validation::prototype::{ - context::ValidationPrototypeContext, ValidationPrototype, ValidationPrototypeError, - ValidationPrototypeId, -}; -pub use validation::resolver::{ - ValidationResolver, ValidationResolverError, ValidationResolverId, ValidationStatus, -}; -pub use visibility::{Visibility, VisibilityError}; -pub use workspace::{Workspace, WorkspaceError, WorkspacePk, WorkspaceResult, WorkspaceSignup}; -pub use workspace_snapshot::graph::WorkspaceSnapshotGraph; -pub use workspace_snapshot::WorkspaceSnapshot; -pub use ws_event::{WsEvent, WsEventError, WsEventResult, WsPayload}; +// FIXME(nick,zack,jacob): move this back to component. +pk!(ComponentId); -pub mod action; +// pub mod action; pub mod action_prototype; -pub mod actor_view; +// pub mod actor_view; pub mod attribute; +pub mod authentication_prototype; pub mod builtins; pub mod change_set; pub mod change_set_pointer; -pub mod change_status; -pub mod code_view; -pub mod component; +// pub mod change_status; +// pub mod code_view; +// pub mod component; pub mod context; -pub mod diagram; -pub mod edge; -pub mod fix; +// pub mod diagram; +// pub mod edge; +// pub mod fix; pub mod func; pub mod history_event; -pub mod index_map; +// pub mod index_map; pub mod installed_pkg; pub mod job; pub mod job_failure; @@ -147,25 +46,25 @@ pub mod jwt_key; pub mod key_pair; pub mod label_list; pub mod node; -pub mod node_menu; +// pub mod node_menu; pub mod pkg; pub mod prop; -pub mod prop_tree; +// pub mod prop_tree; pub mod property_editor; -pub mod prototype_context; -pub mod prototype_list_for_func; +// pub mod prototype_context; +// pub mod prototype_list_for_func; pub mod provider; -pub mod qualification; -pub mod reconciliation_prototype; +// pub mod qualification; +// pub mod reconciliation_prototype; pub mod schema; -pub mod secret; +// pub mod secret; pub mod serde_impls; pub mod socket; pub mod standard_accessors; pub mod standard_model; pub mod standard_pk; -pub mod status; -pub mod tasks; +// pub mod status; +// pub mod tasks; pub mod tenancy; pub mod timestamp; pub mod user; @@ -175,6 +74,46 @@ pub mod workspace; pub mod workspace_snapshot; pub mod ws_event; +pub use action_prototype::{ + ActionKind, ActionPrototype, ActionPrototypeContext, ActionPrototypeId, +}; +pub use attribute::{ + prototype::{AttributePrototype, AttributePrototypeId}, + value::{AttributeValue, AttributeValueId}, +}; +pub use builtins::{BuiltinsError, BuiltinsResult}; +pub use change_set::{ChangeSet, ChangeSetError, ChangeSetPk, ChangeSetStatus}; +pub use context::{ + AccessBuilder, Connections, DalContext, DalContextBuilder, RequestContext, ServicesContext, + Transactions, TransactionsError, +}; +pub use func::{ + backend::{FuncBackendKind, FuncBackendResponseType}, + Func, FuncId, +}; +pub use history_event::{HistoryActor, HistoryEvent, HistoryEventError}; +pub use job::processor::{JobQueueProcessor, NatsProcessor}; +pub use job_failure::{JobFailure, JobFailureError, JobFailureResult}; +pub use jwt_key::JwtPublicSigningKey; +pub use key_pair::{KeyPair, KeyPairError, KeyPairResult, PublicKey}; +pub use label_list::{LabelEntry, LabelList, LabelListError}; +pub use node::NodeId; +pub use node::{Node, NodeKind}; +pub use prop::{Prop, PropId, PropKind}; +pub use provider::external::{ExternalProvider, ExternalProviderId}; +pub use provider::internal::{InternalProvider, InternalProviderId}; +pub use schema::{Schema, SchemaId, SchemaVariant, SchemaVariantId}; +pub use socket::{Socket, SocketArity, SocketId}; +pub use standard_model::{StandardModel, StandardModelError, StandardModelResult}; +pub use tenancy::{Tenancy, TenancyError}; +pub use timestamp::{Timestamp, TimestampError}; +pub use user::{User, UserClaim, UserError, UserPk, UserResult}; +pub use visibility::{Visibility, VisibilityError}; +pub use workspace::{Workspace, WorkspaceError, WorkspacePk, WorkspaceResult, WorkspaceSignup}; +pub use workspace_snapshot::graph::WorkspaceSnapshotGraph; +pub use workspace_snapshot::WorkspaceSnapshot; +pub use ws_event::{WsEvent, WsEventError, WsEventResult, WsPayload}; + #[remain::sorted] #[derive(Error, Debug)] pub enum InitializationError { @@ -209,9 +148,9 @@ pub enum ModelError { Nats(#[from] NatsError), #[error("database error")] PgError(#[from] PgError), - #[error("transactions error")] + #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), - #[error(transparent)] + #[error("workspace error: {0}")] Workspace(#[from] WorkspaceError), } diff --git a/lib/dal/src/migrations/U0030__workspaces.sql b/lib/dal/src/migrations/U0030__workspaces.sql index 4218afece0..175f12b1c6 100644 --- a/lib/dal/src/migrations/U0030__workspaces.sql +++ b/lib/dal/src/migrations/U0030__workspaces.sql @@ -5,7 +5,7 @@ CREATE TABLE workspaces created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), name text NOT NULL, - base_change_set_id ident NOT NULL + default_change_set_id ident NOT NULL -- TODO(nick): add "REFERENCES change_set_pointers (id)" to column type ); CREATE UNIQUE INDEX ON workspaces (pk); diff --git a/lib/dal/src/migrations/U0072__action_prototype.sql b/lib/dal/src/migrations/U0072__action_prototype.sql index 2ecf9299a3..d2bb1a252f 100644 --- a/lib/dal/src/migrations/U0072__action_prototype.sql +++ b/lib/dal/src/migrations/U0072__action_prototype.sql @@ -49,4 +49,4 @@ BEGIN object := row_to_json(this_new_row); END; -$$ LANGUAGE PLPGSQL VOLATILE; +$$ LANGUAGE PLPGSQL VOLATILE; \ No newline at end of file diff --git a/lib/dal/src/migrations/U3001__change_set_pointers.sql b/lib/dal/src/migrations/U3001__change_set_pointers.sql index 24aa92fff6..605748a5e2 100644 --- a/lib/dal/src/migrations/U3001__change_set_pointers.sql +++ b/lib/dal/src/migrations/U3001__change_set_pointers.sql @@ -4,6 +4,7 @@ CREATE TABLE change_set_pointers created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), name text NOT NULL, + base_change_set_id ident, -- TODO(nick): add once workspaces are added -- workspace_id ident REFERENCES workspaces_v2 (id) NOT NULL, workspace_snapshot_id ident REFERENCES workspace_snapshots (id) diff --git a/lib/dal/src/node.rs b/lib/dal/src/node.rs index 4ee7faf225..6f0056b08c 100644 --- a/lib/dal/src/node.rs +++ b/lib/dal/src/node.rs @@ -1,58 +1,24 @@ +use content_store::ContentHash; use rand::prelude::SliceRandom; -use rand::thread_rng; + use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use std::collections::{HashMap, HashSet}; + + + +use strum::EnumDiscriminants; use telemetry::prelude::*; -use thiserror::Error; -use crate::edge::EdgeKind; -use crate::standard_model::objects_from_rows; + + +use crate::workspace_snapshot::content_address::ContentAddress; use crate::{ - impl_standard_model, pk, schema::variant::SchemaVariantError, standard_model, - standard_model_accessor, standard_model_belongs_to, Component, ComponentId, HistoryEventError, - StandardModel, StandardModelError, Tenancy, Timestamp, Visibility, + pk, StandardModel, Timestamp, }; -use crate::{DalContext, Edge, SchemaError, TransactionsError}; -const LIST_FOR_KIND: &str = include_str!("queries/node/list_for_kind.sql"); -const LIST_LIVE: &str = include_str!("queries/node/list_live.sql"); -#[remain::sorted] -#[derive(Error, Debug)] -pub enum NodeError { - #[error("component is None")] - ComponentIsNone, - #[error("edge error: {0}")] - Edge(String), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("could not find node with ID: {0}")] - NotFound(NodeId), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), - #[error("cannot find schema id to generate node template")] - SchemaIdNotFound, - #[error("cannot generate node template with missing default schema variant")] - SchemaMissingDefaultVariant, - #[error("schema variant error: {0}")] - SchemaVariant(#[from] SchemaVariantError), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), -} +// const LIST_FOR_KIND: &str = include_str!("queries/node/list_for_kind.sql"); +// const LIST_LIVE: &str = include_str!("queries/node/list_live.sql"); -pub type NodeResult = Result; - -pk!(NodePk); pk!(NodeId); /// The kind of a given [`Node`](Node) that corresponds to the [`DiagramKind`](crate::DiagramKind). @@ -78,210 +44,215 @@ pub enum NodeKind { Configuration, } -/// A mathematical node that can be used to create [`Edges`](crate::Edge). +/// Visual representation of a [`Component`](crate::Component) for a given [`kind`](NodeKind). #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct Node { - pk: NodePk, id: NodeId, - kind: NodeKind, - #[serde(flatten)] - tenancy: Tenancy, #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, - x: String, - y: String, - width: Option, - height: Option, + kind: NodeKind, + pub x: String, + pub y: String, + pub width: Option, + pub height: Option, } -impl_standard_model! { - model: Node, - pk: NodePk, +#[derive(Debug, PartialEq)] +pub struct NodeGraphNode { id: NodeId, - table_name: "nodes", - history_event_label_base: "node", - history_event_message_name: "Node" + content_address: ContentAddress, + content: NodeContentV1, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum NodeContent { + V1(NodeContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct NodeContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + pub kind: NodeKind, + pub x: String, + pub y: String, + pub width: Option, + pub height: Option, +} + +impl NodeGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: NodeContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::Node(content_hash), + content, + } + } } impl Node { - #[instrument(skip_all)] - pub async fn new(ctx: &DalContext, kind: &NodeKind) -> NodeResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM node_create_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), &kind.as_ref()], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) + pub fn assemble(id: NodeId, inner: &NodeContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + kind: inner.kind, + x: inner.x.clone(), + y: inner.y.clone(), + width: inner.width.clone(), + height: inner.height.clone(), + } } - standard_model_accessor!(kind, Enum(NodeKind), NodeResult); - standard_model_accessor!(x, String, NodeResult); - standard_model_accessor!(y, String, NodeResult); - standard_model_accessor!(width, Option, NodeResult); - standard_model_accessor!(height, Option, NodeResult); - - standard_model_belongs_to!( - lookup_fn: component, - set_fn: set_component, - unset_fn: unset_component, - table: "node_belongs_to_component", - model_table: "components", - belongs_to_id: ComponentId, - returns: Component, - result: NodeResult, - ); - - /// List all "live" [`Nodes`](Node) for a given [`NodeKind`](NodeKind). - /// - /// The [`DalContext`](crate::DalContext) should be provided with "deletion" - /// [`Visibility`](crate::Visibility). - pub async fn list_live(ctx: &DalContext, kind: NodeKind) -> NodeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_LIVE, - &[ - ctx.tenancy(), - &ctx.visibility().to_deleted(), - &kind.as_ref(), - ], - ) - .await?; - Ok(objects_from_rows(rows)?) + pub fn id(&self) -> NodeId { + self.id } +} - /// Find all [`NodeIds`](Self) for a given [`NodeKind`]. - #[instrument(skip_all)] - pub async fn list_for_kind(ctx: &DalContext, kind: NodeKind) -> NodeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_KIND, - &[ctx.tenancy(), ctx.visibility(), &kind.as_ref()], - ) - .await?; - let mut node_ids = HashSet::new(); - for row in rows { - let node_id: NodeId = row.try_get("node_id")?; - node_ids.insert(node_id); +impl From for NodeContentV1 { + fn from(value: Node) -> Self { + Self { + timestamp: value.timestamp, + kind: value.kind, + x: value.x, + y: value.y, + width: value.width, + height: value.height, } - Ok(node_ids) } +} - /// List all [`Nodes`](Self) of kind [`configuration`](NodeKind::Configuration) in - /// [`topological`](https://en.wikipedia.org/wiki/Topological_sorting) order. The order will - /// be also be stable. - pub async fn list_topologically_sorted_configuration_nodes_with_stable_ordering( - ctx: &DalContext, - shuffle_edges: bool, - ) -> NodeResult> { - let total_start = std::time::Instant::now(); - let ctx_with_deleted = &ctx.clone_with_delete_visibility(); - - // Gather all nodes with at least one edge. - let mut edges = Edge::list_for_kind(ctx_with_deleted, EdgeKind::Configuration) - .await - .map_err(|e| NodeError::Edge(e.to_string()))?; - if shuffle_edges { - edges.shuffle(&mut thread_rng()); - } +// impl Node { +// /// List all "live" [`Nodes`](Node) for a given [`NodeKind`](NodeKind). +// /// +// /// The [`DalContext`](crate::DalContext) should be provided with "deletion" +// /// [`Visibility`](crate::Visibility). +// pub async fn list_live(ctx: &DalContext, kind: NodeKind) -> NodeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_LIVE, +// &[ +// ctx.tenancy(), +// &ctx.visibility().to_deleted(), +// &kind.as_ref(), +// ], +// ) +// .await?; +// Ok(objects_from_rows(rows)?) +// } - // Populate the nodes map based on all configuration edges. The "key" is every node with at - // least one edge. The "value" is a set of nodes that the "key" node depends on (i.e. the - // set of nodes are sources/tails in edges and the "key" node is the destination/head in - // in edges). - let mut nodes: HashMap> = HashMap::new(); - for edge in edges { - nodes - .entry(edge.head_node_id()) - .and_modify(|set| { - set.insert(edge.tail_node_id()); - }) - .or_insert_with(|| { - let mut set = HashSet::new(); - set.insert(edge.tail_node_id()); - set - }); - } +// /// Find all [`NodeIds`](Self) for a given [`NodeKind`]. +// #[instrument(skip_all)] +// pub async fn list_for_kind(ctx: &DalContext, kind: NodeKind) -> NodeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_KIND, +// &[ctx.tenancy(), ctx.visibility(), &kind.as_ref()], +// ) +// .await?; +// let mut node_ids = HashSet::new(); +// for row in rows { +// let node_id: NodeId = row.try_get("node_id")?; +// node_ids.insert(node_id); +// } +// Ok(node_ids) +// } - // Add all floating nodes (those without edges). - for potential_floating_node in - Self::list_for_kind(ctx_with_deleted, NodeKind::Configuration).await? - { - if nodes.get(&potential_floating_node).is_none() { - nodes.insert(potential_floating_node, HashSet::new()); - } - } +// /// List all [`Nodes`](Self) of kind [`configuration`](NodeKind::Configuration) in +// /// [`topological`](https://en.wikipedia.org/wiki/Topological_sorting) order. The order will +// /// be also be stable. +// pub async fn list_topologically_sorted_configuration_nodes_with_stable_ordering( +// ctx: &DalContext, +// shuffle_edges: bool, +// ) -> NodeResult> { +// let total_start = std::time::Instant::now(); +// let ctx_with_deleted = &ctx.clone_with_delete_visibility(); - // Gather all results based on the nodes and their "depends_on" sets. This is a topological - // sort with stable ordering. - let mut results = Vec::new(); - loop { - let mut siblings: Vec = Vec::new(); - - // For each node in the map, find siblings (those whose "depends_on" sets are empty) - for (node, depends_on) in &mut nodes { - if depends_on.is_empty() { - siblings.push(*node); - } - } - - // If we found no siblings, then we have processed every node in the map and are ready - // to exit the loop. - if siblings.is_empty() { - break; - } - - // Remove each sibling from the map's "keys". - for sibling in &siblings { - nodes.remove(sibling); - } - - // Remove each sibling from the map's "values". - nodes.iter_mut().for_each(|(_, depends_on)| { - for sibling in &siblings { - depends_on.remove(sibling); - } - }); - - // Provide stable ordering by sorting the siblings before extending the results. - siblings.sort(); - results.extend(siblings); - } +// // Gather all nodes with at least one edge. +// let mut edges = Edge::list_for_kind(ctx_with_deleted, EdgeKind::Configuration) +// .await +// .map_err(|e| NodeError::Edge(e.to_string()))?; +// if shuffle_edges { +// edges.shuffle(&mut thread_rng()); +// } - debug!( - "listing topologically sorted configuration nodes with stable ordering took {:?}", - total_start.elapsed() - ); - Ok(results) - } +// // Populate the nodes map based on all configuration edges. The "key" is every node with at +// // least one edge. The "value" is a set of nodes that the "key" node depends on (i.e. the +// // set of nodes are sources/tails in edges and the "key" node is the destination/head in +// // in edges). +// let mut nodes: HashMap> = HashMap::new(); +// for edge in edges { +// nodes +// .entry(edge.head_node_id()) +// .and_modify(|set| { +// set.insert(edge.tail_node_id()); +// }) +// .or_insert_with(|| { +// let mut set = HashSet::new(); +// set.insert(edge.tail_node_id()); +// set +// }); +// } - pub async fn set_geometry( - &mut self, - ctx: &DalContext, - x: impl AsRef, - y: impl AsRef, - width: Option>, - height: Option>, - ) -> NodeResult<()> { - self.set_x(ctx, x.as_ref()).await?; - self.set_y(ctx, y.as_ref()).await?; - self.set_width(ctx, width.as_ref().map(|val| val.as_ref())) - .await?; - self.set_height(ctx, height.as_ref().map(|val| val.as_ref())) - .await?; - - Ok(()) - } -} +// // Add all floating nodes (those without edges). +// for potential_floating_node in +// Self::list_for_kind(ctx_with_deleted, NodeKind::Configuration).await? +// { +// if nodes.get(&potential_floating_node).is_none() { +// nodes.insert(potential_floating_node, HashSet::new()); +// } +// } + +// // Gather all results based on the nodes and their "depends_on" sets. This is a topological +// // sort with stable ordering. +// let mut results = Vec::new(); +// loop { +// let mut siblings: Vec = Vec::new(); + +// // For each node in the map, find siblings (those whose "depends_on" sets are empty) +// for (node, depends_on) in &mut nodes { +// if depends_on.is_empty() { +// siblings.push(*node); +// } +// } + +// // If we found no siblings, then we have processed every node in the map and are ready +// // to exit the loop. +// if siblings.is_empty() { +// break; +// } + +// // Remove each sibling from the map's "keys". +// for sibling in &siblings { +// nodes.remove(sibling); +// } + +// // Remove each sibling from the map's "values". +// nodes.iter_mut().for_each(|(_, depends_on)| { +// for sibling in &siblings { +// depends_on.remove(sibling); +// } +// }); + +// // Provide stable ordering by sorting the siblings before extending the results. +// siblings.sort(); +// results.extend(siblings); +// } + +// debug!( +// "listing topologically sorted configuration nodes with stable ordering took {:?}", +// total_start.elapsed() +// ); +// Ok(results) +// } +// } diff --git a/lib/dal/src/node_menu.rs b/lib/dal/src/node_menu.rs index e818108c7d..36792866f4 100644 --- a/lib/dal/src/node_menu.rs +++ b/lib/dal/src/node_menu.rs @@ -10,7 +10,7 @@ use thiserror::Error; use crate::schema::SchemaUiMenu; use crate::DalContext; -use crate::{SchemaError, SchemaId, StandardModel, StandardModelError}; +use crate::{SchemaId, StandardModel, StandardModelError}; #[allow(clippy::large_enum_variant)] #[remain::sorted] diff --git a/lib/dal/src/pkg.rs b/lib/dal/src/pkg.rs index 06406834c1..bf7686a4eb 100644 --- a/lib/dal/src/pkg.rs +++ b/lib/dal/src/pkg.rs @@ -1,182 +1,32 @@ -use std::collections::HashMap; - -use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use thiserror::Error; -use url::ParseError; - -pub use export::{get_component_type, PkgExporter}; -pub use import::{ - attach_resource_payload_to_value, import_pkg, import_pkg_from_pkg, ImportAttributeSkip, - ImportEdgeSkip, ImportOptions, ImportSkips, -}; use si_pkg::{FuncSpecBackendKind, FuncSpecBackendResponseType, SiPkgError, SpecError}; +use std::collections::HashMap; +use thiserror::Error; -use crate::authentication_prototype::AuthenticationPrototypeError; use crate::{ - component::view::debug::ComponentDebugViewError, - func::{ - argument::{FuncArgumentError, FuncArgumentId}, - binding::FuncBindingError, - }, - installed_pkg::InstalledPkgError, - prop_tree::PropTreeError, - schema::variant::definition::{SchemaVariantDefinitionError, SchemaVariantDefinitionId}, - socket::{SocketEdgeKind, SocketError}, - ActionPrototypeError, AttributeContextBuilderError, AttributePrototypeArgumentError, - AttributePrototypeArgumentId, AttributePrototypeError, AttributePrototypeId, - AttributeReadContext, AttributeValueError, ChangeSetError, ChangeSetPk, ComponentError, - ComponentId, DalContext, EdgeError, ExternalProviderError, ExternalProviderId, FuncBackendKind, - FuncBackendResponseType, FuncBindingReturnValueError, FuncError, FuncId, InternalProviderError, - InternalProviderId, NodeError, PropError, PropId, PropKind, SchemaError, SchemaId, - SchemaVariantError, SchemaVariantId, StandardModelError, UserPk, ValidationPrototypeError, - WorkspaceError, WorkspacePk, WsEvent, WsEventResult, WsPayload, + change_set_pointer::ChangeSetPointerError, installed_pkg::InstalledPkgError, ChangeSetPk, + FuncBackendKind, FuncBackendResponseType, FuncId, }; -mod export; +use crate::workspace_snapshot::WorkspaceSnapshotError; +pub use import::{import_pkg, import_pkg_from_pkg, ImportOptions}; + +// mod export; mod import; #[remain::sorted] #[derive(Debug, Error)] pub enum PkgError { - #[error("Action creation error: {0}")] - Action(#[from] ActionPrototypeError), #[error(transparent)] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute function for context {0:?} has key {1} but is not setting a prop value")] - AttributeFuncForKeyMissingProp(AttributeReadContext, String), - #[error("attribute function for prop {0} has a key {1} but prop kind is {2} not a map)")] - AttributeFuncForKeySetOnWrongKind(PropId, String, PropKind), - #[error(transparent)] - AttributePrototype(#[from] AttributePrototypeError), - #[error(transparent)] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("Missing ExternalProvider {1} for AttributePrototypeArgument {1}")] - AttributePrototypeArgumentMissingExternalProvider( - AttributePrototypeArgumentId, - ExternalProviderId, - ), - #[error("AttributePrototypeArgument {0} missing FuncArgument {1}")] - AttributePrototypeArgumentMissingFuncArgument(AttributePrototypeArgumentId, FuncArgumentId), - #[error("Missing InternalProvider {1} for AttributePrototypeArgument {1}")] - AttributePrototypeArgumentMissingInternalProvider( - AttributePrototypeArgumentId, - InternalProviderId, - ), - #[error(transparent)] - AttributeValue(#[from] AttributeValueError), - #[error("parent prop could not be found with path: {0}")] - AttributeValueParentPropNotFound(String), - #[error("parent value could not be found for prop path: {0} and key {1:?}, index {2:?}")] - AttributeValueParentValueNotFound(String, Option, Option), - #[error("attribute value is a proxy but there is no value to proxy")] - AttributeValueSetToProxyButNoProxyFound, - #[error("encountered an attribute value with a key or index but no parent")] - AttributeValueWithKeyOrIndexButNoParent, - #[error("Auth func creation error: {0}")] - AuthFunc(#[from] AuthenticationPrototypeError), - #[error(transparent)] - ChangeSet(#[from] ChangeSetError), - #[error("change set {0} not found")] - ChangeSetNotFound(ChangeSetPk), - #[error(transparent)] - Component(#[from] ComponentError), - #[error(transparent)] - ComponentDebugView(#[from] ComponentDebugViewError), - #[error("component import can only happen during a workspace import")] - ComponentImportWithoutChangeSet, - #[error("could not find schema {0} for package component {1}")] - ComponentMissingBuiltinSchema(String, String), - #[error("could not find schema {0} with variant {1} for package component {2}")] - ComponentMissingBuiltinSchemaVariant(String, String, String), - #[error("component has no node: {0}")] - ComponentMissingNode(ComponentId), - #[error("could not find schema variant {0} for package component {1}")] - ComponentMissingSchemaVariant(String, String), - #[error("component spec has no position")] - ComponentSpecMissingPosition, - #[error("map item prop {0} has both custom key prototypes and custom prop only prototype")] - ConflictingMapKeyPrototypes(PropId), + ChangeSetPointer(#[from] ChangeSetPointerError), #[error("expected data on an SiPkg node, but none found: {0}")] DataNotFound(String), - #[error(transparent)] - Edge(#[from] EdgeError), - #[error("edge refers to component not in export: {0}")] - EdgeRefersToMissingComponent(ComponentId), - #[error("Cannot find Socket for explicit InternalProvider {0}")] - ExplicitInternalProviderMissingSocket(InternalProviderId), - #[error(transparent)] - ExternalProvider(#[from] ExternalProviderError), - #[error("Cannot find Socket for ExternalProvider {0}")] - ExternalProviderMissingSocket(ExternalProviderId), - #[error(transparent)] - Func(#[from] FuncError), - #[error(transparent)] - FuncArgument(#[from] FuncArgumentError), - #[error(transparent)] - FuncBinding(#[from] FuncBindingError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error(transparent)] - FuncExecution(#[from] crate::func::execution::FuncExecutionError), #[error("Installed func id {0} does not exist")] InstalledFuncMissing(FuncId), #[error(transparent)] InstalledPkg(#[from] InstalledPkgError), - #[error("Installed schema id {0} does not exist")] - InstalledSchemaMissing(SchemaId), - #[error("Installed schema variant definition {0} does not exist")] - InstalledSchemaVariantDefinitionMissing(SchemaVariantDefinitionId), - #[error("Installed schema variant {0} does not exist")] - InstalledSchemaVariantMissing(SchemaVariantId), - #[error(transparent)] - InternalProvider(#[from] InternalProviderError), - #[error("Missing Prop {1} for InternalProvider {1}")] - InternalProviderMissingProp(InternalProviderId, PropId), - #[error("Leaf Function {0} has invalid argument {1}")] - InvalidLeafArgument(FuncId, String), - #[error("Missing AttributePrototype {0} for explicit InternalProvider {1}")] - MissingAttributePrototypeForInputSocket(AttributePrototypeId, InternalProviderId), - #[error("Missing AttributePrototype {0} for ExternalProvider {1}")] - MissingAttributePrototypeForOutputSocket(AttributePrototypeId, ExternalProviderId), - #[error("Missing Func {1} for AttributePrototype {0}")] - MissingAttributePrototypeFunc(AttributePrototypeId, FuncId), - #[error("Missing value for context {0:?}")] - MissingAttributeValueForContext(AttributeReadContext), - #[error("Missing a func map for changeset {0}")] - MissingChangeSetFuncMap(ChangeSetPk), - #[error("Missing component {0} for edge from {1} to {2}")] - MissingComponentForEdge(String, String, String), - #[error("Func {0} missing from exported funcs")] - MissingExportedFunc(FuncId), - #[error("Cannot find FuncArgument {0} for Func {1}")] - MissingFuncArgument(String, FuncId), - #[error("Cannot find FuncArgument {0}")] - MissingFuncArgumentById(FuncArgumentId), - #[error("Package asked for a function with the unique id {0} but none could be found")] - MissingFuncUniqueId(String), - #[error("Cannot find InternalProvider for Prop {0}")] - MissingInternalProviderForProp(PropId), - #[error("Cannot find InternalProvider for Socket named {0}")] - MissingInternalProviderForSocketName(String), - #[error("Intrinsic function {0} not found")] - MissingIntrinsicFunc(String), - #[error("Intrinsic function (0) argument {1} not found")] - MissingIntrinsicFuncArgument(String, String), - #[error("Cannot find item prop for installed map prop {0}")] - MissingItemPropForMapProp(PropId), - #[error("Cannot find installed prop {0}")] - MissingProp(PropId), - #[error("Cannot find root prop for variant {0}")] - MissingRootProp(SchemaVariantId), - #[error("Cannot find schema_variant_definition {0}")] - MissingSchemaVariantDefinition(SchemaVariantId), - #[error("Cannot find socket with name {0} for edge kind {1}")] - MissingSocketName(String, SocketEdgeKind), #[error("Unique id missing for node in workspace backup: {0}")] MissingUniqueIdForNode(String), - #[error(transparent)] - Node(#[from] NodeError), #[error("Package with that hash already installed: {0}")] PackageAlreadyInstalled(String), #[error(transparent)] @@ -184,57 +34,17 @@ pub enum PkgError { #[error(transparent)] PkgSpec(#[from] SpecError), #[error(transparent)] - Prop(#[from] PropError), - #[error("prop spec structure is invalid: {0}")] - PropSpecChildrenInvalid(String), - #[error(transparent)] - PropTree(#[from] PropTreeError), - #[error("prop tree structure is invalid: {0}")] - PropTreeInvalid(String), - #[error(transparent)] - Schema(#[from] SchemaError), - #[error(transparent)] - SchemaVariant(#[from] SchemaVariantError), - #[error(transparent)] - SchemaVariantDefinition(#[from] SchemaVariantDefinitionError), - #[error("schema variant not found: {0}")] - SchemaVariantNotFound(SchemaVariantId), - #[error("json serialization error: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error(transparent)] - Socket(#[from] SocketError), - #[error(transparent)] - StandardModel(#[from] StandardModelError), - #[error("standard model relationship {0} missing belongs_to for {1} with id {2}")] - StandardModelMissingBelongsTo(&'static str, &'static str, String), - #[error("standard model relationship {0} found multiple belongs_to for {1} with id {2}")] - StandardModelMultipleBelongsTo(&'static str, &'static str, String), - #[error(transparent)] - UlidDecode(#[from] ulid::DecodeError), - #[error(transparent)] - UrlParse(#[from] ParseError), - #[error("Validation creation error: {0}")] - Validation(#[from] ValidationPrototypeError), - #[error(transparent)] - Workspace(#[from] WorkspaceError), - #[error("Cannot find default change set \"{0}\" in workspace backup")] - WorkspaceBackupNoDefaultChangeSet(String), - #[error("Workspace backup missing workspace name")] - WorkspaceNameNotInBackup, - #[error("Workspace not found: {0}")] - WorkspaceNotFound(WorkspacePk), - #[error("Workspace backup missing workspace pk")] - WorkspacePkNotInBackup, + WorkspaceSnaphot(#[from] WorkspaceSnapshotError), } impl PkgError { - fn prop_tree_invalid(message: impl Into) -> Self { - Self::PropTreeInvalid(message.into()) - } + // fn prop_tree_invalid(message: impl Into) -> Self { + // Self::PropTreeInvalid(message.into()) + // } - fn prop_spec_children_invalid(message: impl Into) -> Self { - Self::PropSpecChildrenInvalid(message.into()) - } + // fn prop_spec_children_invalid(message: impl Into) -> Self { + // Self::PropSpecChildrenInvalid(message.into()) + // } } pub type PkgResult = Result; @@ -385,143 +195,143 @@ where } } -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase", tag = "kind")] -pub struct ModuleImportedPayload { - schema_variant_ids: Vec, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct WorkspaceImportPayload { - workspace_pk: Option, - user_pk: Option, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct WorkspaceExportPayload { - workspace_pk: Option, - user_pk: Option, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct ImportWorkspaceVotePayload { - workspace_pk: Option, - user_pk: UserPk, - vote: String, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct WorkspaceActorPayload { - workspace_pk: Option, - user_pk: Option, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct WorkspaceImportApprovalActorPayload { - workspace_pk: Option, - user_pk: Option, - created_at: DateTime, - created_by: String, - name: String, -} - -impl WsEvent { - pub async fn module_imported( - ctx: &DalContext, - schema_variant_ids: Vec, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::ModuleImported(ModuleImportedPayload { schema_variant_ids }), - ) - .await - } - - pub async fn workspace_imported( - ctx: &DalContext, - workspace_pk: Option, - user_pk: Option, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::WorkspaceImported(WorkspaceImportPayload { - workspace_pk, - user_pk, - }), - ) - .await - } - - pub async fn workspace_exported( - ctx: &DalContext, - workspace_pk: Option, - user_pk: Option, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::WorkspaceExported(WorkspaceExportPayload { - workspace_pk, - user_pk, - }), - ) - .await - } - - pub async fn import_workspace_vote( - ctx: &DalContext, - workspace_pk: Option, - user_pk: UserPk, - vote: String, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::ImportWorkspaceVote(ImportWorkspaceVotePayload { - workspace_pk, - user_pk, - vote, - }), - ) - .await - } - - pub async fn workspace_import_begin_approval_process( - ctx: &DalContext, - workspace_pk: Option, - user_pk: Option, - workspace_export_created_at: DateTime, - workspace_export_created_by: String, - workspace_export_name: String, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::WorkspaceImportBeginApprovalProcess(WorkspaceImportApprovalActorPayload { - workspace_pk, - user_pk, - created_at: workspace_export_created_at, - created_by: workspace_export_created_by, - name: workspace_export_name, - }), - ) - .await - } - - pub async fn workspace_import_cancel_approval_process( - ctx: &DalContext, - workspace_pk: Option, - user_pk: Option, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::WorkspaceImportCancelApprovalProcess(WorkspaceActorPayload { - workspace_pk, - user_pk, - }), - ) - .await - } -} +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase", tag = "kind")] +// pub struct ModuleImportedPayload { +// schema_variant_ids: Vec, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct WorkspaceImportPayload { +// workspace_pk: Option, +// user_pk: Option, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct WorkspaceExportPayload { +// workspace_pk: Option, +// user_pk: Option, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct ImportWorkspaceVotePayload { +// workspace_pk: Option, +// user_pk: UserPk, +// vote: String, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct WorkspaceActorPayload { +// workspace_pk: Option, +// user_pk: Option, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct WorkspaceImportApprovalActorPayload { +// workspace_pk: Option, +// user_pk: Option, +// created_at: DateTime, +// created_by: String, +// name: String, +// } +// +// impl WsEvent { +// pub async fn module_imported( +// ctx: &DalContext, +// schema_variant_ids: Vec, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::ModuleImported(ModuleImportedPayload { schema_variant_ids }), +// ) +// .await +// } +// +// pub async fn workspace_imported( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: Option, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::WorkspaceImported(WorkspaceImportPayload { +// workspace_pk, +// user_pk, +// }), +// ) +// .await +// } +// +// pub async fn workspace_exported( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: Option, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::WorkspaceExported(WorkspaceExportPayload { +// workspace_pk, +// user_pk, +// }), +// ) +// .await +// } +// +// pub async fn import_workspace_vote( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: UserPk, +// vote: String, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::ImportWorkspaceVote(ImportWorkspaceVotePayload { +// workspace_pk, +// user_pk, +// vote, +// }), +// ) +// .await +// } +// +// pub async fn workspace_import_begin_approval_process( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: Option, +// workspace_export_created_at: DateTime, +// workspace_export_created_by: String, +// workspace_export_name: String, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::WorkspaceImportBeginApprovalProcess(WorkspaceImportApprovalActorPayload { +// workspace_pk, +// user_pk, +// created_at: workspace_export_created_at, +// created_by: workspace_export_created_by, +// name: workspace_export_name, +// }), +// ) +// .await +// } +// +// pub async fn workspace_import_cancel_approval_process( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: Option, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::WorkspaceImportCancelApprovalProcess(WorkspaceActorPayload { +// workspace_pk, +// user_pk, +// }), +// ) +// .await +// } +// } diff --git a/lib/dal/src/pkg/import.rs b/lib/dal/src/pkg/import.rs index 871a9b51e3..78281ce105 100644 --- a/lib/dal/src/pkg/import.rs +++ b/lib/dal/src/pkg/import.rs @@ -7,6 +7,7 @@ use std::{ use chrono::Utc; use serde::{Deserialize, Serialize}; use tokio::sync::Mutex; +use ulid::Ulid; use si_pkg::{ AttributeValuePath, ComponentSpecVariant, EdgeSpecKind, SchemaVariantSpecPropRoot, SiPkg, @@ -20,54 +21,81 @@ use telemetry::prelude::*; use crate::authentication_prototype::{AuthenticationPrototype, AuthenticationPrototypeContext}; use crate::{ - component::ComponentKind, - edge::EdgeKind, + change_set_pointer::{self, ChangeSetPointer, ChangeSetPointerId}, + // component::ComponentKind, + // edge::EdgeKind, func::{ self, - argument::{FuncArgumentError, FuncArgumentKind}, + // argument::FuncArgumentKind, backend::validation::FuncBackendValidationArgs, - binding::FuncBinding, - binding_return_value::FuncBindingReturnValue, + // binding::FuncBinding, + // binding_return_value::FuncBindingReturnValue, + // execution::{FuncExecution, FuncExecutionPk}, }, installed_pkg::{ InstalledPkg, InstalledPkgAsset, InstalledPkgAssetKind, InstalledPkgAssetTyped, InstalledPkgId, }, prop::PropPath, - schema::{ - variant::{ - definition::{SchemaVariantDefinition, SchemaVariantDefinitionJson}, - leaves::LeafInputLocation, - }, - SchemaUiMenu, - }, + schema::{variant::leaves::LeafInputLocation, SchemaUiMenu}, socket::SocketEdgeKind, validation::{Validation, ValidationKind}, - ActionKind, ActionPrototype, ActionPrototypeContext, AttributeContext, AttributeContextBuilder, - AttributePrototype, AttributePrototypeArgument, AttributePrototypeId, AttributeReadContext, - AttributeValue, AttributeValueError, ChangeSet, ChangeSetPk, Component, ComponentId, - DalContext, Edge, ExternalProvider, ExternalProviderId, Func, FuncArgument, FuncError, FuncId, - InternalProvider, InternalProviderError, InternalProviderId, LeafKind, Node, Prop, PropId, - PropKind, Schema, SchemaId, SchemaVariant, SchemaVariantError, SchemaVariantId, Socket, - StandardModel, Tenancy, UserPk, ValidationPrototype, ValidationPrototypeContext, Workspace, + workspace_snapshot::{self, WorkspaceSnapshotError}, + ActionKind, + ActionPrototype, + ActionPrototypeContext, + AttributePrototype, + AttributePrototypeId, + AttributeValue, + ChangeSet, + ChangeSetPk, + // Component, + ComponentId, + DalContext, + // Edge, + ExternalProvider, + ExternalProviderId, + Func, + // FuncArgument, + // FuncBindingError, + // FuncBindingReturnValueError, + FuncId, + InternalProvider, + InternalProviderId, + // LeafKind, + Node, + Prop, + PropId, + PropKind, + Schema, + SchemaId, + SchemaVariant, + SchemaVariantId, + Socket, + StandardModel, + Tenancy, + UserPk, + // ValidationPrototype, + // ValidationPrototypeContext, + Workspace, WorkspacePk, + WorkspaceSnapshot, }; use super::{PkgError, PkgResult}; #[derive(Clone, Debug)] enum Thing { - ActionPrototype(ActionPrototype), - AuthPrototype(AuthenticationPrototype), - AttributePrototypeArgument(AttributePrototypeArgument), - Component((Component, Node)), - Edge(Edge), + // ActionPrototype(ActionPrototype), + // AttributePrototypeArgument(AttributePrototypeArgument), + // Component((Component, Node)), + // Edge(Edge), Func(Func), - FuncArgument(FuncArgument), - Schema(Schema), - SchemaVariant(SchemaVariant), - Socket(Box<(Socket, Option, Option)>), - Validation(ValidationPrototype), + // FuncArgument(FuncArgument), + // Schema(Schema), + // SchemaVariant(SchemaVariant), + // Socket(Box<(Socket, Option, Option)>), + // Validation(ValidationPrototype), } type ThingMap = super::ChangeSetThingMap; @@ -88,6 +116,8 @@ pub struct ImportOptions { async fn import_change_set( ctx: &DalContext, change_set_pk: Option, + workspace_snapshot: &mut WorkspaceSnapshot, + change_set_pointer: &ChangeSetPointer, metadata: &SiPkgMetadata, funcs: &[SiPkgFunc<'_>], schemas: &[SiPkgSchema<'_>], @@ -98,8 +128,8 @@ async fn import_change_set( options: &ImportOptions, ) -> PkgResult<( Vec, - Vec<(String, Vec)>, - Vec, + Vec<(String, Vec)>, + Vec, )> { for func_spec in funcs { let unique_id = func_spec.unique_id().to_string(); @@ -112,14 +142,20 @@ async fn import_change_set( || special_case_funcs.contains(&func_spec.name()) || func_spec.is_from_builtin().unwrap_or(false) { - if let Some(func) = Func::find_by_name(ctx, func_spec.name()).await? { + dbg!(func_spec.name()); + + if let Some(func_id) = workspace_snapshot.func_find_by_name(func_spec.name())? { + let func = workspace_snapshot.func_get_by_id(ctx, func_id).await?; + thing_map.insert( change_set_pk, unique_id.to_owned(), Thing::Func(func.to_owned()), ); - } else if let Some(func) = import_func( + } else if let Some(_func) = import_func( ctx, + workspace_snapshot, + change_set_pointer, None, func_spec, installed_pkg_id, @@ -128,11 +164,11 @@ async fn import_change_set( ) .await? { - let args = func_spec.arguments()?; + // let args = vec![]; //func_spec.arguments()?; - if !args.is_empty() { - import_func_arguments(ctx, None, *func.id(), &args, thing_map).await?; - } + // if !args.is_empty() { + // // import_func_arguments(ctx, None, *func.id(), &args, thing_map).await?; + // } } } else { let func = if let Some(Some(func)) = options @@ -144,7 +180,7 @@ async fn import_change_set( InstalledPkgAsset::new( ctx, InstalledPkgAssetTyped::new_for_func( - *func.id(), + func.id, installed_pkg_id, func_spec.hash().to_string(), ), @@ -163,6 +199,8 @@ async fn import_change_set( } else { import_func( ctx, + workspace_snapshot, + change_set_pointer, change_set_pk, func_spec, installed_pkg_id, @@ -182,7 +220,7 @@ async fn import_change_set( let args = func_spec.arguments()?; if !args.is_empty() { - import_func_arguments(ctx, change_set_pk, *func.id(), &args, thing_map).await?; + // import_func_arguments(ctx, change_set_pk, *func.id(), &args, thing_map).await?; } } }; @@ -190,742 +228,1191 @@ async fn import_change_set( let mut installed_schema_variant_ids = vec![]; - for schema_spec in schemas { - match &options.schemas { - None => {} - Some(schemas) => { - if !schemas.contains(&schema_spec.name().to_string().to_lowercase()) { - continue; - } - } - } - - info!( - "installing schema '{}' from {}", - schema_spec.name(), - metadata.name(), - ); - - let (_, schema_variant_ids) = - import_schema(ctx, change_set_pk, schema_spec, installed_pkg_id, thing_map).await?; - - installed_schema_variant_ids.extend(schema_variant_ids); - } - - println!("Finished Imports: {}", Utc::now()); - - let mut component_attribute_skips = vec![]; - for component_spec in components { - let skips = import_component(ctx, change_set_pk, component_spec, thing_map).await?; - if !skips.is_empty() { - component_attribute_skips.push((component_spec.name().to_owned(), skips)); - } - } - - let mut edge_skips = vec![]; - for edge_spec in edges { - if let Some(skip) = import_edge(ctx, change_set_pk, edge_spec, thing_map).await? { - edge_skips.push(skip); - } - } + // for schema_spec in schemas { + // match &options.schemas { + // None => {} + // Some(schemas) => { + // if !schemas.contains(&schema_spec.name().to_string().to_lowercase()) { + // continue; + // } + // } + // } + + // info!( + // "installing schema '{}' from {}", + // schema_spec.name(), + // metadata.name(), + // ); + + // let (_, schema_variant_ids) = + // import_schema(ctx, change_set_pk, schema_spec, installed_pkg_id, thing_map).await?; + + // installed_schema_variant_ids.extend(schema_variant_ids); + // } + + // let mut component_attribute_skips = vec![]; + // for component_spec in components { + // let skips = import_component(ctx, change_set_pk, component_spec, thing_map).await?; + // if !skips.is_empty() { + // component_attribute_skips.push((component_spec.name().to_owned(), skips)); + // } + // } + + // let mut edge_skips = vec![]; + // for edge_spec in edges { + // if let Some(skip) = import_edge(ctx, change_set_pk, edge_spec, thing_map).await? { + // edge_skips.push(skip); + // } + // } Ok(( installed_schema_variant_ids, - component_attribute_skips, - edge_skips, + vec![], // component_attribute_skips, + vec![], // edge_skips, )) } -#[derive(Eq, PartialEq, Hash, Debug, Clone)] -struct ValueCacheKey { - context: AttributeContext, -} - -impl ValueCacheKey { - pub fn new(component_id: ComponentId, prop_id: PropId) -> Self { - let mut context_builder = AttributeContextBuilder::new(); - context_builder - .set_prop_id(prop_id) - .set_component_id(component_id); - - Self { - context: context_builder.to_context_unchecked(), - } - } -} - -async fn import_edge( - ctx: &DalContext, - change_set_pk: Option, - edge_spec: &SiPkgEdge<'_>, - thing_map: &mut ThingMap, -) -> PkgResult> { - let edge = match thing_map.get(change_set_pk, &edge_spec.unique_id().to_owned()) { - Some(Thing::Edge(edge)) => Some(edge.to_owned()), - _ => { - if !edge_spec.deleted() { - let head_component_unique_id = edge_spec.to_component_unique_id().to_owned(); - let (_, head_node) = match thing_map.get(change_set_pk, &head_component_unique_id) { - Some(Thing::Component((component, node))) => (component, node), - _ => { - return Err(PkgError::MissingComponentForEdge( - head_component_unique_id, - edge_spec.from_socket_name().to_owned(), - edge_spec.to_socket_name().to_owned(), - )); - } - }; - - let tail_component_unique_id = edge_spec.from_component_unique_id().to_owned(); - let (_, tail_node) = match thing_map.get(change_set_pk, &tail_component_unique_id) { - Some(Thing::Component((component, node))) => (component, node), - _ => { - return Err(PkgError::MissingComponentForEdge( - tail_component_unique_id, - edge_spec.from_socket_name().to_owned(), - edge_spec.to_socket_name().to_owned(), - )); - } - }; - - let to_socket = match Socket::find_by_name_for_edge_kind_and_node( - ctx, - edge_spec.to_socket_name(), - SocketEdgeKind::ConfigurationInput, - *head_node.id(), - ) - .await? - { - Some(socket) => socket, - None => { - return Ok(Some(ImportEdgeSkip::MissingInputSocket( - edge_spec.to_socket_name().to_owned(), - ))); - } - }; - - let from_socket = match Socket::find_by_name_for_edge_kind_and_node( - ctx, - edge_spec.from_socket_name(), - SocketEdgeKind::ConfigurationOutput, - *tail_node.id(), - ) - .await? - { - Some(socket) => socket, - None => { - return Ok(Some(ImportEdgeSkip::MissingOutputSocket( - edge_spec.from_socket_name().to_owned(), - ))); - } - }; - - Some( - Edge::new_for_connection( - ctx, - *head_node.id(), - *to_socket.id(), - *tail_node.id(), - *from_socket.id(), - match edge_spec.edge_kind() { - EdgeSpecKind::Configuration => EdgeKind::Configuration, - EdgeSpecKind::Symbolic => EdgeKind::Symbolic, - }, - ) - .await?, - ) - } else { - None - } - } - }; - - if let Some(mut edge) = edge { - let creation_user_pk = match edge_spec.creation_user_pk() { - Some(pk_str) => Some(UserPk::from_str(pk_str)?), - None => None, - }; - if creation_user_pk.as_ref() != edge.creation_user_pk() { - edge.set_creation_user_pk(ctx, creation_user_pk).await?; - } - - let deletion_user_pk = match edge_spec.deletion_user_pk() { - Some(pk_str) => Some(UserPk::from_str(pk_str)?), - None => None, - }; - - if deletion_user_pk.as_ref() != edge.deletion_user_pk() { - edge.set_deletion_user_pk(ctx, deletion_user_pk).await?; - } - - if edge.deleted_implicitly() != edge_spec.deleted_implicitly() { - edge.set_deleted_implicitly(ctx, edge_spec.deleted_implicitly()) - .await?; - } - - if edge.visibility().is_deleted() && !edge_spec.deleted() { - Edge::restore_by_id(ctx, *edge.id()).await?; - } else if !edge.visibility().is_deleted() && edge_spec.deleted() { - edge.delete_and_propagate(ctx).await?; - } - - thing_map.insert( - change_set_pk, - edge_spec.unique_id().to_owned(), - Thing::Edge(edge), - ); - } - - Ok(None) -} - -async fn import_component( - ctx: &DalContext, - change_set_pk: Option, - component_spec: &SiPkgComponent<'_>, - thing_map: &mut ThingMap, -) -> PkgResult> { - let _change_set_pk_inner = change_set_pk.ok_or(PkgError::ComponentImportWithoutChangeSet)?; - - let variant = match component_spec.variant() { - ComponentSpecVariant::BuiltinVariant { - schema_name, - variant_name, - } => { - let schema = Schema::find_by_name_builtin(ctx, schema_name.as_str()) - .await? - .ok_or(PkgError::ComponentMissingBuiltinSchema( - schema_name.to_owned(), - component_spec.name().into(), - ))?; - - schema - .find_variant_by_name(ctx, variant_name.as_str()) - .await? - .ok_or(PkgError::ComponentMissingBuiltinSchemaVariant( - schema_name.to_owned(), - variant_name.to_owned(), - component_spec.name().into(), - ))? - } - ComponentSpecVariant::WorkspaceVariant { variant_unique_id } => { - match thing_map.get(change_set_pk, variant_unique_id) { - Some(Thing::SchemaVariant(variant)) => variant.to_owned(), - _ => { - return Err(PkgError::ComponentMissingSchemaVariant( - variant_unique_id.to_owned(), - component_spec.name().into(), - )); - } - } - } - }; - - let (mut component, mut node) = - match thing_map.get(change_set_pk, &component_spec.unique_id().to_owned()) { - Some(Thing::Component((existing_component, node))) => { - (existing_component.to_owned(), node.to_owned()) - } - _ => { - let (component, node) = - Component::new(ctx, component_spec.name(), *variant.id()).await?; - thing_map.insert( - change_set_pk, - component_spec.unique_id().into(), - Thing::Component((component.to_owned(), node.to_owned())), - ); - - (component, node) - } - }; - - if component.name(ctx).await? != component_spec.name() { - component.set_name(ctx, Some(component_spec.name())).await?; - } - - let position = component_spec - .position()? - .pop() - .ok_or(PkgError::ComponentSpecMissingPosition)?; - - if node.x() != position.x() { - node.set_x(ctx, position.x()).await?; - } - if node.y() != position.y() { - node.set_y(ctx, position.y()).await?; - } - - if node.height() != position.height() { - node.set_height(ctx, position.height().map(ToOwned::to_owned)) - .await?; - } - if node.width() != position.width() { - node.set_width(ctx, position.width().map(ToOwned::to_owned)) - .await?; - } - - let mut value_cache: HashMap = HashMap::new(); - let mut prop_cache: HashMap> = HashMap::new(); - - let mut skips = vec![]; - - for attribute in component_spec.input_sockets()? { - if let Some(skip) = import_component_attribute( - ctx, - change_set_pk, - &component, - &variant, - &attribute, - &mut value_cache, - &mut prop_cache, - thing_map, - ) - .await? - { - skips.push(skip); - } - } - - for attribute in component_spec.output_sockets()? { - if let Some(skip) = import_component_attribute( - ctx, - change_set_pk, - &component, - &variant, - &attribute, - &mut value_cache, - &mut prop_cache, - thing_map, - ) - .await? - { - skips.push(skip); - } - } - - let mut resource_value = None; - - for attribute in component_spec.attributes()? { - if let Some(skip) = import_component_attribute( - ctx, - change_set_pk, - &component, - &variant, - &attribute, - &mut value_cache, - &mut prop_cache, - thing_map, - ) - .await? - { - skips.push(skip); - } - if let AttributeValuePath::Prop { path, .. } = &attribute.path() { - if path == &PropPath::new(["root", "resource"]).to_string() { - resource_value = attribute.implicit_value().cloned(); - } - } - } - - if component_spec.needs_destroy() { - component.set_needs_destroy(ctx, true).await?; - } - - if let Some(resource_value) = resource_value { - if change_set_pk.unwrap_or(ChangeSetPk::NONE) == ChangeSetPk::NONE { - if let Ok(result) = serde_json::from_value(resource_value) { - component.set_resource(ctx, result).await?; - } - } - } - - if component.visibility().is_deleted() && !component_spec.deleted() { - Component::restore_and_propagate(ctx, *component.id()).await?; - } else if !component.visibility().is_deleted() && component_spec.deleted() { - component.delete_and_propagate(ctx).await?; - } - - Ok(skips) -} - -fn get_prop_kind_for_value(value: Option<&serde_json::Value>) -> Option { - match value { - Some(serde_json::Value::Array(_)) => Some(PropKind::Array), - Some(serde_json::Value::Bool(_)) => Some(PropKind::Boolean), - Some(serde_json::Value::Number(_)) => Some(PropKind::Integer), - Some(serde_json::Value::Object(_)) => Some(PropKind::Object), - Some(serde_json::Value::String(_)) => Some(PropKind::String), - - _ => None, - } -} - -#[allow(clippy::too_many_arguments)] -async fn import_component_attribute( - ctx: &DalContext, - change_set_pk: Option, - component: &Component, - variant: &SchemaVariant, - attribute: &SiPkgAttributeValue<'_>, - value_cache: &mut HashMap, - prop_cache: &mut HashMap>, - thing_map: &mut ThingMap, -) -> PkgResult> { - match attribute.path() { - AttributeValuePath::Prop { path, key, index } => { - if attribute.parent_path().is_none() && (key.is_some() || index.is_some()) { - return Err(PkgError::AttributeValueWithKeyOrIndexButNoParent); - } - - let prop = match prop_cache.get(path) { - Some(prop) => prop.to_owned(), - None => { - let prop = Prop::find_prop_by_path_opt( - ctx, - *variant.id(), - &PropPath::from(path.to_owned()), - ) - .await?; - prop_cache.insert(path.to_owned(), prop.to_owned()); - - prop - } - }; - - struct ParentData { - attribute_value: Option, - } - - match prop { - Some(prop) => { - // Do not write attributes for the resource or props under the resource tree if - // in a change set. Let them fall back to the head version - if change_set_pk.unwrap_or(ChangeSetPk::NONE) != ChangeSetPk::NONE - && prop - .path() - .is_descendant_of(&PropPath::new(["root", "resource"])) - { - return Ok(None); - } - - // Validate type if possible - let expected_prop_kind = get_prop_kind_for_value(attribute.value()); - if let Some(expected_kind) = expected_prop_kind { - if expected_kind - != match prop.kind() { - PropKind::Map | PropKind::Object => PropKind::Object, - other => *other, - } - { - // We have to special case the root/resource/payload prop because it is - // typed as a string but we write arbitrary json to it - if prop.path() != PropPath::new(["root", "resource", "payload"]) { - return Ok(Some(ImportAttributeSkip::KindMismatch { - path: PropPath::from(path), - expected_kind, - variant_kind: *prop.kind(), - })); - } - } - } - - if index.is_some() || key.is_some() { - return Ok(None); - } - - let parent_data = if let Some(AttributeValuePath::Prop { path, .. }) = - attribute.parent_path() - { - let parent_prop = prop_cache - .get(path) - .and_then(|p| p.as_ref()) - .ok_or(PkgError::AttributeValueParentPropNotFound(path.to_owned()))?; - - let parent_value_cache_key = - ValueCacheKey::new(*component.id(), *parent_prop.id()); - - let parent_av = match value_cache.get(&parent_value_cache_key) { - Some(parent_av) => parent_av.to_owned(), - // If we don't have a parent in the cache it means we're under a map or - // array and currently we don't support custom attribute functions at - // that depth - None => return Ok(None), - }; - - ParentData { - attribute_value: Some(parent_av.to_owned()), - } - } else { - ParentData { - attribute_value: None, - } - }; - - let context = AttributeReadContext { - prop_id: Some(*prop.id()), - internal_provider_id: Some(InternalProviderId::NONE), - external_provider_id: Some(ExternalProviderId::NONE), - component_id: Some(*component.id()), - }; - - let parent_av_id = parent_data.attribute_value.as_ref().map(|av| *av.id()); - let maybe_av = AttributeValue::find_with_parent_and_key_for_context( - ctx, - parent_av_id, - key.to_owned(), - context, - ) - .await?; - - let mut updated_av = match maybe_av { - Some(av) => { - // Write the entire root implicit value, which will write all child - // values and properly emit the remaining implicit values - if prop.path().as_str() == "root" { - let current_context = av.context; - let context = AttributeContext::builder() - .set_prop_id(current_context.prop_id()) - .set_internal_provider_id( - current_context.internal_provider_id(), - ) - .set_external_provider_id( - current_context.external_provider_id(), - ) - .set_component_id(*component.id()) - .to_context_unchecked(); - - let (_, new_av_id) = AttributeValue::update_for_context( - ctx, - *av.id(), - None, - context, - if attribute.implicit_value().is_some() { - attribute.implicit_value().cloned() - } else { - attribute.value().cloned() - }, - None, - ) - .await?; - - AttributeValue::get_by_id(ctx, &new_av_id).await?.ok_or( - AttributeValueError::NotFound( - new_av_id, - ctx.visibility().to_owned(), - ), - )? - } else { - av - } - } - None => return Ok(None), - }; - - // Ensure the prototype is not set to the intrinsic value - update_prototype( - ctx, - change_set_pk, - *variant.id(), - attribute, - &mut updated_av, - thing_map, - ) - .await?; - - let this_cache_key = ValueCacheKey::new(*component.id(), *prop.id()); - - value_cache.insert(this_cache_key, updated_av); - } - None => { - // collect missing props and log them - return Ok(Some(ImportAttributeSkip::MissingProp(PropPath::from(path)))); - } - } - } - // We skip writing output socket values since they will be written in the dependent value - // update - AttributeValuePath::InputSocket(_) | AttributeValuePath::OutputSocket(_) => {} - } - - Ok(None) -} - -async fn get_ip_for_input( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - input: &SiPkgAttrFuncInput<'_>, -) -> PkgResult> { - Ok(match input { - SiPkgAttrFuncInput::Prop { prop_path, .. } => { - let input_source_prop = match Prop::find_prop_by_path_opt( - ctx, - schema_variant_id, - &PropPath::from(prop_path), - ) - .await? - { - Some(p) => p, - None => return Ok(None), - }; - - let ip = InternalProvider::find_for_prop(ctx, *input_source_prop.id()) - .await? - .ok_or(PkgError::MissingInternalProviderForProp( - *input_source_prop.id(), - ))?; - - Some(*ip.id()) - } - SiPkgAttrFuncInput::InputSocket { socket_name, .. } => { - let explicit_ip = match InternalProvider::find_explicit_for_schema_variant_and_name( - ctx, - schema_variant_id, - &socket_name, - ) - .await? - { - Some(ip) => ip, - None => return Ok(None), - }; - - Some(*explicit_ip.id()) - } - SiPkgAttrFuncInput::OutputSocket { .. } => None, - }) -} - -#[allow(clippy::too_many_arguments)] -async fn update_prototype( - ctx: &DalContext, - change_set_pk: Option, - schema_variant_id: SchemaVariantId, - attribute_spec: &SiPkgAttributeValue<'_>, - attribute_value: &mut AttributeValue, - thing_map: &mut ThingMap, -) -> PkgResult<()> { - let attribute_func = - match thing_map.get(change_set_pk, &attribute_spec.func_unique_id().to_owned()) { - Some(Thing::Func(func)) => func, - _ => { - return Err(PkgError::MissingFuncUniqueId( - attribute_spec.func_unique_id().to_string(), - )); - } - }; - - let mut prototype = attribute_value - .attribute_prototype(ctx) - .await? - .ok_or(AttributeValueError::MissingAttributePrototype)?; - - if prototype.func_id() != *attribute_func.id() { - prototype.set_func_id(ctx, attribute_func.id()).await?; - } - - let inputs = attribute_spec.inputs()?; - - let mut current_apas = - AttributePrototypeArgument::list_for_attribute_prototype(ctx, *prototype.id()).await?; - - if inputs.is_empty() && !current_apas.is_empty() { - for apa in current_apas.iter_mut() { - apa.delete_by_id(ctx).await?; - } - } else if !inputs.is_empty() { - let mut processed_inputs = HashSet::new(); - for apa in current_apas.iter_mut() { - let func_arg = FuncArgument::get_by_id(ctx, &apa.func_argument_id()) - .await? - .ok_or(PkgError::MissingFuncArgumentById(apa.func_argument_id()))?; - - let matching_input = inputs.iter().find(|input| input.name() == func_arg.name()); - - match matching_input { - Some(input) => { - if let Some(ip_id) = get_ip_for_input(ctx, schema_variant_id, input).await? { - if apa.internal_provider_id() != ip_id { - apa.set_internal_provider_id(ctx, ip_id).await?; - } - } - - processed_inputs.insert(input.name()); - } - None => apa.delete_by_id(ctx).await?, - } - } - - for input in &inputs { - let name = input.name(); - - if processed_inputs.contains(name) { - continue; - } - - let func_arg = FuncArgument::find_by_name_for_func(ctx, name, *attribute_func.id()) - .await? - .ok_or(PkgError::MissingFuncArgument( - name.into(), - *attribute_func.id(), - ))?; - - if let Some(ip_id) = get_ip_for_input(ctx, schema_variant_id, input).await? { - match AttributePrototypeArgument::list_for_attribute_prototype(ctx, *prototype.id()) - .await? - .iter() - .find(|apa| apa.func_argument_id() == *func_arg.id()) - { - Some(apa) => { - if apa.internal_provider_id() != ip_id { - let mut apa = apa.to_owned(); - apa.set_internal_provider_id(ctx, ip_id).await?; - } - } - None => { - AttributePrototypeArgument::new_for_intra_component( - ctx, - *prototype.id(), - *func_arg.id(), - ip_id, - ) - .await?; - } - } - } - } - } - - Ok(()) -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ImportSkips { - pub change_set_pk: ChangeSetPk, - pub edge_skips: Vec, - pub attribute_skips: Vec<(String, Vec)>, -} - -#[remain::sorted] -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(tag = "type", rename_all = "camelCase")] -pub enum ImportAttributeSkip { - #[serde(rename_all = "camelCase")] - KindMismatch { - path: PropPath, - expected_kind: PropKind, - variant_kind: PropKind, - }, - MissingInputSocket(String), - MissingOutputSocket(String), - MissingProp(PropPath), -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(tag = "type", rename_all = "camelCase")] -pub enum ImportEdgeSkip { - MissingInputSocket(String), - MissingOutputSocket(String), -} +// #[derive(Eq, PartialEq, Hash, Debug, Clone)] +// struct ValueCacheKey { +// context: AttributeContext, +// key: Option, +// index: Option, +// } + +// impl ValueCacheKey { +// pub fn new( +// component_id: ComponentId, +// prop_id: PropId, +// key: Option, +// index: Option, +// ) -> Self { +// let mut context_builder = AttributeContextBuilder::new(); +// context_builder +// .set_prop_id(prop_id) +// .set_component_id(component_id); + +// Self { +// context: context_builder.to_context_unchecked(), +// key, +// index, +// } +// } +// } + +// async fn import_edge( +// ctx: &DalContext, +// change_set_pk: Option, +// edge_spec: &SiPkgEdge<'_>, +// thing_map: &mut ThingMap, +// ) -> PkgResult> { +// let edge = match thing_map.get(change_set_pk, &edge_spec.unique_id().to_owned()) { +// Some(Thing::Edge(edge)) => Some(edge.to_owned()), +// _ => { +// if !edge_spec.deleted() { +// let head_component_unique_id = edge_spec.to_component_unique_id().to_owned(); +// let (_, head_node) = match thing_map.get(change_set_pk, &head_component_unique_id) { +// Some(Thing::Component((component, node))) => (component, node), +// _ => { +// return Err(PkgError::MissingComponentForEdge( +// head_component_unique_id, +// edge_spec.from_socket_name().to_owned(), +// edge_spec.to_socket_name().to_owned(), +// )) +// } +// }; + +// let tail_component_unique_id = edge_spec.from_component_unique_id().to_owned(); +// let (_, tail_node) = match thing_map.get(change_set_pk, &tail_component_unique_id) { +// Some(Thing::Component((component, node))) => (component, node), +// _ => { +// return Err(PkgError::MissingComponentForEdge( +// tail_component_unique_id, +// edge_spec.from_socket_name().to_owned(), +// edge_spec.to_socket_name().to_owned(), +// )) +// } +// }; + +// let to_socket = match Socket::find_by_name_for_edge_kind_and_node( +// ctx, +// edge_spec.to_socket_name(), +// SocketEdgeKind::ConfigurationInput, +// *head_node.id(), +// ) +// .await? +// { +// Some(socket) => socket, +// None => { +// return Ok(Some(ImportEdgeSkip::MissingInputSocket( +// edge_spec.to_socket_name().to_owned(), +// ))) +// } +// }; + +// let from_socket = match Socket::find_by_name_for_edge_kind_and_node( +// ctx, +// edge_spec.from_socket_name(), +// SocketEdgeKind::ConfigurationOutput, +// *tail_node.id(), +// ) +// .await? +// { +// Some(socket) => socket, +// None => { +// return Ok(Some(ImportEdgeSkip::MissingOutputSocket( +// edge_spec.from_socket_name().to_owned(), +// ))) +// } +// }; + +// Some( +// Edge::new_for_connection( +// ctx, +// *head_node.id(), +// *to_socket.id(), +// *tail_node.id(), +// *from_socket.id(), +// match edge_spec.edge_kind() { +// EdgeSpecKind::Configuration => EdgeKind::Configuration, +// EdgeSpecKind::Symbolic => EdgeKind::Symbolic, +// }, +// ) +// .await?, +// ) +// } else { +// None +// } +// } +// }; + +// if let Some(mut edge) = edge { +// let creation_user_pk = match edge_spec.creation_user_pk() { +// Some(pk_str) => Some(UserPk::from_str(pk_str)?), +// None => None, +// }; +// if creation_user_pk.as_ref() != edge.creation_user_pk() { +// edge.set_creation_user_pk(ctx, creation_user_pk).await?; +// } + +// let deletion_user_pk = match edge_spec.deletion_user_pk() { +// Some(pk_str) => Some(UserPk::from_str(pk_str)?), +// None => None, +// }; + +// if deletion_user_pk.as_ref() != edge.deletion_user_pk() { +// edge.set_deletion_user_pk(ctx, deletion_user_pk).await?; +// } + +// if edge.deleted_implicitly() != edge_spec.deleted_implicitly() { +// edge.set_deleted_implicitly(ctx, edge_spec.deleted_implicitly()) +// .await?; +// } + +// if edge.visibility().is_deleted() && !edge_spec.deleted() { +// Edge::restore_by_id(ctx, *edge.id()).await?; +// } else if !edge.visibility().is_deleted() && edge_spec.deleted() { +// edge.delete_and_propagate(ctx).await?; +// } + +// thing_map.insert( +// change_set_pk, +// edge_spec.unique_id().to_owned(), +// Thing::Edge(edge), +// ); +// } + +// Ok(None) +// } + +// async fn import_component( +// ctx: &DalContext, +// change_set_pk: Option, +// component_spec: &SiPkgComponent<'_>, +// thing_map: &mut ThingMap, +// ) -> PkgResult> { +// let _change_set_pk_inner = change_set_pk.ok_or(PkgError::ComponentImportWithoutChangeSet)?; + +// let variant = match component_spec.variant() { +// ComponentSpecVariant::BuiltinVariant { +// schema_name, +// variant_name, +// } => { +// let schema = Schema::find_by_name_builtin(ctx, schema_name.as_str()) +// .await? +// .ok_or(PkgError::ComponentMissingBuiltinSchema( +// schema_name.to_owned(), +// component_spec.name().into(), +// ))?; + +// schema +// .find_variant_by_name(ctx, variant_name.as_str()) +// .await? +// .ok_or(PkgError::ComponentMissingBuiltinSchemaVariant( +// schema_name.to_owned(), +// variant_name.to_owned(), +// component_spec.name().into(), +// ))? +// } +// ComponentSpecVariant::WorkspaceVariant { variant_unique_id } => { +// match thing_map.get(change_set_pk, variant_unique_id) { +// Some(Thing::SchemaVariant(variant)) => variant.to_owned(), +// _ => { +// return Err(PkgError::ComponentMissingSchemaVariant( +// variant_unique_id.to_owned(), +// component_spec.name().into(), +// )) +// } +// } +// } +// }; + +// let (mut component, mut node) = +// match thing_map.get(change_set_pk, &component_spec.unique_id().to_owned()) { +// Some(Thing::Component((existing_component, node))) => { +// (existing_component.to_owned(), node.to_owned()) +// } +// _ => { +// let (component, node) = +// Component::new(ctx, component_spec.name(), *variant.id()).await?; +// thing_map.insert( +// change_set_pk, +// component_spec.unique_id().into(), +// Thing::Component((component.to_owned(), node.to_owned())), +// ); + +// (component, node) +// } +// }; + +// if component.name(ctx).await? != component_spec.name() { +// component.set_name(ctx, Some(component_spec.name())).await?; +// } + +// let position = component_spec +// .position()? +// .pop() +// .ok_or(PkgError::ComponentSpecMissingPosition)?; + +// if node.x() != position.x() { +// node.set_x(ctx, position.x()).await?; +// } +// if node.y() != position.y() { +// node.set_y(ctx, position.y()).await?; +// } + +// if node.height() != position.height() { +// node.set_height(ctx, position.height().map(ToOwned::to_owned)) +// .await?; +// } +// if node.width() != position.width() { +// node.set_width(ctx, position.width().map(ToOwned::to_owned)) +// .await?; +// } + +// let mut value_cache: HashMap = HashMap::new(); +// let mut prop_cache: HashMap> = HashMap::new(); + +// let mut skips = vec![]; + +// for attribute in component_spec.attributes()? { +// if let Some(skip) = import_component_attribute( +// ctx, +// change_set_pk, +// &component, +// &variant, +// attribute, +// &mut value_cache, +// &mut prop_cache, +// thing_map, +// ) +// .await? +// { +// skips.push(skip); +// } +// } +// for attribute in component_spec.input_sockets()? { +// if let Some(skip) = import_component_attribute( +// ctx, +// change_set_pk, +// &component, +// &variant, +// attribute, +// &mut value_cache, +// &mut prop_cache, +// thing_map, +// ) +// .await? +// { +// skips.push(skip); +// } +// } +// for attribute in component_spec.output_sockets()? { +// if let Some(skip) = import_component_attribute( +// ctx, +// change_set_pk, +// &component, +// &variant, +// attribute, +// &mut value_cache, +// &mut prop_cache, +// thing_map, +// ) +// .await? +// { +// skips.push(skip); +// } +// } + +// if component_spec.needs_destroy() { +// component.set_needs_destroy(ctx, true).await?; +// } + +// if component.visibility().is_deleted() && !component_spec.deleted() { +// Component::restore_and_propagate(ctx, *component.id()).await?; +// } else if !component.visibility().is_deleted() && component_spec.deleted() { +// component.delete_and_propagate(ctx).await?; +// } + +// Ok(skips) +// } + +// fn get_prop_kind_for_value(value: Option<&serde_json::Value>) -> Option { +// match value { +// Some(serde_json::Value::Array(_)) => Some(PropKind::Array), +// Some(serde_json::Value::Bool(_)) => Some(PropKind::Boolean), +// Some(serde_json::Value::Number(_)) => Some(PropKind::Integer), +// Some(serde_json::Value::Object(_)) => Some(PropKind::Object), +// Some(serde_json::Value::String(_)) => Some(PropKind::String), + +// _ => None, +// } +// } + +// #[allow(clippy::too_many_arguments)] +// async fn import_component_attribute( +// ctx: &DalContext, +// change_set_pk: Option, +// component: &Component, +// variant: &SchemaVariant, +// attribute: &SiPkgAttributeValue<'_>, +// value_cache: &mut HashMap, +// prop_cache: &mut HashMap>, +// thing_map: &mut ThingMap, +// ) -> PkgResult> { +// match attribute.path() { +// AttributeValuePath::Prop { path, key, index } => { +// if attribute.parent_path().is_none() && (key.is_some() || index.is_some()) { +// return Err(PkgError::AttributeValueWithKeyOrIndexButNoParent); +// } + +// let prop = match prop_cache.get(path) { +// Some(prop) => prop.to_owned(), +// None => { +// let prop = Prop::find_prop_by_path_opt( +// ctx, +// *variant.id(), +// &PropPath::from(path.to_owned()), +// ) +// .await?; +// prop_cache.insert(path.to_owned(), prop.to_owned()); + +// prop +// } +// }; + +// struct ParentData<'a> { +// prop: Option<&'a Prop>, +// attribute_value: Option, +// default_attribute_value: Option, +// } + +// match prop { +// Some(prop) => { +// // Validate type if possible +// let expected_prop_kind = get_prop_kind_for_value(attribute.value()); +// if let Some(expected_kind) = expected_prop_kind { +// if expected_kind +// != match prop.kind() { +// PropKind::Map | PropKind::Object => PropKind::Object, +// other => *other, +// } +// { +// return Ok(Some(ImportAttributeSkip::KindMismatch { +// path: PropPath::from(path), +// expected_kind, +// variant_kind: *prop.kind(), +// })); +// } +// } + +// let parent_data = if let Some(AttributeValuePath::Prop { path, key, index }) = +// attribute.parent_path() +// { +// let parent_prop = prop_cache +// .get(path) +// .and_then(|p| p.as_ref()) +// .ok_or(PkgError::AttributeValueParentPropNotFound(path.to_owned()))?; + +// let parent_value_cache_key = ValueCacheKey::new( +// *component.id(), +// *parent_prop.id(), +// key.to_owned(), +// index.to_owned(), +// ); + +// let parent_av = value_cache.get(&parent_value_cache_key).ok_or( +// PkgError::AttributeValueParentValueNotFound( +// path.to_owned(), +// key.to_owned(), +// index.to_owned(), +// ), +// )?; + +// let parent_default_value_cache_key = ValueCacheKey::new( +// ComponentId::NONE, +// *parent_prop.id(), +// key.to_owned(), +// index.to_owned(), +// ); + +// let parent_default_av = +// value_cache.get(&parent_default_value_cache_key).cloned(); + +// ParentData { +// prop: Some(parent_prop), +// attribute_value: Some(parent_av.to_owned()), +// default_attribute_value: parent_default_av, +// } +// } else { +// ParentData { +// prop: None, +// attribute_value: None, +// default_attribute_value: None, +// } +// }; + +// // If we're an array element, we might already exist in the index map +// let av_id_from_index_map = match index { +// Some(index) => match parent_data.attribute_value.as_ref() { +// Some(parent_av) => { +// match parent_av +// .index_map() +// .and_then(|index_map| index_map.order().get(*index as usize)) +// { +// None => { +// let attribute_context = AttributeContext::builder() +// .set_prop_id(*prop.id()) +// .set_component_id(*component.id()) +// .to_context_unchecked(); + +// // This value will get updated by +// // update_attribute_value +// Some( +// AttributeValue::insert_for_context( +// ctx, +// attribute_context, +// *parent_av.id(), +// None, +// None, +// ) +// .await?, +// ) +// } +// Some(av_id) => Some(*av_id), +// } +// } +// None => None, +// }, +// None => None, +// }; + +// let default_value_cache_key = ValueCacheKey::new( +// ComponentId::NONE, +// *prop.id(), +// key.to_owned(), +// index.to_owned(), +// ); + +// let default_av = match value_cache.entry(default_value_cache_key) { +// Entry::Occupied(occupied) => Some(occupied.get().to_owned()), +// Entry::Vacant(vacant) => { +// if parent_data.default_attribute_value.is_none() +// && parent_data.prop.is_some() +// { +// None +// } else { +// let default_parent_av_id = +// parent_data.default_attribute_value.map(|av| *av.id()); + +// let default_value_context = AttributeReadContext { +// prop_id: Some(*prop.id()), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: None, +// }; + +// let value = AttributeValue::find_with_parent_and_key_for_context( +// ctx, +// default_parent_av_id, +// key.to_owned(), +// default_value_context, +// ) +// .await?; + +// if let Some(value) = &value { +// vacant.insert(value.to_owned()); +// } + +// value +// } +// } +// }; + +// let context = AttributeReadContext { +// prop_id: Some(*prop.id()), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: Some(*component.id()), +// }; + +// let parent_av_id = parent_data.attribute_value.as_ref().map(|av| *av.id()); +// let maybe_av = match av_id_from_index_map { +// Some(av_id) => Some(AttributeValue::get_by_id(ctx, &av_id).await?.ok_or( +// AttributeValueError::NotFound(av_id, ctx.visibility().to_owned()), +// )?), +// None => { +// AttributeValue::find_with_parent_and_key_for_context( +// ctx, +// parent_av_id, +// key.to_owned(), +// context, +// ) +// .await? +// } +// }; + +// let mut av_to_update = match maybe_av { +// Some(av) => av, +// None => { +// if index.is_some() { +// dbg!( +// "should always have an attribute value here for an indexed av" +// ); +// } +// let context = AttributeReadContext { +// prop_id: Some(*prop.id()), +// internal_provider_id: None, +// external_provider_id: None, +// component_id: None, +// }; +// let maybe_av = AttributeValue::find_with_parent_and_key_for_context( +// ctx, +// parent_av_id, +// key.to_owned(), +// context, +// ) +// .await?; + +// match maybe_av { +// Some(av) => av, +// None => { +// let parent_av_id = parent_av_id.ok_or( +// PkgError::AttributeValueParentValueNotFound( +// "in av search".into(), +// key.to_owned(), +// index.to_owned(), +// ), +// )?; + +// let attribute_context = AttributeContext::builder() +// .set_prop_id(*prop.id()) +// .set_component_id(*component.id()) +// .to_context_unchecked(); + +// if key.is_some() { +// let av_id = AttributeValue::insert_for_context( +// ctx, +// attribute_context, +// parent_av_id, +// None, +// key.to_owned(), +// ) +// .await?; + +// AttributeValue::get_by_id(ctx, &av_id).await?.ok_or( +// AttributeValueError::NotFound( +// av_id, +// ctx.visibility().to_owned(), +// ), +// )? +// } else { +// let (_, value) = create_attribute_value( +// ctx, +// change_set_pk, +// attribute_context, +// *component.id(), +// key, +// parent_data.attribute_value.as_ref(), +// default_av.as_ref(), +// &attribute, +// thing_map, +// ) +// .await?; + +// value +// } +// } +// } +// } +// }; + +// let updated_av = update_attribute_value( +// ctx, +// change_set_pk, +// *variant.id(), +// *component.id(), +// &attribute, +// &mut av_to_update, +// parent_data.attribute_value.as_ref(), +// default_av.as_ref(), +// thing_map, +// ) +// .await?; + +// let this_cache_key = ValueCacheKey::new( +// *component.id(), +// *prop.id(), +// key.to_owned(), +// index.to_owned(), +// ); + +// value_cache.insert(this_cache_key, updated_av); +// } +// None => { +// // collect missing props and log them +// return Ok(Some(ImportAttributeSkip::MissingProp(PropPath::from(path)))); +// } +// } +// } +// AttributeValuePath::InputSocket(socket_name) +// | AttributeValuePath::OutputSocket(socket_name) => { +// let (default_read_context, read_context, write_context) = +// if matches!(attribute.path(), AttributeValuePath::InputSocket(_)) { +// let internal_provider = +// match InternalProvider::find_explicit_for_schema_variant_and_name( +// ctx, +// *variant.id(), +// socket_name.as_str(), +// ) +// .await? +// { +// None => { +// return Ok(Some(ImportAttributeSkip::MissingInputSocket( +// socket_name.to_owned(), +// ))) +// } +// Some(ip) => ip, +// }; + +// let default_read_context = AttributeReadContext { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(*internal_provider.id()), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: None, +// }; +// let read_context = AttributeReadContext { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(*internal_provider.id()), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: Some(*component.id()), +// }; +// let write_context = AttributeContext::builder() +// .set_internal_provider_id(*internal_provider.id()) +// .set_component_id(*component.id()) +// .to_context_unchecked(); + +// (default_read_context, read_context, write_context) +// } else { +// let external_provider = +// match ExternalProvider::find_for_schema_variant_and_name( +// ctx, +// *variant.id(), +// socket_name.as_str(), +// ) +// .await? +// { +// None => { +// return Ok(Some(ImportAttributeSkip::MissingOutputSocket( +// socket_name.to_owned(), +// ))) +// } +// Some(ep) => ep, +// }; + +// let default_read_context = AttributeReadContext { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(*external_provider.id()), +// component_id: None, +// }; +// let read_context = AttributeReadContext { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(*external_provider.id()), +// component_id: Some(*component.id()), +// }; +// let write_context = AttributeContext::builder() +// .set_external_provider_id(*external_provider.id()) +// .set_component_id(*component.id()) +// .to_context_unchecked(); + +// (default_read_context, read_context, write_context) +// }; + +// let default_value = AttributeValue::find_for_context(ctx, default_read_context).await?; + +// match AttributeValue::find_for_context(ctx, read_context).await? { +// Some(mut existing_av) => { +// update_attribute_value( +// ctx, +// change_set_pk, +// *variant.id(), +// *component.id(), +// &attribute, +// &mut existing_av, +// None, +// default_value.as_ref(), +// thing_map, +// ) +// .await?; +// } +// None => { +// create_attribute_value( +// ctx, +// change_set_pk, +// write_context, +// *component.id(), +// &None, +// None, +// default_value.as_ref(), +// &attribute, +// thing_map, +// ) +// .await?; +// } +// } +// } +// } + +// Ok(None) +// } + +// async fn get_ip_for_input( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// input: &SiPkgAttrFuncInput<'_>, +// ) -> PkgResult> { +// Ok(match input { +// SiPkgAttrFuncInput::Prop { prop_path, .. } => { +// let input_source_prop = match Prop::find_prop_by_path_opt( +// ctx, +// schema_variant_id, +// &PropPath::from(prop_path), +// ) +// .await? +// { +// Some(p) => p, +// None => return Ok(None), +// }; + +// let ip = InternalProvider::find_for_prop(ctx, *input_source_prop.id()) +// .await? +// .ok_or(PkgError::MissingInternalProviderForProp( +// *input_source_prop.id(), +// ))?; + +// Some(*ip.id()) +// } +// SiPkgAttrFuncInput::InputSocket { socket_name, .. } => { +// let explicit_ip = match InternalProvider::find_explicit_for_schema_variant_and_name( +// ctx, +// schema_variant_id, +// &socket_name, +// ) +// .await? +// { +// Some(ip) => ip, +// None => return Ok(None), +// }; + +// Some(*explicit_ip.id()) +// } +// SiPkgAttrFuncInput::OutputSocket { .. } => None, +// }) +// } + +// #[allow(clippy::too_many_arguments)] +// async fn create_attribute_value( +// ctx: &DalContext, +// change_set_pk: Option, +// context: AttributeContext, +// component_id: ComponentId, +// real_key: &Option, +// parent_attribute_value: Option<&AttributeValue>, +// default_attribute_value: Option<&AttributeValue>, +// attribute_spec: &SiPkgAttributeValue<'_>, +// thing_map: &mut ThingMap, +// ) -> PkgResult<(AttributePrototype, AttributeValue)> { +// let attribute_func = +// match thing_map.get(change_set_pk, &attribute_spec.func_unique_id().to_owned()) { +// Some(Thing::Func(func)) => func, +// _ => { +// return Err(PkgError::MissingFuncUniqueId(format!( +// "here, {}", +// attribute_spec.func_unique_id().to_owned() +// ))); +// } +// }; + +// let new_context = AttributeContext::builder() +// .set_prop_id(context.prop_id()) +// .set_internal_provider_id(context.internal_provider_id()) +// .set_external_provider_id(context.external_provider_id()) +// .set_component_id(component_id) +// .to_context_unchecked(); + +// let func_binding = FuncBinding::new( +// ctx, +// attribute_spec.func_binding_args().to_owned(), +// *attribute_func.id(), +// attribute_spec.backend_kind().into(), +// ) +// .await?; + +// let mut func_binding_return_value = FuncBindingReturnValue::new( +// ctx, +// attribute_spec.unprocessed_value().cloned(), +// attribute_spec.value().cloned(), +// *attribute_func.id(), +// *func_binding.id(), +// FuncExecutionPk::NONE, +// ) +// .await?; + +// let execution = FuncExecution::new(ctx, attribute_func, &func_binding).await?; +// // TODO: add output stream? + +// func_binding_return_value +// .set_func_execution_pk(ctx, execution.pk()) +// .await?; + +// let mut new_value = AttributeValue::new( +// ctx, +// *func_binding.id(), +// *func_binding_return_value.id(), +// new_context, +// real_key.to_owned(), +// ) +// .await?; + +// if let Some(parent_attribute_value) = parent_attribute_value.as_ref() { +// new_value +// .set_parent_attribute_value_unchecked(ctx, parent_attribute_value.id()) +// .await?; +// } + +// if attribute_spec.is_proxy() { +// let default_av = +// default_attribute_value.ok_or(PkgError::AttributeValueSetToProxyButNoProxyFound)?; + +// new_value +// .set_proxy_for_attribute_value_id(ctx, Some(*default_av.id())) +// .await?; +// } + +// let prototype_context = AttributeContext::builder() +// .set_prop_id(new_context.prop_id()) +// .set_external_provider_id(new_context.external_provider_id()) +// .set_internal_provider_id(new_context.internal_provider_id()) +// .set_component_id(if attribute_spec.component_specific() { +// new_context.component_id() +// } else { +// ComponentId::NONE +// }) +// .to_context_unchecked(); + +// let prototype = +// match AttributePrototype::find_for_context_and_key(ctx, prototype_context, real_key) +// .await? +// .pop() +// { +// Some(existing_proto) => { +// new_value +// .set_attribute_prototype(ctx, existing_proto.id()) +// .await?; + +// existing_proto +// } +// None => { +// AttributePrototype::new_with_existing_value( +// ctx, +// *attribute_func.id(), +// new_context, +// real_key.to_owned(), +// parent_attribute_value.map(|pav| *pav.id()), +// *new_value.id(), +// ) +// .await? +// } +// }; + +// Ok((prototype, new_value)) +// } + +// #[allow(clippy::too_many_arguments)] +// async fn update_attribute_value( +// ctx: &DalContext, +// change_set_pk: Option, +// schema_variant_id: SchemaVariantId, +// component_id: ComponentId, +// attribute_spec: &SiPkgAttributeValue<'_>, +// attribute_value: &mut AttributeValue, +// parent_attribute_value: Option<&AttributeValue>, +// default_attribute_value: Option<&AttributeValue>, +// thing_map: &mut ThingMap, +// ) -> PkgResult { +// let prototype = attribute_value +// .attribute_prototype(ctx) +// .await? +// .ok_or(AttributeValueError::MissingAttributePrototype)?; + +// let attribute_func = +// match thing_map.get(change_set_pk, &attribute_spec.func_unique_id().to_owned()) { +// Some(Thing::Func(func)) => func, +// _ => { +// return Err(PkgError::MissingFuncUniqueId(format!( +// "here, {}", +// attribute_spec.func_unique_id().to_owned() +// ))); +// } +// }; + +// let (mut prototype, value) = if prototype.context.component_id().is_none() +// && attribute_spec.component_specific() +// { +// let current_context = attribute_value.context; +// let new_context = AttributeContext::builder() +// .set_prop_id(current_context.prop_id()) +// .set_internal_provider_id(current_context.internal_provider_id()) +// .set_external_provider_id(current_context.external_provider_id()) +// .set_component_id(component_id) +// .to_context_unchecked(); + +// let func_binding = FuncBinding::new( +// ctx, +// attribute_spec.func_binding_args().to_owned(), +// *attribute_func.id(), +// attribute_spec.backend_kind().into(), +// ) +// .await?; + +// let mut func_binding_return_value = FuncBindingReturnValue::new( +// ctx, +// attribute_spec.unprocessed_value().cloned(), +// attribute_spec.value().cloned(), +// *attribute_func.id(), +// *func_binding.id(), +// FuncExecutionPk::NONE, +// ) +// .await?; + +// let execution = FuncExecution::new(ctx, attribute_func, &func_binding).await?; +// // TODO: add output stream? + +// func_binding_return_value +// .set_func_execution_pk(ctx, execution.pk()) +// .await?; + +// let mut new_value = AttributeValue::new( +// ctx, +// *func_binding.id(), +// *func_binding_return_value.id(), +// new_context, +// attribute_value.key(), +// ) +// .await?; + +// if attribute_spec.is_proxy() { +// let default_av = +// default_attribute_value.ok_or(PkgError::AttributeValueSetToProxyButNoProxyFound)?; + +// new_value +// .set_proxy_for_attribute_value_id(ctx, Some(*default_av.id())) +// .await?; +// } + +// ( +// AttributePrototype::new_with_existing_value( +// ctx, +// *attribute_func.id(), +// new_context, +// attribute_value.key().map(|k| k.to_owned()), +// parent_attribute_value.map(|pav| *pav.id()), +// *new_value.id(), +// ) +// .await?, +// new_value, +// ) +// } else { +// let current_fb = FuncBinding::get_by_id(ctx, &attribute_value.func_binding_id()) +// .await? +// .ok_or(FuncBindingError::NotFound( +// attribute_value.func_binding_id(), +// ))?; + +// let current_fbrv = +// FuncBindingReturnValue::get_by_id(ctx, &attribute_value.func_binding_return_value_id()) +// .await? +// .ok_or(FuncBindingReturnValueError::NotFound( +// attribute_value.func_binding_return_value_id(), +// ))?; + +// if current_fb.args() != attribute_spec.func_binding_args() +// || current_fbrv.unprocessed_value() != attribute_spec.unprocessed_value() +// || current_fbrv.func_id() != attribute_func.id() +// || current_fb.code_sha256() != attribute_func.code_sha256() +// { +// let func_binding = FuncBinding::new( +// ctx, +// attribute_spec.func_binding_args().to_owned(), +// *attribute_func.id(), +// attribute_spec.backend_kind().into(), +// ) +// .await?; + +// let mut func_binding_return_value = FuncBindingReturnValue::new( +// ctx, +// attribute_spec.unprocessed_value().cloned(), +// attribute_spec.value().cloned(), +// *attribute_func.id(), +// *func_binding.id(), +// FuncExecutionPk::NONE, +// ) +// .await?; + +// let execution = FuncExecution::new(ctx, attribute_func, &func_binding).await?; +// // TODO: add output stream? + +// func_binding_return_value +// .set_func_execution_pk(ctx, execution.pk()) +// .await?; + +// attribute_value +// .set_func_binding_id(ctx, *func_binding.id()) +// .await?; + +// attribute_value +// .set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) +// .await?; +// } + +// (prototype, attribute_value.to_owned()) +// }; + +// if prototype.func_id() != *attribute_func.id() { +// prototype.set_func_id(ctx, attribute_func.id()).await?; +// } + +// let inputs = attribute_spec.inputs()?; + +// let mut current_apas = +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *prototype.id()).await?; + +// if inputs.is_empty() && !current_apas.is_empty() { +// for apa in current_apas.iter_mut() { +// apa.delete_by_id(ctx).await?; +// } +// } else if !inputs.is_empty() { +// let mut processed_inputs = HashSet::new(); +// for apa in current_apas.iter_mut() { +// let func_arg = FuncArgument::get_by_id(ctx, &apa.func_argument_id()) +// .await? +// .ok_or(PkgError::MissingFuncArgumentById(apa.func_argument_id()))?; + +// let matching_input = inputs.iter().find(|input| input.name() == func_arg.name()); + +// match matching_input { +// Some(input) => { +// if let Some(ip_id) = get_ip_for_input(ctx, schema_variant_id, input).await? { +// if apa.internal_provider_id() != ip_id { +// apa.set_internal_provider_id(ctx, ip_id).await?; +// } +// } + +// processed_inputs.insert(input.name()); +// } +// None => apa.delete_by_id(ctx).await?, +// } +// } + +// for input in &inputs { +// let name = input.name(); + +// if processed_inputs.contains(name) { +// continue; +// } + +// let func_arg = FuncArgument::find_by_name_for_func(ctx, name, *attribute_func.id()) +// .await? +// .ok_or(PkgError::MissingFuncArgument( +// name.into(), +// *attribute_func.id(), +// ))?; + +// if let Some(ip_id) = get_ip_for_input(ctx, schema_variant_id, input).await? { +// AttributePrototypeArgument::new_for_intra_component( +// ctx, +// *prototype.id(), +// *func_arg.id(), +// ip_id, +// ) +// .await?; +// } +// } +// } + +// Ok(value) +// } + +// #[derive(Debug, Clone, Deserialize, Serialize)] +// #[serde(rename_all = "camelCase")] +// pub struct ImportSkips { +// change_set_pk: ChangeSetPk, +// edge_skips: Vec, +// attribute_skips: Vec<(String, Vec)>, +// } + +// #[remain::sorted] +// #[derive(Debug, Clone, Serialize, Deserialize)] +// #[serde(tag = "type", rename_all = "camelCase")] +// pub enum ImportAttributeSkip { +// #[serde(rename_all = "camelCase")] +// KindMismatch { +// path: PropPath, +// expected_kind: PropKind, +// variant_kind: PropKind, +// }, +// MissingInputSocket(String), +// MissingOutputSocket(String), +// MissingProp(PropPath), +// } + +// #[derive(Clone, Debug, Deserialize, Serialize)] +// #[serde(tag = "type", rename_all = "camelCase")] +// pub enum ImportEdgeSkip { +// MissingInputSocket(String), +// MissingOutputSocket(String), +// } pub async fn import_pkg_from_pkg( ctx: &DalContext, @@ -934,7 +1421,7 @@ pub async fn import_pkg_from_pkg( ) -> PkgResult<( Option, Vec, - Option>, + Option>, )> { // We have to write the installed_pkg row first, so that we have an id, and rely on transaction // semantics to remove the row if anything in the installation process fails @@ -962,12 +1449,22 @@ pub async fn import_pkg_from_pkg( match metadata.kind() { SiPkgKind::Module => { + dbg!("installing module", metadata.name(), ctx.change_set_id()); + let change_set_pointer_id: ChangeSetPointerId = ctx.change_set_id(); + let change_set_pointer = ChangeSetPointer::find(ctx, change_set_pointer_id) + .await? + .expect("head should exist"); + let mut workspace_snapshot = + WorkspaceSnapshot::find_for_change_set(ctx, change_set_pointer_id).await?; + let (installed_schema_variant_ids, _, _) = import_change_set( ctx, None, + &mut workspace_snapshot, + &change_set_pointer, &metadata, &pkg.funcs()?, - &pkg.schemas()?, + &[], // &pkg.schemas()?, &[], &[], installed_pkg_id, @@ -979,96 +1476,88 @@ pub async fn import_pkg_from_pkg( Ok((installed_pkg_id, installed_schema_variant_ids, None)) } SiPkgKind::WorkspaceBackup => { - let mut ctx = ctx.clone_with_new_visibility(ctx.visibility().to_head()); - - let mut import_skips = vec![]; - - let workspace_pk = WorkspacePk::from_str( - metadata - .workspace_pk() - .ok_or(PkgError::WorkspacePkNotInBackup)?, - )?; - let workspace_name = metadata - .workspace_name() - .ok_or(PkgError::WorkspaceNameNotInBackup)?; - let default_change_set_name = metadata.default_change_set().unwrap_or("head"); - - Workspace::clear_or_create_workspace(&mut ctx, workspace_pk, workspace_name).await?; - - ctx.update_tenancy(Tenancy::new(workspace_pk)); - - let change_sets = pkg.change_sets()?; - let default_change_set = change_sets - .iter() - .find(|cs| cs.name() == default_change_set_name) - .ok_or(PkgError::WorkspaceBackupNoDefaultChangeSet( - default_change_set_name.into(), - ))?; - - let (_, attribute_skips, edge_skips) = import_change_set( - &ctx, - Some(ChangeSetPk::NONE), - &metadata, - &default_change_set.funcs()?, - &default_change_set.schemas()?, - &default_change_set.components()?, - &default_change_set.edges()?, - installed_pkg_id, - &mut change_set_things, - &options, - ) - .await?; - - import_skips.push(ImportSkips { - change_set_pk: ChangeSetPk::NONE, - attribute_skips, - edge_skips, - }); - - for change_set in change_sets { - if change_set.name() == default_change_set_name { - continue; - } - - // Revert to head to create new change set - let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_head()); - let new_cs = ChangeSet::new(&ctx, change_set.name(), None).await?; - // Switch to new change set visibility - let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_change_set(new_cs.pk)); - - let (_, attribute_skips, edge_skips) = import_change_set( - &ctx, - Some(new_cs.pk), - &metadata, - &change_set.funcs()?, - &change_set.schemas()?, - &change_set.components()?, - &change_set.edges()?, - installed_pkg_id, - &mut change_set_things, - &options, - ) - .await?; - - import_skips.push(ImportSkips { - change_set_pk: new_cs.pk, - attribute_skips, - edge_skips, - }); - } - - Ok(( - None, - vec![], - if import_skips.is_empty() { - None - } else { - Some(import_skips) - }, - )) - } - } -} + // let mut ctx = ctx.clone_with_new_visibility(ctx.visibility().to_head()); + + // let mut import_skips = vec![]; + + // let workspace_pk = WorkspacePk::from_str( + // metadata + // .workspace_pk() + // .ok_or(PkgError::WorkspacePkNotInBackup)?, + // )?; + // let workspace_name = metadata + // .workspace_name() + // .ok_or(PkgError::WorkspaceNameNotInBackup)?; + // let default_change_set_name = metadata.default_change_set().unwrap_or("head"); + + // Workspace::clear_or_create_workspace(&mut ctx, workspace_pk, workspace_name).await?; + + // ctx.update_tenancy(Tenancy::new(workspace_pk)); + + // let change_sets = pkg.change_sets()?; + // let default_change_set = change_sets + // .iter() + // .find(|cs| cs.name() == default_change_set_name) + // .ok_or(PkgError::WorkspaceBackupNoDefaultChangeSet( + // default_change_set_name.into(), + // ))?; + + // let (_, attribute_skips, edge_skips) = import_change_set( + // &ctx, + // Some(ChangeSetPk::NONE), + // &metadata, + // &default_change_set.funcs()?, + // &default_change_set.schemas()?, + // &default_change_set.components()?, + // &default_change_set.edges()?, + // installed_pkg_id, + // &mut change_set_things, + // &options, + // ) + // .await?; + + // import_skips.push(ImportSkips { + // change_set_pk: ChangeSetPk::NONE, + // attribute_skips, + // edge_skips, + // }); + + // for change_set in change_sets { + // if change_set.name() == default_change_set_name { + // continue; + // } + + // // Revert to head to create new change set + // let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_head()); + // let new_cs = ChangeSet::new(&ctx, change_set.name(), None).await?; + // // Switch to new change set visibility + // let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_change_set(new_cs.pk)); + + // let (_, attribute_skips, edge_skips) = import_change_set( + // &ctx, + // Some(new_cs.pk), + // &metadata, + // &change_set.funcs()?, + // &change_set.schemas()?, + // &change_set.components()?, + // &change_set.edges()?, + // installed_pkg_id, + // &mut change_set_things, + // &options, + // ) + // .await?; + + // import_skips.push(ImportSkips { + // change_set_pk: new_cs.pk, + // attribute_skips, + // edge_skips, + // }); + // } + + Ok((None, vec![], None)) + } + } +} pub async fn import_pkg(ctx: &DalContext, pkg_file_path: impl AsRef) -> PkgResult { println!("Importing package from {:?}", pkg_file_path.as_ref()); @@ -1079,7 +1568,13 @@ pub async fn import_pkg(ctx: &DalContext, pkg_file_path: impl AsRef) -> Pk Ok(pkg) } -async fn create_func(ctx: &DalContext, func_spec: &SiPkgFunc<'_>) -> PkgResult { +async fn create_func( + ctx: &DalContext, + workspace_snapshot: &mut WorkspaceSnapshot, + change_set_pointer: &ChangeSetPointer, + func_spec: &SiPkgFunc<'_>, +) -> PkgResult { + dbg!("create func"); let name = func_spec.name(); let func_spec_data = func_spec @@ -1087,49 +1582,60 @@ async fn create_func(ctx: &DalContext, func_spec: &SiPkgFunc<'_>) -> PkgResult PkgResult<()> { - func.set_name(ctx, func_spec_data.name()).await?; - func.set_backend_kind(ctx, func_spec_data.backend_kind()) - .await?; - func.set_backend_response_type(ctx, func_spec_data.response_type()) - .await?; - func.set_display_name(ctx, func_spec_data.display_name()) - .await?; - func.set_code_base64(ctx, Some(func_spec_data.code_base64())) - .await?; - func.set_description(ctx, func_spec_data.description()) - .await?; - func.set_handler(ctx, Some(func_spec_data.handler())) - .await?; - func.set_hidden(ctx, func_spec_data.hidden()).await?; - func.set_link(ctx, func_spec_data.link().map(|l| l.to_string())) + workspace_snapshot + .func_modify_by_id(ctx, change_set_pointer, func.id, |func| { + func.name = func_spec_data.name().to_owned(); + func.backend_kind = func_spec_data.backend_kind().into(); + func.backend_response_type = func_spec_data.response_type().into(); + func.display_name = func_spec_data + .display_name() + .map(|display_name| display_name.to_owned()); + func.code_base64 = Some(func_spec_data.code_base64().to_owned()); + func.description = func_spec_data.description().map(|desc| desc.to_owned()); + func.handler = Some(func_spec_data.handler().to_owned()); + func.hidden = func_spec_data.hidden(); + func.link = func_spec_data.link().map(|l| l.to_string()); + + Ok(()) + }) .await?; Ok(()) @@ -1137,6 +1643,8 @@ async fn update_func( async fn import_func( ctx: &DalContext, + workspace_snapshot: &mut WorkspaceSnapshot, + change_set_pointer: &ChangeSetPointer, change_set_pk: Option, func_spec: &SiPkgFunc<'_>, installed_pkg_id: Option, @@ -1145,33 +1653,33 @@ async fn import_func( ) -> PkgResult> { let func = match change_set_pk { None => { + dbg!("importing", func_spec.name()); let hash = func_spec.hash().to_string(); let existing_func = InstalledPkgAsset::list_for_kind_and_hash(ctx, InstalledPkgAssetKind::Func, &hash) .await? .pop(); - let (mut func, created) = match existing_func { + let (func, created) = match existing_func { Some(installed_func_record) => match installed_func_record.as_installed_func()? { InstalledPkgAssetTyped::Func { id, .. } => { - match Func::get_by_id(ctx, &id).await? { - Some(func) => (func, false), - None => return Err(PkgError::InstalledFuncMissing(id)), - } + (workspace_snapshot.func_get_by_id(ctx, id).await?, false) } - _ => unreachable!(), }, - None => (create_func(ctx, func_spec).await?, true), + None => ( + create_func(ctx, workspace_snapshot, change_set_pointer, func_spec).await?, + true, + ), }; if is_builtin { - func.set_builtin(ctx, true).await? + // func.set_builtin(ctx, true).await? } if let Some(installed_pkg_id) = installed_pkg_id { InstalledPkgAsset::new( ctx, - InstalledPkgAssetTyped::new_for_func(*func.id(), installed_pkg_id, hash), + InstalledPkgAssetTyped::new_for_func(func.id, installed_pkg_id, hash), ) .await?; } @@ -1189,34 +1697,35 @@ async fn import_func( } } Some(_) => { - let existing_func = thing_map.get(change_set_pk, &func_spec.unique_id().to_owned()); - - match existing_func { - Some(Thing::Func(existing_func)) => { - let mut existing_func = existing_func.to_owned(); - - if func_spec.deleted() { - existing_func.delete_by_id(ctx).await?; - - None - } else { - if let Some(data) = func_spec.data() { - update_func(ctx, &mut existing_func, data).await?; - } - - Some(existing_func) - } - } - _ => { - if func_spec.deleted() { - // If we're "deleted" but there is no existing function, this means we're - // deleted only in a change set. Do nothing - None - } else { - Some(create_func(ctx, func_spec).await?) - } - } - } + unimplemented!("workspace import not fixed"); + // let existing_func = thing_map.get(change_set_pk, &func_spec.unique_id().to_owned()); + + // match existing_func { + // Some(Thing::Func(existing_func)) => { + // let mut existing_func = existing_func.to_owned(); + + // if func_spec.deleted() { + // existing_func.delete_by_id(ctx).await?; + + // None + // } else { + // if let Some(data) = func_spec.data() { + // update_func(ctx, &mut existing_func, data).await?; + // } + + // Some(existing_func) + // } + // } + // _ => { + // if func_spec.deleted() { + // // If we're "deleted" but there is no existing function, this means we're + // // deleted only in a change set. Do nothing + // None + // } else { + // Some(create_func(ctx, func_spec).await?) + // } + // } + // } } }; @@ -1231,2110 +1740,1897 @@ async fn import_func( Ok(func) } -async fn create_func_argument( - ctx: &DalContext, - func_id: FuncId, - func_arg: &SiPkgFuncArgument<'_>, -) -> PkgResult { - Ok(FuncArgument::new( - ctx, - func_arg.name(), - func_arg.kind().into(), - func_arg.element_kind().to_owned().map(|&kind| kind.into()), - func_id, - ) - .await?) -} - -async fn update_func_argument( - ctx: &DalContext, - existing_arg: &mut FuncArgument, - func_id: FuncId, - func_arg: &SiPkgFuncArgument<'_>, -) -> PkgResult<()> { - existing_arg.set_name(ctx, func_arg.name()).await?; - existing_arg.set_kind(ctx, func_arg.kind()).await?; - let element_kind: Option = func_arg.element_kind().map(|&kind| kind.into()); - existing_arg.set_element_kind(ctx, element_kind).await?; - existing_arg.set_func_id(ctx, func_id).await?; - - Ok(()) -} - -async fn import_func_arguments( - ctx: &DalContext, - change_set_pk: Option, - func_id: FuncId, - func_arguments: &[SiPkgFuncArgument<'_>], - thing_map: &mut ThingMap, -) -> PkgResult<()> { - match change_set_pk { - None => { - for arg in func_arguments { - create_func_argument(ctx, func_id, arg).await?; - } - } - Some(_) => { - for arg in func_arguments { - let unique_id = - arg.unique_id() - .ok_or(PkgError::MissingUniqueIdForNode(format!( - "func-argument-{}", - arg.hash() - )))?; - - match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::FuncArgument(existing_arg)) => { - let mut existing_arg = existing_arg.to_owned(); - - if arg.deleted() { - existing_arg.delete_by_id(ctx).await?; - } else { - update_func_argument(ctx, &mut existing_arg, func_id, arg).await?; - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::FuncArgument(existing_arg.to_owned()), - ); - } - } - _ => { - if !arg.deleted() { - let new_arg = create_func_argument(ctx, func_id, arg).await?; - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::FuncArgument(new_arg), - ); - } - } - } - } - } - } - - Ok(()) -} - -async fn create_schema(ctx: &DalContext, schema_spec_data: &SiPkgSchemaData) -> PkgResult { - let mut schema = Schema::new(ctx, schema_spec_data.name(), &ComponentKind::Standard).await?; - schema - .set_ui_hidden(ctx, schema_spec_data.ui_hidden()) - .await?; - - let ui_menu = SchemaUiMenu::new( - ctx, - schema_spec_data - .category_name() - .unwrap_or_else(|| schema_spec_data.name()), - schema_spec_data.category(), - ) - .await?; - ui_menu.set_schema(ctx, schema.id()).await?; - - Ok(schema) -} - -async fn update_schema( - ctx: &DalContext, - schema: &mut Schema, - schema_spec_data: &SiPkgSchemaData, -) -> PkgResult<()> { - if schema_spec_data.name() != schema.name() { - schema.set_name(ctx, schema_spec_data.name()).await?; - } - - if schema_spec_data.ui_hidden() != schema.ui_hidden() { - schema - .set_ui_hidden(ctx, schema_spec_data.ui_hidden()) - .await?; - } - - if let Some(mut ui_menu) = schema.ui_menus(ctx).await?.pop() { - if let Some(category_name) = schema_spec_data.category_name() { - if category_name != ui_menu.name() { - ui_menu.set_name(ctx, category_name).await?; - } - if schema_spec_data.category() != ui_menu.category() { - ui_menu.set_name(ctx, schema_spec_data.category()).await?; - } - } - } - - Ok(()) -} - -async fn import_schema( - ctx: &DalContext, - change_set_pk: Option, - schema_spec: &SiPkgSchema<'_>, - installed_pkg_id: Option, - thing_map: &mut ThingMap, -) -> PkgResult<(Option, Vec)> { - let schema = match change_set_pk { - None => { - let hash = schema_spec.hash().to_string(); - let existing_schema = InstalledPkgAsset::list_for_kind_and_hash( - ctx, - InstalledPkgAssetKind::Schema, - &hash, - ) - .await? - .pop(); - - let schema = match existing_schema { - None => { - let data = schema_spec - .data() - .ok_or(PkgError::DataNotFound("schema".into()))?; - - create_schema(ctx, data).await? - } - Some(installed_schema_record) => { - match installed_schema_record.as_installed_schema()? { - InstalledPkgAssetTyped::Schema { id, .. } => { - match Schema::get_by_id(ctx, &id).await? { - Some(schema) => schema, - None => return Err(PkgError::InstalledSchemaMissing(id)), - } - } - _ => unreachable!(), - } - } - }; - - // Even if the asset is already installed, we write a record of the asset installation so that - // we can track the installed packages that share schemas. - if let Some(installed_pkg_id) = installed_pkg_id { - InstalledPkgAsset::new( - ctx, - InstalledPkgAssetTyped::new_for_schema(*schema.id(), installed_pkg_id, hash), - ) - .await?; - } - - Some(schema) - } - Some(_) => { - let unique_id = schema_spec - .unique_id() - .ok_or(PkgError::MissingUniqueIdForNode(format!( - "schema {}", - schema_spec.hash() - )))?; - - match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::Schema(schema)) => { - let mut schema = schema.to_owned(); - - if schema_spec.deleted() { - schema.delete_by_id(ctx).await?; - // delete all schema children? - - None - } else { - if let Some(data) = schema_spec.data() { - update_schema(ctx, &mut schema, data).await?; - } - - Some(schema) - } - } - _ => { - if schema_spec.deleted() { - None - } else { - Some( - create_schema( - ctx, - schema_spec - .data() - .ok_or(PkgError::DataNotFound("schema".into()))?, - ) - .await?, - ) - } - } - } - } - }; - - if let Some(mut schema) = schema { - if let Some(unique_id) = schema_spec.unique_id() { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::Schema(schema.to_owned()), - ); - } - - let mut installed_schema_variant_ids = vec![]; - for variant_spec in &schema_spec.variants()? { - let variant = import_schema_variant( - ctx, - change_set_pk, - &mut schema, - variant_spec, - installed_pkg_id, - thing_map, - ) - .await?; - - if let Some(variant) = variant { - installed_schema_variant_ids.push(*variant.id()); - - if let Some(variant_spec_data) = variant_spec.data() { - let func_unique_id = variant_spec_data.func_unique_id().to_owned(); - - set_default_schema_variant_id( - ctx, - change_set_pk, - &mut schema, - schema_spec - .data() - .as_ref() - .and_then(|data| data.default_schema_variant()), - variant_spec.unique_id(), - *variant.id(), - ) - .await?; - - if let Thing::Func(asset_func) = - thing_map - .get(change_set_pk, &func_unique_id) - .ok_or(PkgError::MissingFuncUniqueId(func_unique_id.to_string()))? - { - create_schema_variant_definition( - ctx, - schema_spec.clone(), - installed_pkg_id, - *variant.id(), - asset_func, - ) - .await?; - } - } - } - } - - Ok((Some(*schema.id()), installed_schema_variant_ids)) - } else { - Ok((None, vec![])) - } -} - -async fn set_default_schema_variant_id( - ctx: &DalContext, - change_set_pk: Option, - schema: &mut Schema, - spec_default_unique_id: Option<&str>, - variant_unique_id: Option<&str>, - variant_id: SchemaVariantId, -) -> PkgResult<()> { - match (change_set_pk, variant_unique_id, spec_default_unique_id) { - (None, _, _) | (Some(_), None, _) | (_, Some(_), None) => { - if schema.default_schema_variant_id().is_none() { - schema - .set_default_schema_variant_id(ctx, Some(variant_id)) - .await?; - } - } - (Some(_), Some(variant_unique_id), Some(spec_default_unique_id)) => { - if variant_unique_id == spec_default_unique_id { - let current_default_variant_id = schema - .default_schema_variant_id() - .copied() - .unwrap_or(SchemaVariantId::NONE); - - if variant_id != current_default_variant_id { - schema - .set_default_schema_variant_id(ctx, Some(variant_id)) - .await?; - } - } - } - } - - Ok(()) -} - -async fn create_schema_variant_definition( - ctx: &DalContext, - schema_spec: SiPkgSchema<'_>, - installed_pkg_id: Option, - schema_variant_id: SchemaVariantId, - asset_func: &Func, -) -> PkgResult<()> { - let hash = schema_spec.hash().to_string(); - let existing_definition = InstalledPkgAsset::list_for_kind_and_hash( - ctx, - InstalledPkgAssetKind::SchemaVariantDefinition, - &hash, - ) - .await? - .pop(); - - let definition = match existing_definition { - None => { - let maybe_schema_variant_definition = - SchemaVariantDefinition::get_by_func_id(ctx, *asset_func.id()).await?; - let mut schema_variant_definition = match maybe_schema_variant_definition { - None => { - let spec = schema_spec.to_spec().await?; - let metadata = SchemaVariantDefinitionJson::metadata_from_spec(spec)?; - - SchemaVariantDefinition::new( - ctx, - metadata.name, - metadata.menu_name, - metadata.category, - metadata.link, - metadata.color, - metadata.component_kind, - metadata.description, - *asset_func.id(), - ) - .await? - } - Some(schema_variant_definition) => schema_variant_definition, - }; - - schema_variant_definition - .set_schema_variant_id(ctx, Some(schema_variant_id)) - .await?; - - schema_variant_definition - } - Some(existing_definition) => { - match existing_definition.as_installed_schema_variant_definition()? { - InstalledPkgAssetTyped::SchemaVariantDefinition { id, .. } => { - match SchemaVariantDefinition::get_by_id(ctx, &id).await? { - Some(definition) => definition, - None => return Err(PkgError::InstalledSchemaVariantDefinitionMissing(id)), - } - } - _ => unreachable!( - "we are protected by the as_installed_schema_variant_definition method" - ), - } - } - }; - - if let Some(installed_pkg_id) = installed_pkg_id { - InstalledPkgAsset::new( - ctx, - InstalledPkgAssetTyped::new_for_schema_variant_definition( - *definition.id(), - installed_pkg_id, - hash, - ), - ) - .await?; - } - - Ok(()) -} - -#[derive(Clone, Debug)] -struct AttrFuncInfo { - func_unique_id: String, - prop_id: PropId, - inputs: Vec, -} - -#[remain::sorted] -#[derive(Clone, Debug)] -enum DefaultValueInfo { - Boolean { - prop_id: PropId, - default_value: bool, - }, - Number { - prop_id: PropId, - default_value: i64, - }, - String { - prop_id: PropId, - default_value: String, - }, -} - -struct PropVisitContext<'a> { - pub ctx: &'a DalContext, - pub schema_variant_id: SchemaVariantId, - pub attr_funcs: Mutex>, - pub default_values: Mutex>, - pub map_key_funcs: Mutex>, - pub validations: Mutex>, - pub change_set_pk: Option, -} - -async fn import_leaf_function( - ctx: &DalContext, - change_set_pk: Option, - leaf_func: SiPkgLeafFunction<'_>, - schema_variant_id: SchemaVariantId, - thing_map: &mut ThingMap, -) -> PkgResult<()> { - let inputs: Vec = leaf_func - .inputs() - .iter() - .map(|input| input.into()) - .collect(); - - let kind: LeafKind = leaf_func.leaf_kind().into(); - - match thing_map.get(change_set_pk, &leaf_func.func_unique_id().to_owned()) { - Some(Thing::Func(func)) => { - SchemaVariant::upsert_leaf_function(ctx, schema_variant_id, None, kind, &inputs, func) - .await?; - } - _ => { - return Err(PkgError::MissingFuncUniqueId( - leaf_func.func_unique_id().to_string(), - )); - } - } - - Ok(()) -} - -// TODO: cache this so we don't fetch it for every socket -async fn get_identity_func( - ctx: &DalContext, -) -> PkgResult<(Func, FuncBinding, FuncBindingReturnValue, FuncArgument)> { - let func_name = "si:identity"; - let func_argument_name = "identity"; - let func: Func = Func::find_by_name(ctx, func_name) - .await? - .ok_or_else(|| FuncError::NotFoundByName(func_name.to_string()))?; - - let func_id = *func.id(); - let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( - ctx, - serde_json::json![{ "identity": null }], - func_id, - vec![], - ) - .await?; - let func_argument = FuncArgument::find_by_name_for_func(ctx, func_argument_name, func_id) - .await? - .ok_or_else(|| { - PkgError::MissingIntrinsicFuncArgument( - func_name.to_string(), - func_argument_name.to_string(), - ) - })?; - - Ok((func, func_binding, func_binding_return_value, func_argument)) -} - -async fn create_socket( - ctx: &DalContext, - data: &SiPkgSocketData, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, -) -> PkgResult<(Socket, Option, Option)> { - let (identity_func, identity_func_binding, identity_fbrv, _) = get_identity_func(ctx).await?; - - let (mut socket, ip, ep) = match data.kind() { - SocketSpecKind::Input => { - let (ip, socket) = InternalProvider::new_explicit_with_socket( - ctx, - schema_variant_id, - data.name(), - *identity_func.id(), - *identity_func_binding.id(), - *identity_fbrv.id(), - data.arity().into(), - false, - ) - .await?; - - (socket, Some(ip), None) - } - SocketSpecKind::Output => { - let (ep, socket) = ExternalProvider::new_with_socket( - ctx, - schema_id, - schema_variant_id, - data.name(), - None, - *identity_func.id(), - *identity_func_binding.id(), - *identity_fbrv.id(), - data.arity().into(), - false, - ) - .await?; - - (socket, None, Some(ep)) - } - }; - - socket.set_ui_hidden(ctx, data.ui_hidden()).await?; - - Ok((socket, ip, ep)) -} - -async fn import_socket( - ctx: &DalContext, - change_set_pk: Option, - socket_spec: SiPkgSocket<'_>, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, - thing_map: &mut ThingMap, -) -> PkgResult<()> { - let (socket, ip, ep) = match change_set_pk { - None => { - let data = socket_spec - .data() - .ok_or(PkgError::DataNotFound(socket_spec.name().into()))?; - - create_socket(ctx, data, schema_id, schema_variant_id).await? - } - Some(_) => { - let unique_id = socket_spec - .unique_id() - .ok_or(PkgError::MissingUniqueIdForNode(format!( - "socket {}", - socket_spec.hash() - )))?; - - match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::Socket(socket_box)) => { - ( - socket_box.0.to_owned(), - socket_box.1.to_owned(), - socket_box.2.to_owned(), - ) - // prop trees, including sockets and providers, are created whole cloth, so - // should not have differences in change sets (currently) - } - _ => { - let data = socket_spec - .data() - .ok_or(PkgError::DataNotFound(socket_spec.name().into()))?; - - create_socket(ctx, data, schema_id, schema_variant_id).await? - } - } - } - }; - - if let Some(unique_id) = socket_spec.unique_id() { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::Socket(Box::new((socket, ip.to_owned(), ep.to_owned()))), - ); - } - - match ( - socket_spec.data().and_then(|data| data.func_unique_id()), - ep, - ip, - ) { - (Some(func_unique_id), Some(ep), None) => { - import_attr_func_for_output_socket( - ctx, - change_set_pk, - schema_variant_id, - *ep.id(), - func_unique_id, - socket_spec.inputs()?.drain(..).map(Into::into).collect(), - thing_map, - ) - .await?; - } - (Some(_), _, Some(_)) => {} - _ => {} - } - - Ok(()) -} - -async fn create_action_prototype( - ctx: &DalContext, - action_func_spec: &SiPkgActionFunc<'_>, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult { - let mut proto = ActionPrototype::new( - ctx, - func_id, - action_func_spec.kind().into(), - ActionPrototypeContext { schema_variant_id }, - ) - .await?; - - if let Some(name) = action_func_spec.name() { - proto.set_name(ctx, Some(name)).await?; - } - - Ok(proto) -} - -async fn create_authentication_prototype( - ctx: &DalContext, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult { - Ok(AuthenticationPrototype::new( - ctx, - func_id, - AuthenticationPrototypeContext { schema_variant_id }, - ) - .await?) -} - -async fn update_action_prototype( - ctx: &DalContext, - prototype: &mut ActionPrototype, - action_func_spec: &SiPkgActionFunc<'_>, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - if prototype.schema_variant_id() != schema_variant_id { - prototype - .set_schema_variant_id(ctx, schema_variant_id) - .await?; - } - - if prototype.name() != action_func_spec.name() { - prototype.set_name(ctx, action_func_spec.name()).await?; - } - - if prototype.func_id() != func_id { - prototype.set_func_id(ctx, func_id).await?; - } - - let kind: ActionKind = action_func_spec.kind().into(); - if *prototype.kind() != kind { - prototype.set_kind(ctx, kind).await?; - } - - Ok(()) -} - -async fn update_authentication_prototype( - ctx: &DalContext, - prototype: &mut AuthenticationPrototype, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - if prototype.schema_variant_id() != schema_variant_id { - prototype - .set_schema_variant_id(ctx, schema_variant_id) - .await?; - } - - if prototype.func_id() != func_id { - prototype.set_func_id(ctx, func_id).await?; - } - - Ok(()) -} - -async fn import_action_func( - ctx: &DalContext, - change_set_pk: Option, - action_func_spec: &SiPkgActionFunc<'_>, - schema_variant_id: SchemaVariantId, - thing_map: &ThingMap, -) -> PkgResult> { - let prototype = - match thing_map.get(change_set_pk, &action_func_spec.func_unique_id().to_owned()) { - Some(Thing::Func(func)) => { - let func_id = *func.id(); - - if let Some(unique_id) = action_func_spec.unique_id() { - match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::ActionPrototype(prototype)) => { - let mut prototype = prototype.to_owned(); - - if action_func_spec.deleted() { - prototype.delete_by_id(ctx).await?; - } else { - update_action_prototype( - ctx, - &mut prototype, - action_func_spec, - func_id, - schema_variant_id, - ) - .await?; - } - - Some(prototype) - } - _ => { - if action_func_spec.deleted() { - None - } else { - Some( - create_action_prototype( - ctx, - action_func_spec, - func_id, - schema_variant_id, - ) - .await?, - ) - } - } - } - } else { - Some( - create_action_prototype(ctx, action_func_spec, func_id, schema_variant_id) - .await?, - ) - } - } - _ => { - return Err(PkgError::MissingFuncUniqueId( - action_func_spec.func_unique_id().into(), - )); - } - }; - - Ok(prototype) -} - -async fn import_auth_func( - ctx: &DalContext, - change_set_pk: Option, - func_spec: &SiPkgAuthFunc<'_>, - schema_variant_id: SchemaVariantId, - thing_map: &ThingMap, -) -> PkgResult> { - let prototype = match thing_map.get(change_set_pk, &func_spec.func_unique_id().to_owned()) { - Some(Thing::Func(func)) => { - let func_id = *func.id(); - - if let Some(unique_id) = func_spec.unique_id() { - match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::AuthPrototype(prototype)) => { - let mut prototype = prototype.to_owned(); - - if func_spec.deleted() { - prototype.delete_by_id(ctx).await?; - } else { - update_authentication_prototype( - ctx, - &mut prototype, - func_id, - schema_variant_id, - ) - .await?; - } - - Some(prototype) - } - _ => { - if func_spec.deleted() { - None - } else { - Some( - create_authentication_prototype(ctx, func_id, schema_variant_id) - .await?, - ) - } - } - } - } else { - Some(create_authentication_prototype(ctx, func_id, schema_variant_id).await?) - } - } - _ => { - return Err(PkgError::MissingFuncUniqueId( - func_spec.func_unique_id().into(), - )); - } - }; - - Ok(prototype) -} - -#[derive(Default, Clone, Debug)] -struct CreatePropsSideEffects { - attr_funcs: Vec, - default_values: Vec, - map_key_funcs: Vec<(String, AttrFuncInfo)>, - validations: Vec<(PropId, ValidationSpec)>, -} - -impl IntoIterator for CreatePropsSideEffects { - type Item = CreatePropsSideEffects; - - type IntoIter = std::vec::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - vec![self].into_iter() - } -} - -impl Extend for CreatePropsSideEffects { - fn extend>(&mut self, iter: T) { - for element in iter { - self.attr_funcs.extend(element.attr_funcs); - self.default_values.extend(element.default_values); - self.map_key_funcs.extend(element.map_key_funcs); - self.validations.extend(element.validations); - } - } -} - -async fn create_props( - ctx: &DalContext, - change_set_pk: Option, - variant_spec: &SiPkgSchemaVariant<'_>, - prop_root: SchemaVariantSpecPropRoot, - prop_root_prop_id: PropId, - schema_variant_id: SchemaVariantId, -) -> PkgResult { - let context = PropVisitContext { - ctx, - schema_variant_id, - attr_funcs: Mutex::new(vec![]), - default_values: Mutex::new(vec![]), - map_key_funcs: Mutex::new(vec![]), - validations: Mutex::new(vec![]), - change_set_pk, - }; - - let parent_info = (prop_root_prop_id, PropPath::new(prop_root.path_parts())); - - variant_spec - .visit_prop_tree(prop_root, create_prop, Some(parent_info), &context) - .await?; - - Ok(CreatePropsSideEffects { - attr_funcs: context.attr_funcs.into_inner(), - default_values: context.default_values.into_inner(), - map_key_funcs: context.map_key_funcs.into_inner(), - validations: context.validations.into_inner(), - }) -} - -async fn update_schema_variant( - ctx: &DalContext, - schema_variant: &mut SchemaVariant, - name: &str, - schema_id: SchemaId, -) -> PkgResult<()> { - let current_schema_id = schema_variant - .schema(ctx) - .await? - .map(|schema| *schema.id()) - .ok_or(SchemaVariantError::MissingSchema(*schema_variant.id()))?; - - if schema_id != current_schema_id { - schema_variant.set_schema(ctx, &schema_id).await?; - } - - if schema_variant.name() != name { - schema_variant.set_name(ctx, name).await?; - } - - Ok(()) -} - -async fn import_schema_variant( - ctx: &DalContext, - change_set_pk: Option, - schema: &mut Schema, - variant_spec: &SiPkgSchemaVariant<'_>, - installed_pkg_id: Option, - thing_map: &mut ThingMap, -) -> PkgResult> { - let mut schema_variant = match change_set_pk { - None => { - let hash = variant_spec.hash().to_string(); - let existing_schema_variant = InstalledPkgAsset::list_for_kind_and_hash( - ctx, - InstalledPkgAssetKind::SchemaVariant, - &hash, - ) - .await? - .pop(); - - let (variant, created) = match existing_schema_variant { - Some(installed_sv_record) => { - match installed_sv_record.as_installed_schema_variant()? { - InstalledPkgAssetTyped::SchemaVariant { id, .. } => ( - SchemaVariant::get_by_id(ctx, &id) - .await? - .ok_or(PkgError::InstalledSchemaVariantMissing(id))?, - false, - ), - _ => unreachable!( - "the as_installed_schema_variant method ensures we cannot hit this branch" - ), - } - } - None => ( - SchemaVariant::new(ctx, *schema.id(), variant_spec.name()) - .await? - .0, - true, - ), - }; - - if let Some(installed_pkg_id) = installed_pkg_id { - InstalledPkgAsset::new( - ctx, - InstalledPkgAssetTyped::new_for_schema_variant( - *variant.id(), - installed_pkg_id, - hash, - ), - ) - .await?; - } - - if created { - Some(variant) - } else { - None - } - } - Some(_) => { - let unique_id = variant_spec - .unique_id() - .ok_or(PkgError::MissingUniqueIdForNode(format!( - "variant {}", - variant_spec.hash() - )))?; - - match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::SchemaVariant(variant)) => { - let mut variant = variant.to_owned(); - update_schema_variant(ctx, &mut variant, variant_spec.name(), *schema.id()) - .await?; - - if variant_spec.deleted() { - variant.delete_by_id(ctx).await?; - - None - } else { - Some(variant) - } - } - _ => { - if variant_spec.deleted() { - None - } else { - Some( - SchemaVariant::new(ctx, *schema.id(), variant_spec.name()) - .await? - .0, - ) - } - } - } - } - }; - - if let Some(schema_variant) = schema_variant.as_mut() { - if let Some(unique_id) = variant_spec.unique_id() { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::SchemaVariant(schema_variant.to_owned()), - ); - } - - if let Some(data) = variant_spec.data() { - if let (Some(spec_color), current_color) = - (data.color(), schema_variant.color(ctx).await?) - { - if current_color.is_none() - || spec_color - != current_color.expect("is none condition ensures this won't panic") - { - schema_variant.set_color(ctx, spec_color.to_owned()).await?; - } - } - } - - let mut side_effects = CreatePropsSideEffects::default(); - - let domain_prop_id = schema_variant - .find_prop(ctx, &["root", "domain"]) - .await? - .id() - .to_owned(); - - side_effects.extend( - create_props( - ctx, - change_set_pk, - variant_spec, - SchemaVariantSpecPropRoot::Domain, - domain_prop_id, - *schema_variant.id(), - ) - .await?, - ); - - let secrets_prop_id = schema_variant - .find_prop(ctx, &["root", "secrets"]) - .await? - .id() - .to_owned(); - - side_effects.extend( - create_props( - ctx, - change_set_pk, - variant_spec, - SchemaVariantSpecPropRoot::Secrets, - secrets_prop_id, - *schema_variant.id(), - ) - .await?, - ); - - if !variant_spec.secret_definitions()?.is_empty() { - let secret_definition_prop_id = *Prop::new( - ctx, - "secret_definition", - PropKind::Object, - None, - *schema_variant.id(), - Some(*schema_variant.find_prop(ctx, &["root"]).await?.id()), - None, - ) - .await? - .id(); - - side_effects.extend( - create_props( - ctx, - change_set_pk, - variant_spec, - SchemaVariantSpecPropRoot::SecretDefinition, - secret_definition_prop_id, - *schema_variant.id(), - ) - .await?, - ); - } - - match schema_variant - .find_prop(ctx, &["root", "resource_value"]) - .await - { - Ok(resource_value_prop) => { - side_effects.extend( - create_props( - ctx, - change_set_pk, - variant_spec, - SchemaVariantSpecPropRoot::ResourceValue, - *resource_value_prop.id(), - *schema_variant.id(), - ) - .await?, - ); - } - Err(SchemaVariantError::PropNotFoundAtPath(_, _, _)) => { - warn!("Cannot find /root/resource_value prop, so skipping creating props under the resource value. If the /root/resource_value pr has been merged, this should be an error!"); - } - Err(err) => Err(err)?, - }; - - if let Some(data) = variant_spec.data() { - schema_variant - .finalize(ctx, Some(data.component_type().into())) - .await?; - } - - for action_func in &variant_spec.action_funcs()? { - let prototype = import_action_func( - ctx, - change_set_pk, - action_func, - *schema_variant.id(), - thing_map, - ) - .await?; - - if let (Some(prototype), Some(unique_id)) = (prototype, action_func.unique_id()) { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::ActionPrototype(prototype), - ); - } - } - - for func in &variant_spec.auth_funcs()? { - let prototype = - import_auth_func(ctx, change_set_pk, func, *schema_variant.id(), thing_map).await?; - - if let (Some(prototype), Some(unique_id)) = (prototype, func.unique_id()) { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::AuthPrototype(prototype), - ); - } - } - - for leaf_func in variant_spec.leaf_functions()? { - import_leaf_function( - ctx, - change_set_pk, - leaf_func, - *schema_variant.id(), - thing_map, - ) - .await?; - } - - for socket in variant_spec.sockets()? { - import_socket( - ctx, - change_set_pk, - socket, - *schema.id(), - *schema_variant.id(), - thing_map, - ) - .await?; - } - - // Default values must be set before attribute functions are configured so they don't - // override the prototypes set there - for default_value_info in side_effects.default_values { - set_default_value(ctx, default_value_info).await?; - } - - // Set a default name value for all name props, this ensures region has a name before - // the function is executed - { - let name_prop = schema_variant - .find_prop(ctx, &["root", "si", "name"]) - .await?; - let name_default_value_info = DefaultValueInfo::String { - prop_id: *name_prop.id(), - default_value: schema.name().to_lowercase(), - }; - - set_default_value(ctx, name_default_value_info).await?; - } - - for si_prop_func in variant_spec.si_prop_funcs()? { - let prop = schema_variant - .find_prop(ctx, &si_prop_func.kind().prop_path()) - .await?; - import_attr_func_for_prop( - ctx, - change_set_pk, - *schema_variant.id(), - AttrFuncInfo { - func_unique_id: si_prop_func.func_unique_id().to_owned(), - prop_id: *prop.id(), - inputs: si_prop_func - .inputs()? - .iter() - .map(|input| input.to_owned().into()) - .collect(), - }, - None, - thing_map, - ) - .await?; - } - - let mut has_resource_value_func = false; - for root_prop_func in variant_spec.root_prop_funcs()? { - if root_prop_func.prop() == SchemaVariantSpecPropRoot::ResourceValue { - has_resource_value_func = true; - } - - let prop = schema_variant - .find_prop(ctx, root_prop_func.prop().path_parts()) - .await?; - import_attr_func_for_prop( - ctx, - change_set_pk, - *schema_variant.id(), - AttrFuncInfo { - func_unique_id: root_prop_func.func_unique_id().to_owned(), - prop_id: *prop.id(), - inputs: root_prop_func - .inputs()? - .iter() - .map(|input| input.to_owned().into()) - .collect(), - }, - None, - thing_map, - ) - .await?; - } - if !has_resource_value_func { - attach_resource_payload_to_value(ctx, *schema_variant.id()).await?; - } - - for attr_func in side_effects.attr_funcs { - import_attr_func_for_prop( - ctx, - change_set_pk, - *schema_variant.id(), - attr_func, - None, - thing_map, - ) - .await?; - } - - for (key, map_key_func) in side_effects.map_key_funcs { - import_attr_func_for_prop( - ctx, - change_set_pk, - *schema_variant.id(), - map_key_func, - Some(key), - thing_map, - ) - .await?; - } - - for (prop_id, validation_spec) in side_effects.validations { - import_prop_validation( - ctx, - change_set_pk, - validation_spec, - *schema.id(), - *schema_variant.id(), - prop_id, - thing_map, - ) - .await?; - } - } - - Ok(schema_variant) -} - -pub async fn attach_resource_payload_to_value( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - let func_id = *Func::find_by_name(ctx, "si:resourcePayloadToValue") - .await? - .ok_or(FuncError::NotFoundByName( - "si:resourcePayloadToValue".into(), - ))? - .id(); - - let func_argument_id = *FuncArgument::find_by_name_for_func(ctx, "payload", func_id) - .await? - .ok_or(FuncArgumentError::NotFoundByNameForFunc( - "payload".into(), - func_id, - ))? - .id(); - - let source = { - let prop = SchemaVariant::find_prop_in_tree( - ctx, - schema_variant_id, - &["root", "resource", "payload"], - ) - .await?; - - InternalProvider::find_for_prop(ctx, *prop.id()) - .await? - .ok_or(InternalProviderError::NotFoundForProp(*prop.id()))? - }; - - let target = { - let resource_value_prop = - SchemaVariant::find_prop_in_tree(ctx, schema_variant_id, &["root", "resource_value"]) - .await?; - - let mut prototype = AttributeValue::find_for_context( - ctx, - AttributeReadContext::default_with_prop(*resource_value_prop.id()), - ) - .await? - .ok_or(AttributeValueError::Missing)? - .attribute_prototype(ctx) - .await? - .ok_or(AttributeValueError::MissingAttributePrototype)?; - - prototype.set_func_id(ctx, func_id).await?; - - prototype - }; - - match AttributePrototypeArgument::list_for_attribute_prototype(ctx, *target.id()) - .await? - .iter() - .find(|apa| apa.func_argument_id() == func_argument_id) - { - Some(apa) => { - if apa.internal_provider_id() != *source.id() { - let mut apa = apa.to_owned(); - apa.set_internal_provider_id(ctx, *source.id()).await?; - } - } - None => { - AttributePrototypeArgument::new_for_intra_component( - ctx, - *target.id(), - func_argument_id, - *source.id(), - ) - .await?; - } - } - Ok(()) -} - -async fn set_default_value( - ctx: &DalContext, - default_value_info: DefaultValueInfo, -) -> PkgResult<()> { - let prop = match &default_value_info { - DefaultValueInfo::Number { prop_id, .. } - | DefaultValueInfo::String { prop_id, .. } - | DefaultValueInfo::Boolean { prop_id, .. } => Prop::get_by_id(ctx, prop_id) - .await? - .ok_or(PkgError::MissingProp(*prop_id))?, - }; - - match default_value_info { - DefaultValueInfo::Boolean { default_value, .. } => { - prop.set_default_value(ctx, default_value).await? - } - DefaultValueInfo::Number { default_value, .. } => { - prop.set_default_value(ctx, default_value).await? - } - DefaultValueInfo::String { default_value, .. } => { - prop.set_default_value(ctx, default_value).await? - } - } - - Ok(()) -} - -async fn import_attr_func_for_prop( - ctx: &DalContext, - change_set_pk: Option, - schema_variant_id: SchemaVariantId, - AttrFuncInfo { - func_unique_id, - prop_id, - inputs, - }: AttrFuncInfo, - key: Option, - thing_map: &mut ThingMap, -) -> PkgResult<()> { - match thing_map.get(change_set_pk, &func_unique_id.to_owned()) { - Some(Thing::Func(func)) => { - import_attr_func( - ctx, - change_set_pk, - AttributeReadContext { - prop_id: Some(prop_id), - ..Default::default() - }, - key, - schema_variant_id, - *func.id(), - inputs, - thing_map, - ) - .await?; - } - _ => return Err(PkgError::MissingFuncUniqueId(func_unique_id.to_string())), - } - - Ok(()) -} - -async fn import_attr_func_for_output_socket( - ctx: &DalContext, - change_set_pk: Option, - schema_variant_id: SchemaVariantId, - external_provider_id: ExternalProviderId, - func_unique_id: &str, - inputs: Vec, - thing_map: &mut ThingMap, -) -> PkgResult<()> { - match thing_map.get(change_set_pk, &func_unique_id.to_owned()) { - Some(Thing::Func(func)) => { - import_attr_func( - ctx, - change_set_pk, - AttributeReadContext { - external_provider_id: Some(external_provider_id), - ..Default::default() - }, - None, - schema_variant_id, - *func.id(), - inputs, - thing_map, - ) - .await?; - } - _ => return Err(PkgError::MissingFuncUniqueId(func_unique_id.to_string())), - } - - Ok(()) -} - -async fn get_prototype_for_context( - ctx: &DalContext, - context: AttributeReadContext, - key: Option, -) -> PkgResult { - let value = AttributeValue::find_for_context(ctx, context) - .await? - .ok_or(AttributeValueError::Missing)?; - - let real_value = if let Some(key) = key { - let parent_prop_id = context - .prop_id() - .ok_or(PkgError::AttributeFuncForKeyMissingProp( - context, - key.to_owned(), - ))?; - - let parent_prop = Prop::get_by_id(ctx, &parent_prop_id) - .await? - .ok_or(PkgError::MissingProp(parent_prop_id))?; - - if *parent_prop.kind() != PropKind::Map { - return Err(PkgError::AttributeFuncForKeySetOnWrongKind( - parent_prop_id, - key, - *parent_prop.kind(), - )); - } - - match parent_prop.child_props(ctx).await?.pop() { - Some(item_prop) => { - let item_write_context = AttributeContextBuilder::new() - .set_prop_id(*item_prop.id()) - .to_context()?; - - let item_read_context: AttributeReadContext = item_write_context.to_owned().into(); - - match AttributeValue::find_with_parent_and_key_for_context( - ctx, - Some(*value.id()), - Some(key.to_owned()), - item_read_context, - ) - .await? - { - Some(item_av) => item_av, - None => { - let item_id = AttributeValue::insert_for_context( - ctx, - item_write_context, - *value.id(), - None, - Some(key), - ) - .await?; - - AttributeValue::get_by_id(ctx, &item_id) - .await? - .ok_or(AttributeValueError::MissingForId(item_id))? - } - } - } - None => { - return Err(PkgError::MissingItemPropForMapProp(parent_prop_id)); - } - } - } else { - value - }; - - Ok(real_value - .attribute_prototype(ctx) - .await? - .ok_or(AttributeValueError::MissingAttributePrototype)?) -} - -async fn create_attr_proto_arg( - ctx: &DalContext, - prototype_id: AttributePrototypeId, - input: &SiPkgAttrFuncInputView, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult { - let arg = match &input { - SiPkgAttrFuncInputView::Prop { name, .. } - | SiPkgAttrFuncInputView::InputSocket { name, .. } - | SiPkgAttrFuncInputView::OutputSocket { name, .. } => { - FuncArgument::find_by_name_for_func(ctx, name, func_id) - .await? - .ok_or(PkgError::MissingFuncArgument(name.to_owned(), func_id))? - } - }; - - Ok(match input { - SiPkgAttrFuncInputView::Prop { prop_path, .. } => { - let prop = Prop::find_prop_by_path(ctx, schema_variant_id, &prop_path.into()).await?; - let prop_ip = InternalProvider::find_for_prop(ctx, *prop.id()) - .await? - .ok_or(PkgError::MissingInternalProviderForProp(*prop.id()))?; - - AttributePrototypeArgument::new_for_intra_component( - ctx, - prototype_id, - *arg.id(), - *prop_ip.id(), - ) - .await? - } - SiPkgAttrFuncInputView::InputSocket { socket_name, .. } => { - let explicit_ip = InternalProvider::find_explicit_for_schema_variant_and_name( - ctx, - schema_variant_id, - &socket_name, - ) - .await? - .ok_or(PkgError::MissingInternalProviderForSocketName( - socket_name.to_owned(), - ))?; - - AttributePrototypeArgument::new_for_intra_component( - ctx, - prototype_id, - *arg.id(), - *explicit_ip.id(), - ) - .await? - } - _ => { - // xxx: make this an error - panic!("unsupported taking external provider as input for prop"); - } - }) -} - -async fn update_attr_proto_arg( - ctx: &DalContext, - apa: &mut AttributePrototypeArgument, - _prototype_id: AttributePrototypeId, - input: &SiPkgAttrFuncInputView, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - let arg = match &input { - SiPkgAttrFuncInputView::Prop { name, .. } - | SiPkgAttrFuncInputView::InputSocket { name, .. } - | SiPkgAttrFuncInputView::OutputSocket { name, .. } => { - FuncArgument::find_by_name_for_func(ctx, name, func_id) - .await? - .ok_or(PkgError::MissingFuncArgument(name.to_owned(), func_id))? - } - }; - - if apa.func_argument_id() != *arg.id() { - apa.set_func_argument_id(ctx, arg.id()).await?; - } - - match input { - SiPkgAttrFuncInputView::Prop { prop_path, .. } => { - let prop = Prop::find_prop_by_path(ctx, schema_variant_id, &prop_path.into()).await?; - let prop_ip = InternalProvider::find_for_prop(ctx, *prop.id()) - .await? - .ok_or(PkgError::MissingInternalProviderForProp(*prop.id()))?; - - if apa.internal_provider_id() != *prop_ip.id() { - apa.set_internal_provider_id_safe(ctx, *prop_ip.id()) - .await?; - } - } - SiPkgAttrFuncInputView::InputSocket { socket_name, .. } => { - let explicit_ip = InternalProvider::find_explicit_for_schema_variant_and_name( - ctx, - schema_variant_id, - &socket_name, - ) - .await? - .ok_or(PkgError::MissingInternalProviderForSocketName( - socket_name.to_owned(), - ))?; - - if apa.internal_provider_id() != *explicit_ip.id() { - apa.set_internal_provider_id_safe(ctx, *explicit_ip.id()) - .await?; - } - } - _ => {} - } - - Ok(()) -} - -#[allow(clippy::too_many_arguments)] -async fn import_attr_func( - ctx: &DalContext, - change_set_pk: Option, - context: AttributeReadContext, - key: Option, - schema_variant_id: SchemaVariantId, - func_id: FuncId, - inputs: Vec, - thing_map: &mut ThingMap, -) -> PkgResult<()> { - let mut prototype = get_prototype_for_context(ctx, context, key).await?; - - if prototype.func_id() != func_id { - prototype.set_func_id(ctx, &func_id).await?; - } - - for input in &inputs { - match change_set_pk { - None => { - create_attr_proto_arg(ctx, *prototype.id(), input, func_id, schema_variant_id) - .await?; - } - Some(_) => { - let (unique_id, deleted) = match input { - SiPkgAttrFuncInputView::Prop { - unique_id, deleted, .. - } - | SiPkgAttrFuncInputView::InputSocket { - unique_id, deleted, .. - } - | SiPkgAttrFuncInputView::OutputSocket { - unique_id, deleted, .. - } => ( - unique_id - .as_deref() - .ok_or(PkgError::MissingUniqueIdForNode("attr-func-input".into()))?, - *deleted, - ), - }; - - let apa = match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::AttributePrototypeArgument(apa)) => { - let mut apa = apa.to_owned(); - if deleted { - apa.delete_by_id(ctx).await?; - } else { - update_attr_proto_arg( - ctx, - &mut apa, - *prototype.id(), - input, - func_id, - schema_variant_id, - ) - .await?; - } - - Some(apa) - } - _ => { - if deleted { - None - } else { - Some( - create_attr_proto_arg( - ctx, - *prototype.id(), - input, - func_id, - schema_variant_id, - ) - .await?, - ) - } - } - }; - - if let Some(apa) = apa { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::AttributePrototypeArgument(apa), - ); - } - } - } - } - - Ok(()) -} - -async fn create_validation( - ctx: &DalContext, - validation_kind: ValidationKind, - builtin_func_id: FuncId, - prop_id: PropId, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, -) -> PkgResult { - let (validation_func_id, validation_args) = match validation_kind { - ValidationKind::Builtin(validation) => ( - builtin_func_id, - serde_json::to_value(FuncBackendValidationArgs::new(validation))?, - ), - - ValidationKind::Custom(func_id) => (func_id, serde_json::json!(null)), - }; - let mut builder = ValidationPrototypeContext::builder(); - builder - .set_prop_id(prop_id) - .set_schema_id(schema_id) - .set_schema_variant_id(schema_variant_id); - - Ok(ValidationPrototype::new( - ctx, - validation_func_id, - validation_args, - builder.to_context(ctx).await?, - ) - .await?) -} - -async fn update_validation( - ctx: &DalContext, - prototype: &mut ValidationPrototype, - validation_kind: ValidationKind, - builtin_func_id: FuncId, - prop_id: PropId, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - let (validation_func_id, validation_args) = match validation_kind { - ValidationKind::Builtin(validation) => ( - builtin_func_id, - serde_json::to_value(FuncBackendValidationArgs::new(validation))?, - ), - - ValidationKind::Custom(func_id) => (func_id, serde_json::json!(null)), - }; - - prototype.set_prop_id(ctx, prop_id).await?; - prototype.set_schema_id(ctx, schema_id).await?; - prototype - .set_schema_variant_id(ctx, schema_variant_id) - .await?; - prototype.set_args(ctx, validation_args).await?; - prototype.set_func_id(ctx, validation_func_id).await?; - - Ok(()) -} - -async fn import_prop_validation( - ctx: &DalContext, - change_set_pk: Option, - spec: ValidationSpec, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, - prop_id: PropId, - thing_map: &mut ThingMap, -) -> PkgResult<()> { - let builtin_validation_func = Func::find_by_attr(ctx, "name", &"si:validation") - .await? - .pop() - .ok_or(FuncError::NotFoundByName("si:validation".to_string()))?; - - let validation_kind = match &spec { - ValidationSpec::IntegerIsBetweenTwoIntegers { - lower_bound, - upper_bound, - .. - } => ValidationKind::Builtin(Validation::IntegerIsBetweenTwoIntegers { - value: None, - lower_bound: *lower_bound, - upper_bound: *upper_bound, - }), - ValidationSpec::IntegerIsNotEmpty { .. } => { - ValidationKind::Builtin(Validation::IntegerIsNotEmpty { value: None }) - } - ValidationSpec::StringEquals { expected, .. } => { - ValidationKind::Builtin(Validation::StringEquals { - value: None, - expected: expected.to_owned(), - }) - } - ValidationSpec::StringHasPrefix { expected, .. } => { - ValidationKind::Builtin(Validation::StringHasPrefix { - value: None, - expected: expected.to_owned(), - }) - } - ValidationSpec::StringInStringArray { - expected, - display_expected, - .. - } => ValidationKind::Builtin(Validation::StringInStringArray { - value: None, - expected: expected.to_owned(), - display_expected: *display_expected, - }), - ValidationSpec::StringIsHexColor { .. } => { - ValidationKind::Builtin(Validation::StringIsHexColor { value: None }) - } - ValidationSpec::StringIsNotEmpty { .. } => { - ValidationKind::Builtin(Validation::StringIsNotEmpty { value: None }) - } - ValidationSpec::StringIsValidIpAddr { .. } => { - ValidationKind::Builtin(Validation::StringIsValidIpAddr { value: None }) - } - ValidationSpec::CustomValidation { func_unique_id, .. } => { - ValidationKind::Custom(match thing_map.get(None, func_unique_id) { - Some(Thing::Func(func)) => *func.id(), - _ => return Err(PkgError::MissingFuncUniqueId(func_unique_id.to_owned())), - }) - } - }; - - match change_set_pk { - None => { - create_validation( - ctx, - validation_kind, - *builtin_validation_func.id(), - prop_id, - schema_id, - schema_variant_id, - ) - .await?; - } - Some(_) => { - let unique_id = spec - .unique_id() - .ok_or(PkgError::MissingUniqueIdForNode("validation".into()))?; - let deleted = spec.deleted(); - - let validation_prototype = match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::Validation(prototype)) => { - let mut prototype = prototype.to_owned(); - - if deleted { - prototype.delete_by_id(ctx).await?; - } else { - update_validation( - ctx, - &mut prototype, - validation_kind, - *builtin_validation_func.id(), - prop_id, - schema_id, - schema_variant_id, - ) - .await?; - } - - Some(prototype) - } - _ => { - if deleted { - None - } else { - Some( - create_validation( - ctx, - validation_kind, - *builtin_validation_func.id(), - prop_id, - schema_id, - schema_variant_id, - ) - .await?, - ) - } - } - }; - - if let Some(prototype) = validation_prototype { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::Validation(prototype), - ); - } - } - } - - Ok(()) -} - -fn prop_kind_for_pkg_prop(pkg_prop: &SiPkgProp<'_>) -> PropKind { - match pkg_prop { - SiPkgProp::Array { .. } => PropKind::Array, - SiPkgProp::Boolean { .. } => PropKind::Boolean, - SiPkgProp::Map { .. } => PropKind::Map, - SiPkgProp::Number { .. } => PropKind::Integer, - SiPkgProp::Object { .. } => PropKind::Object, - SiPkgProp::String { .. } => PropKind::String, - } -} - -async fn create_dal_prop( - ctx: &DalContext, - data: &SiPkgPropData, - kind: PropKind, - schema_variant_id: SchemaVariantId, - parent_prop_id: Option, -) -> PkgResult { - let mut prop = Prop::new( - ctx, - &data.name, - kind, - Some(((&data.widget_kind).into(), data.widget_options.to_owned())), - schema_variant_id, - parent_prop_id, - data.documentation.to_owned(), - ) - .await - .map_err(SiPkgError::visit_prop)?; - - prop.set_hidden(ctx, data.hidden).await?; - prop.set_doc_link(ctx, data.doc_link.as_ref().map(|l| l.to_string())) - .await?; - - Ok(prop) -} - -async fn create_prop( - spec: SiPkgProp<'_>, - parent_prop_info: Option<(PropId, PropPath)>, - ctx: &PropVisitContext<'_>, -) -> PkgResult> { - let prop = match ctx.change_set_pk { - None => { - let data = spec.data().ok_or(PkgError::DataNotFound("prop".into()))?; - create_dal_prop( - ctx.ctx, - data, - prop_kind_for_pkg_prop(&spec), - ctx.schema_variant_id, - parent_prop_info.map(|info| info.0), - ) - .await? - } - Some(_) => { - let parent_path = parent_prop_info - .as_ref() - .map(|info| info.1.to_owned()) - .unwrap_or(PropPath::new(["root"])); - - let path = parent_path.join(&PropPath::new([spec.name()])); - - match Prop::find_prop_by_path_opt(ctx.ctx, ctx.schema_variant_id, &path).await? { - None => { - let data = spec.data().ok_or(PkgError::DataNotFound("prop".into()))?; - create_dal_prop( - ctx.ctx, - data, - prop_kind_for_pkg_prop(&spec), - ctx.schema_variant_id, - parent_prop_info.as_ref().map(|info| info.0.to_owned()), - ) - .await? - } - Some(prop) => prop, - } - } - }; - - let prop_id = *prop.id(); - - // Both attribute functions and default values have to be set *after* the schema variant is - // "finalized", so we can't do until we construct the *entire* prop tree. Hence we push work - // queues up to the outer context via the PropVisitContext, which uses Mutexes for interior - // mutability (maybe there's a better type for that here?) - - if let Some(data) = spec.data() { - if let Some(default_value_info) = match &spec { - SiPkgProp::String { .. } => { - if let Some(serde_json::Value::String(default_value)) = &data.default_value { - Some(DefaultValueInfo::String { - prop_id, - default_value: default_value.to_owned(), - }) - } else { - // Raise error here for type mismatch - None - } - } - SiPkgProp::Number { .. } => { - if let Some(serde_json::Value::Number(default_value_number)) = &data.default_value { - if default_value_number.is_i64() { - default_value_number - .as_i64() - .map(|dv_i64| DefaultValueInfo::Number { - prop_id, - default_value: dv_i64, - }) - } else { - None - } - } else { - None - } - } - SiPkgProp::Boolean { .. } => { - if let Some(serde_json::Value::Bool(default_value)) = &data.default_value { - Some(DefaultValueInfo::Boolean { - prop_id, - default_value: *default_value, - }) - } else { - None - } - } - // Default values for complex types are not yet supported in packages - _ => None, - } { - ctx.default_values.lock().await.push(default_value_info); - } - } - - if matches!(&spec, SiPkgProp::Map { .. }) { - for map_key_func in spec.map_key_funcs()? { - let key = map_key_func.key(); - let mut inputs = map_key_func.inputs()?; - let func_unique_id = map_key_func.func_unique_id(); - - ctx.map_key_funcs.lock().await.push(( - key.to_owned(), - AttrFuncInfo { - func_unique_id: func_unique_id.to_owned(), - prop_id, - inputs: inputs.drain(..).map(Into::into).collect(), - }, - )); - } - } - - if let Some(func_unique_id) = spec.data().and_then(|data| data.func_unique_id.to_owned()) { - let mut inputs = spec.inputs()?; - ctx.attr_funcs.lock().await.push(AttrFuncInfo { - func_unique_id, - prop_id, - inputs: inputs.drain(..).map(Into::into).collect(), - }); - } - - for validation_pkg_spec in spec.validations()? { - let validation_spec: ValidationSpec = validation_pkg_spec.try_into()?; - - ctx.validations - .lock() - .await - .push((*prop.id(), validation_spec)); - } - - Ok(Some((*prop.id(), prop.path()))) -} +// async fn create_func_argument( +// ctx: &DalContext, +// func_id: FuncId, +// func_arg: &SiPkgFuncArgument<'_>, +// ) -> PkgResult { +// Ok(FuncArgument::new( +// ctx, +// func_arg.name(), +// func_arg.kind().into(), +// func_arg.element_kind().to_owned().map(|&kind| kind.into()), +// func_id, +// ) +// .await?) +// } + +// async fn update_func_argument( +// ctx: &DalContext, +// existing_arg: &mut FuncArgument, +// func_id: FuncId, +// func_arg: &SiPkgFuncArgument<'_>, +// ) -> PkgResult<()> { +// existing_arg.set_name(ctx, func_arg.name()).await?; +// existing_arg.set_kind(ctx, func_arg.kind()).await?; +// let element_kind: Option = func_arg.element_kind().map(|&kind| kind.into()); +// existing_arg.set_element_kind(ctx, element_kind).await?; +// existing_arg.set_func_id(ctx, func_id).await?; + +// Ok(()) +// } + +// async fn import_func_arguments( +// ctx: &DalContext, +// change_set_pk: Option, +// func_id: FuncId, +// func_arguments: &[SiPkgFuncArgument<'_>], +// thing_map: &mut ThingMap, +// ) -> PkgResult<()> { +// match change_set_pk { +// None => { +// for arg in func_arguments { +// create_func_argument(ctx, func_id, arg).await?; +// } +// } +// Some(_) => { +// for arg in func_arguments { +// let unique_id = +// arg.unique_id() +// .ok_or(PkgError::MissingUniqueIdForNode(format!( +// "func-argument-{}", +// arg.hash() +// )))?; + +// match thing_map.get(change_set_pk, &unique_id.to_owned()) { +// Some(Thing::FuncArgument(existing_arg)) => { +// let mut existing_arg = existing_arg.to_owned(); + +// if arg.deleted() { +// existing_arg.delete_by_id(ctx).await?; +// } else { +// update_func_argument(ctx, &mut existing_arg, func_id, arg).await?; +// thing_map.insert( +// change_set_pk, +// unique_id.to_owned(), +// Thing::FuncArgument(existing_arg.to_owned()), +// ); +// } +// } +// _ => { +// if !arg.deleted() { +// let new_arg = create_func_argument(ctx, func_id, arg).await?; +// thing_map.insert( +// change_set_pk, +// unique_id.to_owned(), +// Thing::FuncArgument(new_arg), +// ); +// } +// } +// } +// } +// } +// } + +// Ok(()) +// } + +// async fn create_schema(ctx: &DalContext, schema_spec_data: &SiPkgSchemaData) -> PkgResult { +// let mut schema = Schema::new(ctx, schema_spec_data.name(), &ComponentKind::Standard).await?; +// schema +// .set_ui_hidden(ctx, schema_spec_data.ui_hidden()) +// .await?; + +// let ui_menu = SchemaUiMenu::new( +// ctx, +// schema_spec_data +// .category_name() +// .unwrap_or_else(|| schema_spec_data.name()), +// schema_spec_data.category(), +// ) +// .await?; +// ui_menu.set_schema(ctx, schema.id()).await?; + +// Ok(schema) +// } + +// async fn update_schema( +// ctx: &DalContext, +// schema: &mut Schema, +// schema_spec_data: &SiPkgSchemaData, +// ) -> PkgResult<()> { +// if schema_spec_data.name() != schema.name() { +// schema.set_name(ctx, schema_spec_data.name()).await?; +// } + +// if schema_spec_data.ui_hidden() != schema.ui_hidden() { +// schema +// .set_ui_hidden(ctx, schema_spec_data.ui_hidden()) +// .await?; +// } + +// if let Some(mut ui_menu) = schema.ui_menus(ctx).await?.pop() { +// if let Some(category_name) = schema_spec_data.category_name() { +// if category_name != ui_menu.name() { +// ui_menu.set_name(ctx, category_name).await?; +// } +// if schema_spec_data.category() != ui_menu.category() { +// ui_menu.set_name(ctx, schema_spec_data.category()).await?; +// } +// } +// } + +// Ok(()) +// } + +// async fn import_schema( +// ctx: &DalContext, +// change_set_pk: Option, +// schema_spec: &SiPkgSchema<'_>, +// installed_pkg_id: Option, +// thing_map: &mut ThingMap, +// ) -> PkgResult<(Option, Vec)> { +// let schema = match change_set_pk { +// None => { +// let hash = schema_spec.hash().to_string(); +// let existing_schema = InstalledPkgAsset::list_for_kind_and_hash( +// ctx, +// InstalledPkgAssetKind::Schema, +// &hash, +// ) +// .await? +// .pop(); + +// let schema = match existing_schema { +// None => { +// let data = schema_spec +// .data() +// .ok_or(PkgError::DataNotFound("schema".into()))?; + +// create_schema(ctx, data).await? +// } +// Some(installed_schema_record) => { +// match installed_schema_record.as_installed_schema()? { +// InstalledPkgAssetTyped::Schema { id, .. } => { +// match Schema::get_by_id(ctx, &id).await? { +// Some(schema) => schema, +// None => return Err(PkgError::InstalledSchemaMissing(id)), +// } +// } +// _ => unreachable!(), +// } +// } +// }; + +// // Even if the asset is already installed, we write a record of the asset installation so that +// // we can track the installed packages that share schemas. +// if let Some(installed_pkg_id) = installed_pkg_id { +// InstalledPkgAsset::new( +// ctx, +// InstalledPkgAssetTyped::new_for_schema(*schema.id(), installed_pkg_id, hash), +// ) +// .await?; +// } + +// Some(schema) +// } +// Some(_) => { +// let unique_id = schema_spec +// .unique_id() +// .ok_or(PkgError::MissingUniqueIdForNode(format!( +// "schema {}", +// schema_spec.hash() +// )))?; + +// match thing_map.get(change_set_pk, &unique_id.to_owned()) { +// Some(Thing::Schema(schema)) => { +// let mut schema = schema.to_owned(); + +// if schema_spec.deleted() { +// schema.delete_by_id(ctx).await?; +// // delete all schema children? + +// None +// } else { +// if let Some(data) = schema_spec.data() { +// update_schema(ctx, &mut schema, data).await?; +// } + +// Some(schema) +// } +// } +// _ => { +// if schema_spec.deleted() { +// None +// } else { +// Some( +// create_schema( +// ctx, +// schema_spec +// .data() +// .ok_or(PkgError::DataNotFound("schema".into()))?, +// ) +// .await?, +// ) +// } +// } +// } +// } +// }; + +// if let Some(mut schema) = schema { +// if let Some(unique_id) = schema_spec.unique_id() { +// thing_map.insert( +// change_set_pk, +// unique_id.to_owned(), +// Thing::Schema(schema.to_owned()), +// ); +// } + +// let mut installed_schema_variant_ids = vec![]; +// for variant_spec in &schema_spec.variants()? { +// let variant = import_schema_variant( +// ctx, +// change_set_pk, +// &mut schema, +// variant_spec, +// installed_pkg_id, +// thing_map, +// ) +// .await?; + +// if let Some(variant) = variant { +// installed_schema_variant_ids.push(*variant.id()); + +// if let Some(variant_spec_data) = variant_spec.data() { +// let func_unique_id = variant_spec_data.func_unique_id().to_owned(); + +// set_default_schema_variant_id( +// ctx, +// change_set_pk, +// &mut schema, +// schema_spec +// .data() +// .as_ref() +// .and_then(|data| data.default_schema_variant()), +// variant_spec.unique_id(), +// *variant.id(), +// ) +// .await?; + +// if let Thing::Func(asset_func) = +// thing_map +// .get(change_set_pk, &func_unique_id) +// .ok_or(PkgError::MissingFuncUniqueId(func_unique_id.to_string()))? +// { +// create_schema_variant_definition( +// ctx, +// schema_spec.clone(), +// installed_pkg_id, +// *variant.id(), +// asset_func, +// ) +// .await?; +// } +// } +// } +// } + +// Ok((Some(*schema.id()), installed_schema_variant_ids)) +// } else { +// Ok((None, vec![])) +// } +// } + +// async fn set_default_schema_variant_id( +// ctx: &DalContext, +// change_set_pk: Option, +// schema: &mut Schema, +// spec_default_unique_id: Option<&str>, +// variant_unique_id: Option<&str>, +// variant_id: SchemaVariantId, +// ) -> PkgResult<()> { +// match (change_set_pk, variant_unique_id, spec_default_unique_id) { +// (None, _, _) | (Some(_), None, _) | (_, Some(_), None) => { +// if schema.default_schema_variant_id().is_none() { +// schema +// .set_default_schema_variant_id(ctx, Some(variant_id)) +// .await?; +// } +// } +// (Some(_), Some(variant_unique_id), Some(spec_default_unique_id)) => { +// if variant_unique_id == spec_default_unique_id { +// let current_default_variant_id = schema +// .default_schema_variant_id() +// .copied() +// .unwrap_or(SchemaVariantId::NONE); + +// if variant_id != current_default_variant_id { +// schema +// .set_default_schema_variant_id(ctx, Some(variant_id)) +// .await?; +// } +// } +// } +// } + +// Ok(()) +// } + +// async fn create_schema_variant_definition( +// ctx: &DalContext, +// schema_spec: SiPkgSchema<'_>, +// installed_pkg_id: Option, +// schema_variant_id: SchemaVariantId, +// asset_func: &Func, +// ) -> PkgResult<()> { +// let hash = schema_spec.hash().to_string(); +// let existing_definition = InstalledPkgAsset::list_for_kind_and_hash( +// ctx, +// InstalledPkgAssetKind::SchemaVariantDefinition, +// &hash, +// ) +// .await? +// .pop(); + +// let definition = match existing_definition { +// None => { +// let maybe_schema_variant_definition = +// SchemaVariantDefinition::get_by_func_id(ctx, *asset_func.id()).await?; +// let mut schema_variant_definition = match maybe_schema_variant_definition { +// None => { +// let spec = schema_spec.to_spec().await?; +// let metadata = SchemaVariantDefinitionJson::metadata_from_spec(spec)?; + +// SchemaVariantDefinition::new( +// ctx, +// metadata.name, +// metadata.menu_name, +// metadata.category, +// metadata.link, +// metadata.color, +// metadata.component_kind, +// metadata.description, +// *asset_func.id(), +// ) +// .await? +// } +// Some(schema_variant_definition) => schema_variant_definition, +// }; + +// schema_variant_definition +// .set_schema_variant_id(ctx, Some(schema_variant_id)) +// .await?; + +// schema_variant_definition +// } +// Some(existing_definition) => { +// match existing_definition.as_installed_schema_variant_definition()? { +// InstalledPkgAssetTyped::SchemaVariantDefinition { id, .. } => { +// match SchemaVariantDefinition::get_by_id(ctx, &id).await? { +// Some(definition) => definition, +// None => return Err(PkgError::InstalledSchemaVariantDefinitionMissing(id)), +// } +// } +// _ => unreachable!( +// "we are protected by the as_installed_schema_variant_definition method" +// ), +// } +// } +// }; + +// if let Some(installed_pkg_id) = installed_pkg_id { +// InstalledPkgAsset::new( +// ctx, +// InstalledPkgAssetTyped::new_for_schema_variant_definition( +// *definition.id(), +// installed_pkg_id, +// hash, +// ), +// ) +// .await?; +// } + +// Ok(()) +// } + +// #[derive(Clone, Debug)] +// struct AttrFuncInfo { +// func_unique_id: String, +// prop_id: PropId, +// inputs: Vec, +// } + +// #[remain::sorted] +// #[derive(Clone, Debug)] +// enum DefaultValueInfo { +// Boolean { +// prop_id: PropId, +// default_value: bool, +// }, +// Number { +// prop_id: PropId, +// default_value: i64, +// }, +// String { +// prop_id: PropId, +// default_value: String, +// }, +// } + +// struct PropVisitContext<'a> { +// pub ctx: &'a DalContext, +// pub schema_variant_id: SchemaVariantId, +// pub attr_funcs: Mutex>, +// pub default_values: Mutex>, +// pub map_key_funcs: Mutex>, +// pub validations: Mutex>, +// pub change_set_pk: Option, +// } + +// async fn import_leaf_function( +// ctx: &DalContext, +// change_set_pk: Option, +// leaf_func: SiPkgLeafFunction<'_>, +// schema_variant_id: SchemaVariantId, +// thing_map: &mut ThingMap, +// ) -> PkgResult<()> { +// let inputs: Vec = leaf_func +// .inputs() +// .iter() +// .map(|input| input.into()) +// .collect(); + +// let kind: LeafKind = leaf_func.leaf_kind().into(); + +// match thing_map.get(change_set_pk, &leaf_func.func_unique_id().to_owned()) { +// Some(Thing::Func(func)) => { +// SchemaVariant::upsert_leaf_function(ctx, schema_variant_id, None, kind, &inputs, func) +// .await?; +// } +// _ => { +// return Err(PkgError::MissingFuncUniqueId( +// leaf_func.func_unique_id().to_string(), +// )); +// } +// } + +// Ok(()) +// } + +// // TODO: cache this so we don't fetch it for every socket +// async fn get_identity_func( +// ctx: &DalContext, +// ) -> PkgResult<(Func, FuncBinding, FuncBindingReturnValue, FuncArgument)> { +// let func_name = "si:identity"; +// let func_argument_name = "identity"; +// let func: Func = Func::find_by_name(ctx, func_name) +// .await? +// .ok_or_else(|| FuncError::NotFoundByName(func_name.to_string()))?; + +// let func_id = *func.id(); +// let (func_binding, func_binding_return_value) = +// FuncBinding::create_and_execute(ctx, serde_json::json![{ "identity": null }], func_id) +// .await?; +// let func_argument = FuncArgument::find_by_name_for_func(ctx, func_argument_name, func_id) +// .await? +// .ok_or_else(|| { +// PkgError::MissingIntrinsicFuncArgument( +// func_name.to_string(), +// func_argument_name.to_string(), +// ) +// })?; + +// Ok((func, func_binding, func_binding_return_value, func_argument)) +// } + +// async fn create_socket( +// ctx: &DalContext, +// data: &SiPkgSocketData, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult<(Socket, Option, Option)> { +// let (identity_func, identity_func_binding, identity_fbrv, _) = get_identity_func(ctx).await?; + +// let (mut socket, ip, ep) = match data.kind() { +// SocketSpecKind::Input => { +// let (ip, socket) = InternalProvider::new_explicit_with_socket( +// ctx, +// schema_variant_id, +// data.name(), +// *identity_func.id(), +// *identity_func_binding.id(), +// *identity_fbrv.id(), +// data.arity().into(), +// false, +// ) +// .await?; + +// (socket, Some(ip), None) +// } +// SocketSpecKind::Output => { +// let (ep, socket) = ExternalProvider::new_with_socket( +// ctx, +// schema_id, +// schema_variant_id, +// data.name(), +// None, +// *identity_func.id(), +// *identity_func_binding.id(), +// *identity_fbrv.id(), +// data.arity().into(), +// false, +// ) +// .await?; + +// (socket, None, Some(ep)) +// } +// }; + +// socket.set_ui_hidden(ctx, data.ui_hidden()).await?; + +// Ok((socket, ip, ep)) +// } + +// async fn import_socket( +// ctx: &DalContext, +// change_set_pk: Option, +// socket_spec: SiPkgSocket<'_>, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// thing_map: &mut ThingMap, +// ) -> PkgResult<()> { +// let (socket, ip, ep) = match change_set_pk { +// None => { +// let data = socket_spec +// .data() +// .ok_or(PkgError::DataNotFound(socket_spec.name().into()))?; + +// create_socket(ctx, data, schema_id, schema_variant_id).await? +// } +// Some(_) => { +// let unique_id = socket_spec +// .unique_id() +// .ok_or(PkgError::MissingUniqueIdForNode(format!( +// "socket {}", +// socket_spec.hash() +// )))?; + +// match thing_map.get(change_set_pk, &unique_id.to_owned()) { +// Some(Thing::Socket(socket_box)) => { +// ( +// socket_box.0.to_owned(), +// socket_box.1.to_owned(), +// socket_box.2.to_owned(), +// ) +// // prop trees, including sockets and providers, are created whole cloth, so +// // should not have differences in change sets (currently) +// } +// _ => { +// let data = socket_spec +// .data() +// .ok_or(PkgError::DataNotFound(socket_spec.name().into()))?; + +// create_socket(ctx, data, schema_id, schema_variant_id).await? +// } +// } +// } +// }; + +// if let Some(unique_id) = socket_spec.unique_id() { +// thing_map.insert( +// change_set_pk, +// unique_id.to_owned(), +// Thing::Socket(Box::new((socket, ip.to_owned(), ep.to_owned()))), +// ); +// } + +// match ( +// socket_spec.data().and_then(|data| data.func_unique_id()), +// ep, +// ip, +// ) { +// (Some(func_unique_id), Some(ep), None) => { +// import_attr_func_for_output_socket( +// ctx, +// change_set_pk, +// schema_variant_id, +// *ep.id(), +// func_unique_id, +// socket_spec.inputs()?.drain(..).map(Into::into).collect(), +// thing_map, +// ) +// .await?; +// } +// (Some(_), _, Some(_)) => {} +// _ => {} +// } + +// Ok(()) +// } + +// async fn create_action_protoype( +// ctx: &DalContext, +// action_func_spec: &SiPkgActionFunc<'_>, +// func_id: FuncId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult { +// let mut proto = ActionPrototype::new( +// ctx, +// func_id, +// action_func_spec.kind().into(), +// ActionPrototypeContext { schema_variant_id }, +// ) +// .await?; + +// if let Some(name) = action_func_spec.name() { +// proto.set_name(ctx, Some(name)).await?; +// } + +// Ok(proto) +// } + +// async fn update_action_prototype( +// ctx: &DalContext, +// prototype: &mut ActionPrototype, +// action_func_spec: &SiPkgActionFunc<'_>, +// func_id: FuncId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult<()> { +// if prototype.schema_variant_id() != schema_variant_id { +// prototype +// .set_schema_variant_id(ctx, schema_variant_id) +// .await?; +// } + +// if prototype.name() != action_func_spec.name() { +// prototype.set_name(ctx, action_func_spec.name()).await?; +// } + +// if prototype.func_id() != func_id { +// prototype.set_func_id(ctx, func_id).await?; +// } + +// let kind: ActionKind = action_func_spec.kind().into(); +// if *prototype.kind() != kind { +// prototype.set_kind(ctx, kind).await?; +// } + +// Ok(()) +// } + +// async fn import_action_func( +// ctx: &DalContext, +// change_set_pk: Option, +// action_func_spec: &SiPkgActionFunc<'_>, +// schema_variant_id: SchemaVariantId, +// thing_map: &ThingMap, +// ) -> PkgResult> { +// let prototype = +// match thing_map.get(change_set_pk, &action_func_spec.func_unique_id().to_owned()) { +// Some(Thing::Func(func)) => { +// let func_id = *func.id(); + +// if let Some(unique_id) = action_func_spec.unique_id() { +// match thing_map.get(change_set_pk, &unique_id.to_owned()) { +// Some(Thing::ActionPrototype(prototype)) => { +// let mut prototype = prototype.to_owned(); + +// if action_func_spec.deleted() { +// prototype.delete_by_id(ctx).await?; +// } else { +// update_action_prototype( +// ctx, +// &mut prototype, +// action_func_spec, +// func_id, +// schema_variant_id, +// ) +// .await?; +// } + +// Some(prototype) +// } +// _ => { +// if action_func_spec.deleted() { +// None +// } else { +// Some( +// create_action_protoype( +// ctx, +// action_func_spec, +// func_id, +// schema_variant_id, +// ) +// .await?, +// ) +// } +// } +// } +// } else { +// Some( +// create_action_protoype(ctx, action_func_spec, func_id, schema_variant_id) +// .await?, +// ) +// } +// } +// _ => { +// return Err(PkgError::MissingFuncUniqueId( +// action_func_spec.func_unique_id().into(), +// )); +// } +// }; + +// Ok(prototype) +// } + +// #[derive(Default, Clone, Debug)] +// struct CreatePropsSideEffects { +// attr_funcs: Vec, +// default_values: Vec, +// map_key_funcs: Vec<(String, AttrFuncInfo)>, +// validations: Vec<(PropId, ValidationSpec)>, +// } + +// impl IntoIterator for CreatePropsSideEffects { +// type Item = CreatePropsSideEffects; + +// type IntoIter = std::vec::IntoIter; + +// fn into_iter(self) -> Self::IntoIter { +// vec![self].into_iter() +// } +// } + +// impl Extend for CreatePropsSideEffects { +// fn extend>(&mut self, iter: T) { +// for element in iter { +// self.attr_funcs.extend(element.attr_funcs); +// self.default_values.extend(element.default_values); +// self.map_key_funcs.extend(element.map_key_funcs); +// self.validations.extend(element.validations); +// } +// } +// } + +// async fn create_props( +// ctx: &DalContext, +// change_set_pk: Option, +// variant_spec: &SiPkgSchemaVariant<'_>, +// prop_root: SchemaVariantSpecPropRoot, +// prop_root_prop_id: PropId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult { +// let context = PropVisitContext { +// ctx, +// schema_variant_id, +// attr_funcs: Mutex::new(vec![]), +// default_values: Mutex::new(vec![]), +// map_key_funcs: Mutex::new(vec![]), +// validations: Mutex::new(vec![]), +// change_set_pk, +// }; + +// let parent_info = (prop_root_prop_id, PropPath::new(prop_root.path_parts())); + +// variant_spec +// .visit_prop_tree(prop_root, create_prop, Some(parent_info), &context) +// .await?; + +// Ok(CreatePropsSideEffects { +// attr_funcs: context.attr_funcs.into_inner(), +// default_values: context.default_values.into_inner(), +// map_key_funcs: context.map_key_funcs.into_inner(), +// validations: context.validations.into_inner(), +// }) +// } + +// async fn update_schema_variant( +// ctx: &DalContext, +// schema_variant: &mut SchemaVariant, +// name: &str, +// schema_id: SchemaId, +// ) -> PkgResult<()> { +// let current_schema_id = schema_variant +// .schema(ctx) +// .await? +// .map(|schema| *schema.id()) +// .ok_or(SchemaVariantError::MissingSchema(*schema_variant.id()))?; + +// if schema_id != current_schema_id { +// schema_variant.set_schema(ctx, &schema_id).await?; +// } + +// if schema_variant.name() != name { +// schema_variant.set_name(ctx, name).await?; +// } + +// Ok(()) +// } + +// async fn import_schema_variant( +// ctx: &DalContext, +// change_set_pk: Option, +// schema: &mut Schema, +// variant_spec: &SiPkgSchemaVariant<'_>, +// installed_pkg_id: Option, +// thing_map: &mut ThingMap, +// ) -> PkgResult> { +// let mut schema_variant = match change_set_pk { +// None => { +// let hash = variant_spec.hash().to_string(); +// let existing_schema_variant = InstalledPkgAsset::list_for_kind_and_hash( +// ctx, +// InstalledPkgAssetKind::SchemaVariant, +// &hash, +// ) +// .await? +// .pop(); + +// let (variant, created) = match existing_schema_variant { +// Some(installed_sv_record) => { +// match installed_sv_record.as_installed_schema_variant()? { +// InstalledPkgAssetTyped::SchemaVariant { id, .. } => ( +// SchemaVariant::get_by_id(ctx, &id) +// .await? +// .ok_or(PkgError::InstalledSchemaVariantMissing(id))?, +// false, +// ), +// _ => unreachable!( +// "the as_installed_schema_variant method ensures we cannot hit this branch" +// ), +// } +// } +// None => ( +// SchemaVariant::new(ctx, *schema.id(), variant_spec.name()) +// .await? +// .0, +// true, +// ), +// }; + +// if let Some(installed_pkg_id) = installed_pkg_id { +// InstalledPkgAsset::new( +// ctx, +// InstalledPkgAssetTyped::new_for_schema_variant( +// *variant.id(), +// installed_pkg_id, +// hash, +// ), +// ) +// .await?; +// } + +// if created { +// Some(variant) +// } else { +// None +// } +// } +// Some(_) => { +// let unique_id = variant_spec +// .unique_id() +// .ok_or(PkgError::MissingUniqueIdForNode(format!( +// "variant {}", +// variant_spec.hash() +// )))?; + +// match thing_map.get(change_set_pk, &unique_id.to_owned()) { +// Some(Thing::SchemaVariant(variant)) => { +// let mut variant = variant.to_owned(); +// update_schema_variant(ctx, &mut variant, variant_spec.name(), *schema.id()) +// .await?; + +// if variant_spec.deleted() { +// variant.delete_by_id(ctx).await?; + +// None +// } else { +// Some(variant) +// } +// } +// _ => { +// if variant_spec.deleted() { +// None +// } else { +// Some( +// SchemaVariant::new(ctx, *schema.id(), variant_spec.name()) +// .await? +// .0, +// ) +// } +// } +// } +// } +// }; + +// if let Some(schema_variant) = schema_variant.as_mut() { +// if let Some(unique_id) = variant_spec.unique_id() { +// thing_map.insert( +// change_set_pk, +// unique_id.to_owned(), +// Thing::SchemaVariant(schema_variant.to_owned()), +// ); +// } + +// if let Some(data) = variant_spec.data() { +// if let (Some(spec_color), current_color) = +// (data.color(), schema_variant.color(ctx).await?) +// { +// if current_color.is_none() +// || spec_color +// != current_color.expect("is none condition ensures this won't panic") +// { +// schema_variant.set_color(ctx, spec_color.to_owned()).await?; +// } +// } +// } + +// let mut side_effects = CreatePropsSideEffects::default(); + +// let domain_prop_id = schema_variant +// .find_prop(ctx, &["root", "domain"]) +// .await? +// .id() +// .to_owned(); + +// side_effects.extend( +// create_props( +// ctx, +// change_set_pk, +// variant_spec, +// SchemaVariantSpecPropRoot::Domain, +// domain_prop_id, +// *schema_variant.id(), +// ) +// .await?, +// ); + +// let secrets_prop_id = schema_variant +// .find_prop(ctx, &["root", "secrets"]) +// .await? +// .id() +// .to_owned(); + +// side_effects.extend( +// create_props( +// ctx, +// change_set_pk, +// variant_spec, +// SchemaVariantSpecPropRoot::Secrets, +// secrets_prop_id, +// *schema_variant.id(), +// ) +// .await?, +// ); + +// if !variant_spec.secret_definitions()?.is_empty() { +// let secret_definition_prop_id = *Prop::new( +// ctx, +// "secret_definition", +// PropKind::Object, +// None, +// *schema_variant.id(), +// Some(*schema_variant.find_prop(ctx, &["root"]).await?.id()), +// ) +// .await? +// .id(); + +// side_effects.extend( +// create_props( +// ctx, +// change_set_pk, +// variant_spec, +// SchemaVariantSpecPropRoot::SecretDefinition, +// secret_definition_prop_id, +// *schema_variant.id(), +// ) +// .await?, +// ); +// } + +// match schema_variant +// .find_prop(ctx, &["root", "resource_value"]) +// .await +// { +// Ok(resource_value_prop) => { +// side_effects.extend( +// create_props( +// ctx, +// change_set_pk, +// variant_spec, +// SchemaVariantSpecPropRoot::ResourceValue, +// *resource_value_prop.id(), +// *schema_variant.id(), +// ) +// .await?, +// ); +// } +// Err(SchemaVariantError::PropNotFoundAtPath(_, _, _)) => { +// warn!("Cannot find /root/resource_value prop, so skipping creating props under the resource value. If the /root/resource_value pr has been merged, this should be an error!"); +// } +// Err(err) => Err(err)?, +// }; + +// if let Some(data) = variant_spec.data() { +// schema_variant +// .finalize(ctx, Some(data.component_type().into())) +// .await?; +// } + +// for action_func in &variant_spec.action_funcs()? { +// let prototype = import_action_func( +// ctx, +// change_set_pk, +// action_func, +// *schema_variant.id(), +// thing_map, +// ) +// .await?; + +// if let (Some(prototype), Some(unique_id)) = (prototype, action_func.unique_id()) { +// thing_map.insert( +// change_set_pk, +// unique_id.to_owned(), +// Thing::ActionPrototype(prototype), +// ); +// } +// } + +// for leaf_func in variant_spec.leaf_functions()? { +// import_leaf_function( +// ctx, +// change_set_pk, +// leaf_func, +// *schema_variant.id(), +// thing_map, +// ) +// .await?; +// } + +// for socket in variant_spec.sockets()? { +// import_socket( +// ctx, +// change_set_pk, +// socket, +// *schema.id(), +// *schema_variant.id(), +// thing_map, +// ) +// .await?; +// } + +// // Default values must be set before attribute functions are configured so they don't +// // override the prototypes set there +// for default_value_info in side_effects.default_values { +// set_default_value(ctx, default_value_info).await?; +// } + +// // Set a default name value for all name props, this ensures region has a name before +// // the function is executed +// { +// let name_prop = schema_variant +// .find_prop(ctx, &["root", "si", "name"]) +// .await?; +// let name_default_value_info = DefaultValueInfo::String { +// prop_id: *name_prop.id(), +// default_value: schema.name().to_lowercase(), +// }; + +// set_default_value(ctx, name_default_value_info).await?; +// } + +// for si_prop_func in variant_spec.si_prop_funcs()? { +// let prop = schema_variant +// .find_prop(ctx, &si_prop_func.kind().prop_path()) +// .await?; +// import_attr_func_for_prop( +// ctx, +// change_set_pk, +// *schema_variant.id(), +// AttrFuncInfo { +// func_unique_id: si_prop_func.func_unique_id().to_owned(), +// prop_id: *prop.id(), +// inputs: si_prop_func +// .inputs()? +// .iter() +// .map(|input| input.to_owned().into()) +// .collect(), +// }, +// None, +// thing_map, +// ) +// .await?; +// } + +// for attr_func in side_effects.attr_funcs { +// import_attr_func_for_prop( +// ctx, +// change_set_pk, +// *schema_variant.id(), +// attr_func, +// None, +// thing_map, +// ) +// .await?; +// } + +// for (key, map_key_func) in side_effects.map_key_funcs { +// import_attr_func_for_prop( +// ctx, +// change_set_pk, +// *schema_variant.id(), +// map_key_func, +// Some(key), +// thing_map, +// ) +// .await?; +// } + +// for (prop_id, validation_spec) in side_effects.validations { +// import_prop_validation( +// ctx, +// change_set_pk, +// validation_spec, +// *schema.id(), +// *schema_variant.id(), +// prop_id, +// thing_map, +// ) +// .await?; +// } +// } + +// Ok(schema_variant) +// } + +// async fn set_default_value( +// ctx: &DalContext, +// default_value_info: DefaultValueInfo, +// ) -> PkgResult<()> { +// let prop = match &default_value_info { +// DefaultValueInfo::Number { prop_id, .. } +// | DefaultValueInfo::String { prop_id, .. } +// | DefaultValueInfo::Boolean { prop_id, .. } => Prop::get_by_id(ctx, prop_id) +// .await? +// .ok_or(PkgError::MissingProp(*prop_id))?, +// }; + +// match default_value_info { +// DefaultValueInfo::Boolean { default_value, .. } => { +// prop.set_default_value(ctx, default_value).await? +// } +// DefaultValueInfo::Number { default_value, .. } => { +// prop.set_default_value(ctx, default_value).await? +// } +// DefaultValueInfo::String { default_value, .. } => { +// prop.set_default_value(ctx, default_value).await? +// } +// } + +// Ok(()) +// } + +// async fn import_attr_func_for_prop( +// ctx: &DalContext, +// change_set_pk: Option, +// schema_variant_id: SchemaVariantId, +// AttrFuncInfo { +// func_unique_id, +// prop_id, +// inputs, +// }: AttrFuncInfo, +// key: Option, +// thing_map: &mut ThingMap, +// ) -> PkgResult<()> { +// match thing_map.get(change_set_pk, &func_unique_id.to_owned()) { +// Some(Thing::Func(func)) => { +// import_attr_func( +// ctx, +// change_set_pk, +// AttributeReadContext { +// prop_id: Some(prop_id), +// ..Default::default() +// }, +// key, +// schema_variant_id, +// *func.id(), +// inputs, +// thing_map, +// ) +// .await?; +// } +// _ => return Err(PkgError::MissingFuncUniqueId(func_unique_id.to_string())), +// } + +// Ok(()) +// } + +// async fn import_attr_func_for_output_socket( +// ctx: &DalContext, +// change_set_pk: Option, +// schema_variant_id: SchemaVariantId, +// external_provider_id: ExternalProviderId, +// func_unique_id: &str, +// inputs: Vec, +// thing_map: &mut ThingMap, +// ) -> PkgResult<()> { +// match thing_map.get(change_set_pk, &func_unique_id.to_owned()) { +// Some(Thing::Func(func)) => { +// import_attr_func( +// ctx, +// change_set_pk, +// AttributeReadContext { +// external_provider_id: Some(external_provider_id), +// ..Default::default() +// }, +// None, +// schema_variant_id, +// *func.id(), +// inputs, +// thing_map, +// ) +// .await?; +// } +// _ => return Err(PkgError::MissingFuncUniqueId(func_unique_id.to_string())), +// } + +// Ok(()) +// } + +// async fn get_prototype_for_context( +// ctx: &DalContext, +// context: AttributeReadContext, +// key: Option, +// ) -> PkgResult { +// let value = AttributeValue::find_for_context(ctx, context) +// .await? +// .ok_or(AttributeValueError::Missing)?; + +// let real_value = if let Some(key) = key { +// let parent_prop_id = context +// .prop_id() +// .ok_or(PkgError::AttributeFuncForKeyMissingProp( +// context, +// key.to_owned(), +// ))?; + +// let parent_prop = Prop::get_by_id(ctx, &parent_prop_id) +// .await? +// .ok_or(PkgError::MissingProp(parent_prop_id))?; + +// if *parent_prop.kind() != PropKind::Map { +// return Err(PkgError::AttributeFuncForKeySetOnWrongKind( +// parent_prop_id, +// key, +// *parent_prop.kind(), +// )); +// } + +// match parent_prop.child_props(ctx).await?.pop() { +// Some(item_prop) => { +// let item_write_context = AttributeContextBuilder::new() +// .set_prop_id(*item_prop.id()) +// .to_context()?; + +// let item_read_context: AttributeReadContext = item_write_context.to_owned().into(); + +// match AttributeValue::find_with_parent_and_key_for_context( +// ctx, +// Some(*value.id()), +// Some(key.to_owned()), +// item_read_context, +// ) +// .await? +// { +// Some(item_av) => item_av, +// None => { +// let item_id = AttributeValue::insert_for_context( +// ctx, +// item_write_context, +// *value.id(), +// None, +// Some(key), +// ) +// .await?; + +// AttributeValue::get_by_id(ctx, &item_id) +// .await? +// .ok_or(AttributeValueError::MissingForId(item_id))? +// } +// } +// } +// None => { +// return Err(PkgError::MissingItemPropForMapProp(parent_prop_id)); +// } +// } +// } else { +// value +// }; + +// Ok(real_value +// .attribute_prototype(ctx) +// .await? +// .ok_or(AttributeValueError::MissingAttributePrototype)?) +// } + +// async fn create_attr_proto_arg( +// ctx: &DalContext, +// prototype_id: AttributePrototypeId, +// input: &SiPkgAttrFuncInputView, +// func_id: FuncId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult { +// let arg = match &input { +// SiPkgAttrFuncInputView::Prop { name, .. } +// | SiPkgAttrFuncInputView::InputSocket { name, .. } +// | SiPkgAttrFuncInputView::OutputSocket { name, .. } => { +// FuncArgument::find_by_name_for_func(ctx, name, func_id) +// .await? +// .ok_or(PkgError::MissingFuncArgument(name.to_owned(), func_id))? +// } +// }; + +// Ok(match input { +// SiPkgAttrFuncInputView::Prop { prop_path, .. } => { +// let prop = Prop::find_prop_by_path(ctx, schema_variant_id, &prop_path.into()).await?; +// let prop_ip = InternalProvider::find_for_prop(ctx, *prop.id()) +// .await? +// .ok_or(PkgError::MissingInternalProviderForProp(*prop.id()))?; + +// AttributePrototypeArgument::new_for_intra_component( +// ctx, +// prototype_id, +// *arg.id(), +// *prop_ip.id(), +// ) +// .await? +// } +// SiPkgAttrFuncInputView::InputSocket { socket_name, .. } => { +// let explicit_ip = InternalProvider::find_explicit_for_schema_variant_and_name( +// ctx, +// schema_variant_id, +// &socket_name, +// ) +// .await? +// .ok_or(PkgError::MissingInternalProviderForSocketName( +// socket_name.to_owned(), +// ))?; + +// AttributePrototypeArgument::new_for_intra_component( +// ctx, +// prototype_id, +// *arg.id(), +// *explicit_ip.id(), +// ) +// .await? +// } +// _ => { +// // xxx: make this an error +// panic!("unsupported taking external provider as input for prop"); +// } +// }) +// } + +// async fn update_attr_proto_arg( +// ctx: &DalContext, +// apa: &mut AttributePrototypeArgument, +// _prototype_id: AttributePrototypeId, +// input: &SiPkgAttrFuncInputView, +// func_id: FuncId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult<()> { +// let arg = match &input { +// SiPkgAttrFuncInputView::Prop { name, .. } +// | SiPkgAttrFuncInputView::InputSocket { name, .. } +// | SiPkgAttrFuncInputView::OutputSocket { name, .. } => { +// FuncArgument::find_by_name_for_func(ctx, name, func_id) +// .await? +// .ok_or(PkgError::MissingFuncArgument(name.to_owned(), func_id))? +// } +// }; + +// if apa.func_argument_id() != *arg.id() { +// apa.set_func_argument_id(ctx, arg.id()).await?; +// } + +// match input { +// SiPkgAttrFuncInputView::Prop { prop_path, .. } => { +// let prop = Prop::find_prop_by_path(ctx, schema_variant_id, &prop_path.into()).await?; +// let prop_ip = InternalProvider::find_for_prop(ctx, *prop.id()) +// .await? +// .ok_or(PkgError::MissingInternalProviderForProp(*prop.id()))?; + +// if apa.internal_provider_id() != *prop_ip.id() { +// apa.set_internal_provider_id_safe(ctx, *prop_ip.id()) +// .await?; +// } +// } +// SiPkgAttrFuncInputView::InputSocket { socket_name, .. } => { +// let explicit_ip = InternalProvider::find_explicit_for_schema_variant_and_name( +// ctx, +// schema_variant_id, +// &socket_name, +// ) +// .await? +// .ok_or(PkgError::MissingInternalProviderForSocketName( +// socket_name.to_owned(), +// ))?; + +// if apa.internal_provider_id() != *explicit_ip.id() { +// apa.set_internal_provider_id_safe(ctx, *explicit_ip.id()) +// .await?; +// } +// } +// _ => {} +// } + +// Ok(()) +// } + +// #[allow(clippy::too_many_arguments)] +// async fn import_attr_func( +// ctx: &DalContext, +// change_set_pk: Option, +// context: AttributeReadContext, +// key: Option, +// schema_variant_id: SchemaVariantId, +// func_id: FuncId, +// inputs: Vec, +// thing_map: &mut ThingMap, +// ) -> PkgResult<()> { +// let mut prototype = get_prototype_for_context(ctx, context, key).await?; + +// if prototype.func_id() != func_id { +// prototype.set_func_id(ctx, &func_id).await?; +// } + +// for input in &inputs { +// match change_set_pk { +// None => { +// create_attr_proto_arg(ctx, *prototype.id(), input, func_id, schema_variant_id) +// .await?; +// } +// Some(_) => { +// let (unique_id, deleted) = match input { +// SiPkgAttrFuncInputView::Prop { +// unique_id, deleted, .. +// } +// | SiPkgAttrFuncInputView::InputSocket { +// unique_id, deleted, .. +// } +// | SiPkgAttrFuncInputView::OutputSocket { +// unique_id, deleted, .. +// } => ( +// unique_id +// .as_deref() +// .ok_or(PkgError::MissingUniqueIdForNode("attr-func-input".into()))?, +// *deleted, +// ), +// }; + +// let apa = match thing_map.get(change_set_pk, &unique_id.to_owned()) { +// Some(Thing::AttributePrototypeArgument(apa)) => { +// let mut apa = apa.to_owned(); +// if deleted { +// apa.delete_by_id(ctx).await?; +// } else { +// update_attr_proto_arg( +// ctx, +// &mut apa, +// *prototype.id(), +// input, +// func_id, +// schema_variant_id, +// ) +// .await?; +// } + +// Some(apa) +// } +// _ => { +// if deleted { +// None +// } else { +// Some( +// create_attr_proto_arg( +// ctx, +// *prototype.id(), +// input, +// func_id, +// schema_variant_id, +// ) +// .await?, +// ) +// } +// } +// }; + +// if let Some(apa) = apa { +// thing_map.insert( +// change_set_pk, +// unique_id.to_owned(), +// Thing::AttributePrototypeArgument(apa), +// ); +// } +// } +// } +// } + +// Ok(()) +// } + +// async fn create_validation( +// ctx: &DalContext, +// validation_kind: ValidationKind, +// builtin_func_id: FuncId, +// prop_id: PropId, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult { +// let (validation_func_id, validation_args) = match validation_kind { +// ValidationKind::Builtin(validation) => ( +// builtin_func_id, +// serde_json::to_value(FuncBackendValidationArgs::new(validation))?, +// ), + +// ValidationKind::Custom(func_id) => (func_id, serde_json::json!(null)), +// }; +// let mut builder = ValidationPrototypeContext::builder(); +// builder +// .set_prop_id(prop_id) +// .set_schema_id(schema_id) +// .set_schema_variant_id(schema_variant_id); + +// Ok(ValidationPrototype::new( +// ctx, +// validation_func_id, +// validation_args, +// builder.to_context(ctx).await?, +// ) +// .await?) +// } + +// async fn update_validation( +// ctx: &DalContext, +// prototype: &mut ValidationPrototype, +// validation_kind: ValidationKind, +// builtin_func_id: FuncId, +// prop_id: PropId, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult<()> { +// let (validation_func_id, validation_args) = match validation_kind { +// ValidationKind::Builtin(validation) => ( +// builtin_func_id, +// serde_json::to_value(FuncBackendValidationArgs::new(validation))?, +// ), + +// ValidationKind::Custom(func_id) => (func_id, serde_json::json!(null)), +// }; + +// prototype.set_prop_id(ctx, prop_id).await?; +// prototype.set_schema_id(ctx, schema_id).await?; +// prototype +// .set_schema_variant_id(ctx, schema_variant_id) +// .await?; +// prototype.set_args(ctx, validation_args).await?; +// prototype.set_func_id(ctx, validation_func_id).await?; + +// Ok(()) +// } + +// async fn import_prop_validation( +// ctx: &DalContext, +// change_set_pk: Option, +// spec: ValidationSpec, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// prop_id: PropId, +// thing_map: &mut ThingMap, +// ) -> PkgResult<()> { +// let builtin_validation_func = Func::find_by_attr(ctx, "name", &"si:validation") +// .await? +// .pop() +// .ok_or(FuncError::NotFoundByName("si:validation".to_string()))?; + +// let validation_kind = match &spec { +// ValidationSpec::IntegerIsBetweenTwoIntegers { +// lower_bound, +// upper_bound, +// .. +// } => ValidationKind::Builtin(Validation::IntegerIsBetweenTwoIntegers { +// value: None, +// lower_bound: *lower_bound, +// upper_bound: *upper_bound, +// }), +// ValidationSpec::IntegerIsNotEmpty { .. } => { +// ValidationKind::Builtin(Validation::IntegerIsNotEmpty { value: None }) +// } +// ValidationSpec::StringEquals { expected, .. } => { +// ValidationKind::Builtin(Validation::StringEquals { +// value: None, +// expected: expected.to_owned(), +// }) +// } +// ValidationSpec::StringHasPrefix { expected, .. } => { +// ValidationKind::Builtin(Validation::StringHasPrefix { +// value: None, +// expected: expected.to_owned(), +// }) +// } +// ValidationSpec::StringInStringArray { +// expected, +// display_expected, +// .. +// } => ValidationKind::Builtin(Validation::StringInStringArray { +// value: None, +// expected: expected.to_owned(), +// display_expected: *display_expected, +// }), +// ValidationSpec::StringIsHexColor { .. } => { +// ValidationKind::Builtin(Validation::StringIsHexColor { value: None }) +// } +// ValidationSpec::StringIsNotEmpty { .. } => { +// ValidationKind::Builtin(Validation::StringIsNotEmpty { value: None }) +// } +// ValidationSpec::StringIsValidIpAddr { .. } => { +// ValidationKind::Builtin(Validation::StringIsValidIpAddr { value: None }) +// } +// ValidationSpec::CustomValidation { func_unique_id, .. } => { +// ValidationKind::Custom(match thing_map.get(None, func_unique_id) { +// Some(Thing::Func(func)) => *func.id(), +// _ => return Err(PkgError::MissingFuncUniqueId(func_unique_id.to_owned())), +// }) +// } +// }; + +// match change_set_pk { +// None => { +// create_validation( +// ctx, +// validation_kind, +// *builtin_validation_func.id(), +// prop_id, +// schema_id, +// schema_variant_id, +// ) +// .await?; +// } +// Some(_) => { +// let unique_id = spec +// .unique_id() +// .ok_or(PkgError::MissingUniqueIdForNode("validation".into()))?; +// let deleted = spec.deleted(); + +// let validation_prototype = match thing_map.get(change_set_pk, &unique_id.to_owned()) { +// Some(Thing::Validation(prototype)) => { +// let mut prototype = prototype.to_owned(); + +// if deleted { +// prototype.delete_by_id(ctx).await?; +// } else { +// update_validation( +// ctx, +// &mut prototype, +// validation_kind, +// *builtin_validation_func.id(), +// prop_id, +// schema_id, +// schema_variant_id, +// ) +// .await?; +// } + +// Some(prototype) +// } +// _ => { +// if deleted { +// None +// } else { +// Some( +// create_validation( +// ctx, +// validation_kind, +// *builtin_validation_func.id(), +// prop_id, +// schema_id, +// schema_variant_id, +// ) +// .await?, +// ) +// } +// } +// }; + +// if let Some(prototype) = validation_prototype { +// thing_map.insert( +// change_set_pk, +// unique_id.to_owned(), +// Thing::Validation(prototype), +// ); +// } +// } +// } + +// Ok(()) +// } + +// fn prop_kind_for_pkg_prop(pkg_prop: &SiPkgProp<'_>) -> PropKind { +// match pkg_prop { +// SiPkgProp::Array { .. } => PropKind::Array, +// SiPkgProp::Boolean { .. } => PropKind::Boolean, +// SiPkgProp::Map { .. } => PropKind::Map, +// SiPkgProp::Number { .. } => PropKind::Integer, +// SiPkgProp::Object { .. } => PropKind::Object, +// SiPkgProp::String { .. } => PropKind::String, +// } +// } + +// async fn create_dal_prop( +// ctx: &DalContext, +// data: &SiPkgPropData, +// kind: PropKind, +// schema_variant_id: SchemaVariantId, +// parent_prop_id: Option, +// ) -> PkgResult { +// let mut prop = Prop::new( +// ctx, +// &data.name, +// kind, +// Some(((&data.widget_kind).into(), data.widget_options.to_owned())), +// schema_variant_id, +// parent_prop_id, +// ) +// .await +// .map_err(SiPkgError::visit_prop)?; + +// prop.set_hidden(ctx, data.hidden).await?; +// prop.set_doc_link(ctx, data.doc_link.as_ref().map(|l| l.to_string())) +// .await?; + +// Ok(prop) +// } + +// async fn create_prop( +// spec: SiPkgProp<'_>, +// parent_prop_info: Option<(PropId, PropPath)>, +// ctx: &PropVisitContext<'_>, +// ) -> PkgResult> { +// let prop = match ctx.change_set_pk { +// None => { +// let data = spec.data().ok_or(PkgError::DataNotFound("prop".into()))?; +// create_dal_prop( +// ctx.ctx, +// data, +// prop_kind_for_pkg_prop(&spec), +// ctx.schema_variant_id, +// parent_prop_info.map(|info| info.0), +// ) +// .await? +// } +// Some(_) => { +// let parent_path = parent_prop_info +// .as_ref() +// .map(|info| info.1.to_owned()) +// .unwrap_or(PropPath::new(["root"])); + +// let path = parent_path.join(&PropPath::new([spec.name()])); + +// match Prop::find_prop_by_path_opt(ctx.ctx, ctx.schema_variant_id, &path).await? { +// None => { +// let data = spec.data().ok_or(PkgError::DataNotFound("prop".into()))?; +// create_dal_prop( +// ctx.ctx, +// data, +// prop_kind_for_pkg_prop(&spec), +// ctx.schema_variant_id, +// parent_prop_info.as_ref().map(|info| info.0.to_owned()), +// ) +// .await? +// } +// Some(prop) => prop, +// } +// } +// }; + +// let prop_id = *prop.id(); + +// // Both attribute functions and default values have to be set *after* the schema variant is +// // "finalized", so we can't do until we construct the *entire* prop tree. Hence we push work +// // queues up to the outer context via the PropVisitContext, which uses Mutexes for interior +// // mutability (maybe there's a better type for that here?) + +// if let Some(data) = spec.data() { +// if let Some(default_value_info) = match &spec { +// SiPkgProp::String { .. } => { +// if let Some(serde_json::Value::String(default_value)) = &data.default_value { +// Some(DefaultValueInfo::String { +// prop_id, +// default_value: default_value.to_owned(), +// }) +// } else { +// // Raise error here for type mismatch +// None +// } +// } +// SiPkgProp::Number { .. } => { +// if let Some(serde_json::Value::Number(default_value_number)) = &data.default_value { +// if default_value_number.is_i64() { +// default_value_number +// .as_i64() +// .map(|dv_i64| DefaultValueInfo::Number { +// prop_id, +// default_value: dv_i64, +// }) +// } else { +// None +// } +// } else { +// None +// } +// } +// SiPkgProp::Boolean { .. } => { +// if let Some(serde_json::Value::Bool(default_value)) = &data.default_value { +// Some(DefaultValueInfo::Boolean { +// prop_id, +// default_value: *default_value, +// }) +// } else { +// None +// } +// } +// // Default values for complex types are not yet supported in packages +// _ => None, +// } { +// ctx.default_values.lock().await.push(default_value_info); +// } +// } + +// if matches!(&spec, SiPkgProp::Map { .. }) { +// for map_key_func in spec.map_key_funcs()? { +// let key = map_key_func.key(); +// let mut inputs = map_key_func.inputs()?; +// let func_unique_id = map_key_func.func_unique_id(); + +// ctx.map_key_funcs.lock().await.push(( +// key.to_owned(), +// AttrFuncInfo { +// func_unique_id: func_unique_id.to_owned(), +// prop_id, +// inputs: inputs.drain(..).map(Into::into).collect(), +// }, +// )); +// } +// } + +// if let Some(func_unique_id) = spec.data().and_then(|data| data.func_unique_id.to_owned()) { +// let mut inputs = spec.inputs()?; +// ctx.attr_funcs.lock().await.push(AttrFuncInfo { +// func_unique_id, +// prop_id, +// inputs: inputs.drain(..).map(Into::into).collect(), +// }); +// } + +// for validation_pkg_spec in spec.validations()? { +// let validation_spec: ValidationSpec = validation_pkg_spec.try_into()?; + +// ctx.validations +// .lock() +// .await +// .push((*prop.id(), validation_spec)); +// } + +// Ok(Some((*prop.id(), prop.path()))) +// } diff --git a/lib/dal/src/prop.rs b/lib/dal/src/prop.rs index 455718241e..3677c964be 100644 --- a/lib/dal/src/prop.rs +++ b/lib/dal/src/prop.rs @@ -1,33 +1,138 @@ -use async_recursion::async_recursion; +use content_store::ContentHash; use serde::{Deserialize, Serialize}; use serde_json::Value; -use si_data_pg::PgError; + use si_pkg::PropSpecKind; -use std::collections::VecDeque; -use strum::{AsRefStr, Display, EnumIter, EnumString}; + +use strum::{AsRefStr, Display, EnumDiscriminants, EnumIter, EnumString}; use telemetry::prelude::*; -use thiserror::Error; -use crate::standard_model::{ - finish_create_from_row, object_option_from_row_option, objects_from_rows, -}; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::FuncBackendResponseType; use crate::{ - attribute::{prototype::AttributePrototype, value::AttributeValue}, - func::{ - binding::{FuncBinding, FuncBindingError}, - binding_return_value::FuncBindingReturnValueError, - }, - impl_standard_model, - label_list::ToLabelList, - pk, - property_editor::schema::WidgetKind, - standard_model, standard_model_accessor, standard_model_belongs_to, standard_model_has_many, - AttributeContext, AttributeContextBuilder, AttributeContextBuilderError, - AttributePrototypeError, AttributeReadContext, DalContext, Func, FuncError, FuncId, - HistoryEventError, SchemaVariantId, StandardModel, StandardModelError, Tenancy, Timestamp, - Visibility, + label_list::ToLabelList, pk, property_editor::schema::WidgetKind, FuncId, StandardModel, + Timestamp, }; -use crate::{AttributeValueError, AttributeValueId, FuncBackendResponseType, TransactionsError}; + +pub const PROP_VERSION: PropContentDiscriminants = PropContentDiscriminants::V1; + +pk!(PropId); + +/// An individual "field" within the tree of a [`SchemaVariant`](crate::SchemaVariant). +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +pub struct Prop { + pub id: PropId, + #[serde(flatten)] + pub timestamp: Timestamp, + /// The name of the [`Prop`]. + pub name: String, + /// The kind of the [`Prop`]. + pub kind: PropKind, + /// The kind of "widget" that should be used for this [`Prop`]. + pub widget_kind: WidgetKind, + /// The configuration of the "widget". + pub widget_options: Option, + /// A link to external documentation for working with this specific [`Prop`]. + pub doc_link: Option, + /// A toggle for whether or not the [`Prop`] should be visually hidden. + pub hidden: bool, + /// Props can be connected to eachother to signify that they should contain the same value + /// This is useful for diffing the resource with the domain, to suggest actions if the real world changes + pub refers_to_prop_id: Option, + /// Connected props may need a custom diff function + pub diff_func_id: Option, +} + +#[derive(Debug, PartialEq)] +pub struct PropGraphNode { + id: PropId, + content_address: ContentAddress, + content: PropContentV1, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum PropContent { + V1(PropContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct PropContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + /// The name of the [`Prop`]. + pub name: String, + /// The kind of the [`Prop`]. + pub kind: PropKind, + /// The kind of "widget" that should be used for this [`Prop`]. + pub widget_kind: WidgetKind, + /// The configuration of the "widget". + pub widget_options: Option, + /// A link to external documentation for working with this specific [`Prop`]. + pub doc_link: Option, + /// A toggle for whether or not the [`Prop`] should be visually hidden. + pub hidden: bool, + /// Props can be connected to eachother to signify that they should contain the same value + /// This is useful for diffing the resource with the domain, to suggest actions if the real world changes + pub refers_to_prop_id: Option, + /// Connected props may need a custom diff function + pub diff_func_id: Option, +} + +impl Prop { + pub fn assemble(id: PropId, inner: &PropContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + name: inner.name.clone(), + kind: inner.kind, + widget_kind: inner.widget_kind, + widget_options: inner.widget_options.clone(), + doc_link: inner.doc_link.clone(), + hidden: inner.hidden, + refers_to_prop_id: inner.refers_to_prop_id, + diff_func_id: inner.diff_func_id, + } + } + + pub fn id(&self) -> PropId { + self.id + } +} + +impl From for PropContentV1 { + fn from(value: Prop) -> Self { + Self { + timestamp: value.timestamp, + name: value.name.clone(), + kind: value.kind, + widget_kind: value.widget_kind, + widget_options: value.widget_options.clone(), + doc_link: value.doc_link.clone(), + hidden: value.hidden, + refers_to_prop_id: value.refers_to_prop_id, + diff_func_id: value.diff_func_id, + } + } +} + +impl PropGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: PropContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::Prop(content_hash), + content, + } + } + + pub fn id(&self) -> PropId { + self.id + } +} /// This is the separator used for the "path" column. It is a vertical tab character, which should /// not (we'll see) be able to be provided by our users in [`Prop`] names. @@ -116,61 +221,9 @@ impl From for PropPath { } } -const ALL_ANCESTOR_PROPS: &str = include_str!("queries/prop/all_ancestor_props.sql"); -const FIND_ROOT_PROP_FOR_PROP: &str = include_str!("queries/prop/root_prop_for_prop.sql"); -const FIND_PROP_IN_TREE: &str = include_str!("queries/prop/find_prop_in_tree.sql"); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum PropError { - #[error("Array prop {0} is missing element child")] - ArrayMissingElementChild(PropId), - #[error("AttributeContext error: {0}")] - AttributeContext(#[from] AttributeContextBuilderError), - #[error("AttributePrototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("AttributeValue error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("default diff function not found")] - DefaultDiffFunctionNotFound, - #[error("expected child prop not found with name {0}")] - ExpectedChildNotFound(String), - #[error("Func error: {0}")] - Func(#[from] FuncError), - #[error("FuncBinding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("FuncBindingReturnValue error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("Map prop {0} is missing element child")] - MapMissingElementChild(PropId), - #[error("missing a func: {0}")] - MissingFunc(String), - #[error("missing a func by id: {0}")] - MissingFuncById(FuncId), - #[error("prop not found: {0} ({1:?})")] - NotFound(PropId, Visibility), - #[error("prop not found at path: {0} {1:?}")] - NotFoundAtPath(String, Visibility), - #[error("parent prop kind is not \"Object\", which is required for setting default values on props (found {0})")] - ParentPropIsNotObjectForPropWithDefaultValue(PropKind), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error(transparent)] - SerdeJson(#[from] serde_json::Error), - #[error("unable to set default value for non scalar prop type")] - SetDefaultForNonScalar(PropKind), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), -} - -pub type PropResult = Result; - -pk!(PropPk); -pk!(PropId); +// const ALL_ANCESTOR_PROPS: &str = include_str!("queries/prop/all_ancestor_props.sql"); +// const FIND_ROOT_PROP_FOR_PROP: &str = include_str!("queries/prop/root_prop_for_prop.sql"); +// const FIND_PROP_IN_TREE: &str = include_str!("queries/prop/find_prop_in_tree.sql"); #[remain::sorted] #[derive( @@ -237,420 +290,420 @@ impl From for FuncBackendResponseType { } } -/// An individual "field" within the tree of a [`SchemaVariant`](crate::SchemaVariant). -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct Prop { - pk: PropPk, - id: PropId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, - - /// The name of the [`Prop`]. - name: String, - /// The kind of the [`Prop`]. - kind: PropKind, - /// The kind of "widget" that should be used for this [`Prop`]. - widget_kind: WidgetKind, - /// The configuration of the "widget". - widget_options: Option, - /// A link to external documentation for working with this specific [`Prop`]. - doc_link: Option, - /// Embedded documentation for working with this specific [`Prop`]. - documentation: Option, - /// A toggle for whether or not the [`Prop`] should be visually hidden. - hidden: bool, - /// The "path" for a given [`Prop`]. It is a concatenation of [`Prop`] names based on lineage - /// with [`PROP_PATH_SEPARATOR`] as the separator between each parent and child. - /// - /// This is useful for finding and querying for specific [`Props`](Prop) in a - /// [`SchemaVariant`](crate::SchemaVariant)'s tree. - path: String, - /// The [`SchemaVariant`](crate::SchemaVariant) whose tree we (the [`Prop`]) reside in. - schema_variant_id: SchemaVariantId, - /// Props can be connected to eachother to signify that they should contain the same value - /// This is useful for diffing the resource with the domain, to suggest actions if the real world changes - refers_to_prop_id: Option, - /// Connected props may need a custom diff function - diff_func_id: Option, -} - -impl_standard_model! { - model: Prop, - pk: PropPk, - id: PropId, - table_name: "props", - history_event_label_base: "prop", - history_event_message_name: "Prop" -} - -impl Prop { - /// Create a new [`Prop`]. A corresponding [`AttributePrototype`] and [`AttributeValue`] will be - /// created when the provided [`SchemaVariant`](crate::SchemaVariant) is - /// [`finalized`](crate::SchemaVariant::finalize). - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - name: impl AsRef, - kind: PropKind, - widget_kind_and_options: Option<(WidgetKind, Option)>, - schema_variant_id: SchemaVariantId, - parent_prop_id: Option, - documentation: Option, - ) -> PropResult { - let name = name.as_ref(); - let (widget_kind, widget_options) = match widget_kind_and_options { - Some((kind, options)) => (kind, options), - None => (WidgetKind::from(kind), None), - }; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM prop_create_v2($1, $2, $3, $4, $5, $6, $7, $8, $9)", - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &kind.as_ref(), - &widget_kind.as_ref(), - &widget_options.as_ref(), - &schema_variant_id, - &parent_prop_id, - &documentation, - ], - ) - .await?; - Ok(finish_create_from_row(ctx, row).await?) - } - - standard_model_accessor!(name, String, PropResult); - standard_model_accessor!(kind, Enum(PropKind), PropResult); - standard_model_accessor!(widget_kind, Enum(WidgetKind), PropResult); - standard_model_accessor!(widget_options, Option, PropResult); - standard_model_accessor!(doc_link, Option, PropResult); - standard_model_accessor!(documentation, Option, PropResult); - standard_model_accessor!(hidden, bool, PropResult); - standard_model_accessor!(refers_to_prop_id, Option, PropResult); - standard_model_accessor!(diff_func_id, Option, PropResult); - standard_model_accessor!(schema_variant_id, Pk(SchemaVariantId), PropResult); - - pub fn path(&self) -> PropPath { - self.path.to_owned().into() - } - - // TODO(nick): replace this table with a foreign key relationship. - standard_model_belongs_to!( - lookup_fn: parent_prop, - set_fn: set_parent_prop_do_not_use, - unset_fn: unset_parent_prop_do_not_use, - table: "prop_belongs_to_prop", - model_table: "props", - belongs_to_id: PropId, - returns: Prop, - result: PropResult, - ); - - // TODO(nick): replace this table with a foreign key relationship. - standard_model_has_many!( - lookup_fn: child_props, - table: "prop_belongs_to_prop", - model_table: "props", - returns: Prop, - result: PropResult, - ); - - pub async fn find_root_prop_for_prop( - ctx: &DalContext, - prop_id: PropId, - ) -> PropResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_ROOT_PROP_FOR_PROP, - &[ctx.tenancy(), ctx.visibility(), &prop_id], - ) - .await?; - - Ok(standard_model::object_option_from_row_option::(row)?) - } - - /// Returns the given [`Prop`] and all ancestor [`Props`](crate::Prop) back to the root. - /// Ancestor props are ordered by depth, starting from the root prop. - pub async fn all_ancestor_props(ctx: &DalContext, prop_id: PropId) -> PropResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ALL_ANCESTOR_PROPS, - &[ctx.tenancy(), ctx.visibility(), &prop_id], - ) - .await?; - Ok(objects_from_rows(rows)?) - } - - #[instrument(skip_all)] - #[async_recursion] - pub async fn ts_type(&self, ctx: &DalContext) -> PropResult { - // XXX: Hack! The payload prop kind is a string but we're actually storing arbitrary json - // there and expect it to be a JSON object in most of our code. However, the resource_value - // work is likely to remove the need for this entirely - if self.path() == PropPath::new(["root", "resource", "payload"]) { - return Ok("any".to_string()); - } - - // Note: we should fix this by having propper enums as prop types - if self.path() == PropPath::new(["root", "resource", "status"]) { - return Ok("'ok' | 'warning' | 'error' | undefined | null".to_owned()); - } - - Ok(match self.kind() { - PropKind::Array => format!( - "{}[]", - self.child_props(ctx) - .await? - .get(0) - .ok_or(PropError::ArrayMissingElementChild(self.id))? - .ts_type(ctx) - .await? - ), - PropKind::Boolean => "boolean".to_string(), - PropKind::Integer => "number".to_string(), - PropKind::Map => format!( - "Record", - self.child_props(ctx) - .await? - .get(0) - .ok_or(PropError::MapMissingElementChild(self.id))? - .ts_type(ctx) - .await? - ), - PropKind::Object => { - let mut object_type = "{\n".to_string(); - for child in self.child_props(ctx).await? { - let name_value = serde_json::to_value(&child.name)?; - let name_serialized = serde_json::to_string(&name_value)?; - object_type.push_str( - format!( - "{}: {} | null | undefined;\n", - &name_serialized, - child.ts_type(ctx).await? - ) - .as_str(), - ); - } - object_type.push('}'); - - object_type - } - PropKind::String => "string".to_string(), - }) - } - - /// Assembles the "json_pointer" representing the full "path" to a [`Prop`] based on its - /// lineage. - /// - /// For examples, if a [`Prop`] named "poop" had a parent named "domain" and a grandparent named - /// "root", then the "json_pointer" would be "/root/domain/poop". - pub async fn json_pointer(&self, ctx: &DalContext) -> PropResult { - // NOTE(nick,zack): if this ends up getting used frequently to manage paths corresponding - // to attribute (and/or property editor) values, then we should consider strongly typing - // "json_pointer". - Ok([ - "/".to_string(), - Prop::all_ancestor_props(ctx, *self.id()) - .await? - .iter() - .map(|prop| prop.name().to_string()) - .collect::>() - .join("/"), - ] - .join("")) - } - - /// Finds a prop by a path made up of prop names separated by - /// [`PROP_PATH_SEPARATOR`](crate::prop::PROP_PATH_SEPARATOR) for each depth level - pub async fn find_prop_by_path( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - path: &PropPath, - ) -> PropResult { - Self::find_prop_by_path_opt(ctx, schema_variant_id, path) - .await? - .ok_or(PropError::NotFoundAtPath( - path.to_string(), - *ctx.visibility(), - )) - } - - /// Finds a prop by a path made up of prop names separated by - /// [`PROP_PATH_SEPARATOR`](crate::prop::PROP_PATH_SEPARATOR) for each depth level - pub async fn find_prop_by_path_opt( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - path: &PropPath, - ) -> PropResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_PROP_IN_TREE, - &[ - ctx.tenancy(), - ctx.visibility(), - &schema_variant_id, - &path.as_str(), - ], - ) - .await?; - - Ok(object_option_from_row_option(row)?) - } - - pub async fn create_default_prototypes_and_values( - ctx: &DalContext, - prop_id: PropId, - ) -> PropResult<()> { - #[derive(Debug)] - struct WorkItem { - maybe_parent: Option, - prop: Prop, - } - - let mut root_prop = Prop::get_by_id(ctx, &prop_id) - .await? - .ok_or_else(|| PropError::NotFound(prop_id, *ctx.visibility()))?; - - // We should make sure that we're creating AttributePrototypes & AttributeValues - // contiguously from the root. - while let Some(parent) = root_prop.parent_prop(ctx).await? { - root_prop = parent; - } - - let mut work_queue: VecDeque = VecDeque::from(vec![WorkItem { - maybe_parent: None, - prop: root_prop, - }]); - - let func_name = "si:unset".to_string(); - let mut funcs = Func::find_by_attr(ctx, "name", &func_name).await?; - let func = funcs.pop().ok_or(PropError::MissingFunc(func_name))?; - - // No matter what, we need a FuncBindingReturnValueId to create a new attribute prototype. - // If the func binding was created, we execute on it to generate our value id. Otherwise, - // we try to find a value by id and then fallback to executing anyway if one was not found. - let (func_binding, func_binding_return_value) = - FuncBinding::create_and_execute(ctx, serde_json::json![null], *func.id(), vec![]) - .await?; - - while let Some(WorkItem { maybe_parent, prop }) = work_queue.pop_front() { - let attribute_context = AttributeContext::builder() - .set_prop_id(*prop.id()) - .to_context()?; - - let attribute_value = if let Some(attribute_value) = - AttributeValue::find_for_context(ctx, attribute_context.into()).await? - { - attribute_value - } else { - AttributePrototype::new( - ctx, - *func.id(), - *func_binding.id(), - *func_binding_return_value.id(), - attribute_context, - None, - maybe_parent, - ) - .await?; - - AttributeValue::find_for_context(ctx, attribute_context.into()) - .await? - .ok_or(AttributeValueError::NotFoundForReadContext( - attribute_context.into(), - ))? - }; - - if *prop.kind() == PropKind::Object { - let child_props = prop.child_props(ctx).await?; - if !child_props.is_empty() { - work_queue.extend(child_props.iter().map(|p| WorkItem { - maybe_parent: Some(*attribute_value.id()), - prop: p.clone(), - })); - } - } - } - - Ok(()) - } - - pub async fn set_default_value( - &self, - ctx: &DalContext, - value: T, - ) -> PropResult<()> { - let value = serde_json::to_value(value)?; - match self.kind() { - PropKind::String | PropKind::Boolean | PropKind::Integer => { - let attribute_read_context = AttributeReadContext::default_with_prop(self.id); - let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) - .await? - .ok_or(AttributeValueError::NotFoundForReadContext( - attribute_read_context, - ))?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| AttributeValueError::ParentNotFound(*attribute_value.id()))?; - - // Ensure the parent project is an object. Technically, we should ensure that every - // prop in entire lineage is of kind object, but this should (hopefully) suffice - // for now. Ideally, this would be handled in a query. - let parent_prop = Prop::get_by_id(ctx, &parent_attribute_value.context.prop_id()) - .await? - .ok_or_else(|| { - PropError::NotFound( - parent_attribute_value.context.prop_id(), - *ctx.visibility(), - ) - })?; - if parent_prop.kind() != &PropKind::Object { - return Err(PropError::ParentPropIsNotObjectForPropWithDefaultValue( - *parent_prop.kind(), - )); - } - - let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; - AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - context, - Some(value), - None, - ) - .await?; - Ok(()) - } - _ => Err(PropError::SetDefaultForNonScalar(*self.kind())), - } - } - - pub async fn set_default_diff(&mut self, ctx: &DalContext) -> PropResult<()> { - let func = Func::find_by_attr(ctx, "name", &"si:diff") - .await? - .pop() - .ok_or(PropError::DefaultDiffFunctionNotFound)?; - self.set_diff_func_id(ctx, Some(*func.id())).await - } -} +// /// An individual "field" within the tree of a [`SchemaVariant`](crate::SchemaVariant). +// #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +// pub struct Prop { +// pk: PropPk, +// id: PropId, +// #[serde(flatten)] +// tenancy: Tenancy, +// #[serde(flatten)] +// timestamp: Timestamp, +// #[serde(flatten)] +// visibility: Visibility, + +// /// The name of the [`Prop`]. +// name: String, +// /// The kind of the [`Prop`]. +// kind: PropKind, +// /// The kind of "widget" that should be used for this [`Prop`]. +// widget_kind: WidgetKind, +// /// The configuration of the "widget". +// widget_options: Option, +// /// A link to external documentation for working with this specific [`Prop`]. +// doc_link: Option, +// /// Embedded documentation for working with this specific [`Prop`]. +// documentation: Option, +// /// A toggle for whether or not the [`Prop`] should be visually hidden. +// hidden: bool, +// /// The "path" for a given [`Prop`]. It is a concatenation of [`Prop`] names based on lineage +// /// with [`PROP_PATH_SEPARATOR`] as the separator between each parent and child. +// /// +// /// This is useful for finding and querying for specific [`Props`](Prop) in a +// /// [`SchemaVariant`](crate::SchemaVariant)'s tree. +// path: String, +// /// The [`SchemaVariant`](crate::SchemaVariant) whose tree we (the [`Prop`]) reside in. +// schema_variant_id: SchemaVariantId, +// /// Props can be connected to eachother to signify that they should contain the same value +// /// This is useful for diffing the resource with the domain, to suggest actions if the real world changes +// refers_to_prop_id: Option, +// /// Connected props may need a custom diff function +// diff_func_id: Option, +// } + +// impl_standard_model! { +// model: Prop, +// pk: PropPk, +// id: PropId, +// table_name: "props", +// history_event_label_base: "prop", +// history_event_message_name: "Prop" +// } + +// impl Prop { +// /// Create a new [`Prop`]. A corresponding [`AttributePrototype`] and [`AttributeValue`] will be +// /// created when the provided [`SchemaVariant`](crate::SchemaVariant) is +// /// [`finalized`](crate::SchemaVariant::finalize). +// #[instrument(skip_all)] +// pub async fn new( +// ctx: &DalContext, +// name: impl AsRef, +// kind: PropKind, +// widget_kind_and_options: Option<(WidgetKind, Option)>, +// schema_variant_id: SchemaVariantId, +// parent_prop_id: Option, +// documentation: Option, +// ) -> PropResult { +// let name = name.as_ref(); +// let (widget_kind, widget_options) = match widget_kind_and_options { +// Some((kind, options)) => (kind, options), +// None => (WidgetKind::from(kind), None), +// }; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM prop_create_v2($1, $2, $3, $4, $5, $6, $7, $8, $9)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &name, +// &kind.as_ref(), +// &widget_kind.as_ref(), +// &widget_options.as_ref(), +// &schema_variant_id, +// &parent_prop_id, +// &documentation, +// ], +// ) +// .await?; +// Ok(finish_create_from_row(ctx, row).await?) +// } + +// standard_model_accessor!(name, String, PropResult); +// standard_model_accessor!(kind, Enum(PropKind), PropResult); +// standard_model_accessor!(widget_kind, Enum(WidgetKind), PropResult); +// standard_model_accessor!(widget_options, Option, PropResult); +// standard_model_accessor!(doc_link, Option, PropResult); +// standard_model_accessor!(documentation, Option, PropResult); +// standard_model_accessor!(hidden, bool, PropResult); +// standard_model_accessor!(refers_to_prop_id, Option, PropResult); +// standard_model_accessor!(diff_func_id, Option, PropResult); +// standard_model_accessor!(schema_variant_id, Pk(SchemaVariantId), PropResult); + +// pub fn path(&self) -> PropPath { +// self.path.to_owned().into() +// } + +// // TODO(nick): replace this table with a foreign key relationship. +// standard_model_belongs_to!( +// lookup_fn: parent_prop, +// set_fn: set_parent_prop_do_not_use, +// unset_fn: unset_parent_prop_do_not_use, +// table: "prop_belongs_to_prop", +// model_table: "props", +// belongs_to_id: PropId, +// returns: Prop, +// result: PropResult, +// ); + +// // TODO(nick): replace this table with a foreign key relationship. +// standard_model_has_many!( +// lookup_fn: child_props, +// table: "prop_belongs_to_prop", +// model_table: "props", +// returns: Prop, +// result: PropResult, +// ); + +// pub async fn find_root_prop_for_prop( +// ctx: &DalContext, +// prop_id: PropId, +// ) -> PropResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_ROOT_PROP_FOR_PROP, +// &[ctx.tenancy(), ctx.visibility(), &prop_id], +// ) +// .await?; + +// Ok(standard_model::object_option_from_row_option::(row)?) +// } + +// /// Returns the given [`Prop`] and all ancestor [`Props`](crate::Prop) back to the root. +// /// Ancestor props are ordered by depth, starting from the root prop. +// pub async fn all_ancestor_props(ctx: &DalContext, prop_id: PropId) -> PropResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// ALL_ANCESTOR_PROPS, +// &[ctx.tenancy(), ctx.visibility(), &prop_id], +// ) +// .await?; +// Ok(objects_from_rows(rows)?) +// } + +// #[instrument(skip_all)] +// #[async_recursion] +// pub async fn ts_type(&self, ctx: &DalContext) -> PropResult { +// // XXX: Hack! The payload prop kind is a string but we're actually storing arbitrary json +// // there and expect it to be a JSON object in most of our code. However, the resource_value +// // work is likely to remove the need for this entirely +// if self.path() == PropPath::new(["root", "resource", "payload"]) { +// return Ok("any".to_string()); +// } + +// // Note: we should fix this by having propper enums as prop types +// if self.path() == PropPath::new(["root", "resource", "status"]) { +// return Ok("'ok' | 'warning' | 'error' | undefined | null".to_owned()); +// } + +// Ok(match self.kind() { +// PropKind::Array => format!( +// "{}[]", +// self.child_props(ctx) +// .await? +// .get(0) +// .ok_or(PropError::ArrayMissingElementChild(self.id))? +// .ts_type(ctx) +// .await? +// ), +// PropKind::Boolean => "boolean".to_string(), +// PropKind::Integer => "number".to_string(), +// PropKind::Map => format!( +// "Record", +// self.child_props(ctx) +// .await? +// .get(0) +// .ok_or(PropError::MapMissingElementChild(self.id))? +// .ts_type(ctx) +// .await? +// ), +// PropKind::Object => { +// let mut object_type = "{\n".to_string(); +// for child in self.child_props(ctx).await? { +// let name_value = serde_json::to_value(&child.name)?; +// let name_serialized = serde_json::to_string(&name_value)?; +// object_type.push_str( +// format!( +// "{}: {} | null | undefined;\n", +// &name_serialized, +// child.ts_type(ctx).await? +// ) +// .as_str(), +// ); +// } +// object_type.push('}'); + +// object_type +// } +// PropKind::String => "string".to_string(), +// }) +// } + +// /// Assembles the "json_pointer" representing the full "path" to a [`Prop`] based on its +// /// lineage. +// /// +// /// For examples, if a [`Prop`] named "poop" had a parent named "domain" and a grandparent named +// /// "root", then the "json_pointer" would be "/root/domain/poop". +// pub async fn json_pointer(&self, ctx: &DalContext) -> PropResult { +// // NOTE(nick,zack): if this ends up getting used frequently to manage paths corresponding +// // to attribute (and/or property editor) values, then we should consider strongly typing +// // "json_pointer". +// Ok([ +// "/".to_string(), +// Prop::all_ancestor_props(ctx, *self.id()) +// .await? +// .iter() +// .map(|prop| prop.name().to_string()) +// .collect::>() +// .join("/"), +// ] +// .join("")) +// } + +// /// Finds a prop by a path made up of prop names separated by +// /// [`PROP_PATH_SEPARATOR`](crate::prop::PROP_PATH_SEPARATOR) for each depth level +// pub async fn find_prop_by_path( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// path: &PropPath, +// ) -> PropResult { +// Self::find_prop_by_path_opt(ctx, schema_variant_id, path) +// .await? +// .ok_or(PropError::NotFoundAtPath( +// path.to_string(), +// *ctx.visibility(), +// )) +// } + +// /// Finds a prop by a path made up of prop names separated by +// /// [`PROP_PATH_SEPARATOR`](crate::prop::PROP_PATH_SEPARATOR) for each depth level +// pub async fn find_prop_by_path_opt( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// path: &PropPath, +// ) -> PropResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_PROP_IN_TREE, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &schema_variant_id, +// &path.as_str(), +// ], +// ) +// .await?; + +// Ok(object_option_from_row_option(row)?) +// } + +// pub async fn create_default_prototypes_and_values( +// ctx: &DalContext, +// prop_id: PropId, +// ) -> PropResult<()> { +// #[derive(Debug)] +// struct WorkItem { +// maybe_parent: Option, +// prop: Prop, +// } + +// let mut root_prop = Prop::get_by_id(ctx, &prop_id) +// .await? +// .ok_or_else(|| PropError::NotFound(prop_id, *ctx.visibility()))?; + +// // We should make sure that we're creating AttributePrototypes & AttributeValues +// // contiguously from the root. +// while let Some(parent) = root_prop.parent_prop(ctx).await? { +// root_prop = parent; +// } + +// let mut work_queue: VecDeque = VecDeque::from(vec![WorkItem { +// maybe_parent: None, +// prop: root_prop, +// }]); + +// let func_name = "si:unset".to_string(); +// let mut funcs = Func::find_by_attr(ctx, "name", &func_name).await?; +// let func = funcs.pop().ok_or(PropError::MissingFunc(func_name))?; + +// // No matter what, we need a FuncBindingReturnValueId to create a new attribute prototype. +// // If the func binding was created, we execute on it to generate our value id. Otherwise, +// // we try to find a value by id and then fallback to executing anyway if one was not found. +// let (func_binding, func_binding_return_value) = +// FuncBinding::create_and_execute(ctx, serde_json::json![null], *func.id(), vec![]) +// .await?; + +// while let Some(WorkItem { maybe_parent, prop }) = work_queue.pop_front() { +// let attribute_context = AttributeContext::builder() +// .set_prop_id(*prop.id()) +// .to_context()?; + +// let attribute_value = if let Some(attribute_value) = +// AttributeValue::find_for_context(ctx, attribute_context.into()).await? +// { +// attribute_value +// } else { +// AttributePrototype::new( +// ctx, +// *func.id(), +// *func_binding.id(), +// *func_binding_return_value.id(), +// attribute_context, +// None, +// maybe_parent, +// ) +// .await?; + +// AttributeValue::find_for_context(ctx, attribute_context.into()) +// .await? +// .ok_or(AttributeValueError::NotFoundForReadContext( +// attribute_context.into(), +// ))? +// }; + +// if *prop.kind() == PropKind::Object { +// let child_props = prop.child_props(ctx).await?; +// if !child_props.is_empty() { +// work_queue.extend(child_props.iter().map(|p| WorkItem { +// maybe_parent: Some(*attribute_value.id()), +// prop: p.clone(), +// })); +// } +// } +// } + +// Ok(()) +// } + +// pub async fn set_default_value( +// &self, +// ctx: &DalContext, +// value: T, +// ) -> PropResult<()> { +// let value = serde_json::to_value(value)?; +// match self.kind() { +// PropKind::String | PropKind::Boolean | PropKind::Integer => { +// let attribute_read_context = AttributeReadContext::default_with_prop(self.id); +// let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) +// .await? +// .ok_or(AttributeValueError::NotFoundForReadContext( +// attribute_read_context, +// ))?; +// let parent_attribute_value = attribute_value +// .parent_attribute_value(ctx) +// .await? +// .ok_or_else(|| AttributeValueError::ParentNotFound(*attribute_value.id()))?; + +// // Ensure the parent project is an object. Technically, we should ensure that every +// // prop in entire lineage is of kind object, but this should (hopefully) suffice +// // for now. Ideally, this would be handled in a query. +// let parent_prop = Prop::get_by_id(ctx, &parent_attribute_value.context.prop_id()) +// .await? +// .ok_or_else(|| { +// PropError::NotFound( +// parent_attribute_value.context.prop_id(), +// *ctx.visibility(), +// ) +// })?; +// if parent_prop.kind() != &PropKind::Object { +// return Err(PropError::ParentPropIsNotObjectForPropWithDefaultValue( +// *parent_prop.kind(), +// )); +// } + +// let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; +// AttributeValue::update_for_context( +// ctx, +// *attribute_value.id(), +// Some(*parent_attribute_value.id()), +// context, +// Some(value), +// None, +// ) +// .await?; +// Ok(()) +// } +// _ => Err(PropError::SetDefaultForNonScalar(*self.kind())), +// } +// } + +// pub async fn set_default_diff(&mut self, ctx: &DalContext) -> PropResult<()> { +// let func = Func::find_by_attr(ctx, "name", &"si:diff") +// .await? +// .pop() +// .ok_or(PropError::DefaultDiffFunctionNotFound)?; +// self.set_diff_func_id(ctx, Some(*func.id())).await +// } +// } diff --git a/lib/dal/src/property_editor.rs b/lib/dal/src/property_editor.rs index 67f6f638f6..c640d2622c 100644 --- a/lib/dal/src/property_editor.rs +++ b/lib/dal/src/property_editor.rs @@ -6,39 +6,27 @@ use serde::{Deserialize, Serialize}; use si_data_pg::PgError; use thiserror::Error; -use crate::{ - pk, schema::variant::SchemaVariantError, AttributeValueError, AttributeValueId, ComponentError, - PropError, PropId, SchemaVariantId, StandardModelError, TransactionsError, - ValidationResolverError, -}; +use crate::{pk, AttributeValueId, PropId, SchemaVariantId, StandardModelError, TransactionsError}; pub mod schema; -pub mod validations; -pub mod values; +// pub mod validations; +// pub mod values; #[remain::sorted] #[derive(Error, Debug)] pub enum PropertyEditorError { - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), #[error("invalid AttributeReadContext: {0}")] BadAttributeReadContext(String), - #[error("component error: {0}")] - Component(#[from] ComponentError), #[error("component not found")] ComponentNotFound, #[error("no value(s) found for property editor prop id: {0}")] NoValuesFoundForPropertyEditorProp(PropertyEditorPropId), #[error("pg error: {0}")] Pg(#[from] PgError), - #[error("prop error: {0}")] - Prop(#[from] PropError), #[error("prop not found for id: {0}")] PropNotFound(PropId), #[error("root prop not found for schema variant")] RootPropNotFound, - #[error("schema variant: {0}")] - SchemaVariant(#[from] SchemaVariantError), #[error("schema variant not found: {0}")] SchemaVariantNotFound(SchemaVariantId), #[error("error serializing/deserializing json: {0}")] @@ -49,8 +37,6 @@ pub enum PropertyEditorError { TooManyValuesFoundForPropertyEditorProp(PropertyEditorPropId), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), - #[error("validation resolver error: {0}")] - ValidationResolver(#[from] ValidationResolverError), } pub type PropertyEditorResult = Result; diff --git a/lib/dal/src/property_editor/schema.rs b/lib/dal/src/property_editor/schema.rs index e9742404bd..f1576d2c00 100644 --- a/lib/dal/src/property_editor/schema.rs +++ b/lib/dal/src/property_editor/schema.rs @@ -3,108 +3,107 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; -use std::collections::HashMap; + use strum::{AsRefStr, Display, EnumString}; use si_pkg::PropSpecWidgetKind; -use crate::property_editor::{PropertyEditorError, PropertyEditorPropId, PropertyEditorResult}; -use crate::{DalContext, Prop, PropKind, SchemaVariant, SchemaVariantId, StandardModel}; - -const PROPERTY_EDITOR_SCHEMA_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/property_editor_schema_for_schema_variant.sql"); - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PropertyEditorSchema { - pub root_prop_id: PropertyEditorPropId, - pub props: HashMap, - pub child_props: HashMap>, -} - -impl PropertyEditorSchema { - pub async fn for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> PropertyEditorResult { - let schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) - .await? - .ok_or(PropertyEditorError::SchemaVariantNotFound( - schema_variant_id, - ))?; - let mut props: HashMap = HashMap::new(); - let mut child_props: HashMap> = - HashMap::new(); - - let rows = ctx - .txns() - .await? - .pg() - .query( - PROPERTY_EDITOR_SCHEMA_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant.id()], - ) - .await?; - - for row in rows { - let json: Value = row.try_get("object")?; - let prop: Prop = serde_json::from_value(json)?; - // Omit any secret definition props in the result - if prop - .json_pointer(ctx) - .await? - .starts_with("/root/secret_definition") - { - continue; - } - let property_editor_prop = PropertyEditorProp::new(prop); - - let maybe_child_prop_ids: Option> = - row.try_get("child_prop_ids")?; - if let Some(child_prop_ids) = maybe_child_prop_ids { - child_props.insert(property_editor_prop.id, child_prop_ids); - } - - props.insert(property_editor_prop.id, property_editor_prop); - } - - let root_prop_id = schema_variant - .root_prop_id() - .ok_or(PropertyEditorError::RootPropNotFound)?; - Ok(PropertyEditorSchema { - root_prop_id: (*root_prop_id).into(), - props, - child_props, - }) - } -} - -#[derive(Clone, Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct PropertyEditorProp { - pub id: PropertyEditorPropId, - pub name: String, - pub kind: PropertyEditorPropKind, - pub widget_kind: PropertyEditorPropWidgetKind, - pub doc_link: Option, - pub documentation: Option, -} - -impl PropertyEditorProp { - pub fn new(prop: Prop) -> PropertyEditorProp { - PropertyEditorProp { - id: (*prop.id()).into(), - name: prop.name().into(), - kind: prop.kind().into(), - widget_kind: PropertyEditorPropWidgetKind::new( - *prop.widget_kind(), - prop.widget_options().map(|v| v.to_owned()), - ), - doc_link: prop.doc_link().map(Into::into), - documentation: prop.documentation().map(Into::into), - } - } -} +use crate::{PropKind, StandardModel}; + +// const PROPERTY_EDITOR_SCHEMA_FOR_SCHEMA_VARIANT: &str = +// include_str!("../queries/property_editor_schema_for_schema_variant.sql"); + +// #[derive(Clone, Debug, Serialize, Deserialize)] +// #[serde(rename_all = "camelCase")] +// pub struct PropertyEditorSchema { +// pub root_prop_id: PropertyEditorPropId, +// pub props: HashMap, +// pub child_props: HashMap>, +// } + +// impl PropertyEditorSchema { +// pub async fn for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> PropertyEditorResult { +// let schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) +// .await? +// .ok_or(PropertyEditorError::SchemaVariantNotFound( +// schema_variant_id, +// ))?; +// let mut props: HashMap = HashMap::new(); +// let mut child_props: HashMap> = +// HashMap::new(); + +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// PROPERTY_EDITOR_SCHEMA_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant.id()], +// ) +// .await?; + +// for row in rows { +// let json: Value = row.try_get("object")?; +// let prop: Prop = serde_json::from_value(json)?; +// // Omit any secret definition props in the result +// if prop +// .json_pointer(ctx) +// .await? +// .starts_with("/root/secret_definition") +// { +// continue; +// } +// let property_editor_prop = PropertyEditorProp::new(prop); + +// let maybe_child_prop_ids: Option> = +// row.try_get("child_prop_ids")?; +// if let Some(child_prop_ids) = maybe_child_prop_ids { +// child_props.insert(property_editor_prop.id, child_prop_ids); +// } + +// props.insert(property_editor_prop.id, property_editor_prop); +// } + +// let root_prop_id = schema_variant +// .root_prop_id() +// .ok_or(PropertyEditorError::RootPropNotFound)?; +// Ok(PropertyEditorSchema { +// root_prop_id: (*root_prop_id).into(), +// props, +// child_props, +// }) +// } +// } + +// #[derive(Clone, Debug, Serialize, Deserialize)] +// #[serde(rename_all = "camelCase")] +// pub struct PropertyEditorProp { +// pub id: PropertyEditorPropId, +// pub name: String, +// pub kind: PropertyEditorPropKind, +// pub widget_kind: PropertyEditorPropWidgetKind, +// pub doc_link: Option, +// pub documentation: Option, +// } + +// impl PropertyEditorProp { +// pub fn new(prop: Prop) -> PropertyEditorProp { +// PropertyEditorProp { +// id: (*prop.id()).into(), +// name: prop.name().into(), +// kind: prop.kind().into(), +// widget_kind: PropertyEditorPropWidgetKind::new( +// *prop.widget_kind(), +// prop.widget_options().map(|v| v.to_owned()), +// ), +// doc_link: prop.doc_link().map(Into::into), +// documentation: prop.documentation().map(Into::into), +// } +// } +// } #[remain::sorted] #[derive(Clone, Copy, Debug, Serialize, Deserialize)] diff --git a/lib/dal/src/provider/external.rs b/lib/dal/src/provider/external.rs index d23395565e..96fee33fcb 100644 --- a/lib/dal/src/provider/external.rs +++ b/lib/dal/src/provider/external.rs @@ -1,99 +1,37 @@ +use content_store::ContentHash; use serde::{Deserialize, Serialize}; use si_data_pg::PgError; use std::collections::HashMap; +use strum::EnumDiscriminants; use telemetry::prelude::*; -use thiserror::Error; -use crate::func::binding::FuncBindingId; -use crate::func::binding_return_value::FuncBindingReturnValueId; -use crate::socket::{Socket, SocketArity, SocketEdgeKind, SocketError, SocketId, SocketKind}; -use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_accessor_ro, - standard_model_has_many, AttributePrototype, AttributePrototypeError, ComponentId, DiagramKind, - FuncId, HistoryEventError, InternalProviderId, StandardModel, StandardModelError, Tenancy, - Timestamp, TransactionsError, Visibility, -}; -use crate::{ - AttributeContext, AttributeContextBuilderError, AttributeContextError, AttributePrototypeId, - DalContext, SchemaId, SchemaVariantId, -}; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::{pk, StandardModel, Timestamp}; +use crate::{AttributePrototypeId, SchemaVariantId}; -const BY_SOCKET: &str = include_str!("../queries/external_provider/by_socket.sql"); -const LIST_FOR_ATTRIBUTE_PROTOTYPE_WITH_TAIL_COMPONENT_ID: &str = include_str!( - "../queries/external_provider/list_for_attribute_prototype_with_tail_component_id.sql" -); -const FIND_FOR_SCHEMA_VARIANT_AND_NAME: &str = - include_str!("../queries/external_provider/find_for_schema_variant_and_name.sql"); -const FIND_FOR_SOCKET: &str = include_str!("../queries/external_provider/find_for_socket.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/external_provider/list_for_schema_variant.sql"); -const LIST_FROM_INTERNAL_PROVIDER_USE: &str = - include_str!("../queries/external_provider/list_from_internal_provider_use.sql"); +// const BY_SOCKET: &str = include_str!("../queries/external_provider/by_socket.sql"); +// const LIST_FOR_ATTRIBUTE_PROTOTYPE_WITH_TAIL_COMPONENT_ID: &str = include_str!( +// "../queries/external_provider/list_for_attribute_prototype_with_tail_component_id.sql" +// ); +// const FIND_FOR_SCHEMA_VARIANT_AND_NAME: &str = +// include_str!("../queries/external_provider/find_for_schema_variant_and_name.sql"); +// const FIND_FOR_SOCKET: &str = include_str!("../queries/external_provider/find_for_socket.sql"); +// const LIST_FOR_SCHEMA_VARIANT: &str = +// include_str!("../queries/external_provider/list_for_schema_variant.sql"); +// const LIST_FROM_INTERNAL_PROVIDER_USE: &str = +// include_str!("../queries/external_provider/list_from_internal_provider_use.sql"); -#[remain::sorted] -#[derive(Error, Debug)] -pub enum ExternalProviderError { - #[error("attribute context error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("unexpected: attribute prototype field is empty")] - EmptyAttributePrototype, - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("not found for id: {0}")] - NotFound(ExternalProviderId), - #[error("not found for socket name: {0}")] - NotFoundForSocketName(String), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("schema id mismatch: {0} (self) and {1} (provided)")] - SchemaMismatch(SchemaId, SchemaId), - #[error("schema variant error: {0}")] - SchemaVariant(String), - #[error("schema variant id mismatch: {0} (self) and {1} (provided)")] - SchemaVariantMismatch(SchemaVariantId, SchemaVariantId), - #[error("serde: {0}")] - Serde(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), -} - -pub type ExternalProviderResult = Result; - -pk!(ExternalProviderPk); pk!(ExternalProviderId); -impl_standard_model! { - model: ExternalProvider, - pk: ExternalProviderPk, - id: ExternalProviderId, - table_name: "external_providers", - history_event_label_base: "external_provider", - history_event_message_name: "External Provider" -} - /// This provider can only provide data to external [`SchemaVariants`](crate::SchemaVariant). It can /// only consume data within its own [`SchemaVariant`](crate::SchemaVariant). #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct ExternalProvider { - pk: ExternalProviderPk, id: ExternalProviderId, + #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] - timestamp: Timestamp, + pub timestamp: Timestamp, - /// Indicates which [`Schema`](crate::Schema) this provider belongs to. - schema_id: SchemaId, /// Indicates which [`SchemaVariant`](crate::SchemaVariant) this provider belongs to. schema_variant_id: SchemaVariantId, /// Indicates which transformation function should be used for "emit". @@ -105,221 +43,264 @@ pub struct ExternalProvider { type_definition: Option, } -impl ExternalProvider { - /// This function will also create an _output_ [`Socket`](crate::Socket). - #[allow(clippy::too_many_arguments)] - #[tracing::instrument(skip(ctx, name))] - pub async fn new_with_socket( - ctx: &DalContext, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, - name: impl AsRef, - type_definition: Option, - func_id: FuncId, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - arity: SocketArity, - frame_socket: bool, - ) -> ExternalProviderResult<(Self, Socket)> { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM external_provider_create_v1($1, $2, $3, $4, $5, $6)", - &[ - ctx.tenancy(), - ctx.visibility(), - &schema_id, - &schema_variant_id, - &name, - &type_definition, - ], - ) - .await?; +#[derive(Debug, PartialEq)] +pub struct ExternalProviderGraphNode { + id: ExternalProviderId, + content_address: ContentAddress, + content: ExternalProviderContentV1, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum ExternalProviderContent { + V1(ExternalProviderContentV1), +} - let mut external_provider: ExternalProvider = - standard_model::finish_create_from_row(ctx, row).await?; +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct ExternalProviderContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, - let attribute_context = AttributeContext::builder() - .set_external_provider_id(external_provider.id) - .to_context()?; - let attribute_prototype = AttributePrototype::new( - ctx, - func_id, - func_binding_id, - func_binding_return_value_id, - attribute_context, - None, - None, - ) - .await?; - external_provider - .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) - .await?; + /// Indicates which [`SchemaVariant`](crate::SchemaVariant) this provider belongs to. + pub schema_variant_id: SchemaVariantId, + /// Indicates which transformation function should be used for "emit". + pub attribute_prototype_id: Option, - let socket = Socket::new( - ctx, - name, - match frame_socket { - true => SocketKind::Frame, - false => SocketKind::Provider, - }, - &SocketEdgeKind::ConfigurationOutput, - &arity, - &DiagramKind::Configuration, - Some(schema_variant_id), - ) - .await?; - socket - .set_external_provider(ctx, external_provider.id()) - .await?; + /// Name for [`Self`] that can be used for identification. + pub name: String, + /// Definition of the data type (e.g. "JSONSchema" or "Number"). + pub type_definition: Option, +} - Ok((external_provider, socket)) +impl ExternalProviderGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: ExternalProviderContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::ExternalProvider(content_hash), + content, + } } +} - // Immutable fields. - standard_model_accessor_ro!(schema_id, SchemaId); - standard_model_accessor_ro!(schema_variant_id, SchemaVariantId); +// impl ExternalProvider { +// /// This function will also create an _output_ [`Socket`](crate::Socket). +// #[allow(clippy::too_many_arguments)] +// #[tracing::instrument(skip(ctx, name))] +// pub async fn new_with_socket( +// ctx: &DalContext, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// name: impl AsRef, +// type_definition: Option, +// func_id: FuncId, +// func_binding_id: FuncBindingId, +// func_binding_return_value_id: FuncBindingReturnValueId, +// arity: SocketArity, +// frame_socket: bool, +// ) -> ExternalProviderResult<(Self, Socket)> { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM external_provider_create_v1($1, $2, $3, $4, $5, $6)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &schema_id, +// &schema_variant_id, +// &name, +// &type_definition, +// ], +// ) +// .await?; - // Mutable fields. - standard_model_accessor!(name, String, ExternalProviderResult); - standard_model_accessor!(type_definition, Option, ExternalProviderResult); - standard_model_accessor!( - attribute_prototype_id, - Option, - ExternalProviderResult - ); +// let mut external_provider: ExternalProvider = +// standard_model::finish_create_from_row(ctx, row).await?; - // This is a 1-1 relationship, so the Vec should be 1 - standard_model_has_many!( - lookup_fn: sockets, - table: "socket_belongs_to_external_provider", - model_table: "sockets", - returns: Socket, - result: ExternalProviderResult, - ); +// let attribute_context = AttributeContext::builder() +// .set_external_provider_id(external_provider.id) +// .to_context()?; +// let attribute_prototype = AttributePrototype::new( +// ctx, +// func_id, +// func_binding_id, +// func_binding_return_value_id, +// attribute_context, +// None, +// None, +// ) +// .await?; +// external_provider +// .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) +// .await?; - /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). - #[tracing::instrument(skip(ctx))] - pub async fn list_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> ExternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } +// let socket = Socket::new( +// ctx, +// name, +// match frame_socket { +// true => SocketKind::Frame, +// false => SocketKind::Provider, +// }, +// &SocketEdgeKind::ConfigurationOutput, +// &arity, +// &DiagramKind::Configuration, +// Some(schema_variant_id), +// ) +// .await?; +// socket +// .set_external_provider(ctx, external_provider.id()) +// .await?; - /// Find [`Self`] with a provided [`SocketId`](crate::Socket). - #[instrument(skip_all)] - pub async fn find_for_socket( - ctx: &DalContext, - socket_id: SocketId, - ) -> ExternalProviderResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_FOR_SOCKET, - &[ctx.tenancy(), ctx.visibility(), &socket_id], - ) - .await?; - Ok(standard_model::object_option_from_row_option(row)?) - } +// Ok((external_provider, socket)) +// } - /// Find [`Self`] with a provided name, which is not only the name of [`Self`], but also of the - /// associated _output_ [`Socket`](crate::Socket). - #[instrument(skip_all)] - pub async fn find_for_schema_variant_and_name( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - name: impl AsRef, - ) -> ExternalProviderResult> { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_FOR_SCHEMA_VARIANT_AND_NAME, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id, &name], - ) - .await?; - Ok(standard_model::object_option_from_row_option(row)?) - } +// // Immutable fields. +// standard_model_accessor_ro!(schema_id, SchemaId); +// standard_model_accessor_ro!(schema_variant_id, SchemaVariantId); - /// Find all [`Self`] for a given [`AttributePrototypeId`](crate::AttributePrototype). - #[tracing::instrument(skip(ctx))] - pub async fn list_for_attribute_prototype_with_tail_component_id( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - tail_component_id: ComponentId, - ) -> ExternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_ATTRIBUTE_PROTOTYPE_WITH_TAIL_COMPONENT_ID, - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &tail_component_id, - ], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } +// // Mutable fields. +// standard_model_accessor!(name, String, ExternalProviderResult); +// standard_model_accessor!(type_definition, Option, ExternalProviderResult); +// standard_model_accessor!( +// attribute_prototype_id, +// Option, +// ExternalProviderResult +// ); - /// Find all [`Self`] that have - /// [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) referencing the provided - /// [`InternalProviderId`](crate::InternalProvider). - #[tracing::instrument(skip(ctx))] - pub async fn list_from_internal_provider_use( - ctx: &DalContext, - internal_provider_id: InternalProviderId, - ) -> ExternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FROM_INTERNAL_PROVIDER_USE, - &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } +// // This is a 1-1 relationship, so the Vec should be 1 +// standard_model_has_many!( +// lookup_fn: sockets, +// table: "socket_belongs_to_external_provider", +// model_table: "sockets", +// returns: Socket, +// result: ExternalProviderResult, +// ); - #[tracing::instrument(skip(ctx))] - pub async fn by_socket(ctx: &DalContext) -> ExternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(BY_SOCKET, &[ctx.tenancy(), ctx.visibility()]) - .await?; +// /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> ExternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } - let mut objects: HashMap = HashMap::new(); - for row in rows.into_iter() { - let id: SocketId = row.try_get(0)?; +// /// Find [`Self`] with a provided [`SocketId`](crate::Socket). +// #[instrument(skip_all)] +// pub async fn find_for_socket( +// ctx: &DalContext, +// socket_id: SocketId, +// ) -> ExternalProviderResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_FOR_SOCKET, +// &[ctx.tenancy(), ctx.visibility(), &socket_id], +// ) +// .await?; +// Ok(standard_model::object_option_from_row_option(row)?) +// } - let object: serde_json::Value = row.try_get(1)?; - let object: Self = serde_json::from_value(object)?; +// /// Find [`Self`] with a provided name, which is not only the name of [`Self`], but also of the +// /// associated _output_ [`Socket`](crate::Socket). +// #[instrument(skip_all)] +// pub async fn find_for_schema_variant_and_name( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// name: impl AsRef, +// ) -> ExternalProviderResult> { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_FOR_SCHEMA_VARIANT_AND_NAME, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id, &name], +// ) +// .await?; +// Ok(standard_model::object_option_from_row_option(row)?) +// } - objects.insert(id, object); - } - Ok(objects) - } -} +// /// Find all [`Self`] for a given [`AttributePrototypeId`](crate::AttributePrototype). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_attribute_prototype_with_tail_component_id( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// tail_component_id: ComponentId, +// ) -> ExternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_ATTRIBUTE_PROTOTYPE_WITH_TAIL_COMPONENT_ID, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &tail_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find all [`Self`] that have +// /// [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) referencing the provided +// /// [`InternalProviderId`](crate::InternalProvider). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_from_internal_provider_use( +// ctx: &DalContext, +// internal_provider_id: InternalProviderId, +// ) -> ExternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FROM_INTERNAL_PROVIDER_USE, +// &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// #[tracing::instrument(skip(ctx))] +// pub async fn by_socket(ctx: &DalContext) -> ExternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(BY_SOCKET, &[ctx.tenancy(), ctx.visibility()]) +// .await?; + +// let mut objects: HashMap = HashMap::new(); +// for row in rows.into_iter() { +// let id: SocketId = row.try_get(0)?; + +// let object: serde_json::Value = row.try_get(1)?; +// let object: Self = serde_json::from_value(object)?; + +// objects.insert(id, object); +// } +// Ok(objects) +// } +// } diff --git a/lib/dal/src/provider/internal.rs b/lib/dal/src/provider/internal.rs index e576e1afd7..7499e7ffc0 100644 --- a/lib/dal/src/provider/internal.rs +++ b/lib/dal/src/provider/internal.rs @@ -67,128 +67,30 @@ //! This design also lets us cache the view of a [`Prop`](crate::Prop) and its children rather //! than directly observing the real time values frequently. +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use thiserror::Error; - -use si_data_pg::PgError; -use std::collections::HashMap; -use telemetry::prelude::*; - -use crate::attribute::context::AttributeContextBuilder; -use crate::func::backend::identity::FuncBackendIdentityArgs; -use crate::func::binding::{FuncBindingError, FuncBindingId}; -use crate::func::binding_return_value::FuncBindingReturnValueId; -use crate::socket::{Socket, SocketArity, SocketEdgeKind, SocketError, SocketId, SocketKind}; -use crate::standard_model::object_option_from_row_option; -use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_accessor_ro, - AttributeContextBuilderError, AttributePrototype, AttributePrototypeError, - AttributePrototypeId, AttributeReadContext, AttributeValueError, AttributeView, DiagramKind, - FuncError, FuncId, HistoryEventError, Prop, PropError, StandardModel, StandardModelError, - Tenancy, Timestamp, TransactionsError, Visibility, -}; -use crate::{ - standard_model_has_many, AttributeContext, AttributeContextError, AttributeValue, DalContext, - Func, FuncBinding, PropId, SchemaId, SchemaVariantId, -}; -use crate::{Component, ComponentId}; - -const BY_SOCKET: &str = include_str!("../queries/internal_provider/by_socket.sql"); -const FIND_EXPLICIT_FOR_SCHEMA_VARIANT_AND_NAME: &str = - include_str!("../queries/internal_provider/find_explicit_for_schema_variant_and_name.sql"); -const FIND_FOR_PROP: &str = include_str!("../queries/internal_provider/find_for_prop.sql"); -const FIND_EXPLICIT_FOR_SOCKET: &str = - include_str!("../queries/internal_provider/find_explicit_for_socket.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/internal_provider/list_for_schema_variant.sql"); -const LIST_EXPLICIT_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/internal_provider/list_explicit_for_schema_variant.sql"); -const LIST_FOR_ATTRIBUTE_PROTOTYPE: &str = - include_str!("../queries/internal_provider/list_for_attribute_prototype.sql"); -const LIST_FOR_INPUT_SOCKETS: &str = - include_str!("../queries/internal_provider/list_for_input_sockets_for_all_schema_variants.sql"); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum InternalProviderError { - #[error("attribute context error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("attribute prototype not found for id: {0}")] - AttributePrototypeNotFound(AttributePrototypeId), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("could not find attribute value for attribute context: {0:?}")] - AttributeValueNotFoundForContext(AttributeContext), - #[error("component error: {0}")] - Component(String), - #[error("component not found by id: {0}")] - ComponentNotFound(ComponentId), - #[error("unexpected: attribute prototype field is empty")] - EmptyAttributePrototype, - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func not found for id: {0}")] - FuncNotFound(FuncId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("not allowed to perform implicit emit as an explicit internal provider")] - ImplicitEmitForExplicitProviderNotAllowed, - #[error("missing func")] - MissingFunc(String), - #[error("provided attribute context does not specify a PropId (required for implicit emit)")] - MissingPropForImplicitEmit, - #[error("not found for id: {0}")] - NotFound(InternalProviderId), - #[error("internal provider not found for prop id: {0}")] - NotFoundForProp(PropId), - #[error("internal provider not found for socket id: {0}")] - NotFoundForSocket(SocketId), - #[error("internal provider not found for prop socket name: {0}")] - NotFoundForSocketName(String), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("prop error: {0}")] - Prop(#[from] PropError), - #[error("prop not found for id: {0}")] - PropNotFound(PropId), - #[error("root prop not found for schema variant: {0}")] - RootPropNotFound(SchemaVariantId), - #[error("schema id mismatch: {0} (self) and {1} (provided)")] - SchemaMismatch(SchemaId, SchemaId), - #[error("schema variant error: {0}")] - SchemaVariant(String), - #[error("schema variant id mismatch: {0} (self) and {1} (provided)")] - SchemaVariantMismatch(SchemaVariantId, SchemaVariantId), - #[error("serde_json error: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), -} - -pub type InternalProviderResult = Result; +use strum::EnumDiscriminants; + +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::{pk, StandardModel, Timestamp}; + +// const BY_SOCKET: &str = include_str!("../queries/internal_provider/by_socket.sql"); +// const FIND_EXPLICIT_FOR_SCHEMA_VARIANT_AND_NAME: &str = +// include_str!("../queries/internal_provider/find_explicit_for_schema_variant_and_name.sql"); +// const FIND_FOR_PROP: &str = include_str!("../queries/internal_provider/find_for_prop.sql"); +// const FIND_EXPLICIT_FOR_SOCKET: &str = +// include_str!("../queries/internal_provider/find_explicit_for_socket.sql"); +// const LIST_FOR_SCHEMA_VARIANT: &str = +// include_str!("../queries/internal_provider/list_for_schema_variant.sql"); +// const LIST_EXPLICIT_FOR_SCHEMA_VARIANT: &str = +// include_str!("../queries/internal_provider/list_explicit_for_schema_variant.sql"); +// const LIST_FOR_ATTRIBUTE_PROTOTYPE: &str = +// include_str!("../queries/internal_provider/list_for_attribute_prototype.sql"); +// const LIST_FOR_INPUT_SOCKETS: &str = +// include_str!("../queries/internal_provider/list_for_input_sockets_for_all_schema_variants.sql"); -pk!(InternalProviderPk); pk!(InternalProviderId); -impl_standard_model! { - model: InternalProvider, - pk: InternalProviderPk, - id: InternalProviderId, - table_name: "internal_providers", - history_event_label_base: "internal_provider", - history_event_message_name: "Internal Provider" -} - /// This provider can only provide data within its own [`SchemaVariant`](crate::SchemaVariant). /// /// If this provider _specifies_ a [`PropId`](crate::Prop), it provider can only consume data from @@ -200,23 +102,9 @@ impl_standard_model! { /// are called "explicit" [`InternalProviders`](Self). #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct InternalProvider { - pk: InternalProviderPk, id: InternalProviderId, #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] timestamp: Timestamp, - - /// Indicates which [`Prop`](crate::Prop) this provider belongs to. This will be - /// unset if [`Self`] is "explicit". If [`Self`] is "implicit", this will always be a "set" id. - prop_id: PropId, - /// Indicates which [`SchemaVariant`](crate::SchemaVariant) this provider belongs to. - schema_variant_id: SchemaVariantId, - /// Indicates which transformation function should be used for "emit". - attribute_prototype_id: Option, - /// Name for [`Self`] that can be used for identification. name: String, /// Definition of the inbound type (e.g. "JSONSchema" or "Number"). @@ -225,439 +113,478 @@ pub struct InternalProvider { outbound_type_definition: Option, } -impl InternalProvider { - #[tracing::instrument(skip(ctx))] - pub async fn new_implicit( - ctx: &DalContext, - prop_id: PropId, - schema_variant_id: SchemaVariantId, - ) -> InternalProviderResult { - // Use the prop name for the implicit internal provider name. We need an owned string that - // we then borrow for the query. - let prop = Prop::get_by_id(ctx, &prop_id) - .await? - .ok_or(InternalProviderError::PropNotFound(prop_id))?; - let name = prop.name().to_string(); - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM internal_provider_create_v1($1, $2, $3, $4, $5, $6, $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &prop_id, - &schema_variant_id, - &name, - &Option::::None, - &Option::::None, - ], - ) - .await?; - let mut internal_provider: InternalProvider = - standard_model::finish_create_from_row(ctx, row).await?; - - let (identity_func, identity_func_binding, identity_func_binding_return_value) = - Func::identity_with_binding_and_return_value(ctx).await?; - - // The "base" AttributeContext of anything we create should be as un-specific as possible, - // and for an InternalProvider that is having only the InternalProviderId set. - let context = AttributeContext::builder() - .set_internal_provider_id(*internal_provider.id()) - .to_context()?; - - // Key and parent are unneeded because the provider exists not strictly as part of the - // schema values _and_ because implicit internal providers cannot be created for descendants - // of maps and arrays. - let attribute_prototype = AttributePrototype::new( - ctx, - *identity_func.id(), - *identity_func_binding.id(), - *identity_func_binding_return_value.id(), - context, - None, - None, - ) - .await?; - - internal_provider - .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) - .await?; - Ok(internal_provider) - } - - /// This function will also create an _input_ [`Socket`](crate::Socket). - #[allow(clippy::too_many_arguments)] - #[tracing::instrument(skip(ctx, name))] - pub async fn new_explicit_with_socket( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - name: impl AsRef, - func_id: FuncId, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - arity: SocketArity, - frame_socket: bool, - ) -> InternalProviderResult<(Self, Socket)> { - let name = name.as_ref(); - let prop_id = PropId::NONE; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM internal_provider_create_v1($1, $2, $3, $4, $5, $6, $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &prop_id, - &schema_variant_id, - &name, - &Option::::None, - &Option::::None, - ], - ) - .await?; - - let mut explicit_internal_provider: InternalProvider = - standard_model::finish_create_from_row(ctx, row).await?; - - // The "base" AttributeContext of anything we create should be as un-specific as possible, - // and for an InternalProvider that is having only the InternalProviderId set. - let _base_attribute_context = AttributeContext::builder() - .set_internal_provider_id(explicit_internal_provider.id) - .to_context()?; - - let attribute_prototype = AttributePrototype::new( - ctx, - func_id, - func_binding_id, - func_binding_return_value_id, - explicit_internal_provider.attribute_context()?, - None, - None, - ) - .await?; - explicit_internal_provider - .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) - .await?; - - let socket = Socket::new( - ctx, - name, - match frame_socket { - true => SocketKind::Frame, - false => SocketKind::Provider, - }, - &SocketEdgeKind::ConfigurationInput, - &arity, - &DiagramKind::Configuration, - Some(schema_variant_id), - ) - .await?; - socket - .set_internal_provider(ctx, explicit_internal_provider.id()) - .await?; - - Ok((explicit_internal_provider, socket)) - } - - // Immutable fields. - standard_model_accessor_ro!(prop_id, PropId); - standard_model_accessor_ro!(schema_variant_id, SchemaVariantId); - - // Mutable fields. - standard_model_accessor!( - attribute_prototype_id, - Option, - InternalProviderResult - ); - standard_model_accessor!(name, String, InternalProviderResult); - standard_model_accessor!( - inbound_type_definition, - Option, - InternalProviderResult - ); - standard_model_accessor!( - outbound_type_definition, - Option, - InternalProviderResult - ); - - // This is a 1-1 relationship, so the Vec should be 1 - standard_model_has_many!( - lookup_fn: sockets, - table: "socket_belongs_to_internal_provider", - model_table: "sockets", - returns: Socket, - result: InternalProviderResult, - ); - - /// If the [`PropId`](crate::Prop) field is not unset, then [`Self`] is an internal consumer. - pub fn is_internal_consumer(&self) -> bool { - self.prop_id != PropId::NONE - } - - /// Consume with a provided [`AttributeContext`](crate::AttributeContext) and return the - /// resulting [`AttributeValue`](crate::AttributeValue). - /// - /// Requirements for the provided [`AttributeContext`](crate::AttributeContext): - /// - The least specific field be a [`PropId`](crate::Prop) - /// - If the [`SchemaId`](crate::Schema) is set, it must match the corresponding field on - /// [`Self`] - /// - If the [`SchemaVariantId`](crate::SchemaVariant) is set, it must match the corresponding - /// field on [`Self`] - pub async fn implicit_emit( - &self, - ctx: &DalContext, - target_attribute_value: &mut AttributeValue, - ) -> InternalProviderResult<()> { - if !self.is_internal_consumer() { - return Err(InternalProviderError::ImplicitEmitForExplicitProviderNotAllowed); - } - - // Get the func from our attribute prototype. - let attribute_prototype_id = self - .attribute_prototype_id - .ok_or(InternalProviderError::EmptyAttributePrototype)?; - let attribute_prototype = AttributePrototype::get_by_id(ctx, &attribute_prototype_id) - .await? - .ok_or(InternalProviderError::AttributePrototypeNotFound( - attribute_prototype_id, - ))?; - let func_id = attribute_prototype.func_id(); - let func = Func::get_by_id(ctx, &func_id) - .await? - .ok_or(InternalProviderError::FuncNotFound(func_id))?; - - // Generate the AttributeContext that we should be sourcing our argument from. - let consume_attribute_context = - AttributeContextBuilder::from(target_attribute_value.context) - .unset_internal_provider_id() - .unset_external_provider_id() - .set_prop_id(self.prop_id) - .to_context()?; - - let source_attribute_value = - AttributeValue::find_for_context(ctx, consume_attribute_context.into()) - .await? - .ok_or(InternalProviderError::AttributeValueNotFoundForContext( - consume_attribute_context, - ))?; - let found_attribute_view_context = AttributeReadContext { - prop_id: None, - ..AttributeReadContext::from(consume_attribute_context) - }; - - let found_attribute_view = AttributeView::new( - ctx, - found_attribute_view_context, - Some(*source_attribute_value.id()), - ) - .await?; - - let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( - ctx, - serde_json::to_value(FuncBackendIdentityArgs { - identity: Some(found_attribute_view.value().clone()), - })?, - *func.id(), - vec![], - ) - .await?; - - target_attribute_value - .set_func_binding_id(ctx, *func_binding.id()) - .await?; - target_attribute_value - .set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) - .await?; - - if target_attribute_value.context.component_id().is_some() && self.prop_id().is_some() { - let provider_prop = Prop::get_by_id(ctx, self.prop_id()) - .await? - .ok_or_else(|| InternalProviderError::PropNotFound(*self.prop_id()))?; - - // NOTE(jhelwig): This whole block will go away once Qualifications/Validations become part of the Prop tree. - // - // The Root Prop won't have a parent Prop. - if provider_prop.parent_prop(ctx).await?.is_none() { - let ctx_deletion = &ctx.clone_with_delete_visibility(); - let component = Component::get_by_id( - ctx_deletion, - &target_attribute_value.context.component_id(), - ) - .await? - .ok_or_else(|| { - InternalProviderError::ComponentNotFound( - target_attribute_value.context.component_id(), - ) - })?; - component - .check_validations(ctx) - .await - .map_err(|e| InternalProviderError::Component(e.to_string()))?; - } - } - - Ok(()) - } - - /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). - #[tracing::instrument(skip(ctx))] - pub async fn list_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). - #[tracing::instrument(skip(ctx))] - pub async fn list_explicit_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_EXPLICIT_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// Find [`Self`] with a provided name, which is not only the name of [`Self`], but also of the - /// associated _input_ [`Socket`](crate::Socket). - #[instrument(skip_all)] - pub async fn find_explicit_for_schema_variant_and_name( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - name: impl AsRef, - ) -> InternalProviderResult> { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_EXPLICIT_FOR_SCHEMA_VARIANT_AND_NAME, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id, &name], - ) - .await?; - Ok(object_option_from_row_option(row)?) - } - - /// Find [`Self`] with a provided [`SocketId`](crate::Socket). - #[instrument(skip_all)] - pub async fn find_explicit_for_socket( - ctx: &DalContext, - socket_id: SocketId, - ) -> InternalProviderResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_EXPLICIT_FOR_SOCKET, - &[ctx.tenancy(), ctx.visibility(), &socket_id], - ) - .await?; - Ok(object_option_from_row_option(row)?) - } - - /// Find all [`Self`] for a given [`AttributePrototypeId`](crate::AttributePrototype). - #[tracing::instrument(skip(ctx))] - pub async fn list_for_attribute_prototype( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - ) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_ATTRIBUTE_PROTOTYPE, - &[ctx.tenancy(), ctx.visibility(), &attribute_prototype_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// Find all [`Self`] which are also input sockets. - pub async fn list_for_input_sockets( - ctx: &DalContext, - schema_variant_id: Option, - ) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_INPUT_SOCKETS, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - /// Returns an [`AttributeContext`](crate::AttributeContext) corresponding to our id. - pub fn attribute_context(&self) -> InternalProviderResult { - Ok(AttributeContext::builder() - .set_internal_provider_id(self.id) - .to_context()?) - } - - /// Finds [`Self`] for a given [`PropId`](crate::Prop). This will only work for - /// implicit [`InternalProviders`](Self). - pub async fn find_for_prop( - ctx: &DalContext, - prop_id: PropId, - ) -> InternalProviderResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt(FIND_FOR_PROP, &[ctx.tenancy(), ctx.visibility(), &prop_id]) - .await?; - Ok(object_option_from_row_option(row)?) - } - - #[tracing::instrument(skip(ctx))] - pub async fn by_socket(ctx: &DalContext) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(BY_SOCKET, &[ctx.tenancy(), ctx.visibility()]) - .await?; +#[derive(Debug, PartialEq)] +pub struct InternalProviderGraphNode { + id: InternalProviderId, + content_address: ContentAddress, + content: InternalProviderContentV1, +} - let mut objects: HashMap = HashMap::new(); - for row in rows.into_iter() { - let id: SocketId = row.try_get(0)?; +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum InternalProviderContent { + V1(InternalProviderContentV1), +} - let object: serde_json::Value = row.try_get(1)?; - let object: Self = serde_json::from_value(object)?; +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct InternalProviderContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + /// Name for [`Self`] that can be used for identification. + pub name: String, + /// Definition of the inbound type (e.g. "JSONSchema" or "Number"). + pub inbound_type_definition: Option, + /// Definition of the outbound type (e.g. "JSONSchema" or "Number"). + pub outbound_type_definition: Option, +} - objects.insert(id, object); +impl InternalProviderGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: InternalProviderContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::InternalProvider(content_hash), + content, } - - Ok(objects.into_iter().collect()) } } + +// impl InternalProvider { +// #[tracing::instrument(skip(ctx))] +// pub async fn new_implicit( +// ctx: &DalContext, +// prop_id: PropId, +// schema_variant_id: SchemaVariantId, +// ) -> InternalProviderResult { +// // Use the prop name for the implicit internal provider name. We need an owned string that +// // we then borrow for the query. +// let prop = Prop::get_by_id(ctx, &prop_id) +// .await? +// .ok_or(InternalProviderError::PropNotFound(prop_id))?; +// let name = prop.name().to_string(); + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM internal_provider_create_v1($1, $2, $3, $4, $5, $6, $7)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &prop_id, +// &schema_variant_id, +// &name, +// &Option::::None, +// &Option::::None, +// ], +// ) +// .await?; +// let mut internal_provider: InternalProvider = +// standard_model::finish_create_from_row(ctx, row).await?; + +// let (identity_func, identity_func_binding, identity_func_binding_return_value) = +// Func::identity_with_binding_and_return_value(ctx).await?; + +// // The "base" AttributeContext of anything we create should be as un-specific as possible, +// // and for an InternalProvider that is having only the InternalProviderId set. +// let context = AttributeContext::builder() +// .set_internal_provider_id(*internal_provider.id()) +// .to_context()?; + +// // Key and parent are unneeded because the provider exists not strictly as part of the +// // schema values _and_ because implicit internal providers cannot be created for descendants +// // of maps and arrays. +// let attribute_prototype = AttributePrototype::new( +// ctx, +// *identity_func.id(), +// *identity_func_binding.id(), +// *identity_func_binding_return_value.id(), +// context, +// None, +// None, +// ) +// .await?; + +// internal_provider +// .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) +// .await?; +// Ok(internal_provider) +// } + +// /// This function will also create an _input_ [`Socket`](crate::Socket). +// #[allow(clippy::too_many_arguments)] +// #[tracing::instrument(skip(ctx, name))] +// pub async fn new_explicit_with_socket( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// name: impl AsRef, +// func_id: FuncId, +// func_binding_id: FuncBindingId, +// func_binding_return_value_id: FuncBindingReturnValueId, +// arity: SocketArity, +// frame_socket: bool, +// ) -> InternalProviderResult<(Self, Socket)> { +// let name = name.as_ref(); +// let prop_id = PropId::NONE; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM internal_provider_create_v1($1, $2, $3, $4, $5, $6, $7)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &prop_id, +// &schema_variant_id, +// &name, +// &Option::::None, +// &Option::::None, +// ], +// ) +// .await?; + +// let mut explicit_internal_provider: InternalProvider = +// standard_model::finish_create_from_row(ctx, row).await?; + +// // The "base" AttributeContext of anything we create should be as un-specific as possible, +// // and for an InternalProvider that is having only the InternalProviderId set. +// let _base_attribute_context = AttributeContext::builder() +// .set_internal_provider_id(explicit_internal_provider.id) +// .to_context()?; + +// let attribute_prototype = AttributePrototype::new( +// ctx, +// func_id, +// func_binding_id, +// func_binding_return_value_id, +// explicit_internal_provider.attribute_context()?, +// None, +// None, +// ) +// .await?; +// explicit_internal_provider +// .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) +// .await?; + +// let socket = Socket::new( +// ctx, +// name, +// match frame_socket { +// true => SocketKind::Frame, +// false => SocketKind::Provider, +// }, +// &SocketEdgeKind::ConfigurationInput, +// &arity, +// &DiagramKind::Configuration, +// Some(schema_variant_id), +// ) +// .await?; +// socket +// .set_internal_provider(ctx, explicit_internal_provider.id()) +// .await?; + +// Ok((explicit_internal_provider, socket)) +// } + +// // Immutable fields. +// standard_model_accessor_ro!(prop_id, PropId); +// standard_model_accessor_ro!(schema_variant_id, SchemaVariantId); + +// // Mutable fields. +// standard_model_accessor!( +// attribute_prototype_id, +// Option, +// InternalProviderResult +// ); +// standard_model_accessor!(name, String, InternalProviderResult); +// standard_model_accessor!( +// inbound_type_definition, +// Option, +// InternalProviderResult +// ); +// standard_model_accessor!( +// outbound_type_definition, +// Option, +// InternalProviderResult +// ); + +// // This is a 1-1 relationship, so the Vec should be 1 +// standard_model_has_many!( +// lookup_fn: sockets, +// table: "socket_belongs_to_internal_provider", +// model_table: "sockets", +// returns: Socket, +// result: InternalProviderResult, +// ); + +// /// If the [`PropId`](crate::Prop) field is not unset, then [`Self`] is an internal consumer. +// pub fn is_internal_consumer(&self) -> bool { +// self.prop_id != PropId::NONE +// } + +// /// Consume with a provided [`AttributeContext`](crate::AttributeContext) and return the +// /// resulting [`AttributeValue`](crate::AttributeValue). +// /// +// /// Requirements for the provided [`AttributeContext`](crate::AttributeContext): +// /// - The least specific field be a [`PropId`](crate::Prop) +// /// - If the [`SchemaId`](crate::Schema) is set, it must match the corresponding field on +// /// [`Self`] +// /// - If the [`SchemaVariantId`](crate::SchemaVariant) is set, it must match the corresponding +// /// field on [`Self`] +// pub async fn implicit_emit( +// &self, +// ctx: &DalContext, +// target_attribute_value: &mut AttributeValue, +// ) -> InternalProviderResult<()> { +// if !self.is_internal_consumer() { +// return Err(InternalProviderError::ImplicitEmitForExplicitProviderNotAllowed); +// } + +// // Get the func from our attribute prototype. +// let attribute_prototype_id = self +// .attribute_prototype_id +// .ok_or(InternalProviderError::EmptyAttributePrototype)?; +// let attribute_prototype = AttributePrototype::get_by_id(ctx, &attribute_prototype_id) +// .await? +// .ok_or(InternalProviderError::AttributePrototypeNotFound( +// attribute_prototype_id, +// ))?; +// let func_id = attribute_prototype.func_id(); +// let func = Func::get_by_id(ctx, &func_id) +// .await? +// .ok_or(InternalProviderError::FuncNotFound(func_id))?; + +// // Generate the AttributeContext that we should be sourcing our argument from. +// let consume_attribute_context = +// AttributeContextBuilder::from(target_attribute_value.context) +// .unset_internal_provider_id() +// .unset_external_provider_id() +// .set_prop_id(self.prop_id) +// .to_context()?; + +// let source_attribute_value = +// AttributeValue::find_for_context(ctx, consume_attribute_context.into()) +// .await? +// .ok_or(InternalProviderError::AttributeValueNotFoundForContext( +// consume_attribute_context, +// ))?; +// let found_attribute_view_context = AttributeReadContext { +// prop_id: None, +// ..AttributeReadContext::from(consume_attribute_context) +// }; + +// let found_attribute_view = AttributeView::new( +// ctx, +// found_attribute_view_context, +// Some(*source_attribute_value.id()), +// ) +// .await?; + +// let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( +// ctx, +// serde_json::to_value(FuncBackendIdentityArgs { +// identity: Some(found_attribute_view.value().clone()), +// })?, +// *func.id(), +// vec![], +// ) +// .await?; + +// target_attribute_value +// .set_func_binding_id(ctx, *func_binding.id()) +// .await?; +// target_attribute_value +// .set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) +// .await?; + +// if target_attribute_value.context.component_id().is_some() && self.prop_id().is_some() { +// let provider_prop = Prop::get_by_id(ctx, self.prop_id()) +// .await? +// .ok_or_else(|| InternalProviderError::PropNotFound(*self.prop_id()))?; + +// // NOTE(jhelwig): This whole block will go away once Qualifications/Validations become part of the Prop tree. +// // +// // The Root Prop won't have a parent Prop. +// if provider_prop.parent_prop(ctx).await?.is_none() { +// let ctx_deletion = &ctx.clone_with_delete_visibility(); +// let component = Component::get_by_id( +// ctx_deletion, +// &target_attribute_value.context.component_id(), +// ) +// .await? +// .ok_or_else(|| { +// InternalProviderError::ComponentNotFound( +// target_attribute_value.context.component_id(), +// ) +// })?; +// component +// .check_validations(ctx) +// .await +// .map_err(|e| InternalProviderError::Component(e.to_string()))?; +// } +// } + +// Ok(()) +// } + +// /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_explicit_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_EXPLICIT_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find [`Self`] with a provided name, which is not only the name of [`Self`], but also of the +// /// associated _input_ [`Socket`](crate::Socket). +// #[instrument(skip_all)] +// pub async fn find_explicit_for_schema_variant_and_name( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// name: impl AsRef, +// ) -> InternalProviderResult> { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_EXPLICIT_FOR_SCHEMA_VARIANT_AND_NAME, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id, &name], +// ) +// .await?; +// Ok(object_option_from_row_option(row)?) +// } + +// /// Find [`Self`] with a provided [`SocketId`](crate::Socket). +// #[instrument(skip_all)] +// pub async fn find_explicit_for_socket( +// ctx: &DalContext, +// socket_id: SocketId, +// ) -> InternalProviderResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_EXPLICIT_FOR_SOCKET, +// &[ctx.tenancy(), ctx.visibility(), &socket_id], +// ) +// .await?; +// Ok(object_option_from_row_option(row)?) +// } + +// /// Find all [`Self`] for a given [`AttributePrototypeId`](crate::AttributePrototype). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_attribute_prototype( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// ) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_ATTRIBUTE_PROTOTYPE, +// &[ctx.tenancy(), ctx.visibility(), &attribute_prototype_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find all [`Self`] which are also input sockets. +// pub async fn list_for_input_sockets( +// ctx: &DalContext, +// schema_variant_id: Option, +// ) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_INPUT_SOCKETS, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Returns an [`AttributeContext`](crate::AttributeContext) corresponding to our id. +// pub fn attribute_context(&self) -> InternalProviderResult { +// Ok(AttributeContext::builder() +// .set_internal_provider_id(self.id) +// .to_context()?) +// } + +// /// Finds [`Self`] for a given [`PropId`](crate::Prop). This will only work for +// /// implicit [`InternalProviders`](Self). +// pub async fn find_for_prop( +// ctx: &DalContext, +// prop_id: PropId, +// ) -> InternalProviderResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt(FIND_FOR_PROP, &[ctx.tenancy(), ctx.visibility(), &prop_id]) +// .await?; +// Ok(object_option_from_row_option(row)?) +// } + +// #[tracing::instrument(skip(ctx))] +// pub async fn by_socket(ctx: &DalContext) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(BY_SOCKET, &[ctx.tenancy(), ctx.visibility()]) +// .await?; + +// let mut objects: HashMap = HashMap::new(); +// for row in rows.into_iter() { +// let id: SocketId = row.try_get(0)?; + +// let object: serde_json::Value = row.try_get(1)?; +// let object: Self = serde_json::from_value(object)?; + +// objects.insert(id, object); +// } + +// Ok(objects.into_iter().collect()) +// } +// } diff --git a/lib/dal/src/schema.rs b/lib/dal/src/schema.rs index 2c8591864e..9e945016e9 100644 --- a/lib/dal/src/schema.rs +++ b/lib/dal/src/schema.rs @@ -1,271 +1,170 @@ +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use telemetry::prelude::*; -use thiserror::Error; +use strum::EnumDiscriminants; -use crate::func::binding_return_value::FuncBindingReturnValueError; -use crate::provider::external::ExternalProviderError; -use crate::provider::internal::InternalProviderError; -use crate::schema::variant::SchemaVariantError; -use crate::socket::SocketError; -use crate::standard_model::object_option_from_row_option; -use crate::{ - component::ComponentKind, func::binding::FuncBindingError, impl_standard_model, pk, - schema::ui_menu::SchemaUiMenuId, standard_model, standard_model_accessor, - standard_model_has_many, standard_model_many_to_many, AttributeContextBuilderError, - AttributePrototypeError, AttributeValueError, Component, DalContext, FuncError, - HistoryEventError, PropError, StandardModel, StandardModelError, Timestamp, - ValidationPrototypeError, Visibility, WsEventError, -}; -use crate::{Tenancy, TransactionsError, WorkspacePk}; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::{pk, Timestamp}; pub use ui_menu::SchemaUiMenu; -pub use variant::root_prop::RootProp; pub use variant::{SchemaVariant, SchemaVariantId}; pub mod ui_menu; pub mod variant; -const FIND_SCHEMA_VARIANT_BY_NAME_FOR_SCHEMA: &str = - include_str!("./queries/find_schema_variant_for_schema_and_name.sql"); +// const FIND_SCHEMA_VARIANT_BY_NAME_FOR_SCHEMA: &str = +// include_str!("./queries/find_schema_variant_for_schema_and_name.sql"); -#[remain::sorted] -#[derive(Error, Debug)] -pub enum SchemaError { - #[error("AttributeContextBuilder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("AttributePrototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("AttributeValue error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("external provider error: {0}")] - ExternalProvider(#[from] ExternalProviderError), - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("func not found: {0}")] - FuncNotFound(String), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), - #[error("missing a func in attribute update: {0} not found")] - MissingFunc(String), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("no default variant for schema id: {0}")] - NoDefaultVariant(SchemaId), - #[error("schema not found: {0}")] - NotFound(SchemaId), - #[error("schema not found by name: {0}")] - NotFoundByName(String), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("prop error: {0}")] - Prop(#[from] PropError), - #[error("schema ui menu not found: {0}")] - SchemaUiMenuNotFound(SchemaUiMenuId), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), - #[error("validation prototype error: {0}")] - ValidationPrototype(#[from] ValidationPrototypeError), - #[error("schema variant error: {0}")] - Variant(#[from] SchemaVariantError), - #[error("ws event error: {0}")] - WsEvent(#[from] WsEventError), -} - -pub type SchemaResult = Result; +pub const SCHEMA_VERSION: SchemaContentDiscriminants = SchemaContentDiscriminants::V1; -pk!(SchemaPk); pk!(SchemaId); #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] pub struct Schema { - pk: SchemaPk, id: SchemaId, - name: String, - #[serde(flatten)] - tenancy: Tenancy, #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, + name: String, ui_hidden: bool, default_schema_variant_id: Option, component_kind: ComponentKind, } -impl_standard_model! { - model: Schema, - pk: SchemaPk, - id: SchemaId, - table_name: "schemas", - history_event_label_base: "schema", - history_event_message_name: "Schema" +// FIXME(nick,zack,jacob): temporarily moved here. +#[remain::sorted] +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize, Copy)] +#[serde(rename_all = "camelCase")] +pub enum ComponentKind { + Credential, + Standard, } -impl Schema { - #[instrument(skip_all)] - #[allow(clippy::too_many_arguments)] - pub async fn new( - ctx: &DalContext, - name: impl AsRef, - component_kind: &ComponentKind, - ) -> SchemaResult { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM schema_create_v1($1, $2, $3, $4)", - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &component_kind.as_ref(), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } - - standard_model_accessor!(name, String, SchemaResult); - standard_model_accessor!(component_kind, Enum(ComponentKind), SchemaResult); - standard_model_accessor!(ui_hidden, bool, SchemaResult); - standard_model_accessor!( - default_schema_variant_id, - Option, - SchemaResult - ); - - standard_model_has_many!( - lookup_fn: ui_menus, - table: "schema_ui_menu_belongs_to_schema", - model_table: "schema_ui_menus", - returns: SchemaUiMenu, - result: SchemaResult, - ); - - standard_model_has_many!( - lookup_fn: components, - table: "component_belongs_to_schema", - model_table: "components", - returns: Component, - result: SchemaResult, - ); +#[derive(Debug, PartialEq)] +pub struct SchemaGraphNode { + id: SchemaId, + content_address: ContentAddress, + content: SchemaContentV1, +} - standard_model_has_many!( - lookup_fn: variants, - table: "schema_variant_belongs_to_schema", - model_table: "schema_variants", - returns: SchemaVariant, - result: SchemaResult, - ); +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum SchemaContent { + V1(SchemaContentV1), +} - standard_model_many_to_many!( - lookup_fn: implements, - associate_fn: add_implements_schema, - disassociate_fn: remove_implements_schema, - disassociate_all_fn: remove_all_implements_schemas, - table_name: "schema_many_to_many_implements", - left_table: "schemas", - left_id: SchemaId, - right_table: "schemas", - right_id: SchemaId, - which_table_is_this: "left", - returns: Schema, - result: SchemaResult, - ); +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct SchemaContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + pub name: String, + pub ui_hidden: bool, + pub default_schema_variant_id: Option, + pub component_kind: ComponentKind, +} - pub async fn default_variant(&self, ctx: &DalContext) -> SchemaResult { - match self.default_schema_variant_id() { - Some(schema_variant_id) => Ok(SchemaVariant::get_by_id(ctx, schema_variant_id) - .await? - .ok_or_else(|| SchemaError::NoDefaultVariant(*self.id()))?), - None => Err(SchemaError::NoDefaultVariant(*self.id())), +impl SchemaGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: SchemaContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::Schema(content_hash), + content, } } +} - pub async fn is_builtin(&self, ctx: &DalContext) -> SchemaResult { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT id FROM schemas WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", - &[self.id(), &WorkspacePk::NONE], - ) - .await?; - - Ok(row.is_some()) - } - - pub async fn find_by_name(ctx: &DalContext, name: impl AsRef) -> SchemaResult { - let name = name.as_ref(); - let schemas = Schema::find_by_attr(ctx, "name", &name).await?; - schemas - .first() - .ok_or_else(|| SchemaError::NotFoundByName(name.into())) - .cloned() - } - - pub async fn find_by_name_builtin( - ctx: &DalContext, - name: impl AsRef, - ) -> SchemaResult> { - let name = name.as_ref(); - - let builtin_ctx = ctx.clone_with_new_tenancy(Tenancy::new(WorkspacePk::NONE)); - let builtin_schema = Self::find_by_name(&builtin_ctx, name).await?; - - Ok(Self::get_by_id(ctx, builtin_schema.id()).await?) - } - - pub async fn find_variant_by_name( - &self, - ctx: &DalContext, - name: impl AsRef, - ) -> SchemaResult> { - let name: &str = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_SCHEMA_VARIANT_BY_NAME_FOR_SCHEMA, - &[ctx.tenancy(), ctx.visibility(), self.id(), &name], - ) - .await?; - - Ok(object_option_from_row_option(row)?) - } - - pub async fn default_schema_variant_id_for_name( - ctx: &DalContext, - name: impl AsRef, - ) -> SchemaResult { - let name = name.as_ref(); - let schemas = Schema::find_by_attr(ctx, "name", &name).await?; - let schema = schemas - .first() - .ok_or_else(|| SchemaError::NotFoundByName(name.into()))?; - let schema_variant_id = schema - .default_schema_variant_id() - .ok_or_else(|| SchemaError::NoDefaultVariant(*schema.id()))?; - - Ok(*schema_variant_id) +impl Schema { + pub fn assemble(id: SchemaId, inner: &SchemaContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + name: inner.name.clone(), + ui_hidden: inner.ui_hidden, + default_schema_variant_id: inner.default_schema_variant_id, + component_kind: inner.component_kind, + } } } + +// impl Schema { +// pub async fn default_variant(&self, ctx: &DalContext) -> SchemaResult { +// match self.default_schema_variant_id() { +// Some(schema_variant_id) => Ok(SchemaVariant::get_by_id(ctx, schema_variant_id) +// .await? +// .ok_or_else(|| SchemaError::NoDefaultVariant(*self.id()))?), +// None => Err(SchemaError::NoDefaultVariant(*self.id())), +// } +// } +// +// pub async fn is_builtin(&self, ctx: &DalContext) -> SchemaResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// "SELECT id FROM schemas WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", +// &[self.id(), &WorkspacePk::NONE], +// ) +// .await?; +// +// Ok(row.is_some()) +// } +// +// pub async fn find_by_name(ctx: &DalContext, name: impl AsRef) -> SchemaResult { +// let name = name.as_ref(); +// let schemas = Schema::find_by_attr(ctx, "name", &name).await?; +// schemas +// .first() +// .ok_or_else(|| SchemaError::NotFoundByName(name.into())) +// .cloned() +// } +// +// pub async fn find_by_name_builtin( +// ctx: &DalContext, +// name: impl AsRef, +// ) -> SchemaResult> { +// let name = name.as_ref(); +// +// let builtin_ctx = ctx.clone_with_new_tenancy(Tenancy::new(WorkspacePk::NONE)); +// let builtin_schema = Self::find_by_name(&builtin_ctx, name).await?; +// +// Ok(Self::get_by_id(ctx, builtin_schema.id()).await?) +// } +// +// pub async fn find_variant_by_name( +// &self, +// ctx: &DalContext, +// name: impl AsRef, +// ) -> SchemaResult> { +// let name: &str = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_SCHEMA_VARIANT_BY_NAME_FOR_SCHEMA, +// &[ctx.tenancy(), ctx.visibility(), self.id(), &name], +// ) +// .await?; +// +// Ok(object_option_from_row_option(row)?) +// } +// +// pub async fn default_schema_variant_id_for_name( +// ctx: &DalContext, +// name: impl AsRef, +// ) -> SchemaResult { +// let name = name.as_ref(); +// let schemas = Schema::find_by_attr(ctx, "name", &name).await?; +// let schema = schemas +// .first() +// .ok_or_else(|| SchemaError::NotFoundByName(name.into()))?; +// let schema_variant_id = schema +// .default_schema_variant_id() +// .ok_or_else(|| SchemaError::NoDefaultVariant(*schema.id()))?; +// +// Ok(*schema_variant_id) +// } +// } diff --git a/lib/dal/src/schema/ui_menu.rs b/lib/dal/src/schema/ui_menu.rs index f6a60f5c03..b56921dd1d 100644 --- a/lib/dal/src/schema/ui_menu.rs +++ b/lib/dal/src/schema/ui_menu.rs @@ -2,11 +2,10 @@ use serde::{Deserialize, Serialize}; use telemetry::prelude::*; use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_belongs_to, - DalContext, StandardModel, Tenancy, Timestamp, Visibility, + impl_standard_model, pk, StandardModel, Tenancy, Timestamp, Visibility, }; -use super::{Schema, SchemaId, SchemaResult}; + const FIND_FOR_SCHEMA: &str = include_str!("../queries/ui_menus_find_for_schema.sql"); @@ -36,70 +35,70 @@ impl_standard_model! { history_event_message_name: "Schema UI Menu" } -impl SchemaUiMenu { - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - name: impl AsRef, - category: impl AsRef, - ) -> SchemaResult { - let name = name.as_ref(); - let category = category.as_ref(); +// impl SchemaUiMenu { +// #[instrument(skip_all)] +// pub async fn new( +// ctx: &DalContext, +// name: impl AsRef, +// category: impl AsRef, +// ) -> SchemaResult { +// let name = name.as_ref(); +// let category = category.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM schema_ui_menu_create_v1($1, $2, $3, $4)", - &[ - ctx.tenancy(), - ctx.visibility(), - &(name.to_string()), - &(category.to_string()), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM schema_ui_menu_create_v1($1, $2, $3, $4)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &(name.to_string()), +// &(category.to_string()), +// ], +// ) +// .await?; +// let object = standard_model::finish_create_from_row(ctx, row).await?; +// Ok(object) +// } - standard_model_accessor!(name, String, SchemaResult); - standard_model_accessor!(category, String, SchemaResult); +// standard_model_accessor!(name, String, SchemaResult); +// standard_model_accessor!(category, String, SchemaResult); - standard_model_belongs_to!( - lookup_fn: schema, - set_fn: set_schema, - unset_fn: unset_schema, - table: "schema_ui_menu_belongs_to_schema", - model_table: "schemas", - belongs_to_id: SchemaId, - returns: Schema, - result: SchemaResult, - ); +// standard_model_belongs_to!( +// lookup_fn: schema, +// set_fn: set_schema, +// unset_fn: unset_schema, +// table: "schema_ui_menu_belongs_to_schema", +// model_table: "schemas", +// belongs_to_id: SchemaId, +// returns: Schema, +// result: SchemaResult, +// ); - #[instrument(skip_all)] - pub async fn find_for_schema( - ctx: &DalContext, - schema_id: SchemaId, - ) -> SchemaResult> { - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_FOR_SCHEMA, - &[ctx.tenancy(), ctx.visibility(), &schema_id], - ) - .await?; +// #[instrument(skip_all)] +// pub async fn find_for_schema( +// ctx: &DalContext, +// schema_id: SchemaId, +// ) -> SchemaResult> { +// let maybe_row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_FOR_SCHEMA, +// &[ctx.tenancy(), ctx.visibility(), &schema_id], +// ) +// .await?; - // NOTE(nick): currently, we are assuming there can only be one "schema_ui_menu" for a given - // schema id. This might not always be the case. - let maybe_object: Option = standard_model::option_object_from_row(maybe_row)?; - Ok(maybe_object) - } +// // NOTE(nick): currently, we are assuming there can only be one "schema_ui_menu" for a given +// // schema id. This might not always be the case. +// let maybe_object: Option = standard_model::option_object_from_row(maybe_row)?; +// Ok(maybe_object) +// } - pub fn category_path(&self) -> Vec { - self.category.split('.').map(|f| f.to_string()).collect() - } -} +// pub fn category_path(&self) -> Vec { +// self.category.split('.').map(|f| f.to_string()).collect() +// } +// } diff --git a/lib/dal/src/schema/variant.rs b/lib/dal/src/schema/variant.rs index 1e017d5c94..0999c6bc2b 100644 --- a/lib/dal/src/schema/variant.rs +++ b/lib/dal/src/schema/variant.rs @@ -1,849 +1,684 @@ //! This module contains [`SchemaVariant`](crate::SchemaVariant), which is t/he "class" of a //! [`Component`](crate::Component). +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; + +use strum::EnumDiscriminants; use telemetry::prelude::*; use thiserror::Error; -use crate::attribute::context::AttributeContextBuilder; -use crate::func::binding_return_value::FuncBindingReturnValueError; -use crate::prop::PropPath; -use crate::provider::internal::InternalProviderError; -use crate::schema::variant::definition::{SchemaVariantDefinitionError, SchemaVariantDefinitionId}; -use crate::schema::variant::root_prop::component_type::ComponentType; -use crate::schema::variant::root_prop::SiPropChild; -use crate::standard_model::{object_from_row, option_object_from_row}; -use crate::AttributePrototypeArgument; -use crate::{ - func::{ - argument::{FuncArgument, FuncArgumentError}, - binding::FuncBindingError, - binding_return_value::FuncBindingReturnValueId, - }, - impl_standard_model, pk, - schema::{RootProp, SchemaError}, - socket::{Socket, SocketError, SocketId}, - standard_model::{self, objects_from_rows}, - standard_model_accessor, standard_model_belongs_to, standard_model_many_to_many, - AttributeContextBuilderError, AttributePrototype, AttributePrototypeArgumentError, - AttributePrototypeError, AttributeReadContext, AttributeValue, AttributeValueError, - AttributeValueId, BuiltinsError, Component, ComponentError, ComponentId, DalContext, - ExternalProvider, ExternalProviderError, Func, FuncBackendResponseType, FuncBindingReturnValue, - FuncError, FuncId, HistoryEventError, InternalProvider, Prop, PropError, PropId, PropKind, - ReconciliationPrototypeError, RootPropChild, Schema, SchemaId, SocketArity, StandardModel, - StandardModelError, Tenancy, Timestamp, TransactionsError, ValidationPrototypeError, - Visibility, WorkspacePk, WsEventError, -}; - -use self::leaves::{LeafInput, LeafInputLocation, LeafKind}; - -pub mod definition; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::{pk, DalContext, PropId, StandardModel, Timestamp, WorkspaceSnapshot}; + +// use self::leaves::{LeafInput, LeafInputLocation, LeafKind}; + +// pub mod definition; pub mod leaves; pub mod root_prop; -const ALL_FUNCS: &str = include_str!("../queries/schema_variant/all_related_funcs.sql"); -const ALL_PROPS: &str = include_str!("../queries/schema_variant/all_props.sql"); -const FIND_ROOT_PROP: &str = include_str!("../queries/schema_variant/find_root_prop.sql"); -const FIND_LEAF_ITEM_PROP: &str = include_str!("../queries/schema_variant/find_leaf_item_prop.sql"); -const FIND_ROOT_CHILD_IMPLICIT_INTERNAL_PROVIDER: &str = - include_str!("../queries/schema_variant/find_root_child_implicit_internal_provider.sql"); -const LIST_ROOT_SI_CHILD_PROPS: &str = - include_str!("../queries/schema_variant/list_root_si_child_props.sql"); -const SECRET_DEFINING_SCHEMA_VARIANTS: &str = - include_str!("../queries/schema_variant/secret_defining_schema_variants.sql"); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum SchemaVariantError { - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - /// Not parent [`AttributeValue`](crate::AttributeValue) was found for the specified - /// [`AttributeValueId`](crate::AttributeValue). - #[error("no parent found for attribute value: {0}")] - AttributeValueDoesNotHaveParent(AttributeValueId), - /// An [`AttributeValue`](crate::AttributeValue) could not be found for the specified - /// [`AttributeReadContext`](crate::AttributeReadContext). - #[error("attribute value not found for attribute read context: {0:?}")] - AttributeValueNotFoundForContext(Box), - #[error(transparent)] - Builtins(#[from] Box), - #[error(transparent)] - Component(#[from] Box), - #[error(transparent)] - ExternalProvider(#[from] ExternalProviderError), - #[error("can neither provide children nor entry for primitive with name: ({0})")] - FoundChildrenAndEntryForPrimitive(String), - #[error("cannot provide children for array with name: ({0})")] - FoundChildrenForArray(String), - #[error("cannot provide children for primitive with name: ({0})")] - FoundChildrenForPrimitive(String), - #[error("cannot provide entry for object with name: ({0})")] - FoundEntryForObject(String), - #[error("cannot provide entry for primitive with name: ({0})")] - FoundEntryForPrimitive(String), - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error("func argument error: {0}")] - FuncArgument(#[from] FuncArgumentError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("func binding return value not found {0}")] - FuncBindingReturnValueNotFound(FuncBindingReturnValueId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), - #[error("must provide valid schema variant, found unset schema variant id")] - InvalidSchemaVariant, - #[error("leaf function response type ({0}) must match leaf kind ({0})")] - LeafFunctionMismatch(FuncBackendResponseType, LeafKind), - #[error("leaf function ({0}) must be JsAttribute")] - LeafFunctionMustBeJsAttribute(FuncId), - #[error("link not found in doc links map for doc link ref: {0}")] - LinkNotFoundForDocLinkRef(String), - #[error("must provide children for object with name: ({0})")] - MissingChildrenForObject(String), - #[error("must provide entry for array with name: ({0})")] - MissingEntryForArray(String), - #[error("missing a func in attribute update: {0} not found")] - MissingFunc(String), - #[error("Schema is missing for SchemaVariant {0}")] - MissingSchema(SchemaVariantId), - #[error("cannot use doc link and doc link ref for prop definition name: ({0})")] - MultipleDocLinksProvided(String), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("schema variant not found: {0}")] - NotFound(SchemaVariantId), - #[error("parent prop not found for prop id: {0}")] - ParentPropNotFound(PropId), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("prop error: {0}")] - Prop(#[from] PropError), - /// This variant indicates that a [`Prop`](crate::Prop) or [`PropId`](crate::Prop) was not - /// found. However, it does not _describe_ the attempt to locate the object in question. The - /// "json pointer" piece is purely meant to help describe the location. - #[error("prop not found corresponding to the following json pointer: {0}")] - PropNotFound(&'static str), - #[error("cannot find prop at path {1} for SchemaVariant {0} and Visibility {2:?}")] - PropNotFoundAtPath(SchemaVariantId, String, Visibility), - #[error("prop not found in cache for name ({0}) and parent prop id ({1})")] - PropNotFoundInCache(String, PropId), - #[error("reconciliation prototype: {0}")] - ReconciliationPrototype(#[from] ReconciliationPrototypeError), - #[error("schema error: {0}")] - Schema(#[from] Box), - #[error("schema variant definition error")] - SchemaVariantDefinition(#[from] SchemaVariantDefinitionError), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("std error: {0}")] - Std(#[from] Box), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), - #[error("validation prototype error: {0}")] - ValidationPrototype(#[from] ValidationPrototypeError), - #[error("ws event error: {0}")] - WsEvent(#[from] WsEventError), -} +// const ALL_FUNCS: &str = include_str!("../queries/schema_variant/all_related_funcs.sql"); +// const ALL_PROPS: &str = include_str!("../queries/schema_variant/all_props.sql"); +// const FIND_ROOT_PROP: &str = include_str!("../queries/schema_variant/find_root_prop.sql"); +// const FIND_LEAF_ITEM_PROP: &str = include_str!("../queries/schema_variant/find_leaf_item_prop.sql"); +// const FIND_ROOT_CHILD_IMPLICIT_INTERNAL_PROVIDER: &str = +// include_str!("../queries/schema_variant/find_root_child_implicit_internal_provider.sql"); +// const LIST_ROOT_SI_CHILD_PROPS: &str = +// include_str!("../queries/schema_variant/list_root_si_child_props.sql"); +// const SECRET_DEFINING_SCHEMA_VARIANTS: &str = +// include_str!("../queries/schema_variant/secret_defining_schema_variants.sql"); -pub type SchemaVariantResult = Result; +pub const SCHEMA_VARIANT_VERSION: SchemaVariantContentDiscriminants = + SchemaVariantContentDiscriminants::V1; -pk!(SchemaVariantPk); pk!(SchemaVariantId); +#[derive(Debug, PartialEq)] +pub struct SchemaVariantGraphNode { + id: SchemaVariantId, + content_address: ContentAddress, + content: SchemaVariantContentV1, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum SchemaVariantContent { + V1(SchemaVariantContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct SchemaVariantContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + pub ui_hidden: bool, + pub name: String, + /// The [`RootProp`](crate::RootProp) for [`self`](Self). + pub root_prop_id: Option, + // pub schema_variant_definition_id: Option, + pub link: Option, + pub finalized_once: bool, +} + #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct SchemaVariant { - pk: SchemaVariantPk, id: SchemaVariantId, #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, - ui_hidden: bool, default_color: Option, name: String, /// The [`RootProp`](crate::RootProp) for [`self`](Self). root_prop_id: Option, - schema_variant_definition_id: Option, + // schema_variant_definition_id: Option, link: Option, - // NOTE(nick): we may want to replace this with a better solution. We use this to ensure - // components are not created unless the variant has been finalized at least once. finalized_once: bool, } -impl_standard_model! { - model: SchemaVariant, - pk: SchemaVariantPk, - id: SchemaVariantId, - table_name: "schema_variants", - history_event_label_base: "schema_variant", - history_event_message_name: "Schema Variant" -} - -impl SchemaVariant { - /// Create a [`SchemaVariant`](Self) with a [`RootProp`](crate::schema::RootProp). - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - schema_id: SchemaId, - name: impl AsRef, - ) -> SchemaVariantResult<(Self, RootProp)> { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM schema_variant_create_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), &name], - ) - .await?; - let mut object: SchemaVariant = standard_model::finish_create_from_row(ctx, row).await?; - let root_prop = object.create_and_set_root_prop(ctx, schema_id).await?; - object.set_schema(ctx, &schema_id).await?; - - let (identity_func, identity_func_binding, identity_func_binding_return_value) = - Func::identity_with_binding_and_return_value(ctx).await?; - - // all nodes can be turned into frames therefore, they will need a frame input socket - // the UI itself will determine if this socket is available to be connected - let (_frame_internal_provider, _input_socket) = InternalProvider::new_explicit_with_socket( - ctx, - *object.id(), - "Frame", - *identity_func.id(), - *identity_func_binding.id(), - *identity_func_binding_return_value.id(), - SocketArity::Many, - true, - ) - .await?; - - let (_output_provider, _output_socket) = ExternalProvider::new_with_socket( - ctx, - schema_id, - *object.id(), - "Frame", - None, - *identity_func.id(), - *identity_func_binding.id(), - *identity_func_binding_return_value.id(), - SocketArity::One, - true, - ) - .await?; - - Ok((object, root_prop)) - } - - pub async fn is_builtin(&self, ctx: &DalContext) -> SchemaVariantResult { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT id FROM schema_variants WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", - &[self.id(), &WorkspacePk::NONE], - ) - .await?; - - Ok(row.is_some()) - } - - /// This _idempotent_ function "finalizes" a [`SchemaVariant`]. - /// - /// Once a [`SchemaVariant`] has had all of its [`Props`](crate::Prop) created, there are a few - /// things that need to happen before it is usable: - /// - /// * Create the default [`AttributePrototypes`](crate::AttributePrototype) and - /// [`AttributeValues`](crate::AttributeValue). - /// * Create the _internally consuming_ [`InternalProviders`](crate::InternalProvider) - /// corresponding to every [`Prop`](crate::Prop) in the [`SchemaVariant`] that is not a - /// descendant of an Array or a Map. - /// - /// This method **MUST** be called once all the [`Props`](Prop) have been created for the - /// [`SchemaVariant`]. It can be called multiple times while [`Props`](Prop) are being created, - /// but it must be called once after all [`Props`](Prop) have been created. - pub async fn finalize( - &mut self, - ctx: &DalContext, - component_type: Option, - ) -> SchemaVariantResult<()> { - let total_start = std::time::Instant::now(); - - Self::create_default_prototypes_and_values(ctx, self.id).await?; - Self::create_implicit_internal_providers(ctx, self.id).await?; - if !self.finalized_once() { - self.set_finalized_once(ctx, true).await?; - } - - // Default to the standard "component" component type. - let component_type = match component_type { - Some(component_type) => component_type, - None => ComponentType::Component, - }; - - // Find props that we need to set defaults on for _all_ schema variants. - // FIXME(nick): use the enum and create an appropriate query. - let mut maybe_type_prop_id = None; - let mut maybe_protected_prop_id = None; - for root_si_child_prop in Self::list_root_si_child_props(ctx, self.id).await? { - if root_si_child_prop.name() == "type" { - maybe_type_prop_id = Some(*root_si_child_prop.id()) - } else if root_si_child_prop.name() == "protected" { - maybe_protected_prop_id = Some(*root_si_child_prop.id()) - } - } - let type_prop_id = - maybe_type_prop_id.ok_or(SchemaVariantError::PropNotFound("/root/si/type"))?; - let protected_prop_id = maybe_protected_prop_id - .ok_or(SchemaVariantError::PropNotFound("/root/si/protected"))?; - - // Set the default type of the schema variant. - let attribute_read_context = AttributeReadContext::default_with_prop(type_prop_id); - let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) - .await? - .ok_or_else(|| { - SchemaVariantError::AttributeValueNotFoundForContext(attribute_read_context.into()) - })?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| { - SchemaVariantError::AttributeValueDoesNotHaveParent(*attribute_value.id()) - })?; - let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; - AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - context, - Some(serde_json::to_value(component_type)?), - None, - ) - .await?; - - // Ensure _all_ schema variants are not protected by default. - let attribute_read_context = AttributeReadContext::default_with_prop(protected_prop_id); - let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) - .await? - .ok_or_else(|| { - SchemaVariantError::AttributeValueNotFoundForContext(attribute_read_context.into()) - })?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| { - SchemaVariantError::AttributeValueDoesNotHaveParent(*attribute_value.id()) - })?; - let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; - AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - context, - Some(serde_json::json![false]), - None, - ) - .await?; - - debug!("finalizing {:?} took {:?}", self.id, total_start.elapsed()); - Ok(()) - } - - /// Create the default [`AttributePrototypes`](crate::AttributePrototype) and - /// [`AttributeValues`](crate::AttributeValue) for the [`Props`](Prop) of the - /// [`SchemaVariant`]. - /// - /// This method is idempotent, and may be safely called multiple times before - /// [`SchemaVariant.finalize(ctx)`](SchemaVariant#finalize()) is called. - pub async fn create_default_prototypes_and_values( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult<()> { - let root_prop = match Self::find_root_prop(ctx, schema_variant_id).await? { - Some(root_prop) => root_prop, - None => return Ok(()), - }; - - Ok(Prop::create_default_prototypes_and_values(ctx, *root_prop.id()).await?) - } - - /// Creates _internally consuming_ [`InternalProviders`](crate::InternalProvider) corresponding - /// to every [`Prop`](crate::Prop) in the [`SchemaVariant`] that is not a descendant of an array - /// or a map. - async fn create_implicit_internal_providers( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult<()> { - // If no props have been created for the schema variant, there are no internal providers - // to create. - let root_prop = match Self::find_root_prop(ctx, schema_variant_id).await? { - Some(root_prop) => root_prop, - None => return Ok(()), - }; - - let mut work_queue = vec![root_prop]; - - while let Some(work) = work_queue.pop() { - let maybe_existing_implicit_internal_provider = - InternalProvider::find_for_prop(ctx, *work.id()).await?; - if maybe_existing_implicit_internal_provider.is_none() { - InternalProvider::new_implicit(ctx, *work.id(), SchemaVariantId::NONE).await?; - } - - // Only check for child props if the current prop is of kind object. - if work.kind() == &PropKind::Object { - let child_props = work.child_props(ctx).await?; - if !child_props.is_empty() { - work_queue.extend(child_props); - } - } - } - - Ok(()) - } - - standard_model_accessor!(default_color, Option, SchemaVariantResult); - standard_model_accessor!(ui_hidden, bool, SchemaVariantResult); - standard_model_accessor!(name, String, SchemaVariantResult); - standard_model_accessor!(root_prop_id, Option, SchemaVariantResult); - standard_model_accessor!(link, Option, SchemaVariantResult); - standard_model_accessor!(finalized_once, bool, SchemaVariantResult); - standard_model_accessor!( - schema_variant_definition_id, - Option, - SchemaVariantResult - ); - - pub async fn color(&self, ctx: &DalContext) -> SchemaVariantResult> { - if let Some(color) = self.default_color() { - return Ok(Some(color.to_owned())); - } - - let attribute_value = Component::find_si_child_attribute_value( - ctx, - ComponentId::NONE, - self.id, - SiPropChild::Color, - ) - .await - .map_err(Box::new)?; - let func_binding_return_value = - FuncBindingReturnValue::get_by_id(ctx, &attribute_value.func_binding_return_value_id()) - .await? - .ok_or_else(|| { - SchemaVariantError::FuncBindingReturnValueNotFound( - attribute_value.func_binding_return_value_id(), - ) - })?; - - let color = func_binding_return_value - .value() - .cloned() - .map(serde_json::from_value) - .transpose()?; - - if let Some(color) = color.clone() { - self.clone().set_default_color(ctx, Some(color)).await?; - } - - Ok(color) - } - - pub async fn set_color(&mut self, ctx: &DalContext, color: String) -> SchemaVariantResult<()> { - self.set_default_color(ctx, Some(color.clone())).await?; - - let attribute_value = Component::find_si_child_attribute_value( - ctx, - ComponentId::NONE, - self.id, - SiPropChild::Color, - ) - .await - .map_err(Box::new)?; - let prop = Prop::get_by_id(ctx, &attribute_value.context.prop_id()) - .await? - .ok_or(PropError::NotFound( - attribute_value.context.prop_id(), - *ctx.visibility(), - ))?; - prop.set_default_value(ctx, color).await?; - Ok(()) - } - - standard_model_belongs_to!( - lookup_fn: schema, - set_fn: set_schema, - unset_fn: unset_schema, - table: "schema_variant_belongs_to_schema", - model_table: "schemas", - belongs_to_id: SchemaId, - returns: Schema, - result: SchemaVariantResult, - ); - - standard_model_many_to_many!( - lookup_fn: sockets, - associate_fn: add_socket, - disassociate_fn: remove_socket, - table_name: "socket_many_to_many_schema_variants", - left_table: "sockets", - left_id: SocketId, - right_table: "schema_variants", - right_id: SchemaId, - which_table_is_this: "right", - returns: Socket, - result: SchemaVariantResult, - ); - - /// List all direct child [`Props`](crate::Prop) of the [`Prop`](crate::Prop) corresponding - /// to "/root/si". - #[instrument(skip_all)] - pub async fn list_root_si_child_props( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_ROOT_SI_CHILD_PROPS, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(objects_from_rows(rows)?) - } - - /// Find all [`Props`](crate::Prop) for a given [`SchemaVariantId`](SchemaVariant). - #[instrument(skip_all)] - pub async fn all_props( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ALL_PROPS, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(objects_from_rows(rows)?) - } - - #[instrument(skip_all)] - pub async fn list_secret_defining(ctx: &DalContext) -> SchemaVariantResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - SECRET_DEFINING_SCHEMA_VARIANTS, - &[ctx.tenancy(), ctx.visibility()], - ) - .await?; - Ok(objects_from_rows(rows)?) - } - - /// Find all [`Func`](crate::Func) objects connected to this schema variant in any way. Only - /// finds funcs connected at the schema variant context, ignoring any funcs connected to - /// directly to components. Ignores any functions that have no code (these are typically - /// intrinsics) - #[instrument(skip_all)] - pub async fn all_funcs( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ALL_FUNCS, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - - Ok(objects_from_rows(rows)?) - } - - pub async fn upsert_leaf_function( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - component_id: Option, - leaf_kind: LeafKind, - input_locations: &[LeafInputLocation], - func: &Func, - ) -> SchemaVariantResult { - let leaf_prop = - SchemaVariant::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; - - let context = match component_id { - Some(component_id) => AttributeContextBuilder::new() - .set_prop_id(*leaf_prop.id()) - .set_component_id(component_id) - .to_context()?, - None => AttributeContextBuilder::new() - .set_prop_id(*leaf_prop.id()) - .to_context()?, - }; - - let key = Some(func.name().to_string()); - let mut existing_args = FuncArgument::list_for_func(ctx, *func.id()).await?; - let mut inputs = vec![]; - for location in input_locations { - let arg_name = location.arg_name(); - let arg = match existing_args.iter().find(|arg| arg.name() == arg_name) { - Some(existing_arg) => existing_arg.clone(), - None => { - FuncArgument::new(ctx, arg_name, location.arg_kind(), None, *func.id()).await? - } - }; - - inputs.push(LeafInput { - location: *location, - func_argument_id: *arg.id(), - }); - } - - for mut existing_arg in existing_args.drain(..) { - if !inputs.iter().any( - |&LeafInput { - func_argument_id, .. - }| func_argument_id == *existing_arg.id(), - ) { - existing_arg.delete_by_id(ctx).await?; - } - } - - Ok( - match AttributePrototype::find_for_context_and_key(ctx, context, &key) - .await? - .pop() - { - Some(existing_proto) => { - let mut apas = AttributePrototypeArgument::list_for_attribute_prototype( - ctx, - *existing_proto.id(), - ) - .await?; - - for input in &inputs { - if !apas - .iter() - .any(|apa| apa.func_argument_id() == input.func_argument_id) - { - let input_internal_provider = - Self::find_root_child_implicit_internal_provider( - ctx, - schema_variant_id, - input.location.into(), - ) - .await?; - - AttributePrototypeArgument::new_for_intra_component( - ctx, - *existing_proto.id(), - input.func_argument_id, - *input_internal_provider.id(), - ) - .await?; - } - } - - for mut apa in apas.drain(..) { - if !inputs.iter().any( - |&LeafInput { - func_argument_id, .. - }| { - func_argument_id == apa.func_argument_id() - }, - ) { - apa.delete_by_id(ctx).await?; - } - } - - existing_proto - } - None => { - let (_, new_proto) = SchemaVariant::add_leaf( - ctx, - *func.id(), - schema_variant_id, - component_id, - leaf_kind, - inputs, - ) - .await?; - - new_proto - } - }, - ) - } - - /// This method finds all the functions for a particular - /// ['LeafKind'](crate::schema::variant::leaves::LeafKind) for this SchemaVariant. For example, - /// it can find all Qualification functions for the variant. - pub async fn find_leaf_item_functions( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - leaf_kind: LeafKind, - ) -> SchemaVariantResult> { - let leaf_item_prop = Self::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; - let backend_response_type: FuncBackendResponseType = leaf_kind.into(); - - let context = AttributeContextBuilder::new() - .set_prop_id(*leaf_item_prop.id()) - .to_context()?; - - Ok( - AttributePrototype::list_prototype_funcs_by_context_and_backend_response_type( - ctx, - context, - backend_response_type, - ) - .await?, - ) - } - - /// This method finds a [`leaf`](crate::schema::variant::leaves)'s entry - /// [`Prop`](crate::Prop) given a [`LeafKind`](crate::schema::variant::leaves::LeafKind). - pub async fn find_leaf_item_prop( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - leaf_kind: LeafKind, - ) -> SchemaVariantResult { - let (leaf_map_prop_name, leaf_item_prop_name) = leaf_kind.prop_names(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_LEAF_ITEM_PROP, - &[ - ctx.tenancy(), - ctx.visibility(), - &schema_variant_id, - &leaf_map_prop_name, - &leaf_item_prop_name, - ], - ) - .await?; - Ok(object_from_row(row)?) - } - - /// Find the implicit [`InternalProvider`](crate::InternalProvider) corresponding to a provided, - /// [`direct child`](crate::RootPropChild) of [`RootProp`](crate::RootProp). - pub async fn find_root_child_implicit_internal_provider( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - root_prop_child: RootPropChild, - ) -> SchemaVariantResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_ROOT_CHILD_IMPLICIT_INTERNAL_PROVIDER, - &[ - ctx.tenancy(), - ctx.visibility(), - &schema_variant_id, - &root_prop_child.as_str(), - ], - ) - .await?; - Ok(object_from_row(row)?) - } - - /// Call [`Self::find_root_prop`] with the [`SchemaVariantId`](SchemaVariant) off - /// [`self`](SchemaVariant). - pub async fn root_prop(&self, ctx: &DalContext) -> SchemaVariantResult> { - Self::find_root_prop(ctx, self.id).await - } - - /// Find the [`Prop`](crate::Prop) corresponding to "/root" for a given - /// [`SchemaVariantId`](SchemaVariant). - pub async fn find_root_prop( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult> { - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_ROOT_PROP, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(option_object_from_row(maybe_row)?) - } - - /// Find the [`SchemaVariant`] for a given [`PropId`](crate::Prop) that resides _anywhere_ in a - /// [`Prop`](crate::Prop) tree. - /// - /// For instance, if you have a [`PropId`](crate::Prop) corresponding to "/root/domain/poop" - /// and want to know what [`SchemaVariant`]'s [`Prop`](crate::Prop) tree it resides in, use this - /// method to find out. - pub async fn find_for_prop( - ctx: &DalContext, - prop_id: PropId, - ) -> SchemaVariantResult> { - // FIXME(nick): this is expensive and should be one query. Please WON'T SOMEBODY THINK OF - // THE CPU AND THE DATABASE??? OHHHHHHH THE HUMANITY!!!!!!! Oh well, anyway. - if let Some(root_prop) = Prop::find_root_prop_for_prop(ctx, prop_id).await? { - for schema_variant in Self::list(ctx).await? { - if let Some(populated_root_prop_id) = schema_variant.root_prop_id { - if *root_prop.id() == populated_root_prop_id { - return Ok(Some(schema_variant)); - } - } - } - } - Ok(None) - } - - /// Calls [`Self::find_prop_in_tree`] using the ID off of [`self`](SchemaVariant). - pub async fn find_prop(&self, ctx: &DalContext, path: &[&str]) -> SchemaVariantResult { - Self::find_prop_in_tree(ctx, self.id, path).await - } - - /// Find the [`Prop`] in a tree underneath our [`RootProp`] with a given path. - pub async fn find_prop_in_tree( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - path: &[&str], - ) -> SchemaVariantResult { - match Prop::find_prop_by_path(ctx, schema_variant_id, &PropPath::new(path)).await { - Ok(prop) => Ok(prop), - Err(PropError::NotFoundAtPath(path, visiblity)) => Err( - SchemaVariantError::PropNotFoundAtPath(schema_variant_id, path, visiblity), - ), - Err(err) => Err(err)?, +impl SchemaVariantGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: SchemaVariantContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::Schema(content_hash), + content, } } } + +// impl SchemaVariant { +// pub async fn is_builtin(&self, ctx: &DalContext) -> SchemaVariantResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// "SELECT id FROM schema_variants WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", +// &[self.id(), &WorkspacePk::NONE], +// ) +// .await?; + +// Ok(row.is_some()) +// } + +// /// This _idempotent_ function "finalizes" a [`SchemaVariant`]. +// /// +// /// Once a [`SchemaVariant`] has had all of its [`Props`](crate::Prop) created, there are a few +// /// things that need to happen before it is usable: +// /// +// /// * Create the default [`AttributePrototypes`](crate::AttributePrototype) and +// /// [`AttributeValues`](crate::AttributeValue). +// /// * Create the _internally consuming_ [`InternalProviders`](crate::InternalProvider) +// /// corresponding to every [`Prop`](crate::Prop) in the [`SchemaVariant`] that is not a +// /// descendant of an Array or a Map. +// /// +// /// This method **MUST** be called once all the [`Props`](Prop) have been created for the +// /// [`SchemaVariant`]. It can be called multiple times while [`Props`](Prop) are being created, +// /// but it must be called once after all [`Props`](Prop) have been created. +// pub async fn finalize( +// &mut self, +// ctx: &DalContext, +// component_type: Option, +// ) -> SchemaVariantResult<()> { +// let total_start = std::time::Instant::now(); + +// Self::create_default_prototypes_and_values(ctx, self.id).await?; +// Self::create_implicit_internal_providers(ctx, self.id).await?; +// if !self.finalized_once() { +// self.set_finalized_once(ctx, true).await?; +// } + +// // Default to the standard "component" component type. +// let component_type = match component_type { +// Some(component_type) => component_type, +// None => ComponentType::Component, +// }; + +// // Find props that we need to set defaults on for _all_ schema variants. +// // FIXME(nick): use the enum and create an appropriate query. +// let mut maybe_type_prop_id = None; +// let mut maybe_protected_prop_id = None; +// for root_si_child_prop in Self::list_root_si_child_props(ctx, self.id).await? { +// if root_si_child_prop.name() == "type" { +// maybe_type_prop_id = Some(*root_si_child_prop.id()) +// } else if root_si_child_prop.name() == "protected" { +// maybe_protected_prop_id = Some(*root_si_child_prop.id()) +// } +// } +// let type_prop_id = +// maybe_type_prop_id.ok_or(SchemaVariantError::PropNotFound("/root/si/type"))?; +// let protected_prop_id = maybe_protected_prop_id +// .ok_or(SchemaVariantError::PropNotFound("/root/si/protected"))?; + +// // Set the default type of the schema variant. +// let attribute_read_context = AttributeReadContext::default_with_prop(type_prop_id); +// let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) +// .await? +// .ok_or_else(|| { +// SchemaVariantError::AttributeValueNotFoundForContext(attribute_read_context.into()) +// })?; +// let parent_attribute_value = attribute_value +// .parent_attribute_value(ctx) +// .await? +// .ok_or_else(|| { +// SchemaVariantError::AttributeValueDoesNotHaveParent(*attribute_value.id()) +// })?; +// let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; +// AttributeValue::update_for_context( +// ctx, +// *attribute_value.id(), +// Some(*parent_attribute_value.id()), +// context, +// Some(serde_json::to_value(component_type)?), +// None, +// ) +// .await?; + +// // Ensure _all_ schema variants are not protected by default. +// let attribute_read_context = AttributeReadContext::default_with_prop(protected_prop_id); +// let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) +// .await? +// .ok_or_else(|| { +// SchemaVariantError::AttributeValueNotFoundForContext(attribute_read_context.into()) +// })?; +// let parent_attribute_value = attribute_value +// .parent_attribute_value(ctx) +// .await? +// .ok_or_else(|| { +// SchemaVariantError::AttributeValueDoesNotHaveParent(*attribute_value.id()) +// })?; +// let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; +// AttributeValue::update_for_context( +// ctx, +// *attribute_value.id(), +// Some(*parent_attribute_value.id()), +// context, +// Some(serde_json::json![false]), +// None, +// ) +// .await?; + +// debug!("finalizing {:?} took {:?}", self.id, total_start.elapsed()); +// Ok(()) +// } + +// /// Create the default [`AttributePrototypes`](crate::AttributePrototype) and +// /// [`AttributeValues`](crate::AttributeValue) for the [`Props`](Prop) of the +// /// [`SchemaVariant`]. +// /// +// /// This method is idempotent, and may be safely called multiple times before +// /// [`SchemaVariant.finalize(ctx)`](SchemaVariant#finalize()) is called. +// pub async fn create_default_prototypes_and_values( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult<()> { +// let root_prop = match Self::find_root_prop(ctx, schema_variant_id).await? { +// Some(root_prop) => root_prop, +// None => return Ok(()), +// }; + +// Ok(Prop::create_default_prototypes_and_values(ctx, *root_prop.id()).await?) +// } + +// /// Creates _internally consuming_ [`InternalProviders`](crate::InternalProvider) corresponding +// /// to every [`Prop`](crate::Prop) in the [`SchemaVariant`] that is not a descendant of an array +// /// or a map. +// async fn create_implicit_internal_providers( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult<()> { +// // If no props have been created for the schema variant, there are no internal providers +// // to create. +// let root_prop = match Self::find_root_prop(ctx, schema_variant_id).await? { +// Some(root_prop) => root_prop, +// None => return Ok(()), +// }; + +// let mut work_queue = vec![root_prop]; + +// while let Some(work) = work_queue.pop() { +// let maybe_existing_implicit_internal_provider = +// InternalProvider::find_for_prop(ctx, *work.id()).await?; +// if maybe_existing_implicit_internal_provider.is_none() { +// InternalProvider::new_implicit(ctx, *work.id(), SchemaVariantId::NONE).await?; +// } + +// // Only check for child props if the current prop is of kind object. +// if work.kind() == &PropKind::Object { +// let child_props = work.child_props(ctx).await?; +// if !child_props.is_empty() { +// work_queue.extend(child_props); +// } +// } +// } + +// Ok(()) +// } + +// standard_model_accessor!(default_color, Option, SchemaVariantResult); +// standard_model_accessor!(ui_hidden, bool, SchemaVariantResult); +// standard_model_accessor!(name, String, SchemaVariantResult); +// standard_model_accessor!(root_prop_id, Option, SchemaVariantResult); +// standard_model_accessor!(link, Option, SchemaVariantResult); +// standard_model_accessor!(finalized_once, bool, SchemaVariantResult); +// standard_model_accessor!( +// schema_variant_definition_id, +// Option, +// SchemaVariantResult +// ); + +// pub async fn color(&self, ctx: &DalContext) -> SchemaVariantResult> { +// if let Some(color) = self.default_color() { +// return Ok(Some(color.to_owned())); +// } + +// let attribute_value = Component::find_si_child_attribute_value( +// ctx, +// ComponentId::NONE, +// self.id, +// SiPropChild::Color, +// ) +// .await +// .map_err(Box::new)?; +// let func_binding_return_value = +// FuncBindingReturnValue::get_by_id(ctx, &attribute_value.func_binding_return_value_id()) +// .await? +// .ok_or_else(|| { +// SchemaVariantError::FuncBindingReturnValueNotFound( +// attribute_value.func_binding_return_value_id(), +// ) +// })?; + +// let color = func_binding_return_value +// .value() +// .cloned() +// .map(serde_json::from_value) +// .transpose()?; + +// if let Some(color) = color.clone() { +// self.clone().set_default_color(ctx, Some(color)).await?; +// } + +// Ok(color) +// } + +// pub async fn set_color(&mut self, ctx: &DalContext, color: String) -> SchemaVariantResult<()> { +// self.set_default_color(ctx, Some(color.clone())).await?; +// +// let attribute_value = Component::find_si_child_attribute_value( +// ctx, +// ComponentId::NONE, +// self.id, +// SiPropChild::Color, +// ) +// .await +// .map_err(Box::new)?; +// let prop = Prop::get_by_id(ctx, &attribute_value.context.prop_id()) +// .await? +// .ok_or(PropError::NotFound( +// attribute_value.context.prop_id(), +// *ctx.visibility(), +// ))?; +// prop.set_default_value(ctx, color).await?; +// Ok(()) +// } + +// standard_model_belongs_to!( +// lookup_fn: schema, +// set_fn: set_schema, +// unset_fn: unset_schema, +// table: "schema_variant_belongs_to_schema", +// model_table: "schemas", +// belongs_to_id: SchemaId, +// returns: Schema, +// result: SchemaVariantResult, +// ); + +// standard_model_many_to_many!( +// lookup_fn: sockets, +// associate_fn: add_socket, +// disassociate_fn: remove_socket, +// table_name: "socket_many_to_many_schema_variants", +// left_table: "sockets", +// left_id: SocketId, +// right_table: "schema_variants", +// right_id: SchemaId, +// which_table_is_this: "right", +// returns: Socket, +// result: SchemaVariantResult, +// ); + +// /// List all direct child [`Props`](crate::Prop) of the [`Prop`](crate::Prop) corresponding +// /// to "/root/si". +// #[instrument(skip_all)] +// pub async fn list_root_si_child_props( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_ROOT_SI_CHILD_PROPS, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(objects_from_rows(rows)?) +// } + +// /// Find all [`Props`](crate::Prop) for a given [`SchemaVariantId`](SchemaVariant). +// #[instrument(skip_all)] +// pub async fn all_props( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// ALL_PROPS, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(objects_from_rows(rows)?) +// } + +// #[instrument(skip_all)] +// pub async fn list_secret_defining(ctx: &DalContext) -> SchemaVariantResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// SECRET_DEFINING_SCHEMA_VARIANTS, +// &[ctx.tenancy(), ctx.visibility()], +// ) +// .await?; +// Ok(objects_from_rows(rows)?) +// } + +// /// Find all [`Func`](crate::Func) objects connected to this schema variant in any way. Only +// /// finds funcs connected at the schema variant context, ignoring any funcs connected to +// /// directly to components. Ignores any functions that have no code (these are typically +// /// intrinsics) +// #[instrument(skip_all)] +// pub async fn all_funcs( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// ALL_FUNCS, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; + +// Ok(objects_from_rows(rows)?) +// } + +// pub async fn upsert_leaf_function( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// component_id: Option, +// leaf_kind: LeafKind, +// input_locations: &[LeafInputLocation], +// func: &Func, +// ) -> SchemaVariantResult { +// let leaf_prop = +// SchemaVariant::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; + +// let context = match component_id { +// Some(component_id) => AttributeContextBuilder::new() +// .set_prop_id(*leaf_prop.id()) +// .set_component_id(component_id) +// .to_context()?, +// None => AttributeContextBuilder::new() +// .set_prop_id(*leaf_prop.id()) +// .to_context()?, +// }; + +// let key = Some(func.name().to_string()); +// let mut existing_args = FuncArgument::list_for_func(ctx, *func.id()).await?; +// let mut inputs = vec![]; +// for location in input_locations { +// let arg_name = location.arg_name(); +// let arg = match existing_args.iter().find(|arg| arg.name() == arg_name) { +// Some(existing_arg) => existing_arg.clone(), +// None => { +// FuncArgument::new(ctx, arg_name, location.arg_kind(), None, *func.id()).await? +// } +// }; + +// inputs.push(LeafInput { +// location: *location, +// func_argument_id: *arg.id(), +// }); +// } + +// for mut existing_arg in existing_args.drain(..) { +// if !inputs.iter().any( +// |&LeafInput { +// func_argument_id, .. +// }| func_argument_id == *existing_arg.id(), +// ) { +// existing_arg.delete_by_id(ctx).await?; +// } +// } + +// Ok( +// match AttributePrototype::find_for_context_and_key(ctx, context, &key) +// .await? +// .pop() +// { +// Some(existing_proto) => { +// let mut apas = AttributePrototypeArgument::list_for_attribute_prototype( +// ctx, +// *existing_proto.id(), +// ) +// .await?; + +// for input in &inputs { +// if !apas +// .iter() +// .any(|apa| apa.func_argument_id() == input.func_argument_id) +// { +// let input_internal_provider = +// Self::find_root_child_implicit_internal_provider( +// ctx, +// schema_variant_id, +// input.location.into(), +// ) +// .await?; + +// AttributePrototypeArgument::new_for_intra_component( +// ctx, +// *existing_proto.id(), +// input.func_argument_id, +// *input_internal_provider.id(), +// ) +// .await?; +// } +// } + +// for mut apa in apas.drain(..) { +// if !inputs.iter().any( +// |&LeafInput { +// func_argument_id, .. +// }| { +// func_argument_id == apa.func_argument_id() +// }, +// ) { +// apa.delete_by_id(ctx).await?; +// } +// } + +// existing_proto +// } +// None => { +// let (_, new_proto) = SchemaVariant::add_leaf( +// ctx, +// *func.id(), +// schema_variant_id, +// component_id, +// leaf_kind, +// inputs, +// ) +// .await?; + +// new_proto +// } +// }, +// ) +// } + +// /// This method finds all the functions for a particular +// /// ['LeafKind'](crate::schema::variant::leaves::LeafKind) for this SchemaVariant. For example, +// /// it can find all Qualification functions for the variant. +// pub async fn find_leaf_item_functions( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// leaf_kind: LeafKind, +// ) -> SchemaVariantResult> { +// let leaf_item_prop = Self::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; +// let backend_response_type: FuncBackendResponseType = leaf_kind.into(); + +// let context = AttributeContextBuilder::new() +// .set_prop_id(*leaf_item_prop.id()) +// .to_context()?; + +// Ok( +// AttributePrototype::list_prototype_funcs_by_context_and_backend_response_type( +// ctx, +// context, +// backend_response_type, +// ) +// .await?, +// ) +// } + +// /// This method finds a [`leaf`](crate::schema::variant::leaves)'s entry +// /// [`Prop`](crate::Prop) given a [`LeafKind`](crate::schema::variant::leaves::LeafKind). +// pub async fn find_leaf_item_prop( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// leaf_kind: LeafKind, +// ) -> SchemaVariantResult { +// let (leaf_map_prop_name, leaf_item_prop_name) = leaf_kind.prop_names(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// FIND_LEAF_ITEM_PROP, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &schema_variant_id, +// &leaf_map_prop_name, +// &leaf_item_prop_name, +// ], +// ) +// .await?; +// Ok(object_from_row(row)?) +// } + +// /// Find the implicit [`InternalProvider`](crate::InternalProvider) corresponding to a provided, +// /// [`direct child`](crate::RootPropChild) of [`RootProp`](crate::RootProp). +// pub async fn find_root_child_implicit_internal_provider( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// root_prop_child: RootPropChild, +// ) -> SchemaVariantResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// FIND_ROOT_CHILD_IMPLICIT_INTERNAL_PROVIDER, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &schema_variant_id, +// &root_prop_child.as_str(), +// ], +// ) +// .await?; +// Ok(object_from_row(row)?) +// } + +// /// Call [`Self::find_root_prop`] with the [`SchemaVariantId`](SchemaVariant) off +// /// [`self`](SchemaVariant). +// pub async fn root_prop(&self, ctx: &DalContext) -> SchemaVariantResult> { +// Self::find_root_prop(ctx, self.id).await +// } + +// /// Find the [`Prop`](crate::Prop) corresponding to "/root" for a given +// /// [`SchemaVariantId`](SchemaVariant). +// pub async fn find_root_prop( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult> { +// let maybe_row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_ROOT_PROP, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(option_object_from_row(maybe_row)?) +// } + +// /// Find the [`SchemaVariant`] for a given [`PropId`](crate::Prop) that resides _anywhere_ in a +// /// [`Prop`](crate::Prop) tree. +// /// +// /// For instance, if you have a [`PropId`](crate::Prop) corresponding to "/root/domain/poop" +// /// and want to know what [`SchemaVariant`]'s [`Prop`](crate::Prop) tree it resides in, use this +// /// method to find out. +// pub async fn find_for_prop( +// ctx: &DalContext, +// prop_id: PropId, +// ) -> SchemaVariantResult> { +// // FIXME(nick): this is expensive and should be one query. Please WON'T SOMEBODY THINK OF +// // THE CPU AND THE DATABASE??? OHHHHHHH THE HUMANITY!!!!!!! Oh well, anyway. +// if let Some(root_prop) = Prop::find_root_prop_for_prop(ctx, prop_id).await? { +// for schema_variant in Self::list(ctx).await? { +// if let Some(populated_root_prop_id) = schema_variant.root_prop_id { +// if *root_prop.id() == populated_root_prop_id { +// return Ok(Some(schema_variant)); +// } +// } +// } +// } +// Ok(None) +// } + +// /// Calls [`Self::find_prop_in_tree`] using the ID off of [`self`](SchemaVariant). +// pub async fn find_prop(&self, ctx: &DalContext, path: &[&str]) -> SchemaVariantResult { +// Self::find_prop_in_tree(ctx, self.id, path).await +// } + +// /// Find the [`Prop`] in a tree underneath our [`RootProp`] with a given path. +// pub async fn find_prop_in_tree( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// path: &[&str], +// ) -> SchemaVariantResult { +// match Prop::find_prop_by_path(ctx, schema_variant_id, &PropPath::new(path)).await { +// Ok(prop) => Ok(prop), +// Err(PropError::NotFoundAtPath(path, visiblity)) => Err( +// SchemaVariantError::PropNotFoundAtPath(schema_variant_id, path, visiblity), +// ), +// Err(err) => Err(err)?, +// } +// } +// } diff --git a/lib/dal/src/schema/variant/definition.rs b/lib/dal/src/schema/variant/definition.rs index 0f9c1c5587..a913912200 100644 --- a/lib/dal/src/schema/variant/definition.rs +++ b/lib/dal/src/schema/variant/definition.rs @@ -8,12 +8,9 @@ use telemetry::prelude::*; use thiserror::Error; use url::ParseError; -use crate::pkg::{get_component_type, PkgError}; use crate::prop::PropPath; -use crate::schema::variant::{SchemaVariantError, SchemaVariantResult}; use crate::{ - component::ComponentKind, impl_standard_model, pk, property_editor::schema::WidgetKind, - standard_model, standard_model_accessor, Component, ComponentError, ComponentType, DalContext, + impl_standard_model, pk, standard_model, standard_model_accessor, ComponentType, DalContext, FuncId, HistoryEventError, NatsError, PgError, PropId, PropKind, Schema, SchemaId, SchemaVariant, SchemaVariantId, SocketArity, StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, @@ -27,8 +24,6 @@ use si_pkg::{ #[remain::sorted] #[derive(Error, Debug)] pub enum SchemaVariantDefinitionError { - #[error(transparent)] - Component(#[from] Box), #[error("Could not check for default variant: {0}")] CouldNotCheckForDefaultVariant(String), #[error("Could not get ui menu for schema: {0}")] diff --git a/lib/dal/src/schema/variant/leaves.rs b/lib/dal/src/schema/variant/leaves.rs index 8938fe13cf..da1e8cacac 100644 --- a/lib/dal/src/schema/variant/leaves.rs +++ b/lib/dal/src/schema/variant/leaves.rs @@ -5,16 +5,11 @@ use serde::{Deserialize, Serialize}; use strum::EnumIter; -use crate::func::argument::{FuncArgumentId, FuncArgumentKind}; -use crate::schema::variant::{SchemaVariantError, SchemaVariantResult}; -use crate::{ - AttributeContext, AttributePrototype, AttributePrototypeArgument, AttributeReadContext, - AttributeValue, AttributeValueError, ComponentId, DalContext, Func, FuncBackendKind, - FuncBackendResponseType, FuncError, FuncId, PropId, RootPropChild, SchemaVariant, - SchemaVariantId, StandardModel, -}; +use crate::{FuncBackendResponseType, StandardModel}; use si_pkg::{LeafInputLocation as PkgLeafInputLocation, LeafKind as PkgLeafKind}; +use crate::schema::variant::root_prop::RootPropChild; + /// This enum provides options for creating leaves underneath compatible subtrees of "/root" within /// a [`SchemaVariant`](crate::SchemaVariant). Each compatible subtree starts with a /// [`map`](crate::PropKind::Map) [`Prop`](crate::Prop) that can contain zero to many @@ -117,52 +112,52 @@ impl From for LeafInputLocation { } } -impl LeafInputLocation { - pub fn arg_name(&self) -> &'static str { - match self { - LeafInputLocation::Code => "code", - LeafInputLocation::Domain => "domain", - LeafInputLocation::Resource => "resource", - LeafInputLocation::DeletedAt => "deleted_at", - LeafInputLocation::Secrets => "secrets", - } - } +// impl LeafInputLocation { +// pub fn arg_name(&self) -> &'static str { +// match self { +// LeafInputLocation::Code => "code", +// LeafInputLocation::Domain => "domain", +// LeafInputLocation::Resource => "resource", +// LeafInputLocation::DeletedAt => "deleted_at", +// LeafInputLocation::Secrets => "secrets", +// } +// } - pub fn prop_path(&self) -> Vec<&'static str> { - vec!["root", self.arg_name()] - } +// pub fn prop_path(&self) -> Vec<&'static str> { +// vec!["root", self.arg_name()] +// } - pub fn maybe_from_arg_name(arg_name: &str) -> Option { - Some(match arg_name { - "domain" => LeafInputLocation::Domain, - "code" => LeafInputLocation::Code, - "resource" => LeafInputLocation::Resource, - "deleted_at" => LeafInputLocation::DeletedAt, - "secrets" => LeafInputLocation::Secrets, - _ => return None, - }) - } +// pub fn maybe_from_arg_name(arg_name: &str) -> Option { +// Some(match arg_name { +// "domain" => LeafInputLocation::Domain, +// "code" => LeafInputLocation::Code, +// "resource" => LeafInputLocation::Resource, +// "deleted_at" => LeafInputLocation::DeletedAt, +// "secrets" => LeafInputLocation::Secrets, +// _ => return None, +// }) +// } - pub fn arg_kind(&self) -> FuncArgumentKind { - match self { - LeafInputLocation::Code - | LeafInputLocation::Domain - | LeafInputLocation::Resource - | LeafInputLocation::Secrets => FuncArgumentKind::Object, - LeafInputLocation::DeletedAt => FuncArgumentKind::String, - } - } -} +// pub fn arg_kind(&self) -> FuncArgumentKind { +// match self { +// LeafInputLocation::Code +// | LeafInputLocation::Domain +// | LeafInputLocation::Resource +// | LeafInputLocation::Secrets => FuncArgumentKind::Object, +// LeafInputLocation::DeletedAt => FuncArgumentKind::String, +// } +// } +// } -/// This struct provides the metadata necessary to provide "inputs" to [`Funcs`](crate::Func) -/// on leaves. -#[derive(Clone, Copy, Debug)] -pub struct LeafInput { - /// The source location of the input. - pub location: LeafInputLocation, - /// The corresponding [`FuncArgumentId`](crate::FuncArgument) for the [`Func`](crate::Func). - pub func_argument_id: FuncArgumentId, -} +// /// This struct provides the metadata necessary to provide "inputs" to [`Funcs`](crate::Func) +// /// on leaves. +// #[derive(Clone, Copy, Debug)] +// pub struct LeafInput { +// /// The source location of the input. +// pub location: LeafInputLocation, +// /// The corresponding [`FuncArgumentId`](crate::FuncArgument) for the [`Func`](crate::Func). +// pub func_argument_id: FuncArgumentId, +// } impl LeafKind { /// Provides the names of the [`Map`](crate::PropKind::Map) and the child entry @@ -184,114 +179,114 @@ impl From for FuncBackendResponseType { } } -impl SchemaVariant { - /// Insert an [`object`](crate::PropKind::Object) entry into a "/root" subtree of - /// [`map`](crate::PropKind::Map) with a [`Func`](crate::Func) that matches the provided - /// [`LeafKind`] in order to populate the subtree entry. - /// - /// The [`PropId`](crate::Prop) for the child [`map`](crate::PropKind::Map) of "/root" - /// corresponding to the [`LeafKind`] is returned. - pub async fn add_leaf( - ctx: &DalContext, - func_id: FuncId, - schema_variant_id: SchemaVariantId, - component_id: Option, - leaf_kind: LeafKind, - inputs: Vec, - ) -> SchemaVariantResult<(PropId, AttributePrototype)> { - if schema_variant_id.is_none() { - return Err(SchemaVariantError::InvalidSchemaVariant); - } +// impl SchemaVariant { +// /// Insert an [`object`](crate::PropKind::Object) entry into a "/root" subtree of +// /// [`map`](crate::PropKind::Map) with a [`Func`](crate::Func) that matches the provided +// /// [`LeafKind`] in order to populate the subtree entry. +// /// +// /// The [`PropId`](crate::Prop) for the child [`map`](crate::PropKind::Map) of "/root" +// /// corresponding to the [`LeafKind`] is returned. +// pub async fn add_leaf( +// ctx: &DalContext, +// func_id: FuncId, +// schema_variant_id: SchemaVariantId, +// component_id: Option, +// leaf_kind: LeafKind, +// inputs: Vec, +// ) -> SchemaVariantResult<(PropId, AttributePrototype)> { +// if schema_variant_id.is_none() { +// return Err(SchemaVariantError::InvalidSchemaVariant); +// } - // Ensure the func matches what we need. - let func = Func::get_by_id(ctx, &func_id) - .await? - .ok_or(FuncError::NotFound(func_id))?; - if func.backend_kind() != &FuncBackendKind::JsAttribute { - return Err(SchemaVariantError::LeafFunctionMustBeJsAttribute( - *func.id(), - )); - } - if func.backend_response_type() != &leaf_kind.into() { - return Err(SchemaVariantError::LeafFunctionMismatch( - *func.backend_response_type(), - leaf_kind, - )); - } +// // Ensure the func matches what we need. +// let func = Func::get_by_id(ctx, &func_id) +// .await? +// .ok_or(FuncError::NotFound(func_id))?; +// if func.backend_kind() != &FuncBackendKind::JsAttribute { +// return Err(SchemaVariantError::LeafFunctionMustBeJsAttribute( +// *func.id(), +// )); +// } +// if func.backend_response_type() != &leaf_kind.into() { +// return Err(SchemaVariantError::LeafFunctionMismatch( +// *func.backend_response_type(), +// leaf_kind, +// )); +// } - // We only need to finalize once since we are adding a leaf to a known descendant of the - // root prop. - let mut schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) - .await? - .ok_or(SchemaVariantError::NotFound(schema_variant_id))?; - if !schema_variant.finalized_once() { - schema_variant.finalize(ctx, None).await?; - } +// // We only need to finalize once since we are adding a leaf to a known descendant of the +// // root prop. +// let mut schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) +// .await? +// .ok_or(SchemaVariantError::NotFound(schema_variant_id))?; +// if !schema_variant.finalized_once() { +// schema_variant.finalize(ctx, None).await?; +// } - // Assemble the values we need to insert an object into the map. - let item_prop = - SchemaVariant::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; +// // Assemble the values we need to insert an object into the map. +// let item_prop = +// SchemaVariant::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; - // NOTE(nick): we should consider getting the parent and the item at the same time. - let map_prop = item_prop - .parent_prop(ctx) - .await? - .ok_or_else(|| SchemaVariantError::ParentPropNotFound(*item_prop.id()))?; - let map_attribute_read_context = - AttributeReadContext::default_with_prop_and_component_id(*map_prop.id(), component_id); - let map_attribute_value = AttributeValue::find_for_context(ctx, map_attribute_read_context) - .await? - .ok_or(AttributeValueError::NotFoundForReadContext( - map_attribute_read_context, - ))?; - let insert_attribute_context = AttributeContext::builder() - .set_prop_id(*item_prop.id()) - .set_component_id(component_id.unwrap_or(ComponentId::NONE)) - .to_context()?; +// // NOTE(nick): we should consider getting the parent and the item at the same time. +// let map_prop = item_prop +// .parent_prop(ctx) +// .await? +// .ok_or_else(|| SchemaVariantError::ParentPropNotFound(*item_prop.id()))?; +// let map_attribute_read_context = +// AttributeReadContext::default_with_prop_and_component_id(*map_prop.id(), component_id); +// let map_attribute_value = AttributeValue::find_for_context(ctx, map_attribute_read_context) +// .await? +// .ok_or(AttributeValueError::NotFoundForReadContext( +// map_attribute_read_context, +// ))?; +// let insert_attribute_context = AttributeContext::builder() +// .set_prop_id(*item_prop.id()) +// .set_component_id(component_id.unwrap_or(ComponentId::NONE)) +// .to_context()?; - // Insert an item into the map and setup its function. The new entry is named after the func - // name since func names must be unique for a given tenancy and visibility. If that changes, - // then this will break. - let inserted_attribute_value_id = AttributeValue::insert_for_context( - ctx, - insert_attribute_context, - *map_attribute_value.id(), - Some(serde_json::json![{}]), - Some(func.name().to_string()), - ) - .await?; - let inserted_attribute_value = AttributeValue::get_by_id(ctx, &inserted_attribute_value_id) - .await? - .ok_or_else(|| { - AttributeValueError::NotFound(inserted_attribute_value_id, *ctx.visibility()) - })?; - let mut inserted_attribute_prototype = inserted_attribute_value - .attribute_prototype(ctx) - .await? - .ok_or(AttributeValueError::MissingAttributePrototype)?; - inserted_attribute_prototype - .set_func_id(ctx, func_id) - .await?; +// // Insert an item into the map and setup its function. The new entry is named after the func +// // name since func names must be unique for a given tenancy and visibility. If that changes, +// // then this will break. +// let inserted_attribute_value_id = AttributeValue::insert_for_context( +// ctx, +// insert_attribute_context, +// *map_attribute_value.id(), +// Some(serde_json::json![{}]), +// Some(func.name().to_string()), +// ) +// .await?; +// let inserted_attribute_value = AttributeValue::get_by_id(ctx, &inserted_attribute_value_id) +// .await? +// .ok_or_else(|| { +// AttributeValueError::NotFound(inserted_attribute_value_id, *ctx.visibility()) +// })?; +// let mut inserted_attribute_prototype = inserted_attribute_value +// .attribute_prototype(ctx) +// .await? +// .ok_or(AttributeValueError::MissingAttributePrototype)?; +// inserted_attribute_prototype +// .set_func_id(ctx, func_id) +// .await?; - for input in inputs { - let input_internal_provider = - SchemaVariant::find_root_child_implicit_internal_provider( - ctx, - schema_variant_id, - input.location.into(), - ) - .await?; - AttributePrototypeArgument::new_for_intra_component( - ctx, - *inserted_attribute_prototype.id(), - input.func_argument_id, - *input_internal_provider.id(), - ) - .await?; - } +// for input in inputs { +// let input_internal_provider = +// SchemaVariant::find_root_child_implicit_internal_provider( +// ctx, +// schema_variant_id, +// input.location.into(), +// ) +// .await?; +// AttributePrototypeArgument::new_for_intra_component( +// ctx, +// *inserted_attribute_prototype.id(), +// input.func_argument_id, +// *input_internal_provider.id(), +// ) +// .await?; +// } - // Return the prop id for the entire map so that its implicit internal provider can be - // used for intelligence functions. - Ok((*map_prop.id(), inserted_attribute_prototype)) - } -} +// // Return the prop id for the entire map so that its implicit internal provider can be +// // used for intelligence functions. +// Ok((*map_prop.id(), inserted_attribute_prototype)) +// } +// } diff --git a/lib/dal/src/schema/variant/root_prop.rs b/lib/dal/src/schema/variant/root_prop.rs index 06683f1735..7e9f6c496b 100644 --- a/lib/dal/src/schema/variant/root_prop.rs +++ b/lib/dal/src/schema/variant/root_prop.rs @@ -4,15 +4,7 @@ use strum::{AsRefStr, Display as EnumDisplay, EnumIter, EnumString}; use telemetry::prelude::*; -use crate::func::backend::validation::FuncBackendValidationArgs; -use crate::property_editor::schema::WidgetKind; -use crate::validation::Validation; -use crate::{ - schema::variant::{leaves::LeafKind, SchemaVariantResult}, - DalContext, Func, FuncError, Prop, PropId, PropKind, ReconciliationPrototype, - ReconciliationPrototypeContext, SchemaId, SchemaVariant, SchemaVariantId, StandardModel, - ValidationPrototype, ValidationPrototypeContext, -}; +use crate::{PropId, StandardModel}; pub mod component_type; @@ -106,466 +98,466 @@ pub struct RootProp { pub deleted_at_prop_id: PropId, } -impl SchemaVariant { - /// Create and set a [`RootProp`] for the [`SchemaVariant`]. - #[instrument(skip_all)] - pub async fn create_and_set_root_prop( - &mut self, - ctx: &DalContext, - schema_id: SchemaId, - ) -> SchemaVariantResult { - let root_prop = Prop::new(ctx, "root", PropKind::Object, None, self.id, None, None).await?; - let root_prop_id = *root_prop.id(); - self.set_root_prop_id(ctx, Some(root_prop_id)).await?; - - // FIXME(nick): we rely on ULID ordering for now, so the si prop tree creation has to come - // before the domain prop tree creation. Once index maps for objects are added, this - // can be moved back to its original location with the other prop tree creation methods. - let si_prop_id = Self::setup_si(ctx, root_prop_id, schema_id, self.id).await?; - - let domain_prop = Prop::new( - ctx, - "domain", - PropKind::Object, - None, - self.id, - Some(root_prop_id), - None, - ) - .await?; - - let secrets_prop_id = *Prop::new( - ctx, - "secrets", - PropKind::Object, - None, - self.id, - Some(root_prop_id), - None, - ) - .await? - .id(); - - let resource_prop_id = Self::setup_resource(ctx, root_prop_id, self.id).await?; - let resource_value_prop_id = Self::setup_resource_value(ctx, root_prop_id, self).await?; - let code_prop_id = Self::setup_code(ctx, root_prop_id, self.id).await?; - let qualification_prop_id = Self::setup_qualification(ctx, root_prop_id, self.id).await?; - let deleted_at_prop_id = Self::setup_deleted_at(ctx, root_prop_id, self.id).await?; - - // Now that the structure is set up, we can populate default - // AttributePrototypes & AttributeValues to be updated appropriately below. - SchemaVariant::create_default_prototypes_and_values(ctx, self.id).await?; - - Ok(RootProp { - prop_id: root_prop_id, - si_prop_id, - domain_prop_id: *domain_prop.id(), - resource_value_prop_id, - resource_prop_id, - secrets_prop_id, - code_prop_id, - qualification_prop_id, - deleted_at_prop_id, - }) - } - - async fn insert_leaf_props( - ctx: &DalContext, - leaf_kind: LeafKind, - root_prop_id: PropId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult<(PropId, PropId)> { - let (leaf_prop_name, leaf_item_prop_name) = leaf_kind.prop_names(); - - let mut leaf_prop = Prop::new( - ctx, - leaf_prop_name, - PropKind::Map, - None, - schema_variant_id, - Some(root_prop_id), - None, - ) - .await?; - leaf_prop.set_hidden(ctx, true).await?; - - let mut leaf_item_prop = Prop::new( - ctx, - leaf_item_prop_name, - PropKind::Object, - None, - schema_variant_id, - Some(*leaf_prop.id()), - None, - ) - .await?; - leaf_item_prop.set_hidden(ctx, true).await?; - - Ok((*leaf_prop.id(), *leaf_item_prop.id())) - } - - async fn create_validation( - ctx: &DalContext, - prop_id: PropId, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, - validation: Validation, - ) -> SchemaVariantResult<()> { - let validation_func_name = "si:validation"; - let validation_func: Func = Func::find_by_attr(ctx, "name", &validation_func_name) - .await? - .pop() - .ok_or_else(|| FuncError::NotFoundByName(validation_func_name.to_string()))?; - let mut builder = ValidationPrototypeContext::builder(); - builder - .set_prop_id(prop_id) - .set_schema_id(schema_id) - .set_schema_variant_id(schema_variant_id); - ValidationPrototype::new( - ctx, - *validation_func.id(), - serde_json::to_value(FuncBackendValidationArgs::new(validation))?, - builder.to_context(ctx).await?, - ) - .await?; - Ok(()) - } - - async fn setup_si( - ctx: &DalContext, - root_prop_id: PropId, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult { - let si_prop = Prop::new( - ctx, - "si", - PropKind::Object, - None, - schema_variant_id, - Some(root_prop_id), - None, - ) - .await?; - let si_prop_id = *si_prop.id(); - let _si_name_prop = Prop::new( - ctx, - "name", - PropKind::String, - None, - schema_variant_id, - Some(si_prop_id), - None, - ) - .await?; - - // The protected prop ensures a component cannot be deleted in the configuration diagram. - let _protected_prop = Prop::new( - ctx, - "protected", - PropKind::Boolean, - None, - schema_variant_id, - Some(si_prop_id), - None, - ) - .await?; - - // The type prop controls the type of the configuration node. The default type can be - // determined by the schema variant author. The widget options correspond to the component - // type enumeration. - let _type_prop = Prop::new( - ctx, - "type", - PropKind::String, - Some(( - WidgetKind::Select, - Some(serde_json::json!([ - { - "label": "Component", - "value": "component", - }, - { - "label": "Configuration Frame", - "value": "configurationFrame", - }, - { - "label": "Aggregation Frame", - "value": "aggregationFrame", - }, - ])), - )), - schema_variant_id, - Some(si_prop_id), - None, - ) - .await?; - - // Override the schema variant color for nodes on the diagram. - let mut color_prop = Prop::new( - ctx, - "color", - PropKind::String, - None, - schema_variant_id, - Some(si_prop_id), - None, - ) - .await?; - color_prop.set_widget_kind(ctx, WidgetKind::Color).await?; - Self::create_validation( - ctx, - *color_prop.id(), - schema_id, - schema_variant_id, - Validation::StringIsHexColor { value: None }, - ) - .await?; - - Ok(si_prop_id) - } - - async fn setup_resource_value( - ctx: &DalContext, - root_prop_id: PropId, - schema_variant: &mut SchemaVariant, - ) -> SchemaVariantResult { - let schema_variant_id = *schema_variant.id(); - let mut resource_value_prop = Prop::new( - ctx, - "resource_value", - PropKind::Object, - None, - schema_variant_id, - Some(root_prop_id), - None, - ) - .await?; - resource_value_prop.set_hidden(ctx, true).await?; - - if let Some(reconciliation_func) = - Func::find_by_attr(ctx, "name", &"si:defaultReconciliation") - .await? - .pop() - { - ReconciliationPrototype::upsert( - ctx, - *reconciliation_func.id(), - "Reconciliation", - ReconciliationPrototypeContext::new(*schema_variant.id()), - ) - .await?; - } - - SchemaVariant::create_default_prototypes_and_values(ctx, *schema_variant.id()).await?; - SchemaVariant::create_implicit_internal_providers(ctx, *schema_variant.id()).await?; - - Ok(*resource_value_prop.id()) - } - - async fn setup_resource( - ctx: &DalContext, - root_prop_id: PropId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult { - let mut resource_prop = Prop::new( - ctx, - "resource", - PropKind::Object, - None, - schema_variant_id, - Some(root_prop_id), - None, - ) - .await?; - resource_prop.set_hidden(ctx, true).await?; - let resource_prop_id = *resource_prop.id(); - - let mut resource_status_prop = Prop::new( - ctx, - "status", - PropKind::String, - None, - schema_variant_id, - Some(resource_prop_id), - None, - ) - .await?; - resource_status_prop.set_hidden(ctx, true).await?; - - let mut resource_message_prop = Prop::new( - ctx, - "message", - PropKind::String, - None, - schema_variant_id, - Some(resource_prop_id), - None, - ) - .await?; - resource_message_prop.set_hidden(ctx, true).await?; - - let mut resource_logs_prop = Prop::new( - ctx, - "logs", - PropKind::Array, - None, - schema_variant_id, - Some(resource_prop_id), - None, - ) - .await?; - resource_logs_prop.set_hidden(ctx, true).await?; - - let mut resource_logs_log_prop = Prop::new( - ctx, - "log", - PropKind::String, - None, - schema_variant_id, - Some(*resource_logs_prop.id()), - None, - ) - .await?; - resource_logs_log_prop.set_hidden(ctx, true).await?; - - let mut resource_payload_prop = Prop::new( - ctx, - "payload", - PropKind::String, - None, - schema_variant_id, - Some(resource_prop_id), - None, - ) - .await?; - resource_payload_prop.set_hidden(ctx, true).await?; - - let mut resource_last_synced_prop = Prop::new( - ctx, - "last_synced", - PropKind::String, - None, - schema_variant_id, - Some(resource_prop_id), - None, - ) - .await?; - resource_last_synced_prop.set_hidden(ctx, true).await?; - - Ok(resource_prop_id) - } - - async fn setup_code( - ctx: &DalContext, - root_prop_id: PropId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult { - let (code_map_prop_id, code_map_item_prop_id) = Self::insert_leaf_props( - ctx, - LeafKind::CodeGeneration, - root_prop_id, - schema_variant_id, - ) - .await?; - - let mut child_code_prop = Prop::new( - ctx, - "code", - PropKind::String, - None, - schema_variant_id, - Some(code_map_item_prop_id), - None, - ) - .await?; - child_code_prop.set_hidden(ctx, true).await?; - - let mut child_message_prop = Prop::new( - ctx, - "message", - PropKind::String, - None, - schema_variant_id, - Some(code_map_item_prop_id), - None, - ) - .await?; - child_message_prop.set_hidden(ctx, true).await?; - - let mut child_format_prop = Prop::new( - ctx, - "format", - PropKind::String, - None, - schema_variant_id, - Some(code_map_item_prop_id), - None, - ) - .await?; - child_format_prop.set_hidden(ctx, true).await?; - - Ok(code_map_prop_id) - } - - async fn setup_qualification( - ctx: &DalContext, - root_prop_id: PropId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult { - let (qualification_map_prop_id, qualification_map_item_prop_id) = Self::insert_leaf_props( - ctx, - LeafKind::Qualification, - root_prop_id, - schema_variant_id, - ) - .await?; - - let mut child_qualified_prop = Prop::new( - ctx, - "result", - PropKind::String, - None, - schema_variant_id, - Some(qualification_map_item_prop_id), - None, - ) - .await?; - child_qualified_prop.set_hidden(ctx, true).await?; - - let mut child_message_prop = Prop::new( - ctx, - "message", - PropKind::String, - None, - schema_variant_id, - Some(qualification_map_item_prop_id), - None, - ) - .await?; - child_message_prop.set_hidden(ctx, true).await?; - - Ok(qualification_map_prop_id) - } - - async fn setup_deleted_at( - ctx: &DalContext, - root_prop_id: PropId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult { - // This is a new prop that we will use to determine if we want to run a delete workflow - let mut deleted_at = Prop::new( - ctx, - "deleted_at", - PropKind::String, - None, - schema_variant_id, - Some(root_prop_id), - None, - ) - .await?; - deleted_at.set_hidden(ctx, true).await?; - - Ok(*deleted_at.id()) - } -} +// impl SchemaVariant { +// /// Create and set a [`RootProp`] for the [`SchemaVariant`]. +// #[instrument(skip_all)] +// pub async fn create_and_set_root_prop( +// &mut self, +// ctx: &DalContext, +// schema_id: SchemaId, +// ) -> SchemaVariantResult { +// let root_prop = Prop::new(ctx, "root", PropKind::Object, None, self.id, None, None).await?; +// let root_prop_id = *root_prop.id(); +// self.set_root_prop_id(ctx, Some(root_prop_id)).await?; + +// // FIXME(nick): we rely on ULID ordering for now, so the si prop tree creation has to come +// // before the domain prop tree creation. Once index maps for objects are added, this +// // can be moved back to its original location with the other prop tree creation methods. +// let si_prop_id = Self::setup_si(ctx, root_prop_id, schema_id, self.id).await?; + +// let domain_prop = Prop::new( +// ctx, +// "domain", +// PropKind::Object, +// None, +// self.id, +// Some(root_prop_id), +// None, +// ) +// .await?; + +// let secrets_prop_id = *Prop::new( +// ctx, +// "secrets", +// PropKind::Object, +// None, +// self.id, +// Some(root_prop_id), +// None, +// ) +// .await? +// .id(); + +// let resource_prop_id = Self::setup_resource(ctx, root_prop_id, self.id).await?; +// let resource_value_prop_id = Self::setup_resource_value(ctx, root_prop_id, self).await?; +// let code_prop_id = Self::setup_code(ctx, root_prop_id, self.id).await?; +// let qualification_prop_id = Self::setup_qualification(ctx, root_prop_id, self.id).await?; +// let deleted_at_prop_id = Self::setup_deleted_at(ctx, root_prop_id, self.id).await?; + +// // Now that the structure is set up, we can populate default +// // AttributePrototypes & AttributeValues to be updated appropriately below. +// SchemaVariant::create_default_prototypes_and_values(ctx, self.id).await?; + +// Ok(RootProp { +// prop_id: root_prop_id, +// si_prop_id, +// domain_prop_id: *domain_prop.id(), +// resource_value_prop_id, +// resource_prop_id, +// secrets_prop_id, +// code_prop_id, +// qualification_prop_id, +// deleted_at_prop_id, +// }) +// } + +// async fn insert_leaf_props( +// ctx: &DalContext, +// leaf_kind: LeafKind, +// root_prop_id: PropId, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult<(PropId, PropId)> { +// let (leaf_prop_name, leaf_item_prop_name) = leaf_kind.prop_names(); + +// let mut leaf_prop = Prop::new( +// ctx, +// leaf_prop_name, +// PropKind::Map, +// None, +// schema_variant_id, +// Some(root_prop_id), +// None, +// ) +// .await?; +// leaf_prop.set_hidden(ctx, true).await?; + +// let mut leaf_item_prop = Prop::new( +// ctx, +// leaf_item_prop_name, +// PropKind::Object, +// None, +// schema_variant_id, +// Some(*leaf_prop.id()), +// None, +// ) +// .await?; +// leaf_item_prop.set_hidden(ctx, true).await?; + +// Ok((*leaf_prop.id(), *leaf_item_prop.id())) +// } + +// async fn create_validation( +// ctx: &DalContext, +// prop_id: PropId, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// validation: Validation, +// ) -> SchemaVariantResult<()> { +// let validation_func_name = "si:validation"; +// let validation_func: Func = Func::find_by_attr(ctx, "name", &validation_func_name) +// .await? +// .pop() +// .ok_or_else(|| FuncError::NotFoundByName(validation_func_name.to_string()))?; +// let mut builder = ValidationPrototypeContext::builder(); +// builder +// .set_prop_id(prop_id) +// .set_schema_id(schema_id) +// .set_schema_variant_id(schema_variant_id); +// ValidationPrototype::new( +// ctx, +// *validation_func.id(), +// serde_json::to_value(FuncBackendValidationArgs::new(validation))?, +// builder.to_context(ctx).await?, +// ) +// .await?; +// Ok(()) +// } + +// async fn setup_si( +// ctx: &DalContext, +// root_prop_id: PropId, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult { +// let si_prop = Prop::new( +// ctx, +// "si", +// PropKind::Object, +// None, +// schema_variant_id, +// Some(root_prop_id), +// None, +// ) +// .await?; +// let si_prop_id = *si_prop.id(); +// let _si_name_prop = Prop::new( +// ctx, +// "name", +// PropKind::String, +// None, +// schema_variant_id, +// Some(si_prop_id), +// None, +// ) +// .await?; + +// // The protected prop ensures a component cannot be deleted in the configuration diagram. +// let _protected_prop = Prop::new( +// ctx, +// "protected", +// PropKind::Boolean, +// None, +// schema_variant_id, +// Some(si_prop_id), +// None, +// ) +// .await?; + +// // The type prop controls the type of the configuration node. The default type can be +// // determined by the schema variant author. The widget options correspond to the component +// // type enumeration. +// let _type_prop = Prop::new( +// ctx, +// "type", +// PropKind::String, +// Some(( +// WidgetKind::Select, +// Some(serde_json::json!([ +// { +// "label": "Component", +// "value": "component", +// }, +// { +// "label": "Configuration Frame", +// "value": "configurationFrame", +// }, +// { +// "label": "Aggregation Frame", +// "value": "aggregationFrame", +// }, +// ])), +// )), +// schema_variant_id, +// Some(si_prop_id), +// None, +// ) +// .await?; + +// // Override the schema variant color for nodes on the diagram. +// let mut color_prop = Prop::new( +// ctx, +// "color", +// PropKind::String, +// None, +// schema_variant_id, +// Some(si_prop_id), +// None, +// ) +// .await?; +// color_prop.set_widget_kind(ctx, WidgetKind::Color).await?; +// Self::create_validation( +// ctx, +// *color_prop.id(), +// schema_id, +// schema_variant_id, +// Validation::StringIsHexColor { value: None }, +// ) +// .await?; + +// Ok(si_prop_id) +// } + +// async fn setup_resource_value( +// ctx: &DalContext, +// root_prop_id: PropId, +// schema_variant: &mut SchemaVariant, +// ) -> SchemaVariantResult { +// let schema_variant_id = *schema_variant.id(); +// let mut resource_value_prop = Prop::new( +// ctx, +// "resource_value", +// PropKind::Object, +// None, +// schema_variant_id, +// Some(root_prop_id), +// None, +// ) +// .await?; +// resource_value_prop.set_hidden(ctx, true).await?; + +// if let Some(reconciliation_func) = +// Func::find_by_attr(ctx, "name", &"si:defaultReconciliation") +// .await? +// .pop() +// { +// ReconciliationPrototype::upsert( +// ctx, +// *reconciliation_func.id(), +// "Reconciliation", +// ReconciliationPrototypeContext::new(*schema_variant.id()), +// ) +// .await?; +// } + +// SchemaVariant::create_default_prototypes_and_values(ctx, *schema_variant.id()).await?; +// SchemaVariant::create_implicit_internal_providers(ctx, *schema_variant.id()).await?; + +// Ok(*resource_value_prop.id()) +// } + +// async fn setup_resource( +// ctx: &DalContext, +// root_prop_id: PropId, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult { +// let mut resource_prop = Prop::new( +// ctx, +// "resource", +// PropKind::Object, +// None, +// schema_variant_id, +// Some(root_prop_id), +// None, +// ) +// .await?; +// resource_prop.set_hidden(ctx, true).await?; +// let resource_prop_id = *resource_prop.id(); + +// let mut resource_status_prop = Prop::new( +// ctx, +// "status", +// PropKind::String, +// None, +// schema_variant_id, +// Some(resource_prop_id), +// None, +// ) +// .await?; +// resource_status_prop.set_hidden(ctx, true).await?; + +// let mut resource_message_prop = Prop::new( +// ctx, +// "message", +// PropKind::String, +// None, +// schema_variant_id, +// Some(resource_prop_id), +// None, +// ) +// .await?; +// resource_message_prop.set_hidden(ctx, true).await?; + +// let mut resource_logs_prop = Prop::new( +// ctx, +// "logs", +// PropKind::Array, +// None, +// schema_variant_id, +// Some(resource_prop_id), +// None, +// ) +// .await?; +// resource_logs_prop.set_hidden(ctx, true).await?; + +// let mut resource_logs_log_prop = Prop::new( +// ctx, +// "log", +// PropKind::String, +// None, +// schema_variant_id, +// Some(*resource_logs_prop.id()), +// None, +// ) +// .await?; +// resource_logs_log_prop.set_hidden(ctx, true).await?; + +// let mut resource_payload_prop = Prop::new( +// ctx, +// "payload", +// PropKind::String, +// None, +// schema_variant_id, +// Some(resource_prop_id), +// None, +// ) +// .await?; +// resource_payload_prop.set_hidden(ctx, true).await?; + +// let mut resource_last_synced_prop = Prop::new( +// ctx, +// "last_synced", +// PropKind::String, +// None, +// schema_variant_id, +// Some(resource_prop_id), +// None, +// ) +// .await?; +// resource_last_synced_prop.set_hidden(ctx, true).await?; + +// Ok(resource_prop_id) +// } + +// async fn setup_code( +// ctx: &DalContext, +// root_prop_id: PropId, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult { +// let (code_map_prop_id, code_map_item_prop_id) = Self::insert_leaf_props( +// ctx, +// LeafKind::CodeGeneration, +// root_prop_id, +// schema_variant_id, +// ) +// .await?; + +// let mut child_code_prop = Prop::new( +// ctx, +// "code", +// PropKind::String, +// None, +// schema_variant_id, +// Some(code_map_item_prop_id), +// None, +// ) +// .await?; +// child_code_prop.set_hidden(ctx, true).await?; + +// let mut child_message_prop = Prop::new( +// ctx, +// "message", +// PropKind::String, +// None, +// schema_variant_id, +// Some(code_map_item_prop_id), +// None, +// ) +// .await?; +// child_message_prop.set_hidden(ctx, true).await?; + +// let mut child_format_prop = Prop::new( +// ctx, +// "format", +// PropKind::String, +// None, +// schema_variant_id, +// Some(code_map_item_prop_id), +// None, +// ) +// .await?; +// child_format_prop.set_hidden(ctx, true).await?; + +// Ok(code_map_prop_id) +// } + +// async fn setup_qualification( +// ctx: &DalContext, +// root_prop_id: PropId, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult { +// let (qualification_map_prop_id, qualification_map_item_prop_id) = Self::insert_leaf_props( +// ctx, +// LeafKind::Qualification, +// root_prop_id, +// schema_variant_id, +// ) +// .await?; + +// let mut child_qualified_prop = Prop::new( +// ctx, +// "result", +// PropKind::String, +// None, +// schema_variant_id, +// Some(qualification_map_item_prop_id), +// None, +// ) +// .await?; +// child_qualified_prop.set_hidden(ctx, true).await?; + +// let mut child_message_prop = Prop::new( +// ctx, +// "message", +// PropKind::String, +// None, +// schema_variant_id, +// Some(qualification_map_item_prop_id), +// None, +// ) +// .await?; +// child_message_prop.set_hidden(ctx, true).await?; + +// Ok(qualification_map_prop_id) +// } + +// async fn setup_deleted_at( +// ctx: &DalContext, +// root_prop_id: PropId, +// schema_variant_id: SchemaVariantId, +// ) -> SchemaVariantResult { +// // This is a new prop that we will use to determine if we want to run a delete workflow +// let mut deleted_at = Prop::new( +// ctx, +// "deleted_at", +// PropKind::String, +// None, +// schema_variant_id, +// Some(root_prop_id), +// None, +// ) +// .await?; +// deleted_at.set_hidden(ctx, true).await?; + +// Ok(*deleted_at.id()) +// } +// } diff --git a/lib/dal/src/socket.rs b/lib/dal/src/socket.rs index 4ad4111dc3..be6499b9a2 100644 --- a/lib/dal/src/socket.rs +++ b/lib/dal/src/socket.rs @@ -1,52 +1,99 @@ +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use si_data_pg::PgError; -use strum::{AsRefStr, Display, EnumIter, EnumString}; + +use strum::{AsRefStr, Display, EnumDiscriminants, EnumIter, EnumString}; use telemetry::prelude::*; -use thiserror::Error; + use si_pkg::SocketSpecArity; +use crate::workspace_snapshot::content_address::ContentAddress; use crate::{ - impl_standard_model, label_list::ToLabelList, pk, standard_model, standard_model_accessor, - standard_model_belongs_to, standard_model_many_to_many, ComponentId, DalContext, DiagramKind, - ExternalProvider, ExternalProviderId, HistoryEventError, InternalProvider, InternalProviderId, - NodeId, SchemaVariant, SchemaVariantId, StandardModel, StandardModelError, Tenancy, Timestamp, - TransactionsError, Visibility, + label_list::ToLabelList, pk, StandardModel, Timestamp, }; -const FIND_BY_NAME_FOR_EDGE_KIND_AND_NODE: &str = - include_str!("queries/socket/find_by_name_for_edge_kind_and_node.sql"); -const FIND_FRAME_SOCKET_FOR_NODE: &str = - include_str!("queries/socket/find_frame_socket_for_node.sql"); -const LIST_FOR_COMPONENT: &str = include_str!("queries/socket/list_for_component.sql"); -const FIND_FOR_INTERNAL_PROVIDER: &str = - include_str!("queries/socket/find_for_internal_provider.sql"); -const FIND_FOR_EXTERNAL_PROVIDER: &str = - include_str!("queries/socket/find_for_external_provider.sql"); +// const FIND_BY_NAME_FOR_EDGE_KIND_AND_NODE: &str = +// include_str!("queries/socket/find_by_name_for_edge_kind_and_node.sql"); +// const FIND_FRAME_SOCKET_FOR_NODE: &str = +// include_str!("queries/socket/find_frame_socket_for_node.sql"); +// const LIST_FOR_COMPONENT: &str = include_str!("queries/socket/list_for_component.sql"); +// const FIND_FOR_INTERNAL_PROVIDER: &str = +// include_str!("queries/socket/find_for_internal_provider.sql"); +// const FIND_FOR_EXTERNAL_PROVIDER: &str = +// include_str!("queries/socket/find_for_external_provider.sql"); +pk!(SocketId); + +// TODO(nick,zack,jacob): this is temporary. Move back to "diagram.rs" later. #[remain::sorted] -#[derive(Error, Debug)] -pub enum SocketError { - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("pg error: {0}")] - Pg(#[from] PgError), - /// Propagate a [`SchemaVariantError`](crate::SchemaVariantError) wrapped as a string. - #[error("schema variant error: {0}")] - SchemaVariant(String), - /// Could not find the [`SchemaVariant`](crate::SchemaVariant) by id. - #[error("schema variant not found by id: {0}")] - SchemaVariantNotFound(SchemaVariantId), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), +#[derive( + AsRefStr, Clone, Copy, Debug, Deserialize, Display, EnumString, Eq, PartialEq, Serialize, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum DiagramKind { + /// Represents the collection of [`Components`](crate::Component) and connections between them + /// within a [`Workspace`](crate::Workspace) + Configuration, } -pub type SocketResult = Result; +/// The mechanism for setting relationships between [`SchemaVariants`](crate::SchemaVariant) or +/// instantiations of the same [`SchemaVariant`](crate::SchemaVariant). +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +pub struct Socket { + id: SocketId, + #[serde(flatten)] + timestamp: Timestamp, + name: String, + human_name: Option, + kind: SocketKind, + edge_kind: SocketEdgeKind, + diagram_kind: DiagramKind, + arity: SocketArity, + required: bool, + ui_hidden: bool, +} -pk!(SocketPk); -pk!(SocketId); +#[derive(Debug, PartialEq)] +pub struct SocketGraphNode { + id: SocketId, + content_address: ContentAddress, + content: SocketContentV1, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum SocketContent { + V1(SocketContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct SocketContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + pub name: String, + pub human_name: Option, + pub kind: SocketKind, + pub edge_kind: SocketEdgeKind, + pub diagram_kind: DiagramKind, + pub arity: SocketArity, + pub required: bool, + pub ui_hidden: bool, +} + +impl SocketGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: SocketContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::Socket(content_hash), + content, + } + } +} /// Dictates the kind of behavior possible for a [`Socket`](Socket). #[remain::sorted] @@ -136,229 +183,198 @@ pub enum SocketEdgeKind { impl ToLabelList for SocketEdgeKind {} -/// The mechanism for setting relationships between [`SchemaVariants`](crate::SchemaVariant) or -/// instantiations of the same [`SchemaVariant`](crate::SchemaVariant). -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct Socket { - pk: SocketPk, - id: SocketId, - name: String, - human_name: Option, - kind: SocketKind, - edge_kind: SocketEdgeKind, - diagram_kind: DiagramKind, - arity: SocketArity, - required: bool, - ui_hidden: bool, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, -} - -impl_standard_model! { - model: Socket, - pk: SocketPk, - id: SocketId, - table_name: "sockets", - history_event_label_base: "socket", - history_event_message_name: "Socket" -} - -impl Socket { - pub async fn new( - ctx: &DalContext, - name: impl AsRef, - kind: SocketKind, - socket_edge_kind: &SocketEdgeKind, - arity: &SocketArity, - diagram_kind: &DiagramKind, - schema_variant_id: Option, - ) -> SocketResult { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM socket_create_v1($1, $2, $3, $4, $5, $6, $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &kind.as_ref(), - &socket_edge_kind.as_ref(), - &arity.as_ref(), - &diagram_kind.as_ref(), - ], - ) - .await?; - let object: Socket = standard_model::finish_create_from_row(ctx, row).await?; +// impl Socket { +// pub async fn new( +// ctx: &DalContext, +// name: impl AsRef, +// kind: SocketKind, +// socket_edge_kind: &SocketEdgeKind, +// arity: &SocketArity, +// diagram_kind: &DiagramKind, +// schema_variant_id: Option, +// ) -> SocketResult { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM socket_create_v1($1, $2, $3, $4, $5, $6, $7)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &name, +// &kind.as_ref(), +// &socket_edge_kind.as_ref(), +// &arity.as_ref(), +// &diagram_kind.as_ref(), +// ], +// ) +// .await?; +// let object: Socket = standard_model::finish_create_from_row(ctx, row).await?; - if let Some(schema_variant_id) = schema_variant_id { - let schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) - .await - .map_err(|e| SocketError::SchemaVariant(e.to_string()))? - .ok_or(SocketError::SchemaVariantNotFound(schema_variant_id))?; - schema_variant - .add_socket(ctx, &object.id) - .await - .map_err(|e| SocketError::SchemaVariant(e.to_string()))? - } +// if let Some(schema_variant_id) = schema_variant_id { +// let schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) +// .await +// .map_err(|e| SocketError::SchemaVariant(e.to_string()))? +// .ok_or(SocketError::SchemaVariantNotFound(schema_variant_id))?; +// schema_variant +// .add_socket(ctx, &object.id) +// .await +// .map_err(|e| SocketError::SchemaVariant(e.to_string()))? +// } - Ok(object) - } +// Ok(object) +// } - standard_model_accessor!(human_name, Option, SocketResult); - standard_model_accessor!(name, String, SocketResult); - standard_model_accessor!(kind, Enum(SocketKind), SocketResult); - standard_model_accessor!(edge_kind, Enum(SocketEdgeKind), SocketResult); - standard_model_accessor!(arity, Enum(SocketArity), SocketResult); - standard_model_accessor!(diagram_kind, Enum(DiagramKind), SocketResult); - standard_model_accessor!(required, bool, SocketResult); - standard_model_accessor!(ui_hidden, bool, SocketResult); +// standard_model_accessor!(human_name, Option, SocketResult); +// standard_model_accessor!(name, String, SocketResult); +// standard_model_accessor!(kind, Enum(SocketKind), SocketResult); +// standard_model_accessor!(edge_kind, Enum(SocketEdgeKind), SocketResult); +// standard_model_accessor!(arity, Enum(SocketArity), SocketResult); +// standard_model_accessor!(diagram_kind, Enum(DiagramKind), SocketResult); +// standard_model_accessor!(required, bool, SocketResult); +// standard_model_accessor!(ui_hidden, bool, SocketResult); - standard_model_many_to_many!( - lookup_fn: types, - associate_fn: add_type, - disassociate_fn: remove_type, - disassociate_all_fn: remove_all_types, - table_name: "socket_many_to_many_schema_variants", - left_table: "sockets", - left_id: SocketId, - right_table: "schema_variants", - right_id: SchemaVariantId, - which_table_is_this: "left", - returns: SchemaVariant, - result: SocketResult, - ); +// standard_model_many_to_many!( +// lookup_fn: types, +// associate_fn: add_type, +// disassociate_fn: remove_type, +// disassociate_all_fn: remove_all_types, +// table_name: "socket_many_to_many_schema_variants", +// left_table: "sockets", +// left_id: SocketId, +// right_table: "schema_variants", +// right_id: SchemaVariantId, +// which_table_is_this: "left", +// returns: SchemaVariant, +// result: SocketResult, +// ); - standard_model_belongs_to!( - lookup_fn: internal_provider, - set_fn: set_internal_provider, - unset_fn: unset_internal_provider, - table: "socket_belongs_to_internal_provider", - model_table: "internal_providers", - belongs_to_id: InternalProviderId, - returns: InternalProvider, - result: SocketResult, - ); +// standard_model_belongs_to!( +// lookup_fn: internal_provider, +// set_fn: set_internal_provider, +// unset_fn: unset_internal_provider, +// table: "socket_belongs_to_internal_provider", +// model_table: "internal_providers", +// belongs_to_id: InternalProviderId, +// returns: InternalProvider, +// result: SocketResult, +// ); - standard_model_belongs_to!( - lookup_fn: external_provider, - set_fn: set_external_provider, - unset_fn: unset_external_provider, - table: "socket_belongs_to_external_provider", - model_table: "external_providers", - belongs_to_id: ExternalProviderId, - returns: ExternalProvider, - result: SocketResult, - ); +// standard_model_belongs_to!( +// lookup_fn: external_provider, +// set_fn: set_external_provider, +// unset_fn: unset_external_provider, +// table: "socket_belongs_to_external_provider", +// model_table: "external_providers", +// belongs_to_id: ExternalProviderId, +// returns: ExternalProvider, +// result: SocketResult, +// ); - /// Finds the "Frame" [`Socket`] for a given [`Node`](crate::Node) and - /// [`SocketEdgeKind`]. - #[instrument(skip_all)] - pub async fn find_frame_socket_for_node( - ctx: &DalContext, - node_id: NodeId, - socket_edge_kind: SocketEdgeKind, - ) -> SocketResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_FRAME_SOCKET_FOR_NODE, - &[ - ctx.tenancy(), - ctx.visibility(), - &node_id, - &socket_edge_kind.as_ref(), - ], - ) - .await?; - Ok(standard_model::object_from_row(row)?) - } +// /// Finds the "Frame" [`Socket`] for a given [`Node`](crate::Node) and +// /// [`SocketEdgeKind`]. +// #[instrument(skip_all)] +// pub async fn find_frame_socket_for_node( +// ctx: &DalContext, +// node_id: NodeId, +// socket_edge_kind: SocketEdgeKind, +// ) -> SocketResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// FIND_FRAME_SOCKET_FOR_NODE, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &node_id, +// &socket_edge_kind.as_ref(), +// ], +// ) +// .await?; +// Ok(standard_model::object_from_row(row)?) +// } - #[instrument(skip_all)] - pub async fn find_for_internal_provider( - ctx: &DalContext, - internal_provider_id: InternalProviderId, - ) -> SocketResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_INTERNAL_PROVIDER, - &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } +// #[instrument(skip_all)] +// pub async fn find_for_internal_provider( +// ctx: &DalContext, +// internal_provider_id: InternalProviderId, +// ) -> SocketResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_INTERNAL_PROVIDER, +// &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } - #[instrument(skip_all)] - pub async fn find_for_external_provider( - ctx: &DalContext, - external_provider_id: ExternalProviderId, - ) -> SocketResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_EXTERNAL_PROVIDER, - &[ctx.tenancy(), ctx.visibility(), &external_provider_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } +// #[instrument(skip_all)] +// pub async fn find_for_external_provider( +// ctx: &DalContext, +// external_provider_id: ExternalProviderId, +// ) -> SocketResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_EXTERNAL_PROVIDER, +// &[ctx.tenancy(), ctx.visibility(), &external_provider_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } - /// List all [`Sockets`](Self) for the given [`ComponentId`](crate::Component). - #[instrument(skip_all)] - pub async fn list_for_component( - ctx: &DalContext, - component_id: ComponentId, - ) -> SocketResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_COMPONENT, - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } +// /// List all [`Sockets`](Self) for the given [`ComponentId`](crate::Component). +// #[instrument(skip_all)] +// pub async fn list_for_component( +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> SocketResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_COMPONENT, +// &[ctx.tenancy(), ctx.visibility(), &component_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } - /// Find a [`Socket`] by a provided name for a given [`SocketEdgeKind`] and - /// a given [`NodeId`](crate::Node). - #[instrument(skip_all)] - pub async fn find_by_name_for_edge_kind_and_node( - ctx: &DalContext, - name: impl AsRef, - socket_edge_kind: SocketEdgeKind, - node_id: NodeId, - ) -> SocketResult> { - let name = name.as_ref(); - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_BY_NAME_FOR_EDGE_KIND_AND_NODE, - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &socket_edge_kind.as_ref(), - &node_id, - ], - ) - .await?; - Ok(standard_model::option_object_from_row(maybe_row)?) - } -} +// /// Find a [`Socket`] by a provided name for a given [`SocketEdgeKind`] and +// /// a given [`NodeId`](crate::Node). +// #[instrument(skip_all)] +// pub async fn find_by_name_for_edge_kind_and_node( +// ctx: &DalContext, +// name: impl AsRef, +// socket_edge_kind: SocketEdgeKind, +// node_id: NodeId, +// ) -> SocketResult> { +// let name = name.as_ref(); +// let maybe_row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_BY_NAME_FOR_EDGE_KIND_AND_NODE, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &name, +// &socket_edge_kind.as_ref(), +// &node_id, +// ], +// ) +// .await?; +// Ok(standard_model::option_object_from_row(maybe_row)?) +// } +// } diff --git a/lib/dal/src/status.rs b/lib/dal/src/status.rs index f5ad3c71ce..29a2f3e6cc 100644 --- a/lib/dal/src/status.rs +++ b/lib/dal/src/status.rs @@ -16,9 +16,8 @@ use tokio::sync::Mutex; use crate::{ pk, schema::variant::leaves::LeafKind, standard_model::objects_from_rows, ActorView, - AttributeValue, AttributeValueError, AttributeValueId, ChangeSetPk, Component, ComponentError, - ComponentId, ComponentStatus, DalContext, ExternalProvider, ExternalProviderError, - InternalProvider, InternalProviderError, Prop, PropError, PropId, SchemaVariant, SocketId, + AttributeValue, AttributeValueId, ChangeSetPk, Component, ComponentId, ComponentStatus, + DalContext, ExternalProvider, InternalProvider, Prop, PropId, SchemaVariant, SocketId, StandardModel, StandardModelError, Tenancy, Timestamp, UserPk, WsEvent, WsEventError, WsEventResult, WsPayload, }; diff --git a/lib/dal/src/validation.rs b/lib/dal/src/validation.rs index 5aaf1358bd..632fd93064 100644 --- a/lib/dal/src/validation.rs +++ b/lib/dal/src/validation.rs @@ -13,13 +13,10 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use thiserror::Error; -use crate::{ - func::backend::validation::FuncBackendValidationArgs, DalContext, FuncId, PropId, SchemaId, - SchemaVariantId, -}; +use crate::{FuncId}; pub mod prototype; -pub mod resolver; +// pub mod resolver; /// Struct for creating a consumable error for the frontend when a "field" fails its validation /// check. @@ -196,33 +193,33 @@ pub enum ValidationKind { Custom(FuncId), } -pub async fn create_validation( - ctx: &DalContext, - validation_kind: ValidationKind, - builtin_func_id: FuncId, - prop_id: PropId, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, -) -> prototype::ValidationPrototypeResult { - let (validation_func_id, validation_args) = match validation_kind { - ValidationKind::Builtin(validation) => ( - builtin_func_id, - serde_json::to_value(FuncBackendValidationArgs::new(validation))?, - ), - - ValidationKind::Custom(func_id) => (func_id, serde_json::json!(null)), - }; - let mut builder = prototype::context::ValidationPrototypeContext::builder(); - builder - .set_prop_id(prop_id) - .set_schema_id(schema_id) - .set_schema_variant_id(schema_variant_id); - - prototype::ValidationPrototype::new( - ctx, - validation_func_id, - validation_args, - builder.to_context(ctx).await?, - ) - .await -} +// pub async fn create_validation( +// ctx: &DalContext, +// validation_kind: ValidationKind, +// builtin_func_id: FuncId, +// prop_id: PropId, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// ) -> prototype::ValidationPrototypeResult { +// let (validation_func_id, validation_args) = match validation_kind { +// ValidationKind::Builtin(validation) => ( +// builtin_func_id, +// serde_json::to_value(FuncBackendValidationArgs::new(validation))?, +// ), + +// ValidationKind::Custom(func_id) => (func_id, serde_json::json!(null)), +// }; +// let mut builder = prototype::context::ValidationPrototypeContext::builder(); +// builder +// .set_prop_id(prop_id) +// .set_schema_id(schema_id) +// .set_schema_variant_id(schema_variant_id); + +// prototype::ValidationPrototype::new( +// ctx, +// validation_func_id, +// validation_args, +// builder.to_context(ctx).await?, +// ) +// .await +// } diff --git a/lib/dal/src/validation/prototype.rs b/lib/dal/src/validation/prototype.rs index 60b012fca5..9b71b6b911 100644 --- a/lib/dal/src/validation/prototype.rs +++ b/lib/dal/src/validation/prototype.rs @@ -1,214 +1,178 @@ +use content_store::ContentHash; use serde::{Deserialize, Serialize}; -use serde_json::Value as JsonValue; -use si_data_nats::NatsError; -use si_data_pg::PgError; + + + +use strum::EnumDiscriminants; use telemetry::prelude::*; -use thiserror::Error; -use crate::validation::prototype::context::ValidationPrototypeContextBuilder; + +use crate::workspace_snapshot::content_address::ContentAddress; use crate::{ - func::FuncId, - impl_standard_model, pk, - standard_model::{self, objects_from_rows}, - standard_model_accessor, DalContext, HistoryEventError, Prop, PropId, SchemaVariantId, - StandardModel, StandardModelError, Tenancy, Timestamp, Visibility, + func::FuncId, pk, + StandardModel, Timestamp, }; -use crate::{PropKind, SchemaId, TransactionsError, ValidationPrototypeContext}; - -pub mod context; - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum ValidationPrototypeError { - #[error("prop for validation prototype context is not of primitive prop kind, found: {0:?}")] - ContextPropKindIsNotPrimitive(PropKind), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("for builder {0:?}, the following fields must be set: {1:?}")] - PrerequisteFieldsUnset(ValidationPrototypeContextBuilder, Vec<&'static str>), - #[error("prop not found by id: {0}")] - PropNotFound(PropId), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), -} -pub type ValidationPrototypeResult = Result; -const LIST_FOR_PROP: &str = include_str!("../queries/validation_prototype/list_for_prop.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/validation_prototype/list_for_schema_variant.sql"); -const LIST_FOR_FUNC: &str = include_str!("../queries/validation_prototype/list_for_func.sql"); -const FIND_FOR_CONTEXT: &str = include_str!("../queries/validation_prototype/find_for_context.sql"); +// pub mod context; + +// const LIST_FOR_PROP: &str = include_str!("../queries/validation_prototype/list_for_prop.sql"); +// const LIST_FOR_SCHEMA_VARIANT: &str = +// include_str!("../queries/validation_prototype/list_for_schema_variant.sql"); +// const LIST_FOR_FUNC: &str = include_str!("../queries/validation_prototype/list_for_func.sql"); +// const FIND_FOR_CONTEXT: &str = include_str!("../queries/validation_prototype/find_for_context.sql"); -pk!(ValidationPrototypePk); pk!(ValidationPrototypeId); // An ValidationPrototype joins a `Func` to the context in which // the component that is created with it can use to generate a ValidationResolver. #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct ValidationPrototype { - pk: ValidationPrototypePk, id: ValidationPrototypeId, + #[serde(flatten)] + timestamp: Timestamp, func_id: FuncId, args: serde_json::Value, link: Option, - prop_id: PropId, - schema_id: SchemaId, - schema_variant_id: SchemaVariantId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, } -impl_standard_model! { - model: ValidationPrototype, - pk: ValidationPrototypePk, +#[derive(Debug, PartialEq)] +pub struct ValidationPrototypeGraphNode { id: ValidationPrototypeId, - table_name: "validation_prototypes", - history_event_label_base: "validation_prototype", - history_event_message_name: "Validation Prototype" + content_address: ContentAddress, + content: ValidationPrototypeContentV1, } -impl ValidationPrototype { - #[instrument(skip_all)] - pub async fn new( - ctx: &DalContext, - func_id: FuncId, - args: serde_json::Value, - context: ValidationPrototypeContext, - ) -> ValidationPrototypeResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM validation_prototype_create_v1($1, $2, $3, $4, $5, $6, $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &args, - &context.prop_id(), - &context.schema_id(), - &context.schema_variant_id(), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } - - standard_model_accessor!(func_id, Pk(FuncId), ValidationPrototypeResult); - standard_model_accessor!(args, Json, ValidationPrototypeResult); - standard_model_accessor!(link, Option, ValidationPrototypeResult); - standard_model_accessor!(prop_id, Pk(PropId), ValidationPrototypeResult); - standard_model_accessor!(schema_id, Pk(SchemaId), ValidationPrototypeResult); - standard_model_accessor!( - schema_variant_id, - Pk(SchemaVariantId), - ValidationPrototypeResult - ); - - pub fn context(&self) -> ValidationPrototypeContext { - ValidationPrototypeContext::new_unchecked( - self.prop_id, - self.schema_variant_id, - self.schema_id, - ) - } - - /// List all [`ValidationPrototypes`](Self) for a given [`Prop`](crate::Prop). - #[instrument(skip_all)] - pub async fn list_for_prop( - ctx: &DalContext, - prop_id: PropId, - ) -> ValidationPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(LIST_FOR_PROP, &[ctx.tenancy(), ctx.visibility(), &prop_id]) - .await?; - let object = objects_from_rows(rows)?; - Ok(object) - } - - /// List all [`ValidationPrototypes`](Self) for all [`Props`](crate::Prop) in a - /// [`SchemaVariant`](crate::SchemaVariant). - /// - /// _You can access the [`PropId`](crate::Prop) via the [`ValidationPrototypeContext`], if - /// needed._ - #[instrument(skip_all)] - pub async fn list_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> ValidationPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - let object = objects_from_rows(rows)?; - Ok(object) - } - - /// List all [`ValidationPrototypes`](Self) for a [`Func`](crate::Func) - #[instrument(skip_all)] - pub async fn list_for_func( - ctx: &DalContext, - func_id: FuncId, - ) -> ValidationPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(LIST_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), &func_id]) - .await?; - - Ok(objects_from_rows(rows)?) - } +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum ValidationPrototypeContent { + V1(ValidationPrototypeContentV1), +} - pub async fn find_for_context( - ctx: &DalContext, - context: ValidationPrototypeContext, - ) -> ValidationPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.prop_id(), - &context.schema_variant_id(), - &context.schema_id(), - ], - ) - .await?; - - Ok(objects_from_rows(rows)?) - } +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct ValidationPrototypeContentV1 { + #[serde(flatten)] + pub timestamp: Timestamp, + pub func_id: FuncId, + pub args: serde_json::Value, + pub link: Option, +} - pub async fn prop(&self, ctx: &DalContext) -> ValidationPrototypeResult { - Prop::get_by_id(ctx, &self.prop_id()) - .await? - .ok_or(ValidationPrototypeError::PropNotFound(self.prop_id())) +impl ValidationPrototypeGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: ValidationPrototypeContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::ValidationPrototype(content_hash), + content, + } } } + +// impl ValidationPrototype { +// standard_model_accessor!(func_id, Pk(FuncId), ValidationPrototypeResult); +// standard_model_accessor!(args, Json, ValidationPrototypeResult); +// standard_model_accessor!(link, Option, ValidationPrototypeResult); +// standard_model_accessor!(prop_id, Pk(PropId), ValidationPrototypeResult); +// standard_model_accessor!(schema_id, Pk(SchemaId), ValidationPrototypeResult); +// standard_model_accessor!( +// schema_variant_id, +// Pk(SchemaVariantId), +// ValidationPrototypeResult +// ); + +// pub fn context(&self) -> ValidationPrototypeContext { +// ValidationPrototypeContext::new_unchecked( +// self.prop_id, +// self.schema_variant_id, +// self.schema_id, +// ) +// } + +// /// List all [`ValidationPrototypes`](Self) for a given [`Prop`](crate::Prop). +// #[instrument(skip_all)] +// pub async fn list_for_prop( +// ctx: &DalContext, +// prop_id: PropId, +// ) -> ValidationPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(LIST_FOR_PROP, &[ctx.tenancy(), ctx.visibility(), &prop_id]) +// .await?; +// let object = objects_from_rows(rows)?; +// Ok(object) +// } + +// /// List all [`ValidationPrototypes`](Self) for all [`Props`](crate::Prop) in a +// /// [`SchemaVariant`](crate::SchemaVariant). +// /// +// /// _You can access the [`PropId`](crate::Prop) via the [`ValidationPrototypeContext`], if +// /// needed._ +// #[instrument(skip_all)] +// pub async fn list_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> ValidationPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// let object = objects_from_rows(rows)?; +// Ok(object) +// } + +// /// List all [`ValidationPrototypes`](Self) for a [`Func`](crate::Func) +// #[instrument(skip_all)] +// pub async fn list_for_func( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> ValidationPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(LIST_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), &func_id]) +// .await?; + +// Ok(objects_from_rows(rows)?) +// } + +// pub async fn find_for_context( +// ctx: &DalContext, +// context: ValidationPrototypeContext, +// ) -> ValidationPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.prop_id(), +// &context.schema_variant_id(), +// &context.schema_id(), +// ], +// ) +// .await?; + +// Ok(objects_from_rows(rows)?) +// } + +// pub async fn prop(&self, ctx: &DalContext) -> ValidationPrototypeResult { +// Prop::get_by_id(ctx, &self.prop_id()) +// .await? +// .ok_or(ValidationPrototypeError::PropNotFound(self.prop_id())) +// } +// } diff --git a/lib/dal/src/workspace.rs b/lib/dal/src/workspace.rs index 485edd1acf..4e5b0adef8 100644 --- a/lib/dal/src/workspace.rs +++ b/lib/dal/src/workspace.rs @@ -4,13 +4,14 @@ use si_data_nats::NatsError; use si_data_pg::{PgError, PgRow}; use telemetry::prelude::*; use thiserror::Error; +use ulid::Ulid; use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - pk, standard_model_accessor_ro, DalContext, HistoryActor, HistoryEvent, HistoryEventError, - KeyPair, KeyPairError, StandardModelError, Tenancy, Timestamp, TransactionsError, User, - UserError, UserPk, WorkspaceSnapshot, + pk, standard_model, standard_model_accessor_ro, ChangeSetPk, DalContext, HistoryActor, + HistoryEvent, HistoryEventError, KeyPair, KeyPairError, StandardModelError, Tenancy, Timestamp, + TransactionsError, User, UserError, UserPk, Visibility, WorkspaceSnapshot, }; const WORKSPACE_GET_BY_PK: &str = include_str!("queries/workspace/get_by_pk.sql"); @@ -60,7 +61,7 @@ pub struct WorkspaceSignup { pub struct Workspace { pk: WorkspacePk, name: String, - base_change_set_id: ChangeSetPointerId, + default_change_set_id: ChangeSetPointerId, #[serde(flatten)] timestamp: Timestamp, } @@ -74,7 +75,7 @@ impl TryFrom for Workspace { Ok(Self { pk: row.try_get("pk")?, name: row.try_get("name")?, - base_change_set_id: row.try_get("base_change_set_id")?, + default_change_set_id: row.try_get("default_change_set_id")?, timestamp: Timestamp::assemble(created_at, updated_at), }) } @@ -88,26 +89,31 @@ impl Workspace { /// Find or create the builtin [`Workspace`]. #[instrument(skip_all)] pub async fn builtin(ctx: &DalContext) -> WorkspaceResult { + dbg!("create builtin workspace"); // Check if the builtin already exists. if let Some(found_builtin) = Self::find_builtin(ctx).await? { + dbg!("already have builtin"); return Ok(found_builtin); } // If not, create the builtin workspace with a corresponding base change set and initial // workspace snapshot. - let mut change_set = ChangeSetPointer::new(ctx, "HEAD").await?; + let name = "builtin"; + + dbg!("change set pointer new"); + let mut change_set = ChangeSetPointer::new_head(ctx).await?; let workspace_snapshot = WorkspaceSnapshot::initial(ctx, &change_set).await?; change_set .update_pointer(ctx, workspace_snapshot.id()) .await?; let head_pk = WorkspaceId::NONE; - let name = "builtin"; + let row = ctx .txns() .await? .pg() .query_one( - "INSERT INTO workspaces (pk, name, base_change_set_id) VALUES ($1, $2, $3) RETURNING *", + "INSERT INTO workspaces (pk, name, default_change_set_id) VALUES ($1, $2, $3) RETURNING *", &[&head_pk, &name, &change_set.id], ) .await?; @@ -163,13 +169,15 @@ impl Workspace { pk: WorkspacePk, name: impl AsRef, ) -> WorkspaceResult { - // Get the snapshot that the builtin workspace's base change set is pointing at. + // Get the default change set from the builtin workspace. let builtin = Self::builtin(ctx).await?; - let workspace_snapshot = - WorkspaceSnapshot::find_for_change_set(ctx, builtin.base_change_set_id).await?; - // Create a new change set and point to the aforementioned snapshot. - let mut change_set = ChangeSetPointer::new(ctx, "HEAD").await?; + // Create a new change set whose base is the default change set of the workspace. + // Point to the snapshot that the builtin's default change set is pointing to. + let mut change_set = + ChangeSetPointer::new(ctx, "HEAD", Some(builtin.default_change_set_id)).await?; + let workspace_snapshot = + WorkspaceSnapshot::find_for_change_set(ctx, builtin.default_change_set_id).await?; change_set .update_pointer(ctx, workspace_snapshot.id()) .await?; @@ -180,7 +188,7 @@ impl Workspace { .await? .pg() .query_one( - "INSERT INTO workspaces (pk, name, base_change_set_id) VALUES ($1, $2, $3) RETURNING *", + "INSERT INTO workspaces (pk, name, default_change_set_id) VALUES ($1, $2, $3) RETURNING *", &[&pk, &name, &change_set.id], ) .await?; @@ -188,6 +196,12 @@ impl Workspace { ctx.update_tenancy(Tenancy::new(new_workspace.pk)); + // TODO(nick,zack,jacob): convert visibility (or get rid of it?) to use our the new change set id. + ctx.update_visibility(Visibility::new( + ChangeSetPk::from(Ulid::from(change_set.id)), + None, + )); + let _history_event = HistoryEvent::new( ctx, "workspace.create".to_owned(), @@ -228,36 +242,6 @@ impl Workspace { Ok(workspace) } - pub async fn signup( - ctx: &mut DalContext, - workspace_name: impl AsRef, - user_name: impl AsRef, - user_email: impl AsRef, - ) -> WorkspaceResult { - let workspace = Workspace::new(ctx, WorkspacePk::generate(), workspace_name).await?; - let key_pair = KeyPair::new(ctx, "default").await?; - - let user = User::new( - ctx, - UserPk::generate(), - &user_name, - &user_email, - None::<&str>, - ) - .await?; - user.associate_workspace(ctx, *workspace.pk()).await?; - - ctx.update_history_actor(HistoryActor::User(user.pk())); - - ctx.import_builtins().await?; - - Ok(WorkspaceSignup { - key_pair, - user, - workspace, - }) - } - pub async fn find_by_name(ctx: &DalContext, name: &str) -> WorkspaceResult> { let maybe_row = ctx .txns() diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs index ce655d7ea6..94bf377915 100644 --- a/lib/dal/src/workspace_snapshot.rs +++ b/lib/dal/src/workspace_snapshot.rs @@ -21,6 +21,7 @@ // clippy::missing_panics_doc // )] +pub mod api; pub mod conflict; pub mod content_address; pub mod edge_weight; @@ -31,7 +32,7 @@ pub mod update; pub mod vector_clock; use chrono::{DateTime, Utc}; -use content_store::ContentHash; +use content_store::{ContentHash, StoreError}; use petgraph::prelude::*; use serde::{Deserialize, Serialize}; use si_cbor::CborError; @@ -44,6 +45,7 @@ use ulid::Ulid; use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; use crate::workspace_snapshot::conflict::Conflict; use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::update::Update; use crate::workspace_snapshot::vector_clock::VectorClockId; @@ -52,6 +54,7 @@ use crate::{ workspace_snapshot::{graph::WorkspaceSnapshotGraphError, node_weight::NodeWeightError}, DalContext, TransactionsError, WorkspaceSnapshotGraph, }; +use crate::{AttributePrototypeId, AttributeValueId, PropId, PropKind}; const FIND_FOR_CHANGE_SET: &str = include_str!("queries/workspace_snapshot/find_for_change_set.sql"); @@ -59,22 +62,48 @@ const FIND_FOR_CHANGE_SET: &str = #[remain::sorted] #[derive(Error, Debug)] pub enum WorkspaceSnapshotError { + #[error("attribute prototype {0} is missing a function edge")] + AttributePrototypeMissingFunction(AttributePrototypeId), + #[error("attribute value {0} missing prop edge when one was expected")] + AttributeValueMissingPropEdge(AttributeValueId), + #[error("attribute value {0} missing prototype")] + AttributeValueMissingPrototype(AttributeValueId), #[error("cbor error: {0}")] Cbor(#[from] CborError), #[error("change set pointer error: {0}")] ChangeSetPointer(#[from] ChangeSetPointerError), #[error("edge weight error: {0}")] EdgeWeight(#[from] EdgeWeightError), + #[error("cannot insert for prop kind: {0}")] + InsertionForInvalidPropKind(PropKind), + #[error("cannot find intrinsic func {0}")] + IntrinsicFuncNotFound(String), + #[error("missing content from store for id: {0}")] + MissingContentFromStore(Ulid), #[error("monotonic error: {0}")] Monotonic(#[from] ulid::MonotonicError), #[error("NodeWeight error: {0}")] NodeWeight(#[from] NodeWeightError), + #[error("NodeWeight mismatch, expected {0:?} to be {1}")] + NodeWeightMismatch(NodeIndex, String), #[error("si_data_pg error: {0}")] Pg(#[from] PgError), #[error("poison error: {0}")] Poison(String), + #[error("Array or map prop missing element prop: {0}")] + PropMissingElementProp(PropId), + #[error("Array or map prop has more than one child prop: {0}")] + PropMoreThanOneChild(PropId), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("store error: {0}")] + Store(#[from] StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), + #[error("Type mismatch, expected prop kind {0} got {1}")] + TypeMismatch(PropKind, String), + #[error("unexpected graph layout: {0}")] + UnexpectedGraphLayout(&'static str), #[error("WorkspaceSnapshotGraph error: {0}")] WorkspaceSnapshotGraph(#[from] WorkspaceSnapshotGraphError), #[error("workspace snapshot graph missing")] @@ -107,13 +136,49 @@ impl TryFrom for WorkspaceSnapshot { } } +pub(crate) fn serde_value_to_string_type(value: &serde_json::Value) -> String { + match value { + serde_json::Value::Array(_) => "array", + serde_json::Value::Bool(_) => "bool", + serde_json::Value::Null => "null", + serde_json::Value::Number(_) => "number", + serde_json::Value::Object(_) => "object", + serde_json::Value::String(_) => "string", + } + .into() +} + impl WorkspaceSnapshot { pub async fn initial( ctx: &DalContext, change_set: &ChangeSetPointer, ) -> WorkspaceSnapshotResult { - let snapshot = WorkspaceSnapshotGraph::new(change_set)?; - Self::new_inner(ctx, snapshot).await + let mut graph = WorkspaceSnapshotGraph::new(change_set)?; + + // Create the category nodes under root. + let component_node_index = + graph.add_category_node(change_set, CategoryNodeKind::Component)?; + let func_node_index = graph.add_category_node(change_set, CategoryNodeKind::Func)?; + let schema_node_index = graph.add_category_node(change_set, CategoryNodeKind::Schema)?; + + // Connect them to root. + graph.add_edge( + graph.root(), + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + component_node_index, + )?; + graph.add_edge( + graph.root(), + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_node_index, + )?; + graph.add_edge( + graph.root(), + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + schema_node_index, + )?; + + Self::new_inner(ctx, graph).await } pub async fn write( @@ -155,7 +220,7 @@ impl WorkspaceSnapshot { &[&serialized_snapshot], ) .await?; - Ok(Self::try_from(row)?) + Self::try_from(row) } pub fn id(&self) -> WorkspaceSnapshotId { @@ -217,15 +282,6 @@ impl WorkspaceSnapshot { )?) } - pub fn remove_edge_for_update_stableish( - &mut self, - edge_index: EdgeIndex, - ) -> WorkspaceSnapshotResult<()> { - Ok(self - .working_copy()? - .remove_edge_for_update_stableish(edge_index)?) - } - pub fn get_edge_by_index_stableish( &mut self, edge_index: EdgeIndex, diff --git a/lib/dal/src/workspace_snapshot/api.rs b/lib/dal/src/workspace_snapshot/api.rs new file mode 100644 index 0000000000..db6d9c9346 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api.rs @@ -0,0 +1,107 @@ +use petgraph::stable_graph::EdgeIndex; +use petgraph::stable_graph::Edges; +use petgraph::visit::EdgeRef; +use petgraph::Directed; +use ulid::Ulid; + +use crate::workspace_snapshot::edge_weight::EdgeWeight; +use crate::workspace_snapshot::graph::Direction; +use crate::workspace_snapshot::graph::NodeIndex; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::WorkspaceSnapshotResult; +use crate::WorkspaceSnapshot; + +use super::edge_weight::EdgeWeightKindDiscriminants; + +pub mod attribute; +// pub mod component; +pub mod func; +// pub mod node; +pub mod prop; +pub mod provider; +pub mod schema; +pub mod socket; +pub mod validation; + +impl WorkspaceSnapshot { + pub fn get_category_child(&mut self, kind: CategoryNodeKind) -> WorkspaceSnapshotResult<()> { + //Ok(self.working_copy()?.get_category_child(kind)?) + + Ok(()) + } + + pub fn edges_directed( + &mut self, + id: Ulid, + direction: Direction, + ) -> WorkspaceSnapshotResult> { + let node_index = self.working_copy()?.get_node_index_by_id(id)?; + Ok(self.working_copy()?.edges_directed(node_index, direction)) + } + + pub fn edges_directed_by_index( + &mut self, + node_index: NodeIndex, + direction: Direction, + ) -> WorkspaceSnapshotResult> { + Ok(self.working_copy()?.edges_directed(node_index, direction)) + } + + pub fn incoming_sources_for_edge_weight_kind( + &mut self, + id: Ulid, + edge_weight_kind_discrim: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult> { + Ok(self + .edges_directed(id, Direction::Incoming)? + .filter_map(|edge_ref| { + if edge_weight_kind_discrim == edge_ref.weight().kind().into() { + Some(edge_ref.source()) + } else { + None + } + }) + .collect()) + } + + pub fn outgoing_targets_for_edge_weight_kind( + &mut self, + id: Ulid, + edge_weight_kind_discrim: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult> { + Ok(self + .edges_directed(id, Direction::Outgoing)? + .filter_map(|edge_ref| { + if edge_weight_kind_discrim == edge_ref.weight().kind().into() { + Some(edge_ref.target()) + } else { + None + } + }) + .collect()) + } + + pub fn outgoing_targets_for_edge_weight_kind_by_index( + &mut self, + node_index: NodeIndex, + edge_weight_kind_discrim: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult> { + Ok(self + .edges_directed_by_index(node_index, Direction::Outgoing)? + .filter_map(|edge_ref| { + if edge_weight_kind_discrim == edge_ref.weight().kind().into() { + Some(edge_ref.target()) + } else { + None + } + }) + .collect()) + } + + pub fn remove_edge( + &mut self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotResult> { + Ok(self.working_copy()?.remove_edge_by_index(edge_index)) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/attribute.rs b/lib/dal/src/workspace_snapshot/api/attribute.rs new file mode 100644 index 0000000000..5a21cf1370 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/attribute.rs @@ -0,0 +1,2 @@ +pub mod prototype; +pub mod value; diff --git a/lib/dal/src/workspace_snapshot/api/attribute/prototype.rs b/lib/dal/src/workspace_snapshot/api/attribute/prototype.rs new file mode 100644 index 0000000000..1c935c415b --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/attribute/prototype.rs @@ -0,0 +1,103 @@ +use content_store::{Store}; +use petgraph::prelude::*; + + +use crate::attribute::prototype::{ + AttributePrototypeContent, AttributePrototypeContentV1, +}; +use crate::change_set_pointer::ChangeSetPointer; + + + + + +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; +use crate::{ + AttributePrototype, AttributePrototypeId, DalContext, FuncId, Timestamp, + WorkspaceSnapshot, +}; + +impl WorkspaceSnapshot { + // NOTE(nick,jacob,zack): all incoming edges to an attribute prototype must come from one of two places: + // - an attribute value whose lineage comes from a component + // - a prop whose lineage comes from a schema variant + // Outgoing edges from an attribute prototype are used for intra and inter component relationships. + pub async fn attribute_prototype_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + func_id: FuncId, + ) -> WorkspaceSnapshotResult<(AttributePrototype, NodeIndex)> { + let timestamp = Timestamp::now(); + + let content = AttributePrototypeContentV1 { timestamp }; + let hash = ctx + .content_store() + .lock() + .await + .add(&AttributePrototypeContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::AttributePrototype(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let func_node_index = self.working_copy()?.get_node_index_by_id(func_id.into())?; + self.working_copy()?.add_edge( + node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_node_index, + )?; + + Ok(( + AttributePrototype::assemble(AttributePrototypeId::from(id), &content), + node_index, + )) + } + + pub fn attribute_prototype_update_func( + &mut self, + change_set: &ChangeSetPointer, + attribute_prototype_id: AttributePrototypeId, + func_id: FuncId, + ) -> WorkspaceSnapshotResult<()> { + let attribute_prototype_idx = self + .working_copy()? + .get_node_index_by_id(attribute_prototype_id.into())?; + + let current_func_node_idx = self + .edges_directed(attribute_prototype_id.into(), Direction::Outgoing)? + .find(|edge_ref| edge_ref.weight().kind() == &EdgeWeightKind::Use) + .map(|edge_ref| edge_ref.target()) + .ok_or(WorkspaceSnapshotError::AttributePrototypeMissingFunction( + attribute_prototype_id, + ))?; + + self.working_copy()?.remove_edge( + change_set, + attribute_prototype_idx, + current_func_node_idx, + EdgeWeightKindDiscriminants::Use, + )?; + + // Node index changes after edge removal, so we have to fetch it again + let attribute_prototype_idx = self + .working_copy()? + .get_node_index_by_id(attribute_prototype_id.into())?; + + let func_node_idx = self.working_copy()?.get_node_index_by_id(func_id.into())?; + + self.working_copy()?.add_edge( + attribute_prototype_idx, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_node_idx, + )?; + + Ok(()) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/attribute/value.rs b/lib/dal/src/workspace_snapshot/api/attribute/value.rs new file mode 100644 index 0000000000..c4a61e0615 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/attribute/value.rs @@ -0,0 +1,810 @@ +use std::collections::{HashMap, VecDeque}; + +use content_store::{ContentHash, Store}; +use petgraph::prelude::*; +use ulid::Ulid; + +use crate::attribute::value::{AttributeValueContent, AttributeValueContentV1}; +use crate::change_set_pointer::ChangeSetPointer; +use crate::func::intrinsics::IntrinsicFunc; + +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::graph::WorkspaceSnapshotGraphError; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::{ + serde_value_to_string_type, WorkspaceSnapshotError, WorkspaceSnapshotResult, +}; +use crate::{ + AttributePrototypeId, AttributeValue, AttributeValueId, DalContext, FuncId, PropId, PropKind, + Timestamp, WorkspaceSnapshot, +}; + +// pub enum AttributeValueParent { +// // "More specific" +// Component(ComponentId), + +// // "Least specific" +// ExternalProvider(ExternalProviderId), +// InternalProvider(InternalProviderId), +// Prop(PropId), + +// // "I don't care, eventually my parent knows who they belong to" +// AttributeValue(AttributeValueId), +// } + +impl WorkspaceSnapshot { + pub async fn attribute_value_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + ordered: bool, + ) -> WorkspaceSnapshotResult<(AttributeValue, NodeIndex)> { + let timestamp = Timestamp::now(); + + let content = AttributeValueContentV1 { + timestamp, + unprocessed_value: None, + value: None, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&AttributeValueContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::AttributeValue(hash))?; + let node_index = if ordered { + self.working_copy()? + .add_ordered_node(change_set, node_weight)? + } else { + self.working_copy()?.add_node(node_weight)? + }; + + Ok(( + AttributeValue::assemble(AttributeValueId::from(id), &content), + node_index, + )) + } + + pub async fn attribute_value_update( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + attribute_value_id: AttributeValueId, + value: Option, + ) -> WorkspaceSnapshotResult<()> { + self.attribute_value_vivify_value_and_parent_values(ctx, change_set, attribute_value_id) + .await?; + self.attribute_value_set_value(ctx, change_set, attribute_value_id, value.clone()) + .await?; + self.attribute_value_populate_nested_values(ctx, change_set, attribute_value_id, value) + .await?; + Ok(()) + } + + pub async fn attribute_value_insert( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + parent_attribute_value_id: AttributeValueId, + key: Option, + value: Option, + ) -> WorkspaceSnapshotResult<()> { + // Find the array or map prop. + let prop_index = self + .outgoing_targets_for_edge_weight_kind( + parent_attribute_value_id.into(), + EdgeWeightKindDiscriminants::Prop, + )? + .get(0) + .copied() + .ok_or(WorkspaceSnapshotError::AttributeValueMissingPropEdge( + parent_attribute_value_id, + ))?; + let prop_node_weight = match self.get_node_weight(prop_index)?.clone() { + NodeWeight::Prop(inner) => inner, + _ => { + return Err(WorkspaceSnapshotError::NodeWeightMismatch( + prop_index, + "NodeWeight::Prop".into(), + )) + } + }; + + // Ensure it actually is an array or map prop. + if prop_node_weight.kind() != PropKind::Array || prop_node_weight.kind() != PropKind::Map { + return Err(WorkspaceSnapshotError::InsertionForInvalidPropKind( + prop_node_weight.kind(), + )); + } + + // Find a singlular child prop for the map or an array prop (i.e. the "element" or "entry" prop"). + let prop_id = PropId::from(prop_node_weight.id()); + let child_prop_indices = self.outgoing_targets_for_edge_weight_kind( + prop_node_weight.id(), + EdgeWeightKindDiscriminants::Use, + )?; + if child_prop_indices.len() > 1 { + return Err(WorkspaceSnapshotError::PropMoreThanOneChild(prop_id)); + } + let element_prop_index = child_prop_indices + .get(0) + .ok_or(WorkspaceSnapshotError::PropMissingElementProp(prop_id))? + .to_owned(); + let element_prop_node_weight = match self.get_node_weight(element_prop_index)?.clone() { + NodeWeight::Prop(inner) => inner, + _ => { + return Err(WorkspaceSnapshotError::NodeWeightMismatch( + element_prop_index, + "NodeWeight::Prop".into(), + )) + } + }; + + // Create the "element" attribute value in the array or map alongside an attribute prototype for it. + let (new_attribute_value_node, new_attribute_value_index) = self + .attribute_value_create( + ctx, + change_set, + matches!( + element_prop_node_weight.kind(), + PropKind::Map | PropKind::Object | PropKind::Array + ), + ) + .await?; + let parent_av_node_index = self.get_node_index_by_id(parent_attribute_value_id.into())?; + self.working_copy()?.add_ordered_edge( + change_set, + parent_av_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Contain(key))?, + new_attribute_value_index, + )?; + self.working_copy()?.add_edge( + new_attribute_value_index, + EdgeWeight::new(change_set, EdgeWeightKind::Prop)?, + element_prop_index, + )?; + let func_id = self.func_find_intrinsic(IntrinsicFunc::Unset)?; + self.attribute_prototype_create(ctx, change_set, func_id) + .await?; + + // The element has been created an inserted. Now, we can update it with the provided value. + self.attribute_value_update(ctx, change_set, new_attribute_value_node.id, value) + .await + } + + async fn attribute_value_vivify_value_and_parent_values( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + attribute_value_id: AttributeValueId, + ) -> WorkspaceSnapshotResult<()> { + // determine if the value is for a prop, or for an internal provider. if it is for an + // internal provider we want to find if it is an internal provider for a prop (since we + // want to use the function for that prop kind), or if it is an explicit internal or + // external provider (and has no prop) + + // Values on components have outgoing edges to props or outgoing edges to a provider. Values + // on a schema variant have incoming edges from props or incoming edges from providers + let mut current_attribute_value_id = Some(attribute_value_id); + + while let Some(attribute_value_id) = current_attribute_value_id { + let mut maybe_prop_node_index = None; + let mut maybe_provider_node_index = None; + + for edge_ref in self.edges_directed(attribute_value_id.into(), Outgoing)? { + if edge_ref.weight().kind() == &EdgeWeightKind::Prop { + maybe_prop_node_index = Some(edge_ref.target()); + } + + if edge_ref.weight().kind() == &EdgeWeightKind::Provider { + maybe_provider_node_index = Some(edge_ref.target()); + } + } + + if maybe_provider_node_index.is_none() || maybe_prop_node_index.is_none() { + for edge_ref in self.edges_directed(attribute_value_id.into(), Incoming)? { + if edge_ref.weight().kind() == &EdgeWeightKind::Prop { + maybe_prop_node_index = Some(edge_ref.source()); + } + + if edge_ref.weight().kind() == &EdgeWeightKind::Provider { + maybe_provider_node_index = Some(edge_ref.source()); + } + } + } + + // This should not be possible. + if maybe_prop_node_index.is_some() && maybe_provider_node_index.is_some() { + return Err(WorkspaceSnapshotError::UnexpectedGraphLayout( + "found both an provider edge and an prop edge", + )); + } + + // We're set on a provider, so we should look up the prop (if any) + if let Some(provider_node_index) = maybe_provider_node_index { + let provider_id = self + .working_copy()? + .get_node_weight(provider_node_index)? + .id(); + + maybe_prop_node_index = self + .incoming_sources_for_edge_weight_kind( + provider_id, + EdgeWeightKindDiscriminants::Prop, + )? + .get(0) + .copied(); + } + + let empty_value = match maybe_prop_node_index { + Some(prop_node_index) => { + match self.working_copy()?.get_node_weight(prop_node_index).map( + |node_weight| { + if let NodeWeight::Prop(inner) = node_weight { + Some(inner.kind()) + } else { + None + } + }, + )? { + Some(PropKind::Array) => Some(serde_json::json!([])), + Some(PropKind::Map) | Some(PropKind::Object) => Some(serde_json::json!({})), + + // This means we did not get a prop node weight despite the node index coming + // from a prop edge + None => { + return Err(WorkspaceSnapshotError::NodeWeightMismatch( + prop_node_index, + "NodeWeight::Prop".into(), + )) + } + _ => None, + } + } + None => Some(serde_json::json!({})), + }; + + let (_, inner) = self + .attribute_value_get_content(ctx, attribute_value_id) + .await?; + + // If we have a set value, we don't need to vivify + if inner.value.is_some() { + return Ok(()); + } else { + self.attribute_value_set_value(ctx, change_set, attribute_value_id, empty_value) + .await?; + + // This assumes the only incoming contain edge from an attribute value is from + // another attribute value + let maybe_parent_attribute_node_index = self + .incoming_sources_for_edge_weight_kind( + attribute_value_id.into(), + EdgeWeightKindDiscriminants::Contain, + )? + .get(0) + .copied(); + + if let Some(node_index) = maybe_parent_attribute_node_index { + current_attribute_value_id = Some(AttributeValueId::from( + self.get_node_weight(node_index)?.id(), + )); + } else { + current_attribute_value_id = None; + } + } + } + + Ok(()) + } + + async fn create_nested_value( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + attribute_value_id: AttributeValueId, + value: Option, + func_id: FuncId, + prop_id: PropId, + key: Option, + ) -> WorkspaceSnapshotResult { + let prop_node_index = self.get_node_index_by_id(prop_id.into())?; + let prop_kind = + if let NodeWeight::Prop(prop_inner) = self.get_node_weight(prop_node_index)? { + prop_inner.kind() + } else { + return Err(WorkspaceSnapshotError::NodeWeightMismatch( + prop_node_index, + "NodeWeight::Prop".into(), + )); + }; + + let (new_attribute_value_node, new_attribute_value_index) = + self.attribute_value_create(ctx, change_set, true).await?; + + let parent_av_node_index = self.get_node_index_by_id(attribute_value_id.into())?; + self.working_copy()?.add_ordered_edge( + change_set, + parent_av_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Contain(key))?, + new_attribute_value_index, + )?; + + self.working_copy()?.add_edge( + new_attribute_value_index, + EdgeWeight::new(change_set, EdgeWeightKind::Prop)?, + prop_node_index, + )?; + + self.attribute_prototype_create(ctx, change_set, func_id) + .await?; + + match prop_kind { + PropKind::Object | PropKind::Map => { + self.attribute_value_set_value( + ctx, + change_set, + attribute_value_id, + if value.is_some() { + Some(serde_json::json!({})) + } else { + None + }, + ) + .await?; + } + PropKind::Array => { + self.attribute_value_set_value( + ctx, + change_set, + attribute_value_id, + if value.is_some() { + Some(serde_json::json!([])) + } else { + None + }, + ) + .await?; + } + _ => { + self.attribute_value_set_value(ctx, change_set, attribute_value_id, value) + .await?; + } + } + + Ok(new_attribute_value_node.id) + } + + pub async fn attribute_value_populate_nested_values( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + attribute_value_id: AttributeValueId, + value: Option, + ) -> WorkspaceSnapshotResult<()> { + // Remove child attribute value edges + for attribute_value_target in self.outgoing_targets_for_edge_weight_kind( + attribute_value_id.into(), + EdgeWeightKindDiscriminants::Contain, + )? { + let current_node_index = self.get_node_index_by_id(attribute_value_id.into())?; + self.working_copy()?.remove_edge( + change_set, + current_node_index, + attribute_value_target, + EdgeWeightKindDiscriminants::Contain, + )?; + } + + let mut work_queue = VecDeque::from([(attribute_value_id, value)]); + + let unset_func_id = self.func_find_intrinsic(IntrinsicFunc::Unset)?; + + while let Some((attribute_value_id, maybe_value)) = work_queue.pop_front() { + // We're only looking for props on outgoing edges because we're assuming this will only be used for + // attribute values on components. For default values at the schema variant level, we're + // planning to add a "const arg" node that contains the default input for the function that + // sets the value on the prototype + let prop_node_index = self + .outgoing_targets_for_edge_weight_kind( + attribute_value_id.into(), + EdgeWeightKindDiscriminants::Prop, + )? + .get(0) + .copied() + .ok_or(WorkspaceSnapshotError::AttributeValueMissingPropEdge( + attribute_value_id, + ))?; + + let (prop_kind, prop_id) = + if let NodeWeight::Prop(prop_inner) = self.get_node_weight(prop_node_index)? { + (prop_inner.kind(), PropId::from(prop_inner.id())) + } else { + return Err(WorkspaceSnapshotError::NodeWeightMismatch( + prop_node_index, + "NodeWeight::Prop".into(), + )); + }; + + match prop_kind { + PropKind::Object => { + let maybe_object_map = match maybe_value { + Some(serde_json::Value::Object(map)) => Some(map), + Some(value) => { + return Err(WorkspaceSnapshotError::TypeMismatch( + prop_kind, + serde_value_to_string_type(&value), + )); + } + None => None, + }; + + let child_prop_indexes = self.outgoing_targets_for_edge_weight_kind( + prop_id.into(), + EdgeWeightKindDiscriminants::Use, + )?; + + let mut prop_map = HashMap::new(); + for node_index in child_prop_indexes { + if let NodeWeight::Prop(prop_inner) = self.get_node_weight(node_index)? { + prop_map.insert( + prop_inner.name().to_string(), + (prop_inner.id(), prop_inner.kind()), + ); + } + } + + // Remove keys from our value if there is no corresponding child prop + let maybe_object_map = maybe_object_map.map(|mut map| { + map.retain(|k, _| prop_map.contains_key(k)); + map + }); + + for (key, (prop_id, prop_kind)) in prop_map.into_iter() { + let field_value = maybe_object_map + .as_ref() + .and_then(|map| map.get(&key).cloned()); + + let new_attribute_value_id = self + .create_nested_value( + ctx, + change_set, + attribute_value_id, + field_value.clone(), + unset_func_id, + PropId::from(prop_id), + None, + ) + .await?; + + match prop_kind { + PropKind::Array | PropKind::Map => { + if field_value.is_some() { + work_queue.push_back((new_attribute_value_id, field_value)); + } + } + PropKind::Object => { + work_queue.push_back((new_attribute_value_id, field_value)); + } + _ => {} + } + } + } + PropKind::Array => { + let array_items = match maybe_value { + Some(serde_json::Value::Array(array)) => { + if array.is_empty() { + continue; + } + array + } + Some(value) => { + return Err(WorkspaceSnapshotError::TypeMismatch( + prop_kind, + serde_value_to_string_type(&value), + )); + } + None => continue, + }; + + // find the child element prop + let child_props = self.outgoing_targets_for_edge_weight_kind( + prop_id.into(), + EdgeWeightKindDiscriminants::Use, + )?; + + if child_props.len() > 1 { + return Err(WorkspaceSnapshotError::PropMoreThanOneChild(prop_id)); + } + + let element_prop_index = child_props + .get(0) + .ok_or(WorkspaceSnapshotError::PropMissingElementProp(prop_id))? + .to_owned(); + + let (element_prop_id, element_prop_kind) = + match self.get_node_weight(element_prop_index)? { + NodeWeight::Prop(prop_inner) => (prop_inner.id(), prop_inner.kind()), + _ => { + return Err(WorkspaceSnapshotError::NodeWeightMismatch( + element_prop_index, + "NodeWeight::Prop".into(), + )) + } + }; + + for array_item in array_items { + // TODO: should we type check the values here against the element prop? + let array_item_value = Some(array_item); + let new_attribute_value_id = self + .create_nested_value( + ctx, + change_set, + attribute_value_id, + array_item_value.clone(), + unset_func_id, + PropId::from(element_prop_id), + None, + ) + .await?; + + match element_prop_kind { + PropKind::Array | PropKind::Map => { + if array_item_value.is_some() { + work_queue + .push_back((new_attribute_value_id, array_item_value)); + } + } + PropKind::Object => { + work_queue.push_back((new_attribute_value_id, array_item_value)); + } + _ => {} + } + } + } + PropKind::Map => { + let map_map = match maybe_value { + Some(serde_json::Value::Object(map)) => { + if map.is_empty() { + continue; + } + map + } + Some(value) => { + return Err(WorkspaceSnapshotError::TypeMismatch( + prop_kind, + serde_value_to_string_type(&value), + )); + } + None => continue, + }; + + // find the child element prop + let child_props = self.outgoing_targets_for_edge_weight_kind( + prop_id.into(), + EdgeWeightKindDiscriminants::Use, + )?; + + if child_props.len() > 1 { + return Err(WorkspaceSnapshotError::PropMoreThanOneChild(prop_id)); + } + + let element_prop_index = child_props + .get(0) + .ok_or(WorkspaceSnapshotError::PropMissingElementProp(prop_id))? + .to_owned(); + + let (element_prop_id, element_prop_kind) = + match self.get_node_weight(element_prop_index)? { + NodeWeight::Prop(prop_inner) => (prop_inner.id(), prop_inner.kind()), + _ => { + return Err(WorkspaceSnapshotError::NodeWeightMismatch( + element_prop_index, + "NodeWeight::Prop".into(), + )) + } + }; + + for (key, value) in map_map.into_iter() { + let value = Some(value); + let new_attribute_value_id = self + .create_nested_value( + ctx, + change_set, + attribute_value_id, + value.clone(), + unset_func_id, + PropId::from(element_prop_id), + Some(key), + ) + .await?; + + match element_prop_kind { + PropKind::Array | PropKind::Map => { + if value.is_some() { + work_queue.push_back((new_attribute_value_id, value)); + } + } + PropKind::Object => { + work_queue.push_back((new_attribute_value_id, value)); + } + _ => {} + } + } + } + _ => {} + } + } + + Ok(()) + } + + async fn attribute_value_set_value( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + attribute_value_id: AttributeValueId, + value: Option, + ) -> WorkspaceSnapshotResult<()> { + let mut maybe_prop_node_index = None; + let mut maybe_prototype_node_index = None; + let mut prop_direction = Outgoing; + for edge_ref in self.edges_directed(attribute_value_id.into(), Outgoing)? { + if edge_ref.weight().kind() == &EdgeWeightKind::Prop { + maybe_prop_node_index = Some(edge_ref.target()); + prop_direction = Outgoing; + } + if edge_ref.weight().kind() == &EdgeWeightKind::Prototype { + maybe_prototype_node_index = Some(edge_ref.target()); + } + } + + let prototype_node_index = maybe_prototype_node_index.ok_or( + WorkspaceSnapshotError::AttributeValueMissingPrototype(attribute_value_id), + )?; + + let prototype_id = AttributePrototypeId::from( + self.working_copy()? + .get_node_weight(prototype_node_index)? + .id(), + ); + + if maybe_prop_node_index.is_none() { + for edge_ref in self.edges_directed(attribute_value_id.into(), Incoming)? { + if edge_ref.weight().kind() == &EdgeWeightKind::Prop { + maybe_prop_node_index = Some(edge_ref.target()); + prop_direction = Incoming; + } + } + } + + let intrinsic_func = match maybe_prop_node_index { + Some(prop_node_index) => { + if let NodeWeight::Prop(prop_inner) = + self.working_copy()?.get_node_weight(prop_node_index)? + { + // None for the value means there is no value, so we use unset, but if it's a + // literal serde_json::Value::Null it means the value is set, but to null + if value.is_none() { + IntrinsicFunc::Unset + } else { + match prop_inner.kind() { + PropKind::Array => IntrinsicFunc::SetArray, + PropKind::Boolean => IntrinsicFunc::SetBoolean, + PropKind::Integer => IntrinsicFunc::SetInteger, + PropKind::Map => IntrinsicFunc::SetMap, + PropKind::Object => IntrinsicFunc::SetObject, + PropKind::String => IntrinsicFunc::SetString, + } + } + } else { + Err(WorkspaceSnapshotGraphError::NodeWeightNotFound)? + } + } + None => match value { + None | Some(serde_json::Value::Null) => IntrinsicFunc::Unset, + Some(serde_json::Value::Array(_)) => IntrinsicFunc::SetArray, + Some(serde_json::Value::Bool(_)) => IntrinsicFunc::SetBoolean, + Some(serde_json::Value::Number(_)) => IntrinsicFunc::SetInteger, + Some(serde_json::Value::Object(_)) => IntrinsicFunc::SetObject, + Some(serde_json::Value::String(_)) => IntrinsicFunc::SetString, + }, + }; + + let func_id = self.func_find_intrinsic(intrinsic_func)?; + + // If we have a prop, then we need to know if the edge to it was incoming or outgoing (found + // above). If the edge is outgoing, we need to break the link from the value to the prototype + // and create a new one. If the edge is incoming, we need to update the prototype directly. + if maybe_prop_node_index.is_some() { + match prop_direction { + Direction::Outgoing => { + let attribute_value_node_idx = self + .working_copy()? + .get_node_index_by_id(attribute_value_id.into())?; + + self.working_copy()?.remove_edge( + change_set, + attribute_value_node_idx, + prototype_node_index, + EdgeWeightKindDiscriminants::Use, + )?; + + self.attribute_prototype_create(ctx, change_set, func_id) + .await?; + } + Direction::Incoming => { + self.attribute_prototype_update_func(change_set, prototype_id, func_id)?; + } + } + } + + let processed = match &value { + Some(serde_json::Value::Object(_)) => Some(serde_json::json![{}]), + Some(serde_json::Value::Array(_)) => Some(serde_json::json![[]]), + value => value.to_owned(), + }; + self.attribute_value_set_real_values(ctx, change_set, attribute_value_id, processed, value) + .await?; + Ok(()) + } + + async fn attribute_value_get_content( + &mut self, + ctx: &DalContext, + attribute_value_id: AttributeValueId, + ) -> WorkspaceSnapshotResult<(ContentHash, AttributeValueContentV1)> { + let id: Ulid = attribute_value_id.into(); + let node_index = self.working_copy()?.get_node_index_by_id(id)?; + let node_weight = self.working_copy()?.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: AttributeValueContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let AttributeValueContent::V1(inner) = content; + + Ok((hash, inner)) + } + + async fn attribute_value_set_real_values( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + attribute_value_id: AttributeValueId, + value: Option, + unprocessed_value: Option, + ) -> WorkspaceSnapshotResult { + let (_, inner) = self + .attribute_value_get_content(ctx, attribute_value_id) + .await?; + let mut attribute_value = AttributeValue::assemble(attribute_value_id, &inner); + + attribute_value.value = value; + attribute_value.unprocessed_value = unprocessed_value; + + let updated = AttributeValueContentV1::from(attribute_value.to_owned()); + let hash = ctx + .content_store() + .lock() + .await + .add(&AttributeValueContent::V1(updated))?; + + self.working_copy()? + .update_content(change_set, attribute_value_id.into(), hash)?; + + Ok(attribute_value) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/component.rs b/lib/dal/src/workspace_snapshot/api/component.rs new file mode 100644 index 0000000000..5aab98b22d --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/component.rs @@ -0,0 +1,69 @@ +use content_store::{ContentHash, Store}; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::component::ComponentKind; +use crate::component::{ComponentContent, ComponentContentV1, ComponentGraphNode}; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::WorkspaceSnapshotResult; +use crate::{Component, DalContext, NodeKind, SchemaVariantId, Timestamp, WorkspaceSnapshot}; + +impl WorkspaceSnapshot { + pub async fn component_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + name: impl AsRef, + schema_variant_id: SchemaVariantId, + component_kind: Option, + ) -> WorkspaceSnapshotResult { + let name = name.as_ref(); + let timestamp = Timestamp::now(); + let ui_hidden = false; + + let content = ComponentContentV1 { + timestamp, + kind: match component_kind { + Some(provided_kind) => provided_kind, + None => ComponentKind::Standard, + }, + needs_destroy: false, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&ComponentContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(&change_set, id, ContentAddress::Component(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + // Root --> Component Category --> Component (this) + let component_category_index = self + .working_copy()? + .get_category_child(CategoryNodeKind::Component)?; + self.working_copy()?.add_edge( + component_category_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + + // Component (this) --> Schema Variant + let schema_variant_index = self.get_node_index_by_id(schema_variant_id.into())?; + self.working_copy()?.add_edge( + node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + schema_variant_index, + )?; + + // Create a node. When a node is created an edge will be created from the component node index (this) to the new "node node". Totally not confusing... + self.node_create(ctx, change_set, Some(NodeKind::Configuration), id.into()) + .await?; + + Ok(ComponentGraphNode::assemble(id, hash, content)) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/func.rs b/lib/dal/src/workspace_snapshot/api/func.rs new file mode 100644 index 0000000000..b5367d049a --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/func.rs @@ -0,0 +1,168 @@ +use content_store::{ContentHash, Store}; + +use ulid::Ulid; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::func::intrinsics::IntrinsicFunc; +use crate::func::{FuncContent, FuncContentV1, FuncGraphNode}; + +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; +use crate::{ + DalContext, Func, FuncBackendKind, FuncBackendResponseType, FuncId, Timestamp, + WorkspaceSnapshot, +}; + +// TODO(nick,jacob): when "updating content" to set the code, we need to do something like the following: +// code_base64 text, +// code_sha256 text GENERATED ALWAYS AS (COALESCE(ENCODE(DIGEST(code_base64, 'sha256'), 'hex'), '0')) STORE + +impl WorkspaceSnapshot { + pub async fn func_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + name: impl AsRef, + backend_kind: FuncBackendKind, + backend_response_type: FuncBackendResponseType, + ) -> WorkspaceSnapshotResult { + let name = name.as_ref().to_string(); + let timestamp = Timestamp::now(); + let _finalized_once = false; + + let content = FuncContentV1 { + timestamp, + name: name.clone(), + display_name: None, + description: None, + link: None, + hidden: false, + builtin: false, + backend_kind, + backend_response_type, + handler: None, + code_base64: None, + code_sha256: "".to_string(), + }; + + let hash = ctx + .content_store() + .lock() + .await + .add(&FuncContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_func(change_set, id, name.clone(), hash)?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let (_, func_category_index) = self + .working_copy()? + .get_category_child(CategoryNodeKind::Func)?; + self.working_copy()?.add_edge( + func_category_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + + Ok(Func::assemble(id.into(), &content)) + } + + pub async fn func_get_content( + &mut self, + ctx: &DalContext, + func_id: FuncId, + ) -> WorkspaceSnapshotResult<(ContentHash, FuncContentV1)> { + let id: Ulid = func_id.into(); + let node_index = self.working_copy()?.get_node_index_by_id(id)?; + let node_weight = self.working_copy()?.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: FuncContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let FuncContent::V1(inner) = content; + + Ok((hash, inner)) + } + + pub async fn func_get_by_id( + &mut self, + ctx: &DalContext, + func_id: FuncId, + ) -> WorkspaceSnapshotResult { + let (_, content) = self.func_get_content(ctx, func_id).await?; + + Ok(Func::assemble(func_id, &content)) + } + + pub fn func_find_intrinsic( + &mut self, + intrinsic: IntrinsicFunc, + ) -> WorkspaceSnapshotResult { + let name = intrinsic.name(); + Ok(self + .func_find_by_name(name)? + .ok_or(WorkspaceSnapshotError::IntrinsicFuncNotFound( + name.to_owned(), + ))?) + } + + pub fn func_find_by_name( + &mut self, + name: impl AsRef, + ) -> WorkspaceSnapshotResult> { + let (_, func_category_index) = self + .working_copy()? + .get_category_child(CategoryNodeKind::Func)?; + + let func_id = self + .working_copy()? + .func_find_by_name(func_category_index, name)?; + + Ok(func_id.into()) + } + + pub async fn func_modify_by_id( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + id: FuncId, + lambda: L, + ) -> WorkspaceSnapshotResult + where + L: FnOnce(&mut Func) -> WorkspaceSnapshotResult<()>, + { + let (_, inner) = dbg!(self.func_get_content(ctx, id).await)?; + + dbg!("got content", &inner); + + let mut func = Func::assemble(id, &inner); + lambda(&mut func)?; + let updated = FuncContentV1::from(func); + + dbg!("updated content", &updated); + + let hash = ctx + .content_store() + .lock() + .await + .add(&FuncContent::V1(updated.clone()))?; + + dbg!("added content"); + + self.working_copy()? + .update_content(change_set, id.into(), hash)?; + + dbg!("update content"); + + Ok(Func::assemble(id, &updated)) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/node.rs b/lib/dal/src/workspace_snapshot/api/node.rs new file mode 100644 index 0000000000..8e6ff67eca --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/node.rs @@ -0,0 +1,111 @@ +use content_store::{ContentHash, Store}; +use petgraph::visit::Time; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::component::ComponentKind; +use crate::component::{ComponentContent, ComponentContentV1, ComponentGraphNode}; +use crate::node::NodeContentV1; +use crate::node::NodeKind; +use crate::node::{NodeContent, NodeGraphNode}; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; +use crate::ComponentId; +use crate::{Component, DalContext, Node, NodeId, SchemaVariantId, Timestamp, WorkspaceSnapshot}; + +impl WorkspaceSnapshot { + pub async fn node_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + kind: Option, + component_id: ComponentId, + ) -> WorkspaceSnapshotResult { + let content = NodeContentV1 { + timestamp: Timestamp::now(), + kind: match kind { + Some(provided_kind) => provided_kind, + None => NodeKind::Configuration, + }, + ..Default::default() + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&NodeContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_content(&change_set, id, ContentAddress::Node(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + // Component --> Node (this) + let component_index = self.get_node_index_by_id(component_id)?; + self.working_copy()?.add_edge( + component_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + + Ok(NodeGraphNode::assemble(id, hash, content)) + } + + async fn node_get_content( + &mut self, + ctx: &DalContext, + node_id: NodeId, + ) -> WorkspaceSnapshotResult<(ContentHash, NodeContentV1)> { + let id: Ulid = node_id.into(); + let node_index = self.working_copy()?.get_node_index_by_id(id)?; + let node_weight = self.working_copy()?.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: NodeContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let inner = match content { + NodeContentV1::V1(inner) => inner, + }; + + Ok((hash, inner)) + } + + pub async fn node_set_geometry( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + node_id: NodeId, + x: impl AsRef, + y: impl AsRef, + width: Option>, + height: Option>, + ) -> WorkspaceSnapshotResult<()> { + let (_, inner) = self.node_get_content(ctx, node_id).await?; + + let mut node = Node::assemble(node_id, &inner); + node.x = x; + node.y = y; + node.width = width; + node.height = height; + let updated = NodeContentV1::from(node); + + let hash = ctx + .content_store() + .lock() + .await + .add(&NodeContent::V1(updated.clone()))?; + + self.working_copy()? + .update_content(&change_set, node_id.into(), hash)?; + + Ok(()) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/prop.rs b/lib/dal/src/workspace_snapshot/api/prop.rs new file mode 100644 index 0000000000..990246c979 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/prop.rs @@ -0,0 +1,158 @@ +use content_store::{ContentHash, Store}; +use serde_json::Value; +use ulid::Ulid; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::prop::{PropContent, PropContentV1, PropGraphNode}; +use crate::property_editor::schema::WidgetKind; + + + +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; +use crate::{ + DalContext, Prop, PropId, PropKind, SchemaVariantId, Timestamp, WorkspaceSnapshot, +}; + +pub enum PropParent { + OrderedProp(PropId), + Prop(PropId), + SchemaVariant(SchemaVariantId), +} + +impl WorkspaceSnapshot { + /// Create a new [`Prop`]. A corresponding [`AttributePrototype`] and [`AttributeValue`] will be + /// created when the provided [`SchemaVariant`](crate::SchemaVariant) is + /// [`finalized`](crate::SchemaVariant::finalize). + pub async fn prop_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + name: impl AsRef, + kind: PropKind, + widget_kind_and_options: Option<(WidgetKind, Option)>, + prop_parent: PropParent, + ordered: bool, + ) -> WorkspaceSnapshotResult { + let timestamp = Timestamp::now(); + let name = name.as_ref(); + let (widget_kind, widget_options) = match widget_kind_and_options { + Some((kind, options)) => (kind, options), + None => (WidgetKind::from(kind), None), + }; + + let content = PropContentV1 { + timestamp, + name: name.to_string(), + kind, + widget_kind, + widget_options, + doc_link: None, + hidden: false, + refers_to_prop_id: None, + diff_func_id: None, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&PropContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_prop(change_set, id, kind, name, hash)?; + let node_index = if ordered { + self.working_copy()? + .add_ordered_node(change_set, node_weight)? + } else { + self.working_copy()?.add_node(node_weight)? + }; + + match prop_parent { + PropParent::OrderedProp(ordered_prop_id) => { + let parent_node_index = self + .working_copy()? + .get_node_index_by_id(ordered_prop_id.into())?; + self.working_copy()?.add_ordered_edge( + change_set, + parent_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + } + PropParent::Prop(prop_id) => { + let parent_node_index = + self.working_copy()?.get_node_index_by_id(prop_id.into())?; + self.working_copy()?.add_edge( + parent_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + } + PropParent::SchemaVariant(schema_variant_id) => { + let parent_node_index = self + .working_copy()? + .get_node_index_by_id(schema_variant_id.into())?; + self.working_copy()?.add_edge( + parent_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + } + }; + + Ok(PropGraphNode::assemble(id, hash, content)) + } + + async fn prop_get_content( + &mut self, + ctx: &DalContext, + prop_id: PropId, + ) -> WorkspaceSnapshotResult<(ContentHash, PropContentV1)> { + let id: Ulid = prop_id.into(); + let node_index = self.working_copy()?.get_node_index_by_id(id)?; + let node_weight = self.working_copy()?.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: PropContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let PropContent::V1(inner) = content; + + Ok((hash, inner)) + } + + pub async fn prop_modify_by_id( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + id: PropId, + lambda: L, + ) -> WorkspaceSnapshotResult + where + L: FnOnce(&mut Prop) -> WorkspaceSnapshotResult<()>, + { + let (_, inner) = self.prop_get_content(ctx, id).await?; + + let mut prop = Prop::assemble(id, &inner); + lambda(&mut prop)?; + let updated = PropContentV1::from(prop); + + let hash = ctx + .content_store() + .lock() + .await + .add(&PropContent::V1(updated.clone()))?; + + self.working_copy()? + .update_content(change_set, id.into(), hash)?; + + Ok(PropGraphNode::assemble(id, hash, updated)) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/provider.rs b/lib/dal/src/workspace_snapshot/api/provider.rs new file mode 100644 index 0000000000..04df418abc --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/provider.rs @@ -0,0 +1,2 @@ +pub mod external; +pub mod internal; diff --git a/lib/dal/src/workspace_snapshot/api/provider/external.rs b/lib/dal/src/workspace_snapshot/api/provider/external.rs new file mode 100644 index 0000000000..7b166c00b0 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/provider/external.rs @@ -0,0 +1,86 @@ +use content_store::{Store}; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::provider::external::{ + ExternalProviderContent, ExternalProviderContentV1, ExternalProviderGraphNode, +}; + + +use crate::socket::{DiagramKind, SocketEdgeKind, SocketKind}; + +use crate::workspace_snapshot::api::socket::SocketParent; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::WorkspaceSnapshotResult; +use crate::{ + DalContext, FuncId, SchemaVariantId, SocketArity, Timestamp, + WorkspaceSnapshot, +}; + +impl WorkspaceSnapshot { + pub async fn external_provider_create_with_socket( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + schema_variant_id: SchemaVariantId, + name: impl AsRef, + type_definition: Option, + func_id: FuncId, + arity: SocketArity, + frame_socket: bool, + ) -> WorkspaceSnapshotResult { + let name = name.as_ref(); + let timestamp = Timestamp::now(); + + let content = ExternalProviderContentV1 { + timestamp, + schema_variant_id, + attribute_prototype_id: None, + name: name.to_string(), + type_definition, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&ExternalProviderContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::ExternalProvider(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let schema_variant_node_index = self + .working_copy()? + .get_node_index_by_id(schema_variant_id.into())?; + self.working_copy()?.add_edge( + schema_variant_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Provider)?, + node_index, + )?; + + let _attribute_prototype = self + .attribute_prototype_create(ctx, change_set, func_id) + .await?; + + let _socket = self + .socket_create( + ctx, + change_set, + name, + match frame_socket { + true => SocketKind::Frame, + false => SocketKind::Provider, + }, + SocketEdgeKind::ConfigurationOutput, + arity, + DiagramKind::Configuration, + Some(schema_variant_id), + SocketParent::ExternalProvider(id.into()), + ) + .await?; + + Ok(ExternalProviderGraphNode::assemble(id, hash, content)) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/provider/internal.rs b/lib/dal/src/workspace_snapshot/api/provider/internal.rs new file mode 100644 index 0000000000..693a1c2106 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/provider/internal.rs @@ -0,0 +1,125 @@ +use content_store::Store; +use petgraph::Direction; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::func::intrinsics::IntrinsicFunc; +use crate::provider::internal::{ + InternalProviderContent, InternalProviderContentV1, InternalProviderGraphNode, +}; +use crate::socket::{DiagramKind, SocketEdgeKind, SocketKind}; +use crate::workspace_snapshot::api::socket::SocketParent; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::{NodeWeight, PropNodeWeight}; +use crate::workspace_snapshot::WorkspaceSnapshotResult; +use crate::{DalContext, FuncId, SchemaVariantId, SocketArity, Timestamp, WorkspaceSnapshot}; + +impl WorkspaceSnapshot { + pub async fn internal_provider_create_implicit( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + prop: &PropNodeWeight, + ) -> WorkspaceSnapshotResult<()> { + for edgeref in self.edges_directed(prop.id(), Direction::Outgoing)? { + if edgeref.weight().kind() == &EdgeWeightKind::Provider { + // It already exists! + return Ok(()); + } + } + + let content = InternalProviderContentV1 { + timestamp: Timestamp::now(), + name: prop.name().to_string(), + inbound_type_definition: None, + outbound_type_definition: None, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&InternalProviderContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::InternalProvider(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let prop_node_index = self.working_copy()?.get_node_index_by_id(prop.id())?; + self.working_copy()?.add_edge( + prop_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Provider)?, + node_index, + )?; + + let func_id = self.func_find_intrinsic(IntrinsicFunc::Identity)?; + let (_, _) = self + .attribute_prototype_create(ctx, change_set, func_id) + .await?; + + Ok(()) + } + + pub async fn internal_provider_create_explicit_with_socket( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + schema_variant_id: SchemaVariantId, + name: impl AsRef, + func_id: FuncId, + arity: SocketArity, + frame_socket: bool, + ) -> WorkspaceSnapshotResult { + let name = name.as_ref().to_string(); + let timestamp = Timestamp::now(); + + let content = InternalProviderContentV1 { + timestamp, + name: name.clone(), + inbound_type_definition: None, + outbound_type_definition: None, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&InternalProviderContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::InternalProvider(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let schema_variant_node_index = self + .working_copy()? + .get_node_index_by_id(schema_variant_id.into())?; + self.working_copy()?.add_edge( + schema_variant_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Provider)?, + node_index, + )?; + + let _attribute_prototype = self + .attribute_prototype_create(ctx, change_set, func_id) + .await?; + + let _socket = self + .socket_create( + ctx, + change_set, + name, + match frame_socket { + true => SocketKind::Frame, + false => SocketKind::Provider, + }, + SocketEdgeKind::ConfigurationInput, + arity, + DiagramKind::Configuration, + Some(schema_variant_id), + SocketParent::ExplicitInternalProvider(id.into()), + ) + .await?; + + Ok(InternalProviderGraphNode::assemble(id, hash, content)) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/schema.rs b/lib/dal/src/workspace_snapshot/api/schema.rs new file mode 100644 index 0000000000..f65bfc37c3 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/schema.rs @@ -0,0 +1,81 @@ +use content_store::{ContentHash, Store}; +use ulid::Ulid; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::schema::{ComponentKind, SchemaContent, SchemaContentV1, SchemaGraphNode}; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; +use crate::{DalContext, SchemaId, Timestamp, WorkspaceSnapshot}; + +pub mod variant; + +impl WorkspaceSnapshot { + pub async fn schema_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + name: impl AsRef, + component_kind: ComponentKind, + ) -> WorkspaceSnapshotResult { + let name = name.as_ref(); + let timestamp = Timestamp::now(); + let ui_hidden = false; + + let content = SchemaContentV1 { + timestamp, + name: name.to_string(), + ui_hidden, + default_schema_variant_id: None, + component_kind, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&SchemaContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_content(change_set, id, ContentAddress::Schema(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let schema_category_index = self + .working_copy()? + .get_category_child(CategoryNodeKind::Schema)?; + /*self.working_copy()?.add_edge( + schema_category_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?;*/ + + Ok(SchemaGraphNode::assemble(id, hash, content)) + } + + pub async fn schema_get_content( + &mut self, + ctx: &DalContext, + schema_id: SchemaId, + ) -> WorkspaceSnapshotResult<(ContentHash, SchemaContentV1)> { + let id: Ulid = schema_id.into(); + let node_index = self.working_copy()?.get_node_index_by_id(id)?; + let node_weight = self.working_copy()?.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: SchemaContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let inner = match content { + SchemaContent::V1(inner) => inner, + }; + + Ok((hash, inner)) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/schema/variant.rs b/lib/dal/src/workspace_snapshot/api/schema/variant.rs new file mode 100644 index 0000000000..1deb51d397 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/schema/variant.rs @@ -0,0 +1,325 @@ +use std::collections::VecDeque; + +use content_store::{ContentHash, Store}; +use petgraph::visit::EdgeRef; +use petgraph::Direction; +use ulid::Ulid; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::func::intrinsics::IntrinsicFunc; +use crate::schema::variant::root_prop::RootProp; +use crate::schema::variant::{ + SchemaVariantContent, SchemaVariantContentV1, SchemaVariantGraphNode, +}; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::graph::NodeIndex; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::{NodeWeight, PropNodeWeight}; +use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; +use crate::{ + ActionKind, AttributePrototypeId, DalContext, FuncId, PropKind, Schema, SchemaId, + SchemaVariant, SchemaVariantId, SocketArity, Timestamp, WorkspaceSnapshot, +}; + +pub mod root_prop; + +impl WorkspaceSnapshot { + pub async fn schema_variant_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + name: impl AsRef, + schema_id: SchemaId, + ui_hidden: bool, + ) -> WorkspaceSnapshotResult<(SchemaVariantGraphNode, RootProp)> { + let name = name.as_ref(); + let timestamp = Timestamp::now(); + + let content = SchemaVariantContentV1 { + timestamp, + name: name.to_string(), + root_prop_id: None, + // schema_variant_definition_id: None, + link: None, + ui_hidden, + finalized_once: false, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&SchemaVariantContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::SchemaVariant(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let schema_node_index = self + .working_copy()? + .get_node_index_by_id(schema_id.into())?; + self.working_copy()?.add_edge( + schema_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + + let schema_variant_id: SchemaVariantId = id.into(); + + let root_prop = self + .schema_variant_create_root_prop_tree(ctx, change_set, schema_variant_id, schema_id) + .await?; + + let func_id = self.func_find_intrinsic(IntrinsicFunc::Identity)?; + + self.internal_provider_create_explicit_with_socket( + ctx, + change_set, + schema_variant_id, + "Frame", + func_id, + SocketArity::Many, + true, + ) + .await?; + self.external_provider_create_with_socket( + ctx, + change_set, + schema_variant_id, + "Frame", + None, + func_id, + SocketArity::One, + true, + ) + .await?; + + Ok(( + SchemaVariantGraphNode::assemble(id, hash, content), + root_prop, + )) + } + + async fn schema_variant_get_root_prop( + &mut self, + schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult { + let edge_targets: Vec = self + .edges_directed(schema_variant_id.into(), Direction::Outgoing)? + .map(|edge_ref| edge_ref.target()) + .collect(); + + for index in edge_targets { + let node_weight = self.get_node_weight(index)?; + // TODO(nick): ensure that only one prop can be under a schema variant. + if let NodeWeight::Prop(inner_weight) = node_weight { + if inner_weight.name() == "root" { + return Ok(inner_weight.clone()); + } + } + } + todo!("could not get root prop") + } + + pub async fn schema_variant_create_default_prototypes( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult<()> { + let func_id = self.func_find_intrinsic(IntrinsicFunc::Unset)?; + let root_prop = self.schema_variant_get_root_prop(schema_variant_id).await?; + let mut work_queue: VecDeque = VecDeque::from(vec![root_prop]); + + while let Some(prop) = work_queue.pop_front() { + // See an attribute prototype exists. + let mut found_attribute_prototype_id: Option = None; + let targets = self.outgoing_targets_for_edge_weight_kind( + prop.id(), + EdgeWeightKindDiscriminants::Prototype, + )?; + for target in targets { + let node_weight = self.get_node_weight(target)?; + if let Some(discriminants) = node_weight.content_address_discriminants() { + if let ContentAddressDiscriminants::AttributePrototype = discriminants { + found_attribute_prototype_id = Some(node_weight.id().into()); + break; + } + } + } + + // Create the attribute prototype and appropriate edges if they do not exist. + if found_attribute_prototype_id.is_none() { + // We did not find a prototype, so we must create one. + let (_attribute_prototype, attribute_prototype_node_index) = self + .attribute_prototype_create(ctx, change_set, func_id) + .await?; + + // New edge Prop --Prototype--> AttributePrototype. + let prop_node_index = self.get_node_index_by_id(prop.id())?; + self.add_edge( + prop_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Prototype)?, + attribute_prototype_node_index, + )?; + } + + // Push all children onto the work queue. + let targets = self.outgoing_targets_for_edge_weight_kind( + prop.id(), + EdgeWeightKindDiscriminants::Use, + )?; + for target in targets { + let node_weight = self.get_node_weight(target)?; + if let NodeWeight::Prop(child_prop) = node_weight { + work_queue.push_back(child_prop.to_owned()) + } + } + } + + Ok(()) + } + + pub async fn schema_variant_create_implicit_internal_providers( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult<()> { + let root_prop = self.schema_variant_get_root_prop(schema_variant_id).await?; + let mut work_queue = VecDeque::new(); + work_queue.push_back(root_prop); + + while let Some(prop) = work_queue.pop_front() { + self.internal_provider_create_implicit(ctx, change_set, &prop) + .await?; + + // Only descend if we are an object. + if prop.kind() == PropKind::Object { + let targets = self.outgoing_targets_for_edge_weight_kind( + prop.id(), + EdgeWeightKindDiscriminants::Use, + )?; + for target in targets { + let node_weight = self.get_node_weight(target)?; + if let NodeWeight::Prop(child_prop) = node_weight { + work_queue.push_back(child_prop.to_owned()); + } + } + } + } + + Ok(()) + } + + pub fn action_prototype_create( + &mut self, + _ctx: &DalContext, + change_set: &ChangeSetPointer, + func_id: FuncId, + schema_variant_id: SchemaVariantId, + _kind: ActionKind, + ) -> WorkspaceSnapshotResult<()> { + let schema_variant_index = self + .working_copy()? + .get_node_index_by_id(schema_variant_id.into())?; + + let func_index = self.working_copy()?.get_node_index_by_id(func_id.into())?; + + self.working_copy()?.add_edge( + schema_variant_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_index, + )?; + + Ok(()) + } + + async fn schema_variant_get_content( + &mut self, + ctx: &DalContext, + schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult<(ContentHash, SchemaVariantContentV1)> { + let id: Ulid = schema_variant_id.into(); + let node_index = self.working_copy()?.get_node_index_by_id(id)?; + let node_weight = self.working_copy()?.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: SchemaVariantContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let inner = match content { + SchemaVariantContent::V1(inner) => inner, + }; + + Ok((hash, inner)) + } + + pub async fn schema_variant_list( + &mut self, + ctx: &DalContext, + ) -> WorkspaceSnapshotResult> { + /* + let schema_category_index = self.get_category_child(CategoryNodeKind::Schema)?; + let schema_indices = self.outgoing_targets_for_edge_weight_kind_by_index( + schema_category_index, + EdgeWeightKindDiscriminants::Use, + )?; + + // TODO(nick,zack,jacob,wendy): start here! + let mut unchecked_node_weights = Vec::new(); + for schema_index in schema_indices { + unchecked_node_weights.push(self.get_node_weight(schema_index)?); + } + let mut schemas = Vec::new(); + for unchecked_node_weight in unchecked_node_weights { + if let NodeWeight::Content(content_node_weight) = unchecked_node_weight { + let (_, content) = self + .schema_get_content(ctx, content_node_weight.id().into()) + .await?; + schemas.push(Schema::assemble(content_node_weight.id().into(), &content)); + } + }*/ + + Ok(vec![]) + } + + /// This _idempotent_ function "finalizes" a [`SchemaVariant`]. + /// + /// Once a [`SchemaVariant`] has had all of its [`Props`](crate::Prop) created, there are a few + /// things that need to happen before it is usable: + /// + /// * Create the default [`AttributePrototypes`](crate::AttributePrototype) + /// * Create the _internally consuming_ [`InternalProviders`](crate::InternalProvider) + /// corresponding to every [`Prop`](crate::Prop) in the [`SchemaVariant`] that is not a + /// descendant of an Array or a Map. + /// + /// This method **MUST** be called once all the [`Props`](Prop) have been created for the + /// [`SchemaVariant`]. It can be called multiple times while [`Props`](Prop) are being created, + /// but it must be called once after all [`Props`](Prop) have been created. + pub async fn schema_variant_finalize( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult<()> { + self.schema_variant_create_default_prototypes(ctx, change_set, schema_variant_id) + .await?; + self.schema_variant_create_implicit_internal_providers(ctx, change_set, schema_variant_id) + .await?; + + // TODO(nick,jacob,zack): if we are going to copy the existing system (which we likely will), we need to + // set "/root/si/type" and "/root/si/protected". + + Ok(()) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs b/lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs new file mode 100644 index 0000000000..0a83004c57 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs @@ -0,0 +1,591 @@ + + + +use crate::change_set_pointer::ChangeSetPointer; + +use crate::property_editor::schema::WidgetKind; +use crate::schema::variant::root_prop::RootProp; +use crate::validation::Validation; +use crate::workspace_snapshot::api::prop::PropParent; +use crate::workspace_snapshot::WorkspaceSnapshotResult; +use crate::{ + schema::variant::leaves::LeafKind, DalContext, PropId, PropKind, SchemaId, SchemaVariantId, + StandardModel, WorkspaceSnapshot, +}; + +impl WorkspaceSnapshot { + /// Create and set a [`RootProp`] for the [`SchemaVariant`]. + pub async fn schema_variant_create_root_prop_tree( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + schema_variant_id: SchemaVariantId, + _schema_id: SchemaId, + ) -> WorkspaceSnapshotResult { + let root_prop = self + .prop_create( + ctx, + change_set, + "root", + PropKind::Object, + None, + PropParent::SchemaVariant(schema_variant_id), + true, + ) + .await?; + + let si_prop_id = self + .schema_variant_root_prop_setup_si(ctx, change_set, root_prop.id()) + .await?; + + let domain_prop = self + .prop_create( + ctx, + change_set, + "domain", + PropKind::Object, + None, + PropParent::OrderedProp(root_prop.id()), + true, + ) + .await?; + + let secrets_prop = self + .prop_create( + ctx, + change_set, + "secrets", + PropKind::Object, + None, + PropParent::OrderedProp(root_prop.id()), + true, + ) + .await?; + + let resource_prop_id = self + .schema_variant_root_prop_setup_resource( + ctx, + change_set, + root_prop.id(), + schema_variant_id, + ) + .await?; + + let resource_value_prop_id = self + .schema_variant_root_prop_setup_resource_value( + ctx, + change_set, + root_prop.id(), + schema_variant_id, + ) + .await?; + + let code_prop_id = self + .schema_variant_root_prop_setup_code(ctx, change_set, root_prop.id(), schema_variant_id) + .await?; + let qualification_prop_id = self + .schema_variant_root_prop_setup_qualification( + ctx, + change_set, + root_prop.id(), + schema_variant_id, + ) + .await?; + + let deleted_at_prop = self + .prop_create( + ctx, + change_set, + "deleted_at", + PropKind::String, + None, + PropParent::OrderedProp(root_prop.id()), + false, + ) + .await?; + self.prop_modify_by_id(ctx, change_set, deleted_at_prop.id(), |deleted_at_prop| { + deleted_at_prop.hidden = true; + Ok(()) + }) + .await?; + + // Now that the structure is set up, we can populate default + // AttributePrototypes to be updated appropriately below. + self.schema_variant_create_default_prototypes(ctx, change_set, schema_variant_id) + .await?; + + self.schema_variant_create_implicit_internal_providers(ctx, change_set, schema_variant_id) + .await?; + + Ok(RootProp { + prop_id: root_prop.id(), + si_prop_id, + domain_prop_id: domain_prop.id(), + resource_value_prop_id, + resource_prop_id, + secrets_prop_id: secrets_prop.id(), + code_prop_id, + qualification_prop_id, + deleted_at_prop_id: deleted_at_prop.id(), + }) + } + + async fn schema_variant_root_prop_insert_leaf_props( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + leaf_kind: LeafKind, + root_prop_id: PropId, + _schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult<(PropId, PropId)> { + let (leaf_prop_name, leaf_item_prop_name) = leaf_kind.prop_names(); + + let leaf_prop = self + .prop_create( + ctx, + change_set, + leaf_prop_name, + PropKind::Map, + None, + PropParent::OrderedProp(root_prop_id), + true, + ) + .await?; + self.prop_modify_by_id(ctx, change_set, leaf_prop.id(), |leaf_prop| { + leaf_prop.hidden = true; + Ok(()) + }) + .await?; + + let leaf_item_prop = self + .prop_create( + ctx, + change_set, + leaf_item_prop_name, + PropKind::Object, + None, + PropParent::OrderedProp(leaf_prop.id()), + true, + ) + .await?; + self.prop_modify_by_id(ctx, change_set, leaf_item_prop.id(), |leaf_item_prop| { + leaf_item_prop.hidden = true; + Ok(()) + }) + .await?; + + Ok((leaf_prop.id(), leaf_item_prop.id())) + } + + async fn schema_variant_root_prop_setup_si( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + root_prop_id: PropId, + ) -> WorkspaceSnapshotResult { + let si_prop = self + .prop_create( + ctx, + change_set, + "si", + PropKind::Object, + None, + PropParent::OrderedProp(root_prop_id), + true, + ) + .await?; + + let _si_name_prop = self + .prop_create( + ctx, + change_set, + "name", + PropKind::String, + None, + PropParent::OrderedProp(si_prop.id()), + false, + ) + .await?; + + // The protected prop ensures a component cannot be deleted in the configuration diagram. + let _protected_prop = self + .prop_create( + ctx, + change_set, + "protected", + PropKind::Boolean, + None, + PropParent::OrderedProp(si_prop.id()), + false, + ) + .await?; + + // The type prop controls the type of the configuration node. The default type can be + // determined by the schema variant author. The widget options correspond to the component + // type enumeration. + let _type_prop = self + .prop_create( + ctx, + change_set, + "type", + PropKind::String, + Some(( + WidgetKind::Select, + Some(serde_json::json!([ + { + "label": "Component", + "value": "component", + }, + { + "label": "Configuration Frame", + "value": "configurationFrame", + }, + { + "label": "Aggregation Frame", + "value": "aggregationFrame", + }, + ])), + )), + PropParent::OrderedProp(si_prop.id()), + false, + ) + .await?; + + // Override the schema variant color for nodes on the diagram. + let color_prop = self + .prop_create( + ctx, + change_set, + "color", + PropKind::String, + Some((WidgetKind::Color, None)), + PropParent::OrderedProp(si_prop.id()), + false, + ) + .await?; + + self.validation_prototype_create_in_memory( + ctx, + change_set, + Validation::StringIsHexColor { value: None }, + color_prop.id(), + ) + .await?; + + Ok(si_prop.id()) + } + + async fn schema_variant_root_prop_setup_resource_value( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + root_prop_id: PropId, + _schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult { + let resource_value_prop = self + .prop_create( + ctx, + change_set, + "resource_value", + PropKind::Object, + None, + PropParent::OrderedProp(root_prop_id), + true, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + resource_value_prop.id(), + |resource_value_prop| { + resource_value_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + Ok(resource_value_prop.id()) + } + + async fn schema_variant_root_prop_setup_resource( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + root_prop_id: PropId, + _schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult { + let resource_prop = self + .prop_create( + ctx, + change_set, + "resource", + PropKind::Object, + None, + PropParent::OrderedProp(root_prop_id), + false, + ) + .await?; + self.prop_modify_by_id(ctx, change_set, resource_prop.id(), |resource_prop| { + resource_prop.hidden = true; + Ok(()) + }) + .await?; + + let resource_status_prop = self + .prop_create( + ctx, + change_set, + "status", + PropKind::String, + None, + PropParent::Prop(root_prop_id), + false, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + resource_status_prop.id(), + |resource_status_prop| { + resource_status_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + let resource_message_prop = self + .prop_create( + ctx, + change_set, + "message", + PropKind::String, + None, + PropParent::Prop(root_prop_id), + false, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + resource_message_prop.id(), + |resource_message_prop| { + resource_message_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + let resource_logs_prop = self + .prop_create( + ctx, + change_set, + "logs", + PropKind::Array, + None, + PropParent::Prop(root_prop_id), + true, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + resource_logs_prop.id(), + |resource_logs_prop| { + resource_logs_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + let resource_logs_log_prop = self + .prop_create( + ctx, + change_set, + "log", + PropKind::String, + None, + PropParent::OrderedProp(root_prop_id), + false, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + resource_logs_log_prop.id(), + |resource_logs_log_prop| { + resource_logs_log_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + let resource_payload_prop = self + .prop_create( + ctx, + change_set, + "payload", + PropKind::String, + None, + PropParent::Prop(root_prop_id), + false, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + resource_payload_prop.id(), + |resource_payload_prop| { + resource_payload_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + let resource_last_synced_prop = self + .prop_create( + ctx, + change_set, + "resource_last_synced_prop", + PropKind::String, + None, + PropParent::Prop(root_prop_id), + false, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + resource_last_synced_prop.id(), + |resource_last_synced_prop| { + resource_last_synced_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + Ok(resource_prop.id()) + } + + async fn schema_variant_root_prop_setup_code( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + root_prop_id: PropId, + schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult { + let (code_map_prop_id, code_map_item_prop_id) = self + .schema_variant_root_prop_insert_leaf_props( + ctx, + change_set, + LeafKind::CodeGeneration, + root_prop_id, + schema_variant_id, + ) + .await?; + + let child_code_prop = self + .prop_create( + ctx, + change_set, + "code", + PropKind::String, + None, + PropParent::OrderedProp(code_map_item_prop_id), + false, + ) + .await?; + self.prop_modify_by_id(ctx, change_set, child_code_prop.id(), |child_code_prop| { + child_code_prop.hidden = true; + Ok(()) + }) + .await?; + + let child_format_prop = self + .prop_create( + ctx, + change_set, + "format", + PropKind::String, + None, + PropParent::OrderedProp(code_map_item_prop_id), + false, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + child_format_prop.id(), + |child_format_prop| { + child_format_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + Ok(code_map_prop_id) + } + + async fn schema_variant_root_prop_setup_qualification( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + root_prop_id: PropId, + schema_variant_id: SchemaVariantId, + ) -> WorkspaceSnapshotResult { + let (qualification_map_prop_id, qualification_map_item_prop_id) = self + .schema_variant_root_prop_insert_leaf_props( + ctx, + change_set, + LeafKind::Qualification, + root_prop_id, + schema_variant_id, + ) + .await?; + + let child_qualified_prop = self + .prop_create( + ctx, + change_set, + "result", + PropKind::String, + None, + PropParent::OrderedProp(qualification_map_item_prop_id), + false, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + child_qualified_prop.id(), + |child_qualified_prop| { + child_qualified_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + let child_message_prop = self + .prop_create( + ctx, + change_set, + "message", + PropKind::String, + None, + PropParent::OrderedProp(qualification_map_item_prop_id), + false, + ) + .await?; + self.prop_modify_by_id( + ctx, + change_set, + child_message_prop.id(), + |child_message_prop| { + child_message_prop.hidden = true; + Ok(()) + }, + ) + .await?; + + Ok(qualification_map_prop_id) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/socket.rs b/lib/dal/src/workspace_snapshot/api/socket.rs new file mode 100644 index 0000000000..3dc243da13 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/socket.rs @@ -0,0 +1,77 @@ +use content_store::{Store}; +use ulid::Ulid; + +use crate::change_set_pointer::ChangeSetPointer; + + + +use crate::socket::{SocketContent, SocketContentV1, SocketEdgeKind, SocketGraphNode, SocketKind, DiagramKind}; + +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::WorkspaceSnapshotResult; +use crate::{ + DalContext, ExternalProviderId, InternalProviderId, + SchemaVariantId, SocketArity, Timestamp, WorkspaceSnapshot, +}; + +pub enum SocketParent { + ExplicitInternalProvider(InternalProviderId), + ExternalProvider(ExternalProviderId), +} + +impl WorkspaceSnapshot { + pub async fn socket_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + name: impl AsRef, + kind: SocketKind, + socket_edge_kind: SocketEdgeKind, + arity: SocketArity, + diagram_kind: DiagramKind, + _schema_variant_id: Option, + socket_parent: SocketParent, + ) -> WorkspaceSnapshotResult { + let name = name.as_ref(); + let timestamp = Timestamp::now(); + + let content = SocketContentV1 { + timestamp, + name: name.to_string(), + human_name: None, + kind, + edge_kind: socket_edge_kind, + diagram_kind, + arity, + required: false, + ui_hidden: false, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&SocketContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_content(change_set, id, ContentAddress::Socket(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let parent_id: Ulid = match socket_parent { + SocketParent::ExplicitInternalProvider(explicit_internal_provider_id) => { + explicit_internal_provider_id.into() + } + SocketParent::ExternalProvider(external_provider_id) => external_provider_id.into(), + }; + + let parent_node_index = self.working_copy()?.get_node_index_by_id(parent_id)?; + self.working_copy()?.add_edge( + parent_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + + Ok(SocketGraphNode::assemble(id, hash, content)) + } +} diff --git a/lib/dal/src/workspace_snapshot/api/validation.rs b/lib/dal/src/workspace_snapshot/api/validation.rs new file mode 100644 index 0000000000..c9aa0e02cb --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/validation.rs @@ -0,0 +1 @@ +pub mod prototype; diff --git a/lib/dal/src/workspace_snapshot/api/validation/prototype.rs b/lib/dal/src/workspace_snapshot/api/validation/prototype.rs new file mode 100644 index 0000000000..94cae1fdd8 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/api/validation/prototype.rs @@ -0,0 +1,71 @@ +use content_store::Store; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::func::backend::validation::FuncBackendValidationArgs; +use crate::func::intrinsics::IntrinsicFunc; +use crate::validation::prototype::{ + ValidationPrototypeContent, ValidationPrototypeContentV1, ValidationPrototypeGraphNode, +}; +use crate::validation::Validation; +use crate::workspace_snapshot::content_address::ContentAddress; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; + +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::WorkspaceSnapshotResult; +use crate::{DalContext, FuncId, PropId, Timestamp, WorkspaceSnapshot}; + +impl WorkspaceSnapshot { + pub async fn validation_prototype_create_in_memory( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + validation: Validation, + parent_prop_id: PropId, + ) -> WorkspaceSnapshotResult { + let func_id: FuncId = self.func_find_intrinsic(IntrinsicFunc::Validation)?; + let args = serde_json::to_value(FuncBackendValidationArgs::new(validation))?; + let validation_prototype_graph_node = self + .validation_prototype_create(ctx, change_set, func_id, args, parent_prop_id) + .await?; + Ok(validation_prototype_graph_node) + } + + pub async fn validation_prototype_create( + &mut self, + ctx: &DalContext, + change_set: &ChangeSetPointer, + func_id: FuncId, + args: serde_json::Value, + parent_prop_id: PropId, + ) -> WorkspaceSnapshotResult { + let timestamp = Timestamp::now(); + + let content = ValidationPrototypeContentV1 { + timestamp, + func_id, + args, + link: None, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&ValidationPrototypeContent::V1(content.clone()))?; + + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::ValidationPrototype(hash))?; + let node_index = self.working_copy()?.add_node(node_weight)?; + + let parent_prop_index = self + .working_copy()? + .get_node_index_by_id(parent_prop_id.into())?; + self.working_copy()?.add_edge( + parent_prop_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + node_index, + )?; + + Ok(ValidationPrototypeGraphNode::assemble(id, hash, content)) + } +} diff --git a/lib/dal/src/workspace_snapshot/content_address.rs b/lib/dal/src/workspace_snapshot/content_address.rs index 4c47f6dbbc..8f1fb14399 100644 --- a/lib/dal/src/workspace_snapshot/content_address.rs +++ b/lib/dal/src/workspace_snapshot/content_address.rs @@ -1,11 +1,13 @@ use content_store::ContentHash; use serde::{Deserialize, Serialize}; +use strum::EnumDiscriminants; #[remain::sorted] -#[derive(Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq)] +#[derive(EnumDiscriminants, Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq)] /// The type of the object, and the content-addressable-storage address (content hash) /// of the object itself. pub enum ContentAddress { + ActionPrototype(ContentHash), AttributePrototype(ContentHash), AttributeValue(ContentHash), Component(ContentHash), @@ -13,26 +15,33 @@ pub enum ContentAddress { Func(ContentHash), FuncArg(ContentHash), InternalProvider(ContentHash), + Node(ContentHash), Prop(ContentHash), Root, Schema(ContentHash), SchemaVariant(ContentHash), + Socket(ContentHash), + ValidationPrototype(ContentHash), } impl ContentAddress { pub fn content_hash(&self) -> ContentHash { match self { - ContentAddress::AttributePrototype(id) => Some(*id), - ContentAddress::AttributeValue(id) => Some(*id), - ContentAddress::Component(id) => Some(*id), - ContentAddress::ExternalProvider(id) => Some(*id), - ContentAddress::FuncArg(id) => Some(*id), - ContentAddress::Func(id) => Some(*id), - ContentAddress::InternalProvider(id) => Some(*id), - ContentAddress::Prop(id) => Some(*id), ContentAddress::Root => None, - ContentAddress::Schema(id) => Some(*id), - ContentAddress::SchemaVariant(id) => Some(*id), + ContentAddress::ActionPrototype(id) + | ContentAddress::AttributePrototype(id) + | ContentAddress::AttributeValue(id) + | ContentAddress::Component(id) + | ContentAddress::ExternalProvider(id) + | ContentAddress::FuncArg(id) + | ContentAddress::Func(id) + | ContentAddress::InternalProvider(id) + | ContentAddress::Node(id) + | ContentAddress::Prop(id) + | ContentAddress::Schema(id) + | ContentAddress::SchemaVariant(id) + | ContentAddress::Socket(id) + | ContentAddress::ValidationPrototype(id) => Some(*id), } .unwrap_or_default() } diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs index f052a8054b..daafa09936 100644 --- a/lib/dal/src/workspace_snapshot/edge_weight.rs +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -6,6 +6,9 @@ use thiserror::Error; use crate::change_set_pointer::ChangeSetPointer; use crate::workspace_snapshot::vector_clock::{VectorClock, VectorClockError, VectorClockId}; +use crate::ActionKind; + +use strum::EnumDiscriminants; #[derive(Debug, Error)] pub enum EdgeWeightError { @@ -16,8 +19,10 @@ pub enum EdgeWeightError { pub type EdgeWeightResult = Result; #[remain::sorted] -#[derive(Default, Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash)] +#[derive(Default, Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash, EnumDiscriminants)] pub enum EdgeWeightKind { + /// A function used by a [`SchemaVariant`] to perform an action that affects its resource + ActionPrototype(ActionKind), /// An argument to a function defined by an [`AttributePrototype`][crate::AttributePrototype], /// including the name of the argument to the function. Argument(String), @@ -25,13 +30,14 @@ pub enum EdgeWeightKind { /// array/map, or a field of an object. The optional [`String`] represents the key of the entry /// in a map. Contain(Option), - /// Used when the target/destination of an edge is an [`InternalProvider`], or an - /// [`ExternalProvider`]. - DataProvider, /// Used to record the order that the elements of a container should be presented in. Ordering, + /// Used to link an attribute value to the prop that it is for. Prop, Prototype, + /// Used when the target/destination of an edge is an [`InternalProvider`], or an + /// [`ExternalProvider`]. + Provider, Proxy, /// Workspaces "use" functions, modules, schemas. Schemas "use" schema variants. /// Schema variants "use" props. Props "use" functions, and other props. Modules diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index d53ce526e8..3dd172e928 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -1,5 +1,7 @@ use chrono::Utc; use content_store::{ContentHash, Store, StoreError}; +use petgraph::graph::Edge; +use petgraph::stable_graph::Edges; use petgraph::{algo, prelude::*, visit::DfsEvent}; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet, VecDeque}; @@ -12,13 +14,18 @@ use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::workspace_snapshot::{ conflict::Conflict, content_address::ContentAddress, - edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}, + edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants}, node_weight::{NodeWeight, NodeWeightError, OrderingNodeWeight}, update::Update, }; +use crate::FuncId; + +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::CategoryNodeWeight; /// Ensure [`NodeIndex`] is usable by external crates. pub use petgraph::graph::NodeIndex; +pub use petgraph::Direction; pub type LineageId = Ulid; @@ -143,6 +150,64 @@ impl WorkspaceSnapshotGraph { Ok(new_node_index) } + pub fn add_category_node( + &mut self, + change_set: &ChangeSetPointer, + kind: CategoryNodeKind, + ) -> WorkspaceSnapshotGraphResult { + let inner_weight = CategoryNodeWeight::new(change_set, kind)?; + let new_node_index = self.add_node(NodeWeight::Category(inner_weight))?; + Ok(new_node_index) + } + + pub fn get_category_child( + &mut self, + kind: CategoryNodeKind, + ) -> WorkspaceSnapshotGraphResult<(Ulid, NodeIndex)> { + for edgeref in self.graph.edges_directed(self.root(), Outgoing) { + let node_weight = self + .graph + .node_weight(edgeref.target()) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + if let NodeWeight::Category(inner_weight) = node_weight { + if inner_weight.kind() == kind { + return Ok((inner_weight.id(), edgeref.target())); + } + } + } + self.dot(); + todo!("could not get category child") + } + + pub fn edges_directed( + &self, + node_index: NodeIndex, + direction: Direction, + ) -> Edges<'_, EdgeWeight, Directed, u32> { + self.graph.edges_directed(node_index, direction) + } + + pub fn func_find_by_name( + &self, + parent_node_index: NodeIndex, + name: impl AsRef, + ) -> WorkspaceSnapshotGraphResult> { + let name = name.as_ref(); + for edgeref in self.graph.edges_directed(parent_node_index, Outgoing) { + let node_weight = self + .graph + .node_weight(edgeref.target()) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + if let NodeWeight::Func(inner_weight) = node_weight { + if inner_weight.name() == name { + return Ok(Some(inner_weight.id().into())); + } + } + } + + Ok(None) + } + pub fn add_ordered_edge( &mut self, change_set: &ChangeSetPointer, @@ -191,8 +256,7 @@ impl WorkspaceSnapshotGraph { Ok(new_edge_index) } - #[allow(dead_code)] - fn add_ordered_node( + pub fn add_ordered_node( &mut self, change_set: &ChangeSetPointer, node: NodeWeight, @@ -320,8 +384,8 @@ impl WorkspaceSnapshotGraph { // determine their sorting in oldest to most recent order. let mut child_index_to_position = HashMap::new(); let mut child_indexes = Vec::new(); - let mut outgoing_edges = self.graph.edges_directed(current_node_index, Outgoing); - while let Some(edge_ref) = outgoing_edges.next() { + let outgoing_edges = self.graph.edges_directed(current_node_index, Outgoing); + for edge_ref in outgoing_edges { match edge_ref.weight().kind() { EdgeWeightKind::Contain(Some(key)) => { view_pointer @@ -639,11 +703,11 @@ impl WorkspaceSnapshotGraph { // There was at least one thing with a merkle tree hash difference, so we need // to examine further down the tree to see where the difference(s) are, and // where there are conflicts, if there are any. - return Ok(petgraph::visit::Control::Continue); + Ok(petgraph::visit::Control::Continue) } else { // Everything to be rebased is identical, so there's no need to examine the // rest of the tree looking for differences & conflicts that won't be there. - return Ok(petgraph::visit::Control::Prune); + Ok(petgraph::visit::Control::Prune) } } DfsEvent::TreeEdge(_, _) @@ -652,7 +716,7 @@ impl WorkspaceSnapshotGraph { | DfsEvent::Finish(_, _) => { // These events are all ignored, since we handle looking at edges as we encounter // the node(s) the edges are coming from (Outgoing edges). - return Ok(petgraph::visit::Control::Continue); + Ok(petgraph::visit::Control::Continue) } } } @@ -894,8 +958,8 @@ impl WorkspaceSnapshotGraph { }); } } - } else if let Some(onto_item_node_weight) = - onto.get_node_weight(only_onto_item_index).ok() + } else if let Ok(onto_item_node_weight) = + onto.get_node_weight(only_onto_item_index) { if let Some(root_seen_as_of) = onto_root_seen_as_of { if onto_item_node_weight @@ -1172,7 +1236,7 @@ impl WorkspaceSnapshotGraph { ) -> WorkspaceSnapshotGraphResult<()> { let seen_at = Utc::now(); for edge in self.graph.edge_weights_mut() { - edge.mark_seen_at(vector_clock_id, seen_at.clone()); + edge.mark_seen_at(vector_clock_id, seen_at); } for node in self.graph.node_weights_mut() { node.mark_seen_at(vector_clock_id, seen_at); @@ -1253,17 +1317,20 @@ impl WorkspaceSnapshotGraph { Ok(prop_node_indexes.get(0).copied()) } + pub(crate) fn remove_edge_by_index(&mut self, edge_index: EdgeIndex) -> Option { + self.graph.remove_edge(edge_index) + } + /// [`StableGraph`] guarantees the stability of [`NodeIndex`] across removals, however there /// are **NO** guarantees around the stability of [`EdgeIndex`] across removals. If /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] found before /// [`Self::cleanup()`] has run should be considered invalid. - #[allow(dead_code)] pub(crate) fn remove_edge( &mut self, change_set: &ChangeSetPointer, source_node_index: NodeIndex, target_node_index: NodeIndex, - edge_kind: EdgeWeightKind, + edge_kind: EdgeWeightKindDiscriminants, ) -> WorkspaceSnapshotGraphResult<()> { let mut edges_to_remove = Vec::new(); let new_source_node_index = self.copy_node_index(source_node_index)?; @@ -1273,7 +1340,7 @@ impl WorkspaceSnapshotGraph { .graph .edges_connecting(new_source_node_index, target_node_index) { - if edgeref.weight().kind() == &edge_kind { + if edge_kind == edgeref.weight().kind().into() { edges_to_remove.push(edgeref.id()); } } @@ -1324,17 +1391,6 @@ impl WorkspaceSnapshotGraph { Ok(()) } - pub(crate) fn remove_edge_for_update_stableish( - &mut self, - edge_index: EdgeIndex, - ) -> WorkspaceSnapshotGraphResult<()> { - let _ = self - .graph - .remove_edge(edge_index) - .ok_or(WorkspaceSnapshotGraphError::EdgeDoesNotExist(edge_index))?; - Ok(()) - } - pub(crate) fn get_edge_by_index_stableish( &mut self, edge_index: EdgeIndex, @@ -1457,7 +1513,7 @@ impl WorkspaceSnapshotGraph { // order specified by the ordering graph node. let explicitly_ordered_children = self .ordered_children_for_node(node_index_to_update)? - .unwrap_or_else(Vec::new); + .unwrap_or_default(); // Need to make sure the unordered neighbors are added to the hash in a stable order to // ensure the merkle tree hash is identical for identical trees. @@ -1509,11 +1565,15 @@ impl WorkspaceSnapshotGraph { EdgeWeightKind::Argument(arg_name) => hasher.update(arg_name.as_bytes()), // This is the key for an entry in a map. EdgeWeightKind::Contain(Some(key)) => hasher.update(key.as_bytes()), + // This is the kind of the action. + EdgeWeightKind::ActionPrototype(kind) => { + hasher.update(kind.to_string().as_bytes()) + } // Nothing to do, as these EdgeWeightKind do not encode extra information // in the edge itself. EdgeWeightKind::Contain(None) - | EdgeWeightKind::DataProvider + | EdgeWeightKind::Provider | EdgeWeightKind::Ordering | EdgeWeightKind::Prop | EdgeWeightKind::Prototype @@ -1541,13 +1601,13 @@ fn ordering_node_indexes_for_node_index( .graph .edges_directed(node_index, Outgoing) .filter_map(|edge_reference| { - if edge_reference.weight().kind() == &EdgeWeightKind::Ordering { - if matches!( + if edge_reference.weight().kind() == &EdgeWeightKind::Ordering + && matches!( snapshot.get_node_weight(edge_reference.target()), Ok(NodeWeight::Ordering(_)) - ) { - return Some(edge_reference.target()); - } + ) + { + return Some(edge_reference.target()); } None @@ -1563,13 +1623,13 @@ fn prop_node_indexes_for_node_index( .graph .edges_directed(node_index, Outgoing) .filter_map(|edge_reference| { - if edge_reference.weight().kind() == &EdgeWeightKind::Prop { - if matches!( + if edge_reference.weight().kind() == &EdgeWeightKind::Prop + && matches!( snapshot.get_node_weight(edge_reference.target()), Ok(NodeWeight::Prop(_)) - ) { - return Some(edge_reference.target()); - } + ) + { + return Some(edge_reference.target()); } None }) @@ -2740,7 +2800,7 @@ mod test { base_graph .get_node_index_by_id(component_id) .expect("Unable to get NodeIndex"), - EdgeWeightKind::Use, + EdgeWeightKindDiscriminants::Use, ) .expect("Unable to remove Component A"); @@ -3004,7 +3064,7 @@ mod test { base_graph .get_node_index_by_id(nginx_butane_component_id) .expect("Unable to get NodeIndex"), - EdgeWeightKind::Use, + EdgeWeightKindDiscriminants::Use, ) .expect("Unable to update the component"); new_graph @@ -3798,7 +3858,7 @@ mod test { .get_node_index_by_id(prop_id) .expect("Unable to get NodeIndex for prop"), ordered_prop_2_index, - EdgeWeightKind::Use, + EdgeWeightKindDiscriminants::Use, ) .expect("Unable to remove prop -> ordered_prop_2 edge"); @@ -4826,7 +4886,7 @@ mod test { .get_node_index_by_id(container_prop_id) .expect("Unable to get container NodeIndex"), ordered_prop_2_index, - EdgeWeightKind::Use, + EdgeWeightKindDiscriminants::Use, ) .expect("Unable to remove container prop -> prop 2 edge"); diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs index 97d53528b8..fd31420137 100644 --- a/lib/dal/src/workspace_snapshot/node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -5,6 +5,7 @@ use thiserror::Error; use ulid::Ulid; use crate::workspace_snapshot::vector_clock::VectorClockId; + use crate::{ change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}, workspace_snapshot::{ @@ -14,11 +15,17 @@ use crate::{ PropKind, }; +pub use category_node_weight::CategoryNodeWeight; pub use content_node_weight::ContentNodeWeight; +pub use func_node_weight::FuncNodeWeight; pub use ordering_node_weight::OrderingNodeWeight; pub use prop_node_weight::PropNodeWeight; +use super::content_address::ContentAddressDiscriminants; + +pub mod category_node_weight; pub mod content_node_weight; +pub mod func_node_weight; pub mod ordering_node_weight; pub mod prop_node_weight; @@ -44,7 +51,9 @@ pub type NodeWeightResult = Result; #[derive(Debug, Serialize, Deserialize, Clone)] pub enum NodeWeight { + Category(CategoryNodeWeight), Content(ContentNodeWeight), + Func(FuncNodeWeight), Ordering(OrderingNodeWeight), Prop(PropNodeWeight), } @@ -52,17 +61,31 @@ pub enum NodeWeight { impl NodeWeight { pub fn content_hash(&self) -> ContentHash { match self { - NodeWeight::Content(content_weight) => content_weight.content_hash(), - NodeWeight::Ordering(ordering_weight) => ordering_weight.content_hash(), - NodeWeight::Prop(prop_weight) => prop_weight.content_hash(), + NodeWeight::Category(weight) => weight.content_hash(), + NodeWeight::Content(weight) => weight.content_hash(), + NodeWeight::Func(weight) => weight.content_hash(), + NodeWeight::Ordering(weight) => weight.content_hash(), + NodeWeight::Prop(weight) => weight.content_hash(), + } + } + + pub fn content_address_discriminants(&self) -> Option { + match self { + NodeWeight::Content(weight) => Some(weight.content_address().into()), + NodeWeight::Category(_) + | NodeWeight::Func(_) + | NodeWeight::Ordering(_) + | NodeWeight::Prop(_) => None, } } pub fn id(&self) -> Ulid { match self { - NodeWeight::Content(content_weight) => content_weight.id(), - NodeWeight::Ordering(ordering_weight) => ordering_weight.id(), - NodeWeight::Prop(prop_weight) => prop_weight.id(), + NodeWeight::Category(weight) => weight.id(), + NodeWeight::Content(weight) => weight.id(), + NodeWeight::Func(weight) => weight.id(), + NodeWeight::Ordering(weight) => weight.id(), + NodeWeight::Prop(weight) => weight.id(), } } @@ -71,33 +94,31 @@ impl NodeWeight { change_set: &ChangeSetPointer, ) -> NodeWeightResult<()> { match self { - NodeWeight::Content(content_weight) => { - content_weight.increment_vector_clock(change_set) - } - NodeWeight::Ordering(ordering_weight) => { - ordering_weight.increment_vector_clock(change_set) - } - NodeWeight::Prop(prop_weight) => prop_weight.increment_vector_clock(change_set), + NodeWeight::Category(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Content(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Func(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Ordering(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Prop(weight) => weight.increment_vector_clock(change_set), } } pub fn lineage_id(&self) -> Ulid { match self { - NodeWeight::Content(content_weight) => content_weight.lineage_id(), - NodeWeight::Ordering(ordering_weight) => ordering_weight.lineage_id(), - NodeWeight::Prop(prop_weight) => prop_weight.lineage_id(), + NodeWeight::Category(weight) => weight.lineage_id(), + NodeWeight::Content(weight) => weight.lineage_id(), + NodeWeight::Func(weight) => weight.lineage_id(), + NodeWeight::Ordering(weight) => weight.lineage_id(), + NodeWeight::Prop(weight) => weight.lineage_id(), } } pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { match self { - NodeWeight::Content(content_weight) => { - content_weight.mark_seen_at(vector_clock_id, seen_at) - } - NodeWeight::Ordering(ordering_weight) => { - ordering_weight.mark_seen_at(vector_clock_id, seen_at) - } - NodeWeight::Prop(prop_weight) => prop_weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Category(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Content(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Func(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Ordering(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Prop(weight) => weight.mark_seen_at(vector_clock_id, seen_at), } } @@ -107,16 +128,20 @@ impl NodeWeight { other: &NodeWeight, ) -> NodeWeightResult<()> { match (self, other) { - ( - NodeWeight::Content(self_content_weight), - NodeWeight::Content(other_content_weight), - ) => self_content_weight.merge_clocks(change_set, other_content_weight), - ( - NodeWeight::Ordering(self_ordering_weight), - NodeWeight::Ordering(other_ordering_weight), - ) => self_ordering_weight.merge_clocks(change_set, other_ordering_weight), - (NodeWeight::Prop(self_prop_weight), NodeWeight::Prop(other_prop_weight)) => { - self_prop_weight.merge_clocks(change_set, other_prop_weight) + (NodeWeight::Category(self_weight), NodeWeight::Category(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Content(self_weight), NodeWeight::Content(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Func(self_weight), NodeWeight::Func(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Ordering(self_weight), NodeWeight::Ordering(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Prop(self_weight), NodeWeight::Prop(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) } _ => Err(NodeWeightError::IncompatibleNodeWeightVariants), } @@ -124,59 +149,43 @@ impl NodeWeight { pub fn merkle_tree_hash(&self) -> ContentHash { match self { - NodeWeight::Content(content_weight) => content_weight.merkle_tree_hash(), - NodeWeight::Ordering(ordering_weight) => ordering_weight.merkle_tree_hash(), - NodeWeight::Prop(prop_weight) => prop_weight.merkle_tree_hash(), + NodeWeight::Category(weight) => weight.merkle_tree_hash(), + NodeWeight::Content(weight) => weight.merkle_tree_hash(), + NodeWeight::Func(weight) => weight.merkle_tree_hash(), + NodeWeight::Ordering(weight) => weight.merkle_tree_hash(), + NodeWeight::Prop(weight) => weight.merkle_tree_hash(), } } - pub fn new_content( - change_set: &ChangeSetPointer, - content_id: Ulid, - kind: ContentAddress, - ) -> NodeWeightResult { - Ok(NodeWeight::Content(ContentNodeWeight::new( - change_set, content_id, kind, - )?)) - } - pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { match self { - NodeWeight::Content(content_weight) => content_weight.new_content_hash(content_hash), + NodeWeight::Category(_) => Err(NodeWeightError::CannotSetContentHashOnKind), + NodeWeight::Content(weight) => weight.new_content_hash(content_hash), + NodeWeight::Func(weight) => weight.new_content_hash(content_hash), NodeWeight::Ordering(_) => Err(NodeWeightError::CannotSetContentHashOnKind), - NodeWeight::Prop(prop_weight) => prop_weight.new_content_hash(content_hash), + NodeWeight::Prop(weight) => weight.new_content_hash(content_hash), } } - pub fn new_prop( - change_set: &ChangeSetPointer, - prop_id: Ulid, - prop_kind: PropKind, - name: impl AsRef, - content_hash: ContentHash, - ) -> NodeWeightResult { - Ok(NodeWeight::Prop(PropNodeWeight::new( - change_set, - prop_id, - ContentAddress::Prop(content_hash), - prop_kind, - name.as_ref().to_string(), - )?)) - } - pub fn new_with_incremented_vector_clock( &self, change_set: &ChangeSetPointer, ) -> NodeWeightResult { let new_weight = match self { - NodeWeight::Content(content_weight) => { - NodeWeight::Content(content_weight.new_with_incremented_vector_clock(change_set)?) + NodeWeight::Category(weight) => { + NodeWeight::Category(weight.new_with_incremented_vector_clock(change_set)?) } - NodeWeight::Ordering(ordering_weight) => { - NodeWeight::Ordering(ordering_weight.new_with_incremented_vector_clock(change_set)?) + NodeWeight::Content(weight) => { + NodeWeight::Content(weight.new_with_incremented_vector_clock(change_set)?) } - NodeWeight::Prop(prop_weight) => { - NodeWeight::Prop(prop_weight.new_with_incremented_vector_clock(change_set)?) + NodeWeight::Func(weight) => { + NodeWeight::Func(weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::Ordering(weight) => { + NodeWeight::Ordering(weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::Prop(weight) => { + NodeWeight::Prop(weight.new_with_incremented_vector_clock(change_set)?) } }; @@ -185,17 +194,21 @@ impl NodeWeight { pub fn node_hash(&self) -> ContentHash { match self { - NodeWeight::Content(content_weight) => content_weight.node_hash(), - NodeWeight::Ordering(ordering_weight) => ordering_weight.node_hash(), - NodeWeight::Prop(prop_weight) => prop_weight.node_hash(), + NodeWeight::Category(weight) => weight.node_hash(), + NodeWeight::Content(weight) => weight.node_hash(), + NodeWeight::Func(weight) => weight.node_hash(), + NodeWeight::Ordering(weight) => weight.node_hash(), + NodeWeight::Prop(weight) => weight.node_hash(), } } pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { match self { - NodeWeight::Content(content_weight) => content_weight.set_merkle_tree_hash(new_hash), - NodeWeight::Ordering(ordering_weight) => ordering_weight.set_merkle_tree_hash(new_hash), - NodeWeight::Prop(prop_weight) => prop_weight.set_merkle_tree_hash(new_hash), + NodeWeight::Category(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Content(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Func(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Ordering(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Prop(weight) => weight.set_merkle_tree_hash(new_hash), } } @@ -205,9 +218,11 @@ impl NodeWeight { order: Vec, ) -> NodeWeightResult<()> { match self { - NodeWeight::Content(_) => Err(NodeWeightError::CannotSetOrderOnKind), NodeWeight::Ordering(ordering_weight) => ordering_weight.set_order(change_set, order), - NodeWeight::Prop(_) => Err(NodeWeightError::CannotSetOrderOnKind), + NodeWeight::Category(_) + | NodeWeight::Content(_) + | NodeWeight::Func(_) + | NodeWeight::Prop(_) => Err(NodeWeightError::CannotSetOrderOnKind), } } @@ -217,39 +232,93 @@ impl NodeWeight { new_val: DateTime, ) { match self { - NodeWeight::Content(content_weight) => { - content_weight.set_vector_clock_recently_seen_to(change_set, new_val) + NodeWeight::Category(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::Content(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) } - NodeWeight::Ordering(ordering_weight) => { - ordering_weight.set_vector_clock_recently_seen_to(change_set, new_val) + NodeWeight::Func(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) } - NodeWeight::Prop(prop_weight) => { - prop_weight.set_vector_clock_recently_seen_to(change_set, new_val) + NodeWeight::Ordering(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::Prop(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) } } } pub fn vector_clock_first_seen(&self) -> &VectorClock { match self { - NodeWeight::Content(content_weight) => content_weight.vector_clock_first_seen(), - NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_first_seen(), - NodeWeight::Prop(prop_weight) => prop_weight.vector_clock_first_seen(), + NodeWeight::Category(weight) => weight.vector_clock_first_seen(), + NodeWeight::Content(weight) => weight.vector_clock_first_seen(), + NodeWeight::Func(weight) => weight.vector_clock_first_seen(), + NodeWeight::Ordering(weight) => weight.vector_clock_first_seen(), + NodeWeight::Prop(weight) => weight.vector_clock_first_seen(), } } pub fn vector_clock_recently_seen(&self) -> &VectorClock { match self { - NodeWeight::Content(content_weight) => content_weight.vector_clock_recently_seen(), - NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_recently_seen(), - NodeWeight::Prop(prop_weight) => prop_weight.vector_clock_recently_seen(), + NodeWeight::Category(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Content(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Func(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Ordering(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Prop(weight) => weight.vector_clock_recently_seen(), } } pub fn vector_clock_write(&self) -> &VectorClock { match self { - NodeWeight::Content(content_weight) => content_weight.vector_clock_write(), - NodeWeight::Ordering(ordering_weight) => ordering_weight.vector_clock_write(), - NodeWeight::Prop(prop_weight) => prop_weight.vector_clock_write(), + NodeWeight::Category(weight) => weight.vector_clock_write(), + NodeWeight::Content(weight) => weight.vector_clock_write(), + NodeWeight::Func(weight) => weight.vector_clock_write(), + NodeWeight::Ordering(weight) => weight.vector_clock_write(), + NodeWeight::Prop(weight) => weight.vector_clock_write(), } } + + // NOTE(nick): individual node weight funcs below. + + pub fn new_content( + change_set: &ChangeSetPointer, + content_id: Ulid, + kind: ContentAddress, + ) -> NodeWeightResult { + Ok(NodeWeight::Content(ContentNodeWeight::new( + change_set, content_id, kind, + )?)) + } + + pub fn new_prop( + change_set: &ChangeSetPointer, + prop_id: Ulid, + prop_kind: PropKind, + name: impl AsRef, + content_hash: ContentHash, + ) -> NodeWeightResult { + Ok(NodeWeight::Prop(PropNodeWeight::new( + change_set, + prop_id, + ContentAddress::Prop(content_hash), + prop_kind, + name.as_ref().to_string(), + )?)) + } + + pub fn new_func( + change_set: &ChangeSetPointer, + func_id: Ulid, + name: impl AsRef, + content_hash: ContentHash, + ) -> NodeWeightResult { + Ok(NodeWeight::Func(FuncNodeWeight::new( + change_set, + func_id, + ContentAddress::Func(content_hash), + name.as_ref().to_string(), + )?)) + } } diff --git a/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs new file mode 100644 index 0000000000..322a1aed11 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs @@ -0,0 +1,165 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::workspace_snapshot::{node_weight::NodeWeightResult, vector_clock::VectorClock}; + +#[derive(Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +pub enum CategoryNodeKind { + Component, + Func, + Schema, +} + +#[derive(Clone, Serialize, Deserialize)] +pub struct CategoryNodeWeight { + id: Ulid, + lineage_id: Ulid, + kind: CategoryNodeKind, + content_hash: ContentHash, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl CategoryNodeWeight { + pub fn content_hash(&self) -> ContentHash { + self.content_hash + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn kind(&self) -> CategoryNodeKind { + self.kind + } + + pub fn increment_seen_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_first_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .inc(change_set.vector_clock_id()) + .map_err(Into::into) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &CategoryNodeWeight, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), other.vector_clock_write())?; + self.vector_clock_first_seen.merge( + change_set.vector_clock_id(), + other.vector_clock_first_seen(), + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn new(change_set: &ChangeSetPointer, kind: CategoryNodeKind) -> NodeWeightResult { + Ok(Self { + id: change_set.generate_ulid()?, + lineage_id: change_set.generate_ulid()?, + kind, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + content_hash: Default::default(), + merkle_tree_hash: Default::default(), + vector_clock_recently_seen: Default::default(), + }) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_ordering_weight = self.clone(); + new_ordering_weight.increment_vector_clock(change_set)?; + + Ok(new_ordering_weight) + } + + pub fn node_hash(&self) -> ContentHash { + self.content_hash() + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for CategoryNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("CategoryNodeWeight") + .field("id", &self.id.to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("content_hash", &self.content_hash) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs index 61a22bda24..00d24fddb7 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -120,6 +120,7 @@ impl ContentNodeWeight { pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { let new_address = match &self.content_address { + ContentAddress::ActionPrototype(_) => ContentAddress::ActionPrototype(content_hash), ContentAddress::AttributePrototype(_) => { ContentAddress::AttributePrototype(content_hash) } @@ -129,6 +130,7 @@ impl ContentNodeWeight { ContentAddress::FuncArg(_) => ContentAddress::FuncArg(content_hash), ContentAddress::Func(_) => ContentAddress::Func(content_hash), ContentAddress::InternalProvider(_) => ContentAddress::InternalProvider(content_hash), + ContentAddress::Node(_) => ContentAddress::Node(content_hash), ContentAddress::Prop(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "Prop".to_string(), @@ -138,6 +140,10 @@ impl ContentNodeWeight { ContentAddress::Root => return Err(NodeWeightError::CannotUpdateRootNodeContentHash), ContentAddress::Schema(_) => ContentAddress::Schema(content_hash), ContentAddress::SchemaVariant(_) => ContentAddress::SchemaVariant(content_hash), + ContentAddress::Socket(_) => ContentAddress::Socket(content_hash), + ContentAddress::ValidationPrototype(_) => { + ContentAddress::ValidationPrototype(content_hash) + } }; self.content_address = new_address; diff --git a/lib/dal/src/workspace_snapshot/node_weight/func_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/func_node_weight.rs new file mode 100644 index 0000000000..590d3dac53 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/func_node_weight.rs @@ -0,0 +1,266 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::{ + change_set_pointer::ChangeSetPointer, + workspace_snapshot::{ + content_address::ContentAddress, + graph::LineageId, + node_weight::{NodeWeightError, NodeWeightResult}, + vector_clock::VectorClock, + }, +}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct FuncNodeWeight { + id: Ulid, + lineage_id: LineageId, + content_address: ContentAddress, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, + name: String, +} + +impl FuncNodeWeight { + pub fn new( + change_set: &ChangeSetPointer, + id: Ulid, + content_address: ContentAddress, + name: String, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + content_address, + merkle_tree_hash: ContentHash::default(), + name, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + }) + } + + pub fn content_address(&self) -> ContentAddress { + self.content_address + } + + pub fn content_hash(&self) -> ContentHash { + self.content_address.content_hash() + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &Self, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn name(&self) -> &str { + &self.name + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + let new_address = match &self.content_address { + ContentAddress::ActionPrototype(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "ActionPrototype".to_string(), + "Func".to_string(), + )); + } + ContentAddress::AttributePrototype(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "AttributePrototype".to_string(), + "Func".to_string(), + )); + } + ContentAddress::AttributeValue(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "AttributeValue".to_string(), + "Func".to_string(), + )); + } + ContentAddress::Component(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Component".to_string(), + "Func".to_string(), + )); + } + ContentAddress::ExternalProvider(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "ExternalProvider".to_string(), + "Func".to_string(), + )); + } + ContentAddress::Func(_) => ContentAddress::Func(content_hash), + ContentAddress::FuncArg(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "FuncArc".to_string(), + "Func".to_string(), + )); + } + ContentAddress::InternalProvider(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "InternalProvider".to_string(), + "Func".to_string(), + )); + } + ContentAddress::Node(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Node".to_string(), + "Func".to_string(), + )); + } + ContentAddress::Prop(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Prop".to_string(), + "Func".to_string(), + )); + } + ContentAddress::Root => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Root".to_string(), + "Func".to_string(), + )); + } + ContentAddress::Schema(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Schema".to_string(), + "Func".to_string(), + )); + } + ContentAddress::SchemaVariant(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "SchemaVariant".to_string(), + "Func".to_string(), + )); + } + ContentAddress::Socket(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Socket".to_string(), + "Func".to_string(), + )); + } + ContentAddress::ValidationPrototype(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "ValidationPrototype".to_string(), + "Func".to_string(), + )); + } + }; + + self.content_address = new_address; + + Ok(()) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn node_hash(&self) -> ContentHash { + ContentHash::from(&serde_json::json![{ + "content_address": self.content_address, + "name": self.name, + }]) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for FuncNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("FuncNodeWeight") + .field("id", &self.id().to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("name", &self.name) + .field("content_hash", &self.content_hash()) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs index 665ed5e164..54578e343a 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs @@ -49,6 +49,10 @@ impl PropNodeWeight { }) } + pub fn kind(&self) -> PropKind { + self.kind + } + pub fn content_address(&self) -> ContentAddress { self.content_address } @@ -116,6 +120,12 @@ impl PropNodeWeight { pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { let new_address = match &self.content_address { + ContentAddress::ActionPrototype(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "ActionPrototype".to_string(), + "Prop".to_string(), + )); + } ContentAddress::AttributePrototype(_) => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( "AttributePrototype".to_string(), @@ -158,6 +168,12 @@ impl PropNodeWeight { "Prop".to_string(), )); } + ContentAddress::Node(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Node".to_string(), + "Prop".to_string(), + )); + } ContentAddress::Prop(_) => ContentAddress::Prop(content_hash), ContentAddress::Root => { return Err(NodeWeightError::InvalidContentAddressForWeightKind( @@ -177,6 +193,18 @@ impl PropNodeWeight { "Prop".to_string(), )); } + ContentAddress::Socket(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Socket".to_string(), + "Prop".to_string(), + )); + } + ContentAddress::ValidationPrototype(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "ValidationPrototype".to_string(), + "Prop".to_string(), + )); + } }; self.content_address = new_address; diff --git a/lib/dal/src/ws_event.rs b/lib/dal/src/ws_event.rs index 26c095d6e1..2f04fed039 100644 --- a/lib/dal/src/ws_event.rs +++ b/lib/dal/src/ws_event.rs @@ -4,20 +4,9 @@ use si_data_pg::PgError; use thiserror::Error; use crate::change_set::{ChangeSetActorPayload, ChangeSetMergeVotePayload}; -use crate::component::ComponentCreatedPayload; -use crate::pkg::{ - ImportWorkspaceVotePayload, ModuleImportedPayload, WorkspaceActorPayload, - WorkspaceExportPayload, WorkspaceImportApprovalActorPayload, WorkspaceImportPayload, -}; +use crate::user::{CursorPayload, OnlinePayload}; use crate::{ - component::{code::CodeGeneratedPayload, resource::ResourceRefreshedPayload}, - fix::{batch::FixBatchReturn, FixReturn}, - func::binding::LogLinePayload, - qualification::QualificationCheckPayload, - status::StatusMessage, - user::{CursorPayload, OnlinePayload}, - AttributeValueId, ChangeSetPk, ComponentId, DalContext, PropId, SchemaPk, SocketId, - StandardModelError, TransactionsError, WorkspacePk, + ChangeSetPk, DalContext, PropId, SocketId, StandardModelError, TransactionsError, WorkspacePk, }; #[remain::sorted] @@ -54,23 +43,23 @@ pub enum WsPayload { ChangeSetCreated(ChangeSetPk), ChangeSetMergeVote(ChangeSetMergeVotePayload), ChangeSetWritten(ChangeSetPk), - CheckedQualifications(QualificationCheckPayload), - CodeGenerated(CodeGeneratedPayload), - ComponentCreated(ComponentCreatedPayload), + // CheckedQualifications(QualificationCheckPayload), + // CodeGenerated(CodeGeneratedPayload), + // ComponentCreated(ComponentCreatedPayload), Cursor(CursorPayload), - FixBatchReturn(FixBatchReturn), - FixReturn(FixReturn), - ImportWorkspaceVote(ImportWorkspaceVotePayload), - LogLine(LogLinePayload), - ModuleImported(ModuleImportedPayload), + // FixBatchReturn(FixBatchReturn), + // FixReturn(FixReturn), + // ImportWorkspaceVote(ImportWorkspaceVotePayload), + // LogLine(LogLinePayload), + // ModuleImported(ModuleImportedPayload), Online(OnlinePayload), - ResourceRefreshed(ResourceRefreshedPayload), - SchemaCreated(SchemaPk), - StatusUpdate(StatusMessage), - WorkspaceExported(WorkspaceExportPayload), - WorkspaceImportBeginApprovalProcess(WorkspaceImportApprovalActorPayload), - WorkspaceImportCancelApprovalProcess(WorkspaceActorPayload), - WorkspaceImported(WorkspaceImportPayload), + // ResourceRefreshed(ResourceRefreshedPayload), + // SchemaCreated(SchemaPk), + // StatusUpdate(StatusMessage), + // WorkspaceExported(WorkspaceExportPayload), + // WorkspaceImportBeginApprovalProcess(WorkspaceImportApprovalActorPayload), + // WorkspaceImportCancelApprovalProcess(WorkspaceActorPayload), + // WorkspaceImported(WorkspaceImportPayload), } #[remain::sorted] @@ -85,27 +74,27 @@ pub enum StatusValueKind { Qualification, } -#[derive(Deserialize, Serialize, Debug, Clone, Copy, Eq, Hash, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct AttributeValueStatusUpdate { - value_id: AttributeValueId, - component_id: ComponentId, - value_kind: StatusValueKind, -} +// #[derive(Deserialize, Serialize, Debug, Clone, Copy, Eq, Hash, PartialEq)] +// #[serde(rename_all = "camelCase")] +// pub struct AttributeValueStatusUpdate { +// value_id: AttributeValueId, +// component_id: ComponentId, +// value_kind: StatusValueKind, +// } -impl AttributeValueStatusUpdate { - pub fn new( - value_id: AttributeValueId, - component_id: ComponentId, - value_kind: StatusValueKind, - ) -> Self { - Self { - value_id, - component_id, - value_kind, - } - } -} +// impl AttributeValueStatusUpdate { +// pub fn new( +// value_id: AttributeValueId, +// component_id: ComponentId, +// value_kind: StatusValueKind, +// ) -> Self { +// Self { +// value_id, +// component_id, +// value_kind, +// } +// } +// } #[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq)] pub struct WsEvent { diff --git a/lib/dal/tests/integration.rs b/lib/dal/tests/integration.rs index 0ad4379906..a602d02f8d 100644 --- a/lib/dal/tests/integration.rs +++ b/lib/dal/tests/integration.rs @@ -1,3 +1,3 @@ const TEST_PG_DBNAME: &str = "si_test_dal"; -mod integration_test; +// mod integration_test; diff --git a/lib/pinga-server/src/server.rs b/lib/pinga-server/src/server.rs index d306101b29..131c58355a 100644 --- a/lib/pinga-server/src/server.rs +++ b/lib/pinga-server/src/server.rs @@ -3,11 +3,10 @@ use std::{io, path::Path, sync::Arc}; use dal::{ job::{ consumer::{JobConsumer, JobConsumerError, JobInfo}, - definition::{FixesJob, RefreshJob}, producer::BlockingJobError, }, - DalContext, DalContextBuilder, DependentValuesUpdate, InitializationError, JobFailure, - JobFailureError, JobQueueProcessor, NatsProcessor, ServicesContext, TransactionsError, + DalContext, DalContextBuilder, InitializationError, JobFailure, JobFailureError, + JobQueueProcessor, NatsProcessor, ServicesContext, TransactionsError, }; use futures::{FutureExt, Stream, StreamExt}; use nats_subscriber::{Request, SubscriberError}; @@ -477,27 +476,26 @@ async fn execute_job( tracing::Span::current().record("job_info.blocking", job_info.blocking); } - let job = - match job_info.kind.as_str() { - stringify!(DependentValuesUpdate) => { - Box::new(DependentValuesUpdate::try_from(job_info.clone())?) - as Box - } - stringify!(FixesJob) => Box::new(FixesJob::try_from(job_info.clone())?) - as Box, - stringify!(RefreshJob) => Box::new(RefreshJob::try_from(job_info.clone())?) - as Box, - kind => return Err(ServerError::UnknownJobKind(kind.to_owned())), - }; - - info!("Processing job"); - - if let Err(err) = job.run_job(ctx_builder.clone()).await { - // The missing part is this, should we execute subsequent jobs if the one they depend on fail or not? - record_job_failure(ctx_builder, job, err).await?; - } - - info!("Finished processing job"); + // let job = match job_info.kind.as_str() { + // stringify!(DependentValuesUpdate) => { + // Box::new(DependentValuesUpdate::try_from(job_info.clone())?) + // as Box + // } + // stringify!(FixesJob) => Box::new(FixesJob::try_from(job_info.clone())?) + // as Box, + // stringify!(RefreshJob) => Box::new(RefreshJob::try_from(job_info.clone())?) + // as Box, + // kind => return Err(ServerError::UnknownJobKind(kind.to_owned())), + // }; + + // info!("Processing job"); + + // if let Err(err) = job.run_job(ctx_builder.clone()).await { + // // The missing part is this, should we execute subsequent jobs if the one they depend on fail or not? + // record_job_failure(ctx_builder, job, err).await?; + // } + + // info!("Finished processing job"); Ok(()) } diff --git a/lib/rebaser-server/src/server/change_set_loop.rs b/lib/rebaser-server/src/server/change_set_loop.rs index dc74b3c564..8ffdc01b43 100644 --- a/lib/rebaser-server/src/server/change_set_loop.rs +++ b/lib/rebaser-server/src/server/change_set_loop.rs @@ -23,6 +23,8 @@ enum ChangeSetLoopError { DestinationNotUpdatedWhenImportingSubgraph, #[error("missing change set message \"reply_to\" field")] MissingChangeSetMessageReplyTo, + #[error("missing change set pointer")] + MissingChangeSetPointer(ChangeSetPointerId), #[error("missing workspace snapshot for change set ({0}) (the change set likely isn't pointing at a workspace snapshot)")] MissingWorkspaceSnapshotForChangeSet(ChangeSetPointerId), #[error("rabbit error: {0}")] @@ -120,7 +122,11 @@ async fn process_delivery( // Gather everything we need to detect conflicts and updates from the inbound message. let mut to_rebase_change_set = - ChangeSetPointer::find(ctx, message.to_rebase_change_set_id.into()).await?; + ChangeSetPointer::find(ctx, message.to_rebase_change_set_id.into()) + .await? + .ok_or(ChangeSetLoopError::MissingChangeSetPointer( + message.to_rebase_change_set_id.into(), + ))?; let to_rebase_workspace_snapshot_id = to_rebase_change_set.workspace_snapshot_id.ok_or( ChangeSetLoopError::MissingWorkspaceSnapshotForChangeSet(to_rebase_change_set.id), )?; @@ -209,7 +215,8 @@ async fn perform_updates_and_write_out_and_update_pointer( to_rebase_workspace_snapshot.add_edge(source, edge_weight.clone(), destination)?; } Update::RemoveEdge(edge) => { - to_rebase_workspace_snapshot.remove_edge_for_update_stableish(*edge)?; + // TODO(nick): debug log or handle whether or not the edge was deleted. + let _ = to_rebase_workspace_snapshot.remove_edge(*edge)?; } Update::ReplaceSubgraph { onto, to_rebase } => { let to_rebase = *updated.get(to_rebase).unwrap_or(to_rebase); diff --git a/lib/rebaser-server/src/server/management_loop.rs b/lib/rebaser-server/src/server/management_loop.rs index c756fff21d..3898090a12 100644 --- a/lib/rebaser-server/src/server/management_loop.rs +++ b/lib/rebaser-server/src/server/management_loop.rs @@ -22,7 +22,7 @@ pub(crate) async fn management_loop_infallible_wrapper( veritech: veritech_client::Client, job_processor: Box, symmetric_crypto_service: SymmetricCryptoService, - encryption_key: Arc, + encryption_key: Arc, shutdown_watch_rx: watch::Receiver<()>, ) { if let Err(err) = management_loop( @@ -48,7 +48,7 @@ async fn management_loop( veritech: veritech_client::Client, job_processor: Box, symmetric_crypto_service: SymmetricCryptoService, - encryption_key: Arc, + encryption_key: Arc, _shutdown_watch_rx: watch::Receiver<()>, ) -> ServerResult<()> { let services_context = ServicesContext::new( diff --git a/lib/sdf-server/src/server/routes.rs b/lib/sdf-server/src/server/routes.rs index 3cf956fdbc..11fcf1b076 100644 --- a/lib/sdf-server/src/server/routes.rs +++ b/lib/sdf-server/src/server/routes.rs @@ -26,31 +26,31 @@ pub fn routes(state: AppState) -> Router { "/api/change_set", crate::server::service::change_set::routes(), ) - .nest( - "/api/component", - crate::server::service::component::routes(), - ) - .nest("/api/fix", crate::server::service::fix::routes()) - .nest("/api/func", crate::server::service::func::routes()) - .nest("/api/pkg", crate::server::service::pkg::routes()) - .nest("/api/provider", crate::server::service::provider::routes()) - .nest( - "/api/qualification", - crate::server::service::qualification::routes(), - ) - .nest("/api/schema", crate::server::service::schema::routes()) - .nest("/api/diagram", crate::server::service::diagram::routes()) - .nest("/api/secret", crate::server::service::secret::routes()) - .nest("/api/session", crate::server::service::session::routes()) - .nest("/api/status", crate::server::service::status::routes()) - .nest( - "/api/variant_def", - crate::server::service::variant_definition::routes(), - ) - .nest("/api/ws", crate::server::service::ws::routes()); + .nest("/api/session", crate::server::service::session::routes()); + // .nest( + // "/api/component", + // crate::server::service::component::routes(), + // ) + // .nest("/api/fix", crate::server::service::fix::routes()) + // .nest("/api/func", crate::server::service::func::routes()) + // .nest("/api/pkg", crate::server::service::pkg::routes()) + // .nest("/api/provider", crate::server::service::provider::routes()) + // .nest( + // "/api/qualification", + // crate::server::service::qualification::routes(), + // ) + // .nest("/api/schema", crate::server::service::schema::routes()) + // .nest("/api/diagram", crate::server::service::diagram::routes()) + // .nest("/api/secret", crate::server::service::secret::routes()) + // .nest("/api/status", crate::server::service::status::routes()) + // .nest( + // "/api/variant_def", + // crate::server::service::variant_definition::routes(), + // ) + // .nest("/api/ws", crate::server::service::ws::routes()); // Load dev routes if we are in dev mode (decided by "opt-level" at the moment). - router = dev_routes(router); + // router = dev_routes(router); router.with_state(state) } @@ -59,11 +59,11 @@ async fn system_status_route() -> Json { Json(json!({ "ok": true })) } -#[cfg(debug_assertions)] -pub fn dev_routes(mut router: Router) -> Router { - router = router.nest("/api/dev", crate::server::service::dev::routes()); - router -} +// #[cfg(debug_assertions)] +// pub fn dev_routes(mut router: Router) -> Router { +// router = router.nest("/api/dev", crate::server::service::dev::routes()); +// router +// } #[cfg(not(debug_assertions))] pub fn dev_routes(router: Router) -> Router { diff --git a/lib/sdf-server/src/server/server.rs b/lib/sdf-server/src/server/server.rs index 68a9a8dfd6..6b1ee2768d 100644 --- a/lib/sdf-server/src/server/server.rs +++ b/lib/sdf-server/src/server/server.rs @@ -16,13 +16,11 @@ use tokio::{ use tower_http::trace::{DefaultMakeSpan, TraceLayer}; use ulid::Ulid; -use dal::pkg::{import_pkg_from_pkg, ImportOptions, PkgError}; -use dal::tasks::{StatusReceiver, StatusReceiverError}; +use dal::ServicesContext; use dal::{ builtins, BuiltinsError, DalContext, JwtPublicSigningKey, Tenancy, TransactionsError, Workspace, WorkspaceError, }; -use dal::{tasks::ResourceScheduler, ServicesContext}; use module_index_client::types::BuiltinsDetailsResponse; use module_index_client::{IndexClient, ModuleDetailsResponse}; use si_crypto::{ @@ -74,8 +72,6 @@ pub enum ServerError { Pg(#[from] PgError), #[error(transparent)] PgPool(#[from] Box), - #[error(transparent)] - Pkg(#[from] PkgError), #[error("failed to install package")] PkgInstall, #[error(transparent)] @@ -85,8 +81,6 @@ pub enum ServerError { #[error(transparent)] SiPkg(#[from] SiPkgError), #[error(transparent)] - StatusReceiver(#[from] StatusReceiverError), - #[error(transparent)] SymmetricCryptoService(#[from] SymmetricCryptoError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), @@ -230,23 +224,23 @@ impl Server<(), ()> { Ok(()) } - /// Start the basic resource refresh scheduler - pub async fn start_resource_refresh_scheduler( - services_context: ServicesContext, - shutdown_broadcast_rx: broadcast::Receiver<()>, - ) { - ResourceScheduler::new(services_context).start(shutdown_broadcast_rx); - } - - pub async fn start_status_updater( - services_context: ServicesContext, - shutdown_broadcast_rx: broadcast::Receiver<()>, - ) -> Result<()> { - StatusReceiver::new(services_context) - .await? - .start(shutdown_broadcast_rx); - Ok(()) - } + // /// Start the basic resource refresh scheduler + // pub async fn start_resource_refresh_scheduler( + // services_context: ServicesContext, + // shutdown_broadcast_rx: broadcast::Receiver<()>, + // ) { + // ResourceScheduler::new(services_context).start(shutdown_broadcast_rx); + // } + + // pub async fn start_status_updater( + // services_context: ServicesContext, + // shutdown_broadcast_rx: broadcast::Receiver<()>, + // ) -> Result<()> { + // StatusReceiver::new(services_context) + // .await? + // .start(shutdown_broadcast_rx); + // Ok(()) + // } #[instrument(name = "sdf.init.create_pg_pool", skip_all)] pub async fn create_pg_pool(pg_pool_config: &PgPoolConfig) -> Result { @@ -318,8 +312,8 @@ pub async fn migrate_builtins_from_module_index(services_context: &ServicesConte info!("migrating intrinsic functions"); builtins::func::migrate_intrinsics(&ctx).await?; - info!("migrating builtin functions"); - builtins::func::migrate(&ctx).await?; + // info!("migrating builtin functions"); + // builtins::func::migrate(&ctx).await?; let module_index_url = services_context .module_index_url() @@ -376,10 +370,10 @@ async fn install_builtins( let (pkg_name, res) = res?; match res { Ok(pkg) => { - if let Err(err) = import_pkg_from_pkg( + if let Err(err) = dal::pkg::import_pkg_from_pkg( &ctx, &pkg, - Some(ImportOptions { + Some(dal::pkg::ImportOptions { schemas: None, skip_import_funcs: None, no_record: false, @@ -394,8 +388,8 @@ async fn install_builtins( count += 1; println!( - "Pkg {pkg_name} Install finished successfully. {count} of {total} installed.", - ); + "Pkg {pkg_name} Install finished successfully. {count} of {total} installed.", + ); } } Err(err) => { diff --git a/lib/sdf-server/src/server/service.rs b/lib/sdf-server/src/server/service.rs index 14852669dc..c9e6e4e65a 100644 --- a/lib/sdf-server/src/server/service.rs +++ b/lib/sdf-server/src/server/service.rs @@ -1,18 +1,18 @@ pub mod change_set; -pub mod component; +// pub mod component; pub mod diagram; -pub mod fix; -pub mod func; -pub mod pkg; -pub mod provider; -pub mod qualification; -pub mod schema; -pub mod secret; +// pub mod fix; +// pub mod func; +// pub mod pkg; +// pub mod provider; +// pub mod qualification; +// pub mod schema; +// pub mod secret; pub mod session; -pub mod status; -pub mod variant_definition; +// pub mod status; +// pub mod variant_definition; pub mod ws; -/// A module containing dev routes for local development only. -#[cfg(debug_assertions)] -pub mod dev; +// /// A module containing dev routes for local development only. +// #[cfg(debug_assertions)] +// pub mod dev; diff --git a/lib/sdf-server/src/server/service/change_set.rs b/lib/sdf-server/src/server/service/change_set.rs index bfe07d1066..cee524caa8 100644 --- a/lib/sdf-server/src/server/service/change_set.rs +++ b/lib/sdf-server/src/server/service/change_set.rs @@ -1,53 +1,51 @@ use axum::{ http::StatusCode, response::{IntoResponse, Response}, - routing::{get, post}, + routing::get, Json, Router, }; use dal::{ - change_status::ChangeStatusError, ActionError, ActionId, ChangeSetError as DalChangeSetError, - ComponentError as DalComponentError, FixError, StandardModelError, TransactionsError, - UserError, UserPk, WsEventError, + ChangeSetError as DalChangeSetError, StandardModelError, TransactionsError, UserError, UserPk, + WsEventError, }; use module_index_client::IndexClientError; use telemetry::prelude::*; use thiserror::Error; -use crate::{server::state::AppState, service::pkg::PkgError}; +use crate::server::state::AppState; -pub mod abandon_change_set; -pub mod add_action; -pub mod apply_change_set; -mod begin_approval_process; -pub mod create_change_set; -pub mod get_change_set; -pub mod get_stats; +// pub mod abandon_change_set; +// pub mod add_action; +// pub mod apply_change_set; +// mod begin_approval_process; +// pub mod create_change_set; +// pub mod get_change_set; +// pub mod get_stats; pub mod list_open_change_sets; -mod merge_vote; -pub mod remove_action; -pub mod update_selected_change_set; +// pub mod remove_action; +// pub mod update_selected_change_set; #[remain::sorted] #[derive(Debug, Error)] pub enum ChangeSetError { - #[error(transparent)] - Action(#[from] ActionError), - #[error("action {0} not found")] - ActionNotFound(ActionId), + // #[error(transparent)] + // Action(#[from] ActionError), + // #[error("action {0} not found")] + // ActionNotFound(ActionId), #[error(transparent)] ChangeSet(#[from] DalChangeSetError), #[error("change set not found")] ChangeSetNotFound, - #[error(transparent)] - ChangeStatusError(#[from] ChangeStatusError), - #[error(transparent)] - Component(#[from] DalComponentError), + // #[error(transparent)] + // ChangeStatusError(#[from] ChangeStatusError), + // #[error(transparent)] + // Component(#[from] DalComponentError), #[error(transparent)] ContextError(#[from] TransactionsError), - #[error(transparent)] - DalPkg(#[from] dal::pkg::PkgError), - #[error(transparent)] - Fix(#[from] FixError), + // #[error(transparent)] + // DalPkg(#[from] dal::pkg::PkgError), + // #[error(transparent)] + // Fix(#[from] FixError), #[error(transparent)] IndexClient(#[from] IndexClientError), #[error("invalid user {0}")] @@ -58,8 +56,8 @@ pub enum ChangeSetError { Nats(#[from] si_data_nats::NatsError), #[error(transparent)] Pg(#[from] si_data_pg::PgError), - #[error(transparent)] - PkgService(#[from] PkgError), + // #[error(transparent)] + // PkgService(#[from] PkgError), #[error(transparent)] StandardModel(#[from] StandardModelError), #[error(transparent)] @@ -88,38 +86,37 @@ impl IntoResponse for ChangeSetError { } pub fn routes() -> Router { - Router::new() - .route( - "/list_open_change_sets", - get(list_open_change_sets::list_open_change_sets), - ) - .route("/remove_action", post(remove_action::remove_action)) - .route("/add_action", post(add_action::add_action)) - .route( - "/create_change_set", - post(create_change_set::create_change_set), - ) - .route("/get_change_set", get(get_change_set::get_change_set)) - .route("/get_stats", get(get_stats::get_stats)) - .route( - "/apply_change_set", - post(apply_change_set::apply_change_set), - ) - .route( - "/abandon_change_set", - post(abandon_change_set::abandon_change_set), - ) - .route( - "/update_selected_change_set", - post(update_selected_change_set::update_selected_change_set), - ) - .route( - "/begin_approval_process", - post(begin_approval_process::begin_approval_process), - ) - .route( - "/cancel_approval_process", - post(begin_approval_process::cancel_approval_process), - ) - .route("/merge_vote", post(merge_vote::merge_vote)) + Router::new().route( + "/list_open_change_sets", + get(list_open_change_sets::list_open_change_sets), + ) + // .route("/remove_action", post(remove_action::remove_action)) + // .route("/add_action", post(add_action::add_action)) + // .route( + // "/create_change_set", + // post(create_change_set::create_change_set), + // ) + // .route("/get_change_set", get(get_change_set::get_change_set)) + // .route("/get_stats", get(get_stats::get_stats)) + // .route( + // "/apply_change_set", + // post(apply_change_set::apply_change_set), + // ) + // .route( + // "/abandon_change_set", + // post(abandon_change_set::abandon_change_set), + // ) + // .route( + // "/update_selected_change_set", + // post(update_selected_change_set::update_selected_change_set), + // ) + // .route( + // "/begin_approval_process", + // post(begin_approval_process::begin_approval_process), + // ) + // .route( + // "/cancel_approval_process", + // post(begin_approval_process::cancel_approval_process), + // ) + // .route("/merge_vote", post(merge_vote::merge_vote)) } diff --git a/lib/sdf-server/src/server/service/change_set/list_open_change_sets.rs b/lib/sdf-server/src/server/service/change_set/list_open_change_sets.rs index 6b9289be3b..5b8470a3c1 100644 --- a/lib/sdf-server/src/server/service/change_set/list_open_change_sets.rs +++ b/lib/sdf-server/src/server/service/change_set/list_open_change_sets.rs @@ -1,16 +1,15 @@ use super::ChangeSetResult; use crate::server::extract::{AccessBuilder, HandlerContext}; use axum::Json; -use dal::{ - history_event, ActionId, ActionKind, ActionPrototypeId, ActorView, ChangeSet, ChangeSetPk, - ChangeSetStatus, ComponentId, Func, StandardModel, Visibility, -}; +use dal::{ActionPrototypeId, ChangeSet, ChangeSetPk, ChangeSetStatus, ComponentId}; use serde::{Deserialize, Serialize}; +use ulid::Ulid; #[derive(Deserialize, Serialize, Debug, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct ActionView { - pub id: ActionId, + // FIXME(nick,zack,jacob): drop ActionId since it does not exist yet for the graph switchover. + pub id: Ulid, pub action_prototype_id: ActionPrototypeId, pub name: String, pub component_id: ComponentId, @@ -36,51 +35,10 @@ pub async fn list_open_change_sets( let list = ChangeSet::list_open(&ctx).await?; let mut view = Vec::with_capacity(list.len()); for cs in list { - let ctx = - ctx.clone_with_new_visibility(Visibility::new(cs.pk, ctx.visibility().deleted_at)); - let a = cs.actions(&ctx).await?; - let mut actions = Vec::with_capacity(a.len()); - for action in a { - let mut display_name = None; - let prototype = action.prototype(&ctx).await?; - let func_details = Func::get_by_id(&ctx, &prototype.func_id()).await?; - if let Some(func) = func_details { - if func.display_name().is_some() { - display_name = func.display_name().map(|dname| dname.to_string()); - } - } + // let ctx = + // ctx.clone_with_new_visibility(Visibility::new(cs.pk, ctx.visibility().deleted_at)); - let mut actor_email: Option = None; - { - if let Some(created_at_user) = action.creation_user_id() { - let history_actor = history_event::HistoryActor::User(*created_at_user); - let actor = ActorView::from_history_actor(&ctx, history_actor).await?; - match actor { - ActorView::System { label } => actor_email = Some(label), - ActorView::User { label, email, .. } => { - if let Some(em) = email { - actor_email = Some(em) - } else { - actor_email = Some(label) - } - } - }; - } - } - - actions.push(ActionView { - id: *action.id(), - action_prototype_id: *prototype.id(), - name: display_name.unwrap_or_else(|| match prototype.kind() { - ActionKind::Create => "create".to_owned(), - ActionKind::Delete => "delete".to_owned(), - ActionKind::Other => "other".to_owned(), - ActionKind::Refresh => "refresh".to_owned(), - }), - component_id: *action.component_id(), - actor: actor_email, - }); - } + let mut actions = Vec::new(); view.push(ChangeSetView { pk: cs.pk, diff --git a/lib/sdf-server/src/server/service/diagram.rs b/lib/sdf-server/src/server/service/diagram.rs index 8905080cb1..1ad2e7268f 100644 --- a/lib/sdf-server/src/server/service/diagram.rs +++ b/lib/sdf-server/src/server/service/diagram.rs @@ -1,66 +1,43 @@ use axum::http::StatusCode; use axum::response::{IntoResponse, Response}; -use axum::routing::{get, post}; +use axum::routing::get; use axum::Json; use axum::Router; -use dal::provider::external::ExternalProviderError as DalExternalProviderError; -use dal::socket::{SocketError, SocketId}; +use dal::socket::SocketId; +use dal::workspace_snapshot::WorkspaceSnapshotError; +use dal::WsEventError; use dal::{ - node::NodeId, schema::variant::SchemaVariantError, ActionError, ActionPrototypeError, - AttributeValueError, ChangeSetError, ComponentError, ComponentType, - DiagramError as DalDiagramError, EdgeError, InternalProviderError, NodeError, NodeKind, - NodeMenuError, SchemaError as DalSchemaError, SchemaVariantId, StandardModelError, - TransactionsError, + node::NodeId, ChangeSetError, NodeKind, SchemaVariantId, StandardModelError, TransactionsError, }; -use dal::{AttributeReadContext, WsEventError}; use thiserror::Error; use crate::server::state::AppState; -use crate::service::schema::SchemaError; -mod connect_component_to_frame; -pub mod create_connection; -pub mod create_node; -pub mod delete_component; -pub mod delete_connection; -pub mod get_diagram; -pub mod get_node_add_menu; +// mod connect_component_to_frame; +// pub mod create_connection; +// pub mod create_node; +// pub mod delete_component; +// pub mod delete_connection; +// pub mod get_diagram; +// pub mod get_node_add_menu; pub mod list_schema_variants; -mod restore_component; -pub mod restore_connection; -pub mod set_node_position; +// mod restore_component; +// pub mod restore_connection; +// pub mod set_node_position; #[remain::sorted] #[derive(Debug, Error)] pub enum DiagramError { - #[error("action error: {0}")] - ActionError(#[from] ActionError), - #[error("action prototype error: {0}")] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("attribute value not found for context: {0:?}")] - AttributeValueNotFoundForContext(AttributeReadContext), #[error("changeset error: {0}")] ChangeSet(#[from] ChangeSetError), #[error("change set not found")] ChangeSetNotFound, - #[error("component error: {0}")] - Component(#[from] ComponentError), #[error("component not found")] ComponentNotFound, #[error(transparent)] ContextTransaction(#[from] TransactionsError), - #[error("dal schema error: {0}")] - DalSchema(#[from] DalSchemaError), - #[error("dal diagram error: {0}")] - DiagramError(#[from] DalDiagramError), - #[error(transparent)] - Edge(#[from] EdgeError), #[error("edge not found")] EdgeNotFound, - #[error("external provider error: {0}")] - ExternalProvider(#[from] DalExternalProviderError), #[error("external provider not found for socket id: {0}")] ExternalProviderNotFoundForSocket(SocketId), #[error("frame internal provider not found for schema variant id: {0}")] @@ -69,12 +46,8 @@ pub enum DiagramError { FrameSocketNotFound(SchemaVariantId), #[error("invalid header name {0}")] Hyper(#[from] hyper::http::Error), - #[error(transparent)] - InternalProvider(#[from] InternalProviderError), #[error("internal provider not found for socket id: {0}")] InternalProviderNotFoundForSocket(SocketId), - #[error("invalid component type ({0:?}) for frame")] - InvalidComponentTypeForFrame(ComponentType), #[error("invalid parent node kind {0:?}")] InvalidParentNode(NodeKind), #[error("invalid request")] @@ -83,10 +56,6 @@ pub enum DiagramError { InvalidSystem, #[error(transparent)] Nats(#[from] si_data_nats::NatsError), - #[error("node error: {0}")] - Node(#[from] NodeError), - #[error("node menu error: {0}")] - NodeMenu(#[from] NodeMenuError), #[error("node not found: {0}")] NodeNotFound(NodeId), #[error("not authorized")] @@ -97,22 +66,18 @@ pub enum DiagramError { Pg(#[from] si_data_pg::PgError), #[error(transparent)] PgPool(#[from] si_data_pg::PgPoolError), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), #[error("schema not found")] SchemaNotFound, - #[error("schema variant error: {0}")] - SchemaVariant(#[from] SchemaVariantError), #[error("schema variant not found")] SchemaVariantNotFound, #[error("serde error: {0}")] Serde(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), #[error("socket not found")] SocketNotFound, #[error(transparent)] StandardModel(#[from] StandardModelError), + #[error(transparent)] + WorkspaceSnaphot(#[from] WorkspaceSnapshotError), #[error("ws event error: {0}")] WsEvent(#[from] WsEventError), } @@ -136,48 +101,48 @@ impl IntoResponse for DiagramError { pub fn routes() -> Router { Router::new() - .route("/get_diagram", get(get_diagram::get_diagram)) - .route( - "/get_node_add_menu", - post(get_node_add_menu::get_node_add_menu), - ) - .route("/create_node", post(create_node::create_node)) - .route( - "/set_node_position", - post(set_node_position::set_node_position), - ) - .route( - "/create_connection", - post(create_connection::create_connection), - ) - .route( - "/delete_connection", - post(delete_connection::delete_connection), - ) - .route( - "/restore_connection", - post(restore_connection::restore_connection), - ) - .route( - "/delete_component", - post(delete_component::delete_component), - ) - .route( - "/delete_components", - post(delete_component::delete_components), - ) - .route( - "/restore_component", - post(restore_component::restore_component), - ) - .route( - "/restore_components", - post(restore_component::restore_components), - ) - .route( - "/connect_component_to_frame", - post(connect_component_to_frame::connect_component_to_frame), - ) + // .route("/get_diagram", get(get_diagram::get_diagram)) + // .route( + // "/get_node_add_menu", + // post(get_node_add_menu::get_node_add_menu), + // ) + // .route("/create_node", post(create_node::create_node)) + // .route( + // "/set_node_position", + // post(set_node_position::set_node_position), + // ) + // .route( + // "/create_connection", + // post(create_connection::create_connection), + // ) + // .route( + // "/delete_connection", + // post(delete_connection::delete_connection), + // ) + // .route( + // "/restore_connection", + // post(restore_connection::restore_connection), + // ) + // .route( + // "/delete_component", + // post(delete_component::delete_component), + // ) + // .route( + // "/delete_components", + // post(delete_component::delete_components), + // ) + // .route( + // "/restore_component", + // post(restore_component::restore_component), + // ) + // .route( + // "/restore_components", + // post(restore_component::restore_components), + // ) + // .route( + // "/connect_component_to_frame", + // post(connect_component_to_frame::connect_component_to_frame), + // ) .route( "/list_schema_variants", get(list_schema_variants::list_schema_variants), diff --git a/lib/sdf-server/src/server/service/diagram/list_schema_variants.rs b/lib/sdf-server/src/server/service/diagram/list_schema_variants.rs index d1b81aa40c..4934a9f75f 100644 --- a/lib/sdf-server/src/server/service/diagram/list_schema_variants.rs +++ b/lib/sdf-server/src/server/service/diagram/list_schema_variants.rs @@ -1,12 +1,12 @@ use axum::extract::{Json, Query}; +use dal::socket::DiagramKind; use dal::{ - socket::{SocketEdgeKind, SocketId}, - DiagramKind, ExternalProvider, ExternalProviderId, InternalProvider, InternalProviderId, + socket::SocketId, ExternalProvider, ExternalProviderId, InternalProvider, InternalProviderId, SchemaId, SchemaVariant, SchemaVariantId, StandardModel, Visibility, }; use serde::{Deserialize, Serialize}; -use super::{DiagramError, DiagramResult}; +use super::DiagramResult; use crate::server::extract::{AccessBuilder, HandlerContext}; #[derive(Deserialize, Serialize, Debug)] @@ -71,82 +71,84 @@ pub async fn list_schema_variants( ) -> DiagramResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let variants = SchemaVariant::list(&ctx).await?; - let external_provider_by_socket = ExternalProvider::by_socket(&ctx).await?; - let internal_provider_by_socket = InternalProvider::by_socket(&ctx).await?; - - let mut variants_view = Vec::with_capacity(variants.len()); - for variant in variants { - if variant.ui_hidden() { - continue; - } - - let schema = variant - .schema(&ctx) - .await? - .ok_or(DiagramError::SchemaNotFound)?; - - if schema.ui_hidden() { - continue; - } - let mut input_sockets = Vec::new(); - let mut output_sockets = Vec::new(); - - let sockets = variant.sockets(&ctx).await?; - - for socket in sockets { - match socket.edge_kind() { - SocketEdgeKind::ConfigurationOutput => { - let provider = - external_provider_by_socket - .get(socket.id()) - .ok_or_else(|| { - DiagramError::ExternalProviderNotFoundForSocket(*socket.id()) - })?; - output_sockets.push(OutputSocketView { - id: *socket.id(), - name: socket.name().to_owned(), - diagram_kind: *socket.diagram_kind(), - provider: OutputProviderView { - id: *provider.id(), - ty: socket.name().to_owned(), - }, - }) - } - SocketEdgeKind::ConfigurationInput => { - let provider = - internal_provider_by_socket - .get(socket.id()) - .ok_or_else(|| { - DiagramError::InternalProviderNotFoundForSocket(*socket.id()) - })?; - input_sockets.push(InputSocketView { - id: *socket.id(), - name: socket.name().to_owned(), - diagram_kind: *socket.diagram_kind(), - provider: InputProviderView { - id: *provider.id(), - ty: socket.name().to_owned(), - }, - }) - } - } - } - - variants_view.push(SchemaVariantView { - id: *variant.id(), - builtin: variant.is_builtin(&ctx).await?, - name: variant.name().to_owned(), - schema_id: *schema.id(), - schema_name: schema.name().to_owned(), - input_sockets, - color: variant - .color(&ctx) - .await? - .unwrap_or_else(|| "00b0bc".to_owned()), - output_sockets, - }); - } + // let variants = SchemaVariant::list(&ctx).await?; + // let external_provider_by_socket = ExternalProvider::by_socket(&ctx).await?; + // let internal_provider_by_socket = InternalProvider::by_socket(&ctx).await?; + + let variants_view: Vec = vec![]; + + // let mut variants_view = Vec::with_capacity(variants.len()); + // for variant in variants { + // if variant.ui_hidden() { + // continue; + // } + + // let schema = variant + // .schema(&ctx) + // .await? + // .ok_or(DiagramError::SchemaNotFound)?; + + // if schema.ui_hidden() { + // continue; + // } + // let mut input_sockets = Vec::new(); + // let mut output_sockets = Vec::new(); + + // let sockets = variant.sockets(&ctx).await?; + + // for socket in sockets { + // match socket.edge_kind() { + // SocketEdgeKind::ConfigurationOutput => { + // let provider = + // external_provider_by_socket + // .get(socket.id()) + // .ok_or_else(|| { + // DiagramError::ExternalProviderNotFoundForSocket(*socket.id()) + // })?; + // output_sockets.push(OutputSocketView { + // id: *socket.id(), + // name: socket.name().to_owned(), + // diagram_kind: *socket.diagram_kind(), + // provider: OutputProviderView { + // id: *provider.id(), + // ty: socket.name().to_owned(), + // }, + // }) + // } + // SocketEdgeKind::ConfigurationInput => { + // let provider = + // internal_provider_by_socket + // .get(socket.id()) + // .ok_or_else(|| { + // DiagramError::InternalProviderNotFoundForSocket(*socket.id()) + // })?; + // input_sockets.push(InputSocketView { + // id: *socket.id(), + // name: socket.name().to_owned(), + // diagram_kind: *socket.diagram_kind(), + // provider: InputProviderView { + // id: *provider.id(), + // ty: socket.name().to_owned(), + // }, + // }) + // } + // } + // } + + // variants_view.push(SchemaVariantView { + // id: *variant.id(), + // builtin: variant.is_builtin(&ctx).await?, + // name: variant.name().to_owned(), + // schema_id: *schema.id(), + // schema_name: schema.name().to_owned(), + // color: variant + // .color(&ctx) + // .await? + // .unwrap_or_else(|| "00b0bc".to_owned()), + // input_sockets, + // output_sockets, + // }); + // } Ok(Json(variants_view)) } diff --git a/lib/sdf-server/src/server/service/signup.rs b/lib/sdf-server/src/server/service/signup.rs deleted file mode 100644 index ff9804f7e1..0000000000 --- a/lib/sdf-server/src/server/service/signup.rs +++ /dev/null @@ -1,64 +0,0 @@ -use axum::{ - http::StatusCode, - response::{IntoResponse, Response}, - routing::post, - Json, Router, -}; -use thiserror::Error; - -use dal::{ - ComponentError, NodeError, SchemaError, StandardModelError, TransactionsError, WorkspaceError, -}; - -pub mod create_account; - -#[allow(clippy::large_enum_variant)] -#[remain::sorted] -#[derive(Debug, Error)] -pub enum SignupError { - #[error("component error: {0}")] - Component(#[from] ComponentError), - #[error(transparent)] - ContextTransaction(#[from] TransactionsError), - #[error("invalid signup secret")] - InvalidSignupSecret, - #[error(transparent)] - Nats(#[from] si_data_nats::NatsError), - #[error("Node error: {0}")] - Node(#[from] NodeError), - #[error(transparent)] - Pg(#[from] si_data_pg::PgError), - #[error("Schema error: {0}")] - Schema(#[from] SchemaError), - #[error("StandardModel error: {0}")] - StandardModel(#[from] StandardModelError), - #[error(transparent)] - Workspace(#[from] WorkspaceError), -} - -pub type SignupResult = std::result::Result; - -impl IntoResponse for SignupError { - fn into_response(self) -> Response { - let (status, error_message) = match self { - SignupError::InvalidSignupSecret => { - (StatusCode::BAD_REQUEST, "signup failed".to_string()) - } - err => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()), - }; - - let body = Json(serde_json::json!({ - "error": { - "message": error_message, - "code": 42, - "statusCode": status.as_u16(), - }, - })); - - (status, body).into_response() - } -} - -pub fn routes() -> Router { - Router::new().route("/create_account", post(create_account::create_account)) -} diff --git a/lib/sdf-server/src/server/service/signup/create_account.rs b/lib/sdf-server/src/server/service/signup/create_account.rs deleted file mode 100644 index b11d85ad30..0000000000 --- a/lib/sdf-server/src/server/service/signup/create_account.rs +++ /dev/null @@ -1,53 +0,0 @@ -use axum::Json; -use serde::{Deserialize, Serialize}; - -use dal::Workspace; -use telemetry::prelude::*; - -use crate::{ - server::extract::{HandlerContext, SignupSecret}, - service::signup::SignupError, -}; - -use super::SignupResult; - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CreateAccountRequest { - pub workspace_name: String, - pub user_name: String, - pub user_email: String, - pub user_password: String, - pub signup_secret: String, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CreateAccountResponse { - pub success: bool, -} - -pub async fn create_account( - HandlerContext(builder): HandlerContext, - SignupSecret(signup_secret): SignupSecret, - Json(request): Json, -) -> SignupResult> { - if signup_secret.as_str() != request.signup_secret.as_str() { - warn!("invalid signup secret provided when signing up new workspace"); - return Err(SignupError::InvalidSignupSecret); - } - - let mut ctx = builder.build_default().await?; - - let _nw = Workspace::signup( - &mut ctx, - &request.workspace_name, - &request.user_name, - &request.user_email, - ) - .await?; - - ctx.commit().await?; - - Ok(Json(CreateAccountResponse { success: true })) -} diff --git a/lib/sdf-server/tests/api.rs b/lib/sdf-server/tests/api.rs index 43a8731c94..80a44898d7 100644 --- a/lib/sdf-server/tests/api.rs +++ b/lib/sdf-server/tests/api.rs @@ -2,4 +2,4 @@ const TEST_PG_DBNAME: &str = "si_test_sdf_server"; -mod service_tests; +// mod service_tests; diff --git a/third-party/rust/BUCK b/third-party/rust/BUCK index ccc10de0b9..a77fa7fb59 100644 --- a/third-party/rust/BUCK +++ b/third-party/rust/BUCK @@ -63,7 +63,7 @@ cargo.rust_library( crate_root = "addr2line-0.21.0.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":gimli-0.28.1"], + deps = [":gimli-0.28.0"], ) http_archive( @@ -84,18 +84,18 @@ cargo.rust_library( ) http_archive( - name = "ahash-0.7.7.crate", - sha256 = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd", - strip_prefix = "ahash-0.7.7", - urls = ["https://crates.io/api/v1/crates/ahash/0.7.7/download"], + name = "ahash-0.7.6.crate", + sha256 = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47", + strip_prefix = "ahash-0.7.6", + urls = ["https://crates.io/api/v1/crates/ahash/0.7.6/download"], visibility = [], ) cargo.rust_library( - name = "ahash-0.7.7", - srcs = [":ahash-0.7.7.crate"], + name = "ahash-0.7.6", + srcs = [":ahash-0.7.6.crate"], crate = "ahash", - crate_root = "ahash-0.7.7.crate/src/lib.rs", + crate_root = "ahash-0.7.6.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -104,38 +104,38 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":getrandom-0.2.11", - ":once_cell-1.19.0", + ":getrandom-0.2.10", + ":once_cell-1.18.0", ], ), "linux-x86_64": dict( deps = [ - ":getrandom-0.2.11", - ":once_cell-1.19.0", + ":getrandom-0.2.10", + ":once_cell-1.18.0", ], ), "macos-arm64": dict( deps = [ - ":getrandom-0.2.11", - ":once_cell-1.19.0", + ":getrandom-0.2.10", + ":once_cell-1.18.0", ], ), "macos-x86_64": dict( deps = [ - ":getrandom-0.2.11", - ":once_cell-1.19.0", + ":getrandom-0.2.10", + ":once_cell-1.18.0", ], ), "windows-gnu": dict( deps = [ - ":getrandom-0.2.11", - ":once_cell-1.19.0", + ":getrandom-0.2.10", + ":once_cell-1.18.0", ], ), "windows-msvc": dict( deps = [ - ":getrandom-0.2.11", - ":once_cell-1.19.0", + ":getrandom-0.2.10", + ":once_cell-1.18.0", ], ), }, @@ -144,60 +144,59 @@ cargo.rust_library( ) http_archive( - name = "ahash-0.8.6.crate", - sha256 = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a", - strip_prefix = "ahash-0.8.6", - urls = ["https://crates.io/api/v1/crates/ahash/0.8.6/download"], + name = "ahash-0.8.3.crate", + sha256 = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f", + strip_prefix = "ahash-0.8.3", + urls = ["https://crates.io/api/v1/crates/ahash/0.8.3/download"], visibility = [], ) cargo.rust_library( - name = "ahash-0.8.6", - srcs = [":ahash-0.8.6.crate"], + name = "ahash-0.8.3", + srcs = [":ahash-0.8.3.crate"], crate = "ahash", - crate_root = "ahash-0.8.6.crate/src/lib.rs", + crate_root = "ahash-0.8.3.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], ), "linux-x86_64": dict( - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], ), "macos-arm64": dict( - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], ), "macos-x86_64": dict( - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], ), "windows-gnu": dict( - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], ), "windows-msvc": dict( - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], ), }, visibility = [], deps = [ ":cfg-if-1.0.0", ":version_check-0.9.4", - ":zerocopy-0.7.30", ], ) http_archive( - name = "aho-corasick-1.1.2.crate", - sha256 = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0", - strip_prefix = "aho-corasick-1.1.2", - urls = ["https://crates.io/api/v1/crates/aho-corasick/1.1.2/download"], + name = "aho-corasick-1.0.4.crate", + sha256 = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a", + strip_prefix = "aho-corasick-1.0.4", + urls = ["https://crates.io/api/v1/crates/aho-corasick/1.0.4/download"], visibility = [], ) cargo.rust_library( - name = "aho-corasick-1.1.2", - srcs = [":aho-corasick-1.1.2.crate"], + name = "aho-corasick-1.0.4", + srcs = [":aho-corasick-1.0.4.crate"], crate = "aho_corasick", - crate_root = "aho-corasick-1.1.2.crate/src/lib.rs", + crate_root = "aho-corasick-1.0.4.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -205,7 +204,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":memchr-2.6.4"], + deps = [":memchr-2.5.0"], ) http_archive( @@ -248,18 +247,18 @@ cargo.rust_library( ) http_archive( - name = "anstream-0.6.5.crate", - sha256 = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6", - strip_prefix = "anstream-0.6.5", - urls = ["https://crates.io/api/v1/crates/anstream/0.6.5/download"], + name = "anstream-0.3.2.crate", + sha256 = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163", + strip_prefix = "anstream-0.3.2", + urls = ["https://crates.io/api/v1/crates/anstream/0.3.2/download"], visibility = [], ) cargo.rust_library( - name = "anstream-0.6.5", - srcs = [":anstream-0.6.5.crate"], + name = "anstream-0.3.2", + srcs = [":anstream-0.3.2.crate"], crate = "anstream", - crate_root = "anstream-0.6.5.crate/src/lib.rs", + crate_root = "anstream-0.3.2.crate/src/lib.rs", edition = "2021", features = [ "auto", @@ -268,35 +267,36 @@ cargo.rust_library( ], platform = { "windows-gnu": dict( - deps = [":anstyle-wincon-3.0.2"], + deps = [":anstyle-wincon-1.0.2"], ), "windows-msvc": dict( - deps = [":anstyle-wincon-3.0.2"], + deps = [":anstyle-wincon-1.0.2"], ), }, visibility = [], deps = [ - ":anstyle-1.0.4", - ":anstyle-parse-0.2.3", - ":anstyle-query-1.0.2", + ":anstyle-1.0.2", + ":anstyle-parse-0.2.1", + ":anstyle-query-1.0.0", ":colorchoice-1.0.0", + ":is-terminal-0.4.9", ":utf8parse-0.2.1", ], ) http_archive( - name = "anstyle-1.0.4.crate", - sha256 = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87", - strip_prefix = "anstyle-1.0.4", - urls = ["https://crates.io/api/v1/crates/anstyle/1.0.4/download"], + name = "anstyle-1.0.2.crate", + sha256 = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea", + strip_prefix = "anstyle-1.0.2", + urls = ["https://crates.io/api/v1/crates/anstyle/1.0.2/download"], visibility = [], ) cargo.rust_library( - name = "anstyle-1.0.4", - srcs = [":anstyle-1.0.4.crate"], + name = "anstyle-1.0.2", + srcs = [":anstyle-1.0.2.crate"], crate = "anstyle", - crate_root = "anstyle-1.0.4.crate/src/lib.rs", + crate_root = "anstyle-1.0.2.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -306,18 +306,18 @@ cargo.rust_library( ) http_archive( - name = "anstyle-parse-0.2.3.crate", - sha256 = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c", - strip_prefix = "anstyle-parse-0.2.3", - urls = ["https://crates.io/api/v1/crates/anstyle-parse/0.2.3/download"], + name = "anstyle-parse-0.2.1.crate", + sha256 = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333", + strip_prefix = "anstyle-parse-0.2.1", + urls = ["https://crates.io/api/v1/crates/anstyle-parse/0.2.1/download"], visibility = [], ) cargo.rust_library( - name = "anstyle-parse-0.2.3", - srcs = [":anstyle-parse-0.2.3.crate"], + name = "anstyle-parse-0.2.1", + srcs = [":anstyle-parse-0.2.1.crate"], crate = "anstyle_parse", - crate_root = "anstyle-parse-0.2.3.crate/src/lib.rs", + crate_root = "anstyle-parse-0.2.1.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -328,54 +328,54 @@ cargo.rust_library( ) http_archive( - name = "anstyle-query-1.0.2.crate", - sha256 = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648", - strip_prefix = "anstyle-query-1.0.2", - urls = ["https://crates.io/api/v1/crates/anstyle-query/1.0.2/download"], + name = "anstyle-query-1.0.0.crate", + sha256 = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b", + strip_prefix = "anstyle-query-1.0.0", + urls = ["https://crates.io/api/v1/crates/anstyle-query/1.0.0/download"], visibility = [], ) cargo.rust_library( - name = "anstyle-query-1.0.2", - srcs = [":anstyle-query-1.0.2.crate"], + name = "anstyle-query-1.0.0", + srcs = [":anstyle-query-1.0.0.crate"], crate = "anstyle_query", - crate_root = "anstyle-query-1.0.2.crate/src/lib.rs", + crate_root = "anstyle-query-1.0.0.crate/src/lib.rs", edition = "2021", platform = { "windows-gnu": dict( - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), "windows-msvc": dict( - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), }, visibility = [], ) http_archive( - name = "anstyle-wincon-3.0.2.crate", - sha256 = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7", - strip_prefix = "anstyle-wincon-3.0.2", - urls = ["https://crates.io/api/v1/crates/anstyle-wincon/3.0.2/download"], + name = "anstyle-wincon-1.0.2.crate", + sha256 = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c", + strip_prefix = "anstyle-wincon-1.0.2", + urls = ["https://crates.io/api/v1/crates/anstyle-wincon/1.0.2/download"], visibility = [], ) cargo.rust_library( - name = "anstyle-wincon-3.0.2", - srcs = [":anstyle-wincon-3.0.2.crate"], + name = "anstyle-wincon-1.0.2", + srcs = [":anstyle-wincon-1.0.2.crate"], crate = "anstyle_wincon", - crate_root = "anstyle-wincon-3.0.2.crate/src/lib.rs", + crate_root = "anstyle-wincon-1.0.2.crate/src/lib.rs", edition = "2021", platform = { "windows-gnu": dict( - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), "windows-msvc": dict( - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), }, visibility = [], - deps = [":anstyle-1.0.4"], + deps = [":anstyle-1.0.2"], ) http_archive( @@ -399,6 +399,23 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "array-init-2.1.0.crate", + sha256 = "3d62b7694a562cdf5a74227903507c56ab2cc8bdd1f781ed5cb4cf9c9f810bfc", + strip_prefix = "array-init-2.1.0", + urls = ["https://crates.io/api/v1/crates/array-init/2.1.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "array-init-2.1.0", + srcs = [":array-init-2.1.0.crate"], + crate = "array_init", + crate_root = "array-init-2.1.0.crate/src/lib.rs", + edition = "2018", + visibility = [], +) + http_archive( name = "arrayref-0.3.7.crate", sha256 = "6b4930d2cb77ce62f89ee5d5289b4ac049559b1c45539271f5ed4fdc7db34545", @@ -467,61 +484,61 @@ cargo.rust_library( }, visibility = [], deps = [ - ":base64-0.21.5", - ":bytes-1.5.0", - ":futures-0.3.29", - ":http-0.2.11", - ":itoa-1.0.10", - ":memchr-2.6.4", - ":nkeys-0.3.2", + ":base64-0.21.2", + ":bytes-1.4.0", + ":futures-0.3.28", + ":http-0.2.9", + ":itoa-1.0.9", + ":memchr-2.5.0", + ":nkeys-0.3.1", ":nuid-0.3.2", - ":once_cell-1.19.0", + ":once_cell-1.18.0", ":rand-0.8.5", - ":regex-1.10.2", + ":regex-1.9.3", ":ring-0.16.20", ":rustls-native-certs-0.6.3", - ":rustls-pemfile-1.0.4", - ":rustls-webpki-0.101.7", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":rustls-pemfile-1.0.3", + ":rustls-webpki-0.101.4", + ":serde-1.0.186", + ":serde_json-1.0.105", ":serde_nanos-0.1.3", - ":serde_repr-0.1.17", - ":thiserror-1.0.50", - ":time-0.3.30", - ":tokio-1.35.0", + ":serde_repr-0.1.16", + ":thiserror-1.0.47", + ":time-0.3.27", + ":tokio-1.32.0", ":tokio-retry-0.3.0", ":tokio-rustls-0.24.1", - ":tracing-0.1.40", - ":url-2.5.0", + ":tracing-0.1.37", + ":url-2.4.0", ], ) alias( name = "async-recursion", - actual = ":async-recursion-1.0.5", + actual = ":async-recursion-1.0.4", visibility = ["PUBLIC"], ) http_archive( - name = "async-recursion-1.0.5.crate", - sha256 = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0", - strip_prefix = "async-recursion-1.0.5", - urls = ["https://crates.io/api/v1/crates/async-recursion/1.0.5/download"], + name = "async-recursion-1.0.4.crate", + sha256 = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba", + strip_prefix = "async-recursion-1.0.4", + urls = ["https://crates.io/api/v1/crates/async-recursion/1.0.4/download"], visibility = [], ) cargo.rust_library( - name = "async-recursion-1.0.5", - srcs = [":async-recursion-1.0.5.crate"], + name = "async-recursion-1.0.4", + srcs = [":async-recursion-1.0.4.crate"], crate = "async_recursion", - crate_root = "async-recursion-1.0.5.crate/src/lib.rs", + crate_root = "async-recursion-1.0.4.crate/src/lib.rs", edition = "2018", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -543,7 +560,7 @@ cargo.rust_library( deps = [ ":async-stream-impl-0.3.5", ":futures-core-0.3.29", - ":pin-project-lite-0.2.13", + ":pin-project-lite-0.2.12", ], ) @@ -564,38 +581,38 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) alias( name = "async-trait", - actual = ":async-trait-0.1.74", + actual = ":async-trait-0.1.73", visibility = ["PUBLIC"], ) http_archive( - name = "async-trait-0.1.74.crate", - sha256 = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9", - strip_prefix = "async-trait-0.1.74", - urls = ["https://crates.io/api/v1/crates/async-trait/0.1.74/download"], + name = "async-trait-0.1.73.crate", + sha256 = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0", + strip_prefix = "async-trait-0.1.73", + urls = ["https://crates.io/api/v1/crates/async-trait/0.1.73/download"], visibility = [], ) cargo.rust_library( - name = "async-trait-0.1.74", - srcs = [":async-trait-0.1.74.crate"], + name = "async-trait-0.1.73", + srcs = [":async-trait-0.1.73.crate"], crate = "async_trait", - crate_root = "async-trait-0.1.74.crate/src/lib.rs", + crate_root = "async-trait-0.1.73.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -616,11 +633,11 @@ cargo.rust_library( features = ["default"], visibility = [], deps = [ - ":bytes-1.5.0", + ":bytes-1.4.0", ":futures-sink-0.3.29", ":futures-util-0.3.29", - ":memchr-2.6.4", - ":pin-project-lite-0.2.13", + ":memchr-2.5.0", + ":pin-project-lite-0.2.12", ], ) @@ -639,7 +656,7 @@ cargo.rust_library( crate_root = "atoi-1.0.0.crate/src/lib.rs", edition = "2021", visibility = [], - deps = [":num-traits-0.2.17"], + deps = [":num-traits-0.2.16"], ) http_archive( @@ -695,16 +712,16 @@ cargo.rust_library( "webpki-roots", ], named_deps = { - "rustls_opt_dep": ":rustls-0.20.9", + "rustls_opt_dep": ":rustls-0.20.8", }, visibility = [], deps = [ - ":http-0.2.11", + ":http-0.2.9", ":log-0.4.20", - ":serde-1.0.193", - ":serde_json-1.0.108", - ":url-2.5.0", - ":webpki-0.22.4", + ":serde-1.0.186", + ":serde_json-1.0.105", + ":url-2.4.0", + ":webpki-0.22.0", ":webpki-roots-0.22.6", ], ) @@ -752,29 +769,29 @@ cargo.rust_library( ":log-0.4.20", ":quick-xml-0.26.0", ":rust-ini-0.18.0", - ":serde-1.0.193", - ":thiserror-1.0.50", - ":time-0.3.30", - ":url-2.5.0", + ":serde-1.0.186", + ":thiserror-1.0.47", + ":time-0.3.27", + ":url-2.4.0", ], ) http_archive( - name = "aws-region-0.25.4.crate", - sha256 = "42fed2b9fca70f2908268d057a607f2a906f47edbf856ea8587de9038d264e22", - strip_prefix = "aws-region-0.25.4", - urls = ["https://crates.io/api/v1/crates/aws-region/0.25.4/download"], + name = "aws-region-0.25.3.crate", + sha256 = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba", + strip_prefix = "aws-region-0.25.3", + urls = ["https://crates.io/api/v1/crates/aws-region/0.25.3/download"], visibility = [], ) cargo.rust_library( - name = "aws-region-0.25.4", - srcs = [":aws-region-0.25.4.crate"], + name = "aws-region-0.25.3", + srcs = [":aws-region-0.25.3.crate"], crate = "awsregion", - crate_root = "aws-region-0.25.4.crate/src/lib.rs", + crate_root = "aws-region-0.25.3.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":thiserror-1.0.50"], + deps = [":thiserror-1.0.47"], ) alias( @@ -813,31 +830,31 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.74", + ":async-trait-0.1.73", ":axum-core-0.3.4", ":axum-macros-0.3.8", - ":base64-0.21.5", + ":base64-0.21.2", ":bitflags-1.3.2", - ":bytes-1.5.0", + ":bytes-1.4.0", ":futures-util-0.3.29", - ":http-0.2.11", - ":http-body-0.4.6", + ":http-0.2.9", + ":http-body-0.4.5", ":hyper-0.14.27", - ":itoa-1.0.10", - ":matchit-0.7.3", - ":memchr-2.6.4", + ":itoa-1.0.9", + ":matchit-0.7.2", + ":memchr-2.5.0", ":mime-0.3.17", ":multer-2.1.0", - ":percent-encoding-2.3.1", - ":pin-project-lite-0.2.13", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":percent-encoding-2.3.0", + ":pin-project-lite-0.2.12", + ":serde-1.0.186", + ":serde_json-1.0.105", ":serde_path_to_error-0.1.14", ":serde_urlencoded-0.7.1", - ":sha1-0.10.6", + ":sha1-0.10.5", ":sync_wrapper-0.1.2", - ":tokio-1.35.0", - ":tokio-tungstenite-0.20.1", + ":tokio-1.32.0", + ":tokio-tungstenite-0.20.0", ":tower-0.4.13", ":tower-layer-0.3.2", ":tower-service-0.3.2", @@ -860,11 +877,11 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":async-trait-0.1.74", - ":bytes-1.5.0", + ":async-trait-0.1.73", + ":bytes-1.4.0", ":futures-util-0.3.29", - ":http-0.2.11", - ":http-body-0.4.6", + ":http-0.2.9", + ":http-body-0.4.5", ":mime-0.3.17", ":tower-layer-0.3.2", ":tower-service-0.3.2", @@ -890,9 +907,9 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -919,41 +936,41 @@ cargo.rust_library( "linux-arm64": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.151", + ":libc-0.2.147", ":miniz_oxide-0.7.1", - ":object-0.32.1", + ":object-0.32.0", ], ), "linux-x86_64": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.151", + ":libc-0.2.147", ":miniz_oxide-0.7.1", - ":object-0.32.1", + ":object-0.32.0", ], ), "macos-arm64": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.151", + ":libc-0.2.147", ":miniz_oxide-0.7.1", - ":object-0.32.1", + ":object-0.32.0", ], ), "macos-x86_64": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.151", + ":libc-0.2.147", ":miniz_oxide-0.7.1", - ":object-0.32.1", + ":object-0.32.0", ], ), "windows-gnu": dict( deps = [ ":addr2line-0.21.0", - ":libc-0.2.151", + ":libc-0.2.147", ":miniz_oxide-0.7.1", - ":object-0.32.1", + ":object-0.32.0", ], ), }, @@ -983,7 +1000,7 @@ cargo.rust_library( deps = [ ":heck-0.3.3", ":proc-macro-error-1.0.4", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -1031,24 +1048,24 @@ cargo.rust_library( alias( name = "base64", - actual = ":base64-0.21.5", + actual = ":base64-0.21.2", visibility = ["PUBLIC"], ) http_archive( - name = "base64-0.21.5.crate", - sha256 = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9", - strip_prefix = "base64-0.21.5", - urls = ["https://crates.io/api/v1/crates/base64/0.21.5/download"], + name = "base64-0.21.2.crate", + sha256 = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d", + strip_prefix = "base64-0.21.2", + urls = ["https://crates.io/api/v1/crates/base64/0.21.2/download"], visibility = [], ) cargo.rust_library( - name = "base64-0.21.5", - srcs = [":base64-0.21.5.crate"], + name = "base64-0.21.2", + srcs = [":base64-0.21.2.crate"], crate = "base64", - crate_root = "base64-0.21.5.crate/src/lib.rs", - edition = "2018", + crate_root = "base64-0.21.2.crate/src/lib.rs", + edition = "2021", features = [ "alloc", "default", @@ -1093,7 +1110,7 @@ cargo.rust_library( deps = [ ":num-bigint-0.4.4", ":num-integer-0.1.45", - ":num-traits-0.2.17", + ":num-traits-0.2.16", ], ) @@ -1133,18 +1150,18 @@ cargo.rust_library( ) http_archive( - name = "bitflags-2.4.1.crate", - sha256 = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07", - strip_prefix = "bitflags-2.4.1", - urls = ["https://crates.io/api/v1/crates/bitflags/2.4.1/download"], + name = "bitflags-2.4.0.crate", + sha256 = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635", + strip_prefix = "bitflags-2.4.0", + urls = ["https://crates.io/api/v1/crates/bitflags/2.4.0/download"], visibility = [], ) cargo.rust_library( - name = "bitflags-2.4.1", - srcs = [":bitflags-2.4.1.crate"], + name = "bitflags-2.4.0", + srcs = [":bitflags-2.4.0.crate"], crate = "bitflags", - crate_root = "bitflags-2.4.1.crate/src/lib.rs", + crate_root = "bitflags-2.4.0.crate/src/lib.rs", edition = "2021", features = ["std"], visibility = [], @@ -1176,14 +1193,14 @@ cargo.rust_library( alias( name = "blake3", - actual = ":blake3-1.5.0", + actual = ":blake3-1.4.1", visibility = ["PUBLIC"], ) http_archive( - name = "blake3-1.5.0.crate", - sha256 = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87", - strip_prefix = "blake3-1.5.0", + name = "blake3-1.4.1.crate", + sha256 = "199c42ab6972d92c9f8995f086273d25c42fc0f7b2a1fcefba465c1352d25ba5", + strip_prefix = "blake3-1.4.1", sub_targets = [ "c/blake3.c", "c/blake3.h", @@ -1204,24 +1221,25 @@ http_archive( "c/blake3_sse41_x86-64_windows_gnu.S", "c/blake3_sse41_x86-64_windows_msvc.asm", ], - urls = ["https://crates.io/api/v1/crates/blake3/1.5.0/download"], + urls = ["https://crates.io/api/v1/crates/blake3/1.4.1/download"], visibility = [], ) cargo.rust_library( - name = "blake3-1.5.0", - srcs = [":blake3-1.5.0.crate"], + name = "blake3-1.4.1", + srcs = [":blake3-1.4.1.crate"], crate = "blake3", - crate_root = "blake3-1.5.0.crate/src/lib.rs", + crate_root = "blake3-1.4.1.crate/src/lib.rs", edition = "2021", features = [ "default", + "digest", "std", ], platform = { "linux-arm64": dict( rustc_flags = ["--cfg=blake3_neon"], - deps = [":blake3-1.5.0-simd_neon-aarch64"], + deps = [":blake3-1.4.1-simd_neon-aarch64"], ), "linux-x86_64": dict( rustc_flags = [ @@ -1230,11 +1248,11 @@ cargo.rust_library( "--cfg=blake3_sse2_ffi", "--cfg=blake3_sse41_ffi", ], - deps = [":blake3-1.5.0-simd_x86_unix"], + deps = [":blake3-1.4.1-simd_x86_unix"], ), "macos-arm64": dict( rustc_flags = ["--cfg=blake3_neon"], - deps = [":blake3-1.5.0-simd_neon-aarch64"], + deps = [":blake3-1.4.1-simd_neon-aarch64"], ), "macos-x86_64": dict( rustc_flags = [ @@ -1243,7 +1261,7 @@ cargo.rust_library( "--cfg=blake3_sse2_ffi", "--cfg=blake3_sse41_ffi", ], - deps = [":blake3-1.5.0-simd_x86_unix"], + deps = [":blake3-1.4.1-simd_x86_unix"], ), "windows-gnu": dict( rustc_flags = [ @@ -1252,7 +1270,7 @@ cargo.rust_library( "--cfg=blake3_sse2_ffi", "--cfg=blake3_sse41_ffi", ], - deps = [":blake3-1.5.0-simd_x86_windows_gnu"], + deps = [":blake3-1.4.1-simd_x86_windows_gnu"], ), "windows-msvc": dict( rustc_flags = [ @@ -1261,7 +1279,7 @@ cargo.rust_library( "--cfg=blake3_sse2_ffi", "--cfg=blake3_sse41_ffi", ], - deps = [":blake3-1.5.0-simd_x86_windows_msvc"], + deps = [":blake3-1.4.1-simd_x86_windows_msvc"], ), }, visibility = [], @@ -1270,26 +1288,27 @@ cargo.rust_library( ":arrayvec-0.7.4", ":cfg-if-1.0.0", ":constant_time_eq-0.3.0", + ":digest-0.10.7", ], ) cxx_library( - name = "blake3-1.5.0-simd_neon-aarch64", - srcs = [":blake3-1.5.0.crate[c/blake3_neon.c]"], + name = "blake3-1.4.1-simd_neon-aarch64", + srcs = [":blake3-1.4.1.crate[c/blake3_neon.c]"], headers = [ - ":blake3-1.5.0.crate[c/blake3.h]", - ":blake3-1.5.0.crate[c/blake3_impl.h]", + ":blake3-1.4.1.crate[c/blake3.h]", + ":blake3-1.4.1.crate[c/blake3_impl.h]", ], preferred_linkage = "static", visibility = [], ) cxx_library( - name = "blake3-1.5.0-simd_neon-armv7", - srcs = [":blake3-1.5.0.crate[c/blake3_neon.c]"], + name = "blake3-1.4.1-simd_neon-armv7", + srcs = [":blake3-1.4.1.crate[c/blake3_neon.c]"], headers = [ - ":blake3-1.5.0.crate[c/blake3.h]", - ":blake3-1.5.0.crate[c/blake3_impl.h]", + ":blake3-1.4.1.crate[c/blake3.h]", + ":blake3-1.4.1.crate[c/blake3_impl.h]", ], compiler_flags = [ "-mfpu=neon-vfpv4", @@ -1300,19 +1319,19 @@ cxx_library( ) cxx_library( - name = "blake3-1.5.0-simd_x86_unix", + name = "blake3-1.4.1-simd_x86_unix", srcs = [ - ":blake3-1.5.0.crate[c/blake3.c]", - ":blake3-1.5.0.crate[c/blake3_avx2_x86-64_unix.S]", - ":blake3-1.5.0.crate[c/blake3_avx512_x86-64_unix.S]", - ":blake3-1.5.0.crate[c/blake3_dispatch.c]", - ":blake3-1.5.0.crate[c/blake3_portable.c]", - ":blake3-1.5.0.crate[c/blake3_sse2_x86-64_unix.S]", - ":blake3-1.5.0.crate[c/blake3_sse41_x86-64_unix.S]", + ":blake3-1.4.1.crate[c/blake3.c]", + ":blake3-1.4.1.crate[c/blake3_avx2_x86-64_unix.S]", + ":blake3-1.4.1.crate[c/blake3_avx512_x86-64_unix.S]", + ":blake3-1.4.1.crate[c/blake3_dispatch.c]", + ":blake3-1.4.1.crate[c/blake3_portable.c]", + ":blake3-1.4.1.crate[c/blake3_sse2_x86-64_unix.S]", + ":blake3-1.4.1.crate[c/blake3_sse41_x86-64_unix.S]", ], headers = [ - ":blake3-1.5.0.crate[c/blake3.h]", - ":blake3-1.5.0.crate[c/blake3_impl.h]", + ":blake3-1.4.1.crate[c/blake3.h]", + ":blake3-1.4.1.crate[c/blake3_impl.h]", ], compatible_with = [ "prelude//os/constraints:linux", @@ -1327,19 +1346,19 @@ cxx_library( ) cxx_library( - name = "blake3-1.5.0-simd_x86_windows_gnu", + name = "blake3-1.4.1-simd_x86_windows_gnu", srcs = [ - ":blake3-1.5.0.crate[c/blake3.c]", - ":blake3-1.5.0.crate[c/blake3_avx2_x86-64_windows_gnu.S]", - ":blake3-1.5.0.crate[c/blake3_avx512_x86-64_windows_gnu.S]", - ":blake3-1.5.0.crate[c/blake3_dispatch.c]", - ":blake3-1.5.0.crate[c/blake3_portable.c]", - ":blake3-1.5.0.crate[c/blake3_sse2_x86-64_windows_gnu.S]", - ":blake3-1.5.0.crate[c/blake3_sse41_x86-64_windows_gnu.S]", + ":blake3-1.4.1.crate[c/blake3.c]", + ":blake3-1.4.1.crate[c/blake3_avx2_x86-64_windows_gnu.S]", + ":blake3-1.4.1.crate[c/blake3_avx512_x86-64_windows_gnu.S]", + ":blake3-1.4.1.crate[c/blake3_dispatch.c]", + ":blake3-1.4.1.crate[c/blake3_portable.c]", + ":blake3-1.4.1.crate[c/blake3_sse2_x86-64_windows_gnu.S]", + ":blake3-1.4.1.crate[c/blake3_sse41_x86-64_windows_gnu.S]", ], headers = [ - ":blake3-1.5.0.crate[c/blake3.h]", - ":blake3-1.5.0.crate[c/blake3_impl.h]", + ":blake3-1.4.1.crate[c/blake3.h]", + ":blake3-1.4.1.crate[c/blake3_impl.h]", ], compatible_with = ["prelude//os/constraints:windows"], compiler_flags = [ @@ -1351,19 +1370,19 @@ cxx_library( ) cxx_library( - name = "blake3-1.5.0-simd_x86_windows_msvc", + name = "blake3-1.4.1-simd_x86_windows_msvc", srcs = [ - ":blake3-1.5.0.crate[c/blake3.c]", - ":blake3-1.5.0.crate[c/blake3_avx2_x86-64_windows_msvc.asm]", - ":blake3-1.5.0.crate[c/blake3_avx512_x86-64_windows_msvc.asm]", - ":blake3-1.5.0.crate[c/blake3_dispatch.c]", - ":blake3-1.5.0.crate[c/blake3_portable.c]", - ":blake3-1.5.0.crate[c/blake3_sse2_x86-64_windows_msvc.asm]", - ":blake3-1.5.0.crate[c/blake3_sse41_x86-64_windows_msvc.asm]", + ":blake3-1.4.1.crate[c/blake3.c]", + ":blake3-1.4.1.crate[c/blake3_avx2_x86-64_windows_msvc.asm]", + ":blake3-1.4.1.crate[c/blake3_avx512_x86-64_windows_msvc.asm]", + ":blake3-1.4.1.crate[c/blake3_dispatch.c]", + ":blake3-1.4.1.crate[c/blake3_portable.c]", + ":blake3-1.4.1.crate[c/blake3_sse2_x86-64_windows_msvc.asm]", + ":blake3-1.4.1.crate[c/blake3_sse41_x86-64_windows_msvc.asm]", ], headers = [ - ":blake3-1.5.0.crate[c/blake3.h]", - ":blake3-1.5.0.crate[c/blake3_impl.h]", + ":blake3-1.4.1.crate[c/blake3.h]", + ":blake3-1.4.1.crate[c/blake3_impl.h]", ], compatible_with = ["prelude//os/constraints:windows"], preferred_linkage = "static", @@ -1448,25 +1467,25 @@ cargo.rust_library( }, visibility = [], deps = [ - ":base64-0.21.5", + ":base64-0.21.2", ":bollard-stubs-1.43.0-rc.2", - ":bytes-1.5.0", + ":bytes-1.4.0", ":futures-core-0.3.29", ":futures-util-0.3.29", ":hex-0.4.3", - ":http-0.2.11", + ":http-0.2.9", ":hyper-0.14.27", ":log-0.4.20", - ":pin-project-lite-0.2.13", - ":serde-1.0.193", - ":serde_derive-1.0.193", - ":serde_json-1.0.108", - ":serde_repr-0.1.17", + ":pin-project-lite-0.2.12", + ":serde-1.0.186", + ":serde_derive-1.0.186", + ":serde_json-1.0.105", + ":serde_repr-0.1.16", ":serde_urlencoded-0.7.1", - ":thiserror-1.0.50", - ":tokio-1.35.0", - ":tokio-util-0.7.10", - ":url-2.5.0", + ":thiserror-1.0.47", + ":tokio-1.32.0", + ":tokio-util-0.7.8", + ":url-2.4.0", ], ) @@ -1486,90 +1505,127 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":serde-1.0.193", - ":serde_repr-0.1.17", - ":serde_with-3.4.0", + ":serde-1.0.186", + ":serde_repr-0.1.16", + ":serde_with-3.3.0", ], ) http_archive( - name = "borsh-1.2.1.crate", - sha256 = "9897ef0f1bd2362169de6d7e436ea2237dc1085d7d1e4db75f4be34d86f309d1", - strip_prefix = "borsh-1.2.1", - urls = ["https://crates.io/api/v1/crates/borsh/1.2.1/download"], + name = "borsh-0.10.3.crate", + sha256 = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b", + strip_prefix = "borsh-0.10.3", + urls = ["https://crates.io/api/v1/crates/borsh/0.10.3/download"], visibility = [], ) cargo.rust_library( - name = "borsh-1.2.1", - srcs = [":borsh-1.2.1.crate"], + name = "borsh-0.10.3", + srcs = [":borsh-0.10.3.crate"], crate = "borsh", - crate_root = "borsh-1.2.1.crate/src/lib.rs", + crate_root = "borsh-0.10.3.crate/src/lib.rs", edition = "2018", env = { - "CARGO_MANIFEST_DIR": "borsh-1.2.1.crate", + "CARGO_MANIFEST_DIR": "borsh-0.10.3.crate", "CARGO_PKG_AUTHORS": "Near Inc ", "CARGO_PKG_DESCRIPTION": "Binary Object Representation Serializer for Hashing\n", "CARGO_PKG_NAME": "borsh", "CARGO_PKG_REPOSITORY": "https://github.com/near/borsh-rs", - "CARGO_PKG_VERSION": "1.2.1", - "CARGO_PKG_VERSION_MAJOR": "1", - "CARGO_PKG_VERSION_MINOR": "2", - "CARGO_PKG_VERSION_PATCH": "1", + "CARGO_PKG_VERSION": "0.10.3", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "10", + "CARGO_PKG_VERSION_PATCH": "3", }, - features = [ - "borsh-derive", - "derive", - "std", - "unstable__schema", - ], + features = ["std"], visibility = [], - deps = [":borsh-derive-1.2.1"], + deps = [ + ":borsh-derive-0.10.3", + ":hashbrown-0.13.2", + ], ) http_archive( - name = "borsh-derive-1.2.1.crate", - sha256 = "478b41ff04256c5c8330f3dfdaaae2a5cc976a8e75088bafa4625b0d0208de8c", - strip_prefix = "borsh-derive-1.2.1", - urls = ["https://crates.io/api/v1/crates/borsh-derive/1.2.1/download"], + name = "borsh-derive-0.10.3.crate", + sha256 = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7", + strip_prefix = "borsh-derive-0.10.3", + urls = ["https://crates.io/api/v1/crates/borsh-derive/0.10.3/download"], visibility = [], ) cargo.rust_library( - name = "borsh-derive-1.2.1", - srcs = [":borsh-derive-1.2.1.crate"], + name = "borsh-derive-0.10.3", + srcs = [":borsh-derive-0.10.3.crate"], crate = "borsh_derive", - crate_root = "borsh-derive-1.2.1.crate/src/lib.rs", + crate_root = "borsh-derive-0.10.3.crate/src/lib.rs", edition = "2018", - features = [ - "default", - "schema", - ], proc_macro = True, visibility = [], deps = [ - ":once_cell-1.19.0", - ":proc-macro-crate-2.0.0", - ":proc-macro2-1.0.70", + ":borsh-derive-internal-0.10.3", + ":borsh-schema-derive-internal-0.10.3", + ":proc-macro-crate-0.1.5", + ":proc-macro2-1.0.66", + ":syn-1.0.109", + ], +) + +http_archive( + name = "borsh-derive-internal-0.10.3.crate", + sha256 = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb", + strip_prefix = "borsh-derive-internal-0.10.3", + urls = ["https://crates.io/api/v1/crates/borsh-derive-internal/0.10.3/download"], + visibility = [], +) + +cargo.rust_library( + name = "borsh-derive-internal-0.10.3", + srcs = [":borsh-derive-internal-0.10.3.crate"], + crate = "borsh_derive_internal", + crate_root = "borsh-derive-internal-0.10.3.crate/src/lib.rs", + edition = "2018", + visibility = [], + deps = [ + ":proc-macro2-1.0.66", + ":quote-1.0.33", + ":syn-1.0.109", + ], +) + +http_archive( + name = "borsh-schema-derive-internal-0.10.3.crate", + sha256 = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd", + strip_prefix = "borsh-schema-derive-internal-0.10.3", + urls = ["https://crates.io/api/v1/crates/borsh-schema-derive-internal/0.10.3/download"], + visibility = [], +) + +cargo.rust_library( + name = "borsh-schema-derive-internal-0.10.3", + srcs = [":borsh-schema-derive-internal-0.10.3.crate"], + crate = "borsh_schema_derive_internal", + crate_root = "borsh-schema-derive-internal-0.10.3.crate/src/lib.rs", + edition = "2018", + visibility = [], + deps = [ + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", - ":syn_derive-0.1.8", + ":syn-1.0.109", ], ) http_archive( - name = "bstr-1.8.0.crate", - sha256 = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c", - strip_prefix = "bstr-1.8.0", - urls = ["https://crates.io/api/v1/crates/bstr/1.8.0/download"], + name = "bstr-1.6.0.crate", + sha256 = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05", + strip_prefix = "bstr-1.6.0", + urls = ["https://crates.io/api/v1/crates/bstr/1.6.0/download"], visibility = [], ) cargo.rust_library( - name = "bstr-1.8.0", - srcs = [":bstr-1.8.0.crate"], + name = "bstr-1.6.0", + srcs = [":bstr-1.6.0.crate"], crate = "bstr", - crate_root = "bstr-1.8.0.crate/src/lib.rs", + crate_root = "bstr-1.6.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -1577,8 +1633,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":memchr-2.6.4", - ":serde-1.0.193", + ":memchr-2.5.0", + ":serde-1.0.186", ], ) @@ -1629,26 +1685,26 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], ) http_archive( - name = "byteorder-1.5.0.crate", - sha256 = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b", - strip_prefix = "byteorder-1.5.0", - urls = ["https://crates.io/api/v1/crates/byteorder/1.5.0/download"], + name = "byteorder-1.4.3.crate", + sha256 = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610", + strip_prefix = "byteorder-1.4.3", + urls = ["https://crates.io/api/v1/crates/byteorder/1.4.3/download"], visibility = [], ) cargo.rust_library( - name = "byteorder-1.5.0", - srcs = [":byteorder-1.5.0.crate"], + name = "byteorder-1.4.3", + srcs = [":byteorder-1.4.3.crate"], crate = "byteorder", - crate_root = "byteorder-1.5.0.crate/src/lib.rs", - edition = "2021", + crate_root = "byteorder-1.4.3.crate/src/lib.rs", + edition = "2018", features = [ "default", "i128", @@ -1680,23 +1736,23 @@ cargo.rust_library( alias( name = "bytes", - actual = ":bytes-1.5.0", + actual = ":bytes-1.4.0", visibility = ["PUBLIC"], ) http_archive( - name = "bytes-1.5.0.crate", - sha256 = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223", - strip_prefix = "bytes-1.5.0", - urls = ["https://crates.io/api/v1/crates/bytes/1.5.0/download"], + name = "bytes-1.4.0.crate", + sha256 = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be", + strip_prefix = "bytes-1.4.0", + urls = ["https://crates.io/api/v1/crates/bytes/1.4.0/download"], visibility = [], ) cargo.rust_library( - name = "bytes-1.5.0", - srcs = [":bytes-1.5.0.crate"], + name = "bytes-1.4.0", + srcs = [":bytes-1.4.0.crate"], crate = "bytes", - crate_root = "bytes-1.5.0.crate/src/lib.rs", + crate_root = "bytes-1.4.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -1704,7 +1760,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":serde-1.0.193"], + deps = [":serde-1.0.186"], ) http_archive( @@ -1723,16 +1779,16 @@ cargo.rust_library( edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), }, visibility = [], @@ -1757,27 +1813,26 @@ cargo.rust_library( alias( name = "chrono", - actual = ":chrono-0.4.31", + actual = ":chrono-0.4.26", visibility = ["PUBLIC"], ) http_archive( - name = "chrono-0.4.31.crate", - sha256 = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38", - strip_prefix = "chrono-0.4.31", - urls = ["https://crates.io/api/v1/crates/chrono/0.4.31/download"], + name = "chrono-0.4.26.crate", + sha256 = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5", + strip_prefix = "chrono-0.4.26", + urls = ["https://crates.io/api/v1/crates/chrono/0.4.26/download"], visibility = [], ) cargo.rust_library( - name = "chrono-0.4.31", - srcs = [":chrono-0.4.31.crate"], + name = "chrono-0.4.26", + srcs = [":chrono-0.4.26.crate"], crate = "chrono", - crate_root = "chrono-0.4.31.crate/src/lib.rs", + crate_root = "chrono-0.4.26.crate/src/lib.rs", edition = "2021", features = [ "alloc", - "android-tzdata", "clock", "default", "iana-time-zone", @@ -1785,35 +1840,36 @@ cargo.rust_library( "oldtime", "serde", "std", + "time", "wasm-bindgen", "wasmbind", "winapi", - "windows-targets", ], platform = { "linux-arm64": dict( - deps = [":iana-time-zone-0.1.58"], + deps = [":iana-time-zone-0.1.57"], ), "linux-x86_64": dict( - deps = [":iana-time-zone-0.1.58"], + deps = [":iana-time-zone-0.1.57"], ), "macos-arm64": dict( - deps = [":iana-time-zone-0.1.58"], + deps = [":iana-time-zone-0.1.57"], ), "macos-x86_64": dict( - deps = [":iana-time-zone-0.1.58"], + deps = [":iana-time-zone-0.1.57"], ), "windows-gnu": dict( - deps = [":windows-targets-0.48.5"], + deps = [":winapi-0.3.9"], ), "windows-msvc": dict( - deps = [":windows-targets-0.48.5"], + deps = [":winapi-0.3.9"], ), }, visibility = [], deps = [ - ":num-traits-0.2.17", - ":serde-1.0.193", + ":num-traits-0.2.16", + ":serde-1.0.186", + ":time-0.1.45", ], ) @@ -1845,7 +1901,7 @@ cargo.rust_library( deps = [ ":ciborium-io-0.2.1", ":ciborium-ll-0.2.1", - ":serde-1.0.193", + ":serde-1.0.186", ], ) @@ -1893,23 +1949,23 @@ cargo.rust_library( alias( name = "clap", - actual = ":clap-4.4.11", + actual = ":clap-4.3.24", visibility = ["PUBLIC"], ) http_archive( - name = "clap-4.4.11.crate", - sha256 = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2", - strip_prefix = "clap-4.4.11", - urls = ["https://crates.io/api/v1/crates/clap/4.4.11/download"], + name = "clap-4.3.24.crate", + sha256 = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487", + strip_prefix = "clap-4.3.24", + urls = ["https://crates.io/api/v1/crates/clap/4.3.24/download"], visibility = [], ) cargo.rust_library( - name = "clap-4.4.11", - srcs = [":clap-4.4.11.crate"], + name = "clap-4.3.24", + srcs = [":clap-4.3.24.crate"], crate = "clap", - crate_root = "clap-4.4.11.crate/src/lib.rs", + crate_root = "clap-4.3.24.crate/src/lib.rs", edition = "2021", features = [ "color", @@ -1925,24 +1981,25 @@ cargo.rust_library( ], visibility = [], deps = [ - ":clap_builder-4.4.11", - ":clap_derive-4.4.7", + ":clap_builder-4.3.24", + ":clap_derive-4.3.12", + ":once_cell-1.18.0", ], ) http_archive( - name = "clap_builder-4.4.11.crate", - sha256 = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb", - strip_prefix = "clap_builder-4.4.11", - urls = ["https://crates.io/api/v1/crates/clap_builder/4.4.11/download"], + name = "clap_builder-4.3.24.crate", + sha256 = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e", + strip_prefix = "clap_builder-4.3.24", + urls = ["https://crates.io/api/v1/crates/clap_builder/4.3.24/download"], visibility = [], ) cargo.rust_library( - name = "clap_builder-4.4.11", - srcs = [":clap_builder-4.4.11.crate"], + name = "clap_builder-4.3.24", + srcs = [":clap_builder-4.3.24.crate"], crate = "clap_builder", - crate_root = "clap_builder-4.4.11.crate/src/lib.rs", + crate_root = "clap_builder-4.3.24.crate/src/lib.rs", edition = "2021", features = [ "color", @@ -1956,92 +2013,109 @@ cargo.rust_library( ], visibility = [], deps = [ - ":anstream-0.6.5", - ":anstyle-1.0.4", - ":clap_lex-0.6.0", + ":anstream-0.3.2", + ":anstyle-1.0.2", + ":clap_lex-0.5.0", ":strsim-0.10.0", - ":terminal_size-0.3.0", + ":terminal_size-0.2.6", ], ) http_archive( - name = "clap_derive-4.4.7.crate", - sha256 = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442", - strip_prefix = "clap_derive-4.4.7", - urls = ["https://crates.io/api/v1/crates/clap_derive/4.4.7/download"], + name = "clap_derive-4.3.12.crate", + sha256 = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050", + strip_prefix = "clap_derive-4.3.12", + urls = ["https://crates.io/api/v1/crates/clap_derive/4.3.12/download"], visibility = [], ) cargo.rust_library( - name = "clap_derive-4.4.7", - srcs = [":clap_derive-4.4.7.crate"], + name = "clap_derive-4.3.12", + srcs = [":clap_derive-4.3.12.crate"], crate = "clap_derive", - crate_root = "clap_derive-4.4.7.crate/src/lib.rs", + crate_root = "clap_derive-4.3.12.crate/src/lib.rs", edition = "2021", features = ["default"], proc_macro = True, visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) http_archive( - name = "clap_lex-0.6.0.crate", - sha256 = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1", - strip_prefix = "clap_lex-0.6.0", - urls = ["https://crates.io/api/v1/crates/clap_lex/0.6.0/download"], + name = "clap_lex-0.5.0.crate", + sha256 = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b", + strip_prefix = "clap_lex-0.5.0", + urls = ["https://crates.io/api/v1/crates/clap_lex/0.5.0/download"], visibility = [], ) cargo.rust_library( - name = "clap_lex-0.6.0", - srcs = [":clap_lex-0.6.0.crate"], + name = "clap_lex-0.5.0", + srcs = [":clap_lex-0.5.0.crate"], crate = "clap_lex", - crate_root = "clap_lex-0.6.0.crate/src/lib.rs", + crate_root = "clap_lex-0.5.0.crate/src/lib.rs", edition = "2021", visibility = [], ) http_archive( - name = "coarsetime-0.1.33.crate", - sha256 = "71367d3385c716342014ad17e3d19f7788ae514885a1f4c24f500260fb365e1a", - strip_prefix = "coarsetime-0.1.33", - urls = ["https://crates.io/api/v1/crates/coarsetime/0.1.33/download"], + name = "coarsetime-0.1.23.crate", + sha256 = "a90d114103adbc625300f346d4d09dfb4ab1c4a8df6868435dd903392ecf4354", + strip_prefix = "coarsetime-0.1.23", + urls = ["https://crates.io/api/v1/crates/coarsetime/0.1.23/download"], visibility = [], ) cargo.rust_library( - name = "coarsetime-0.1.33", - srcs = [":coarsetime-0.1.33.crate"], + name = "coarsetime-0.1.23", + srcs = [":coarsetime-0.1.23.crate"], crate = "coarsetime", - crate_root = "coarsetime-0.1.33.crate/src/lib.rs", + crate_root = "coarsetime-0.1.23.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-msvc": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), }, visibility = [], - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], +) + +http_archive( + name = "cobs-0.2.3.crate", + sha256 = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15", + strip_prefix = "cobs-0.2.3", + urls = ["https://crates.io/api/v1/crates/cobs/0.2.3/download"], + visibility = [], +) + +cargo.rust_library( + name = "cobs-0.2.3", + srcs = [":cobs-0.2.3.crate"], + crate = "cobs", + crate_root = "cobs-0.2.3.crate/src/lib.rs", + edition = "2018", + visibility = [], ) alias( @@ -2074,34 +2148,34 @@ cargo.rust_library( visibility = [], deps = [ ":backtrace-0.3.69", - ":color-spantrace-0.2.1", - ":eyre-0.6.10", + ":color-spantrace-0.2.0", + ":eyre-0.6.8", ":indenter-0.3.3", - ":once_cell-1.19.0", + ":once_cell-1.18.0", ":owo-colors-3.5.0", ":tracing-error-0.2.0", ], ) http_archive( - name = "color-spantrace-0.2.1.crate", - sha256 = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2", - strip_prefix = "color-spantrace-0.2.1", - urls = ["https://crates.io/api/v1/crates/color-spantrace/0.2.1/download"], + name = "color-spantrace-0.2.0.crate", + sha256 = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce", + strip_prefix = "color-spantrace-0.2.0", + urls = ["https://crates.io/api/v1/crates/color-spantrace/0.2.0/download"], visibility = [], ) cargo.rust_library( - name = "color-spantrace-0.2.1", - srcs = [":color-spantrace-0.2.1.crate"], + name = "color-spantrace-0.2.0", + srcs = [":color-spantrace-0.2.0.crate"], crate = "color_spantrace", - crate_root = "color-spantrace-0.2.1.crate/src/lib.rs", + crate_root = "color-spantrace-0.2.0.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ - ":once_cell-1.19.0", + ":once_cell-1.18.0", ":owo-colors-3.5.0", - ":tracing-core-0.1.32", + ":tracing-core-0.1.31", ":tracing-error-0.2.0", ], ) @@ -2125,23 +2199,23 @@ cargo.rust_library( alias( name = "colored", - actual = ":colored-2.1.0", + actual = ":colored-2.0.4", visibility = ["PUBLIC"], ) http_archive( - name = "colored-2.1.0.crate", - sha256 = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8", - strip_prefix = "colored-2.1.0", - urls = ["https://crates.io/api/v1/crates/colored/2.1.0/download"], + name = "colored-2.0.4.crate", + sha256 = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6", + strip_prefix = "colored-2.0.4", + urls = ["https://crates.io/api/v1/crates/colored/2.0.4/download"], visibility = [], ) cargo.rust_library( - name = "colored-2.1.0", - srcs = [":colored-2.1.0.crate"], + name = "colored-2.0.4", + srcs = [":colored-2.0.4.crate"], crate = "colored", - crate_root = "colored-2.1.0.crate/src/lib.rs", + crate_root = "colored-2.0.4.crate/src/lib.rs", edition = "2021", platform = { "windows-gnu": dict( @@ -2152,28 +2226,31 @@ cargo.rust_library( ), }, visibility = [], - deps = [":lazy_static-1.4.0"], + deps = [ + ":is-terminal-0.4.9", + ":lazy_static-1.4.0", + ], ) alias( name = "comfy-table", - actual = ":comfy-table-7.1.0", + actual = ":comfy-table-7.0.1", visibility = ["PUBLIC"], ) http_archive( - name = "comfy-table-7.1.0.crate", - sha256 = "7c64043d6c7b7a4c58e39e7efccfdea7b93d885a795d0c054a69dbbf4dd52686", - strip_prefix = "comfy-table-7.1.0", - urls = ["https://crates.io/api/v1/crates/comfy-table/7.1.0/download"], + name = "comfy-table-7.0.1.crate", + sha256 = "9ab77dbd8adecaf3f0db40581631b995f312a8a5ae3aa9993188bb8f23d83a5b", + strip_prefix = "comfy-table-7.0.1", + urls = ["https://crates.io/api/v1/crates/comfy-table/7.0.1/download"], visibility = [], ) cargo.rust_library( - name = "comfy-table-7.1.0", - srcs = [":comfy-table-7.1.0.crate"], + name = "comfy-table-7.0.1", + srcs = [":comfy-table-7.0.1.crate"], crate = "comfy_table", - crate_root = "comfy-table-7.1.0.crate/src/lib.rs", + crate_root = "comfy-table-7.0.1.crate/src/lib.rs", edition = "2021", features = [ "console", @@ -2182,63 +2259,44 @@ cargo.rust_library( "default", "tty", ], - platform = { - "linux-arm64": dict( - deps = [":crossterm-0.27.0"], - ), - "linux-x86_64": dict( - deps = [":crossterm-0.27.0"], - ), - "macos-arm64": dict( - deps = [":crossterm-0.27.0"], - ), - "macos-x86_64": dict( - deps = [":crossterm-0.27.0"], - ), - "windows-gnu": dict( - deps = [":crossterm-0.27.0"], - ), - "windows-msvc": dict( - deps = [":crossterm-0.27.0"], - ), - }, visibility = [], deps = [ ":console-0.15.7", - ":strum-0.25.0", - ":strum_macros-0.25.3", - ":unicode-width-0.1.11", + ":crossterm-0.26.1", + ":strum-0.24.1", + ":strum_macros-0.24.3", + ":unicode-width-0.1.10", ], ) alias( name = "config", - actual = ":config-0.13.4", + actual = ":config-0.13.3", visibility = ["PUBLIC"], ) http_archive( - name = "config-0.13.4.crate", - sha256 = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca", - strip_prefix = "config-0.13.4", - urls = ["https://crates.io/api/v1/crates/config/0.13.4/download"], + name = "config-0.13.3.crate", + sha256 = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7", + strip_prefix = "config-0.13.3", + urls = ["https://crates.io/api/v1/crates/config/0.13.3/download"], visibility = [], ) cargo.rust_library( - name = "config-0.13.4", - srcs = [":config-0.13.4.crate"], + name = "config-0.13.3", + srcs = [":config-0.13.3.crate"], crate = "config", - crate_root = "config-0.13.4.crate/src/lib.rs", + crate_root = "config-0.13.3.crate/src/lib.rs", edition = "2018", features = ["toml"], visibility = [], deps = [ - ":async-trait-0.1.74", + ":async-trait-0.1.73", ":lazy_static-1.4.0", ":nom-7.1.3", ":pathdiff-0.2.1", - ":serde-1.0.193", + ":serde-1.0.186", ":toml-0.5.11", ], ) @@ -2285,8 +2343,8 @@ cargo.rust_library( visibility = [], deps = [ ":lazy_static-1.4.0", - ":libc-0.2.151", - ":unicode-width-0.1.11", + ":libc-0.2.147", + ":unicode-width-0.1.10", ], ) @@ -2375,21 +2433,21 @@ cargo.rust_library( }, visibility = [], deps = [ - ":chrono-0.4.31", - ":flate2-1.0.28", + ":chrono-0.4.26", + ":flate2-1.0.27", ":futures-util-0.3.29", - ":http-0.2.11", + ":http-0.2.9", ":hyper-0.14.27", ":log-0.4.20", ":mime-0.3.17", ":paste-1.0.14", ":pin-project-1.1.3", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ":tar-0.4.40", - ":thiserror-1.0.50", - ":tokio-1.35.0", - ":url-2.5.0", + ":thiserror-1.0.47", + ":tokio-1.32.0", + ":url-2.4.0", ], ) @@ -2427,21 +2485,21 @@ cargo.rust_library( }, visibility = [], deps = [ - ":chrono-0.4.31", - ":flate2-1.0.28", + ":chrono-0.4.26", + ":flate2-1.0.27", ":futures-util-0.3.29", - ":http-0.2.11", + ":http-0.2.9", ":hyper-0.14.27", ":log-0.4.20", ":mime-0.3.17", ":paste-1.0.14", ":pin-project-1.1.3", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ":tar-0.4.40", - ":thiserror-1.0.50", - ":tokio-1.35.0", - ":url-2.5.0", + ":thiserror-1.0.47", + ":tokio-1.32.0", + ":url-2.4.0", ], ) @@ -2487,71 +2545,63 @@ cargo.rust_library( ) http_archive( - name = "core-foundation-0.9.4.crate", - sha256 = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f", - strip_prefix = "core-foundation-0.9.4", - urls = ["https://crates.io/api/v1/crates/core-foundation/0.9.4/download"], + name = "core-foundation-0.9.3.crate", + sha256 = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146", + strip_prefix = "core-foundation-0.9.3", + urls = ["https://crates.io/api/v1/crates/core-foundation/0.9.3/download"], visibility = [], ) cargo.rust_library( - name = "core-foundation-0.9.4", - srcs = [":core-foundation-0.9.4.crate"], + name = "core-foundation-0.9.3", + srcs = [":core-foundation-0.9.3.crate"], crate = "core_foundation", - crate_root = "core-foundation-0.9.4.crate/src/lib.rs", - edition = "2018", - features = [ - "default", - "link", - ], + crate_root = "core-foundation-0.9.3.crate/src/lib.rs", + edition = "2015", visibility = [], deps = [ - ":core-foundation-sys-0.8.6", - ":libc-0.2.151", + ":core-foundation-sys-0.8.4", + ":libc-0.2.147", ], ) http_archive( - name = "core-foundation-sys-0.8.6.crate", - sha256 = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f", - strip_prefix = "core-foundation-sys-0.8.6", - urls = ["https://crates.io/api/v1/crates/core-foundation-sys/0.8.6/download"], + name = "core-foundation-sys-0.8.4.crate", + sha256 = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa", + strip_prefix = "core-foundation-sys-0.8.4", + urls = ["https://crates.io/api/v1/crates/core-foundation-sys/0.8.4/download"], visibility = [], ) cargo.rust_library( - name = "core-foundation-sys-0.8.6", - srcs = [":core-foundation-sys-0.8.6.crate"], + name = "core-foundation-sys-0.8.4", + srcs = [":core-foundation-sys-0.8.4.crate"], crate = "core_foundation_sys", - crate_root = "core-foundation-sys-0.8.6.crate/src/lib.rs", - edition = "2018", - features = [ - "default", - "link", - ], + crate_root = "core-foundation-sys-0.8.4.crate/src/lib.rs", + edition = "2015", visibility = [], ) http_archive( - name = "cpufeatures-0.2.11.crate", - sha256 = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0", - strip_prefix = "cpufeatures-0.2.11", - urls = ["https://crates.io/api/v1/crates/cpufeatures/0.2.11/download"], + name = "cpufeatures-0.2.9.crate", + sha256 = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1", + strip_prefix = "cpufeatures-0.2.9", + urls = ["https://crates.io/api/v1/crates/cpufeatures/0.2.9/download"], visibility = [], ) cargo.rust_library( - name = "cpufeatures-0.2.11", - srcs = [":cpufeatures-0.2.11.crate"], + name = "cpufeatures-0.2.9", + srcs = [":cpufeatures-0.2.9.crate"], crate = "cpufeatures", - crate_root = "cpufeatures-0.2.11.crate/src/lib.rs", + crate_root = "cpufeatures-0.2.9.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), }, visibility = [], @@ -2611,87 +2661,6 @@ cargo.rust_library( ], ) -http_archive( - name = "crossbeam-deque-0.8.3.crate", - sha256 = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef", - strip_prefix = "crossbeam-deque-0.8.3", - urls = ["https://crates.io/api/v1/crates/crossbeam-deque/0.8.3/download"], - visibility = [], -) - -cargo.rust_library( - name = "crossbeam-deque-0.8.3", - srcs = [":crossbeam-deque-0.8.3.crate"], - crate = "crossbeam_deque", - crate_root = "crossbeam-deque-0.8.3.crate/src/lib.rs", - edition = "2018", - features = [ - "crossbeam-epoch", - "crossbeam-utils", - "default", - "std", - ], - visibility = [], - deps = [ - ":cfg-if-1.0.0", - ":crossbeam-epoch-0.9.15", - ":crossbeam-utils-0.8.16", - ], -) - -http_archive( - name = "crossbeam-epoch-0.9.15.crate", - sha256 = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7", - strip_prefix = "crossbeam-epoch-0.9.15", - urls = ["https://crates.io/api/v1/crates/crossbeam-epoch/0.9.15/download"], - visibility = [], -) - -cargo.rust_library( - name = "crossbeam-epoch-0.9.15", - srcs = [":crossbeam-epoch-0.9.15.crate"], - crate = "crossbeam_epoch", - crate_root = "crossbeam-epoch-0.9.15.crate/src/lib.rs", - edition = "2018", - features = [ - "alloc", - "std", - ], - rustc_flags = ["@$(location :crossbeam-epoch-0.9.15-build-script-run[rustc_flags])"], - visibility = [], - deps = [ - ":cfg-if-1.0.0", - ":crossbeam-utils-0.8.16", - ":memoffset-0.9.0", - ":scopeguard-1.2.0", - ], -) - -cargo.rust_binary( - name = "crossbeam-epoch-0.9.15-build-script-build", - srcs = [":crossbeam-epoch-0.9.15.crate"], - crate = "build_script_build", - crate_root = "crossbeam-epoch-0.9.15.crate/build.rs", - edition = "2018", - features = [ - "alloc", - "std", - ], - visibility = [], - deps = [":autocfg-1.1.0"], -) - -buildscript_run( - name = "crossbeam-epoch-0.9.15-build-script-run", - package_name = "crossbeam-epoch", - buildscript_rule = ":crossbeam-epoch-0.9.15-build-script-build", - features = [ - "alloc", - "std", - ], - version = "0.9.15", -) - http_archive( name = "crossbeam-queue-0.3.8.crate", sha256 = "d1cfb3ea8a53f37c40dea2c7bedcbd88bdfae54f5e2175d6ecaff1c988353add", @@ -2769,32 +2738,32 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.151", - ":mio-0.8.10", + ":libc-0.2.147", + ":mio-0.8.8", ":signal-hook-0.3.17", ":signal-hook-mio-0.2.3", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.151", - ":mio-0.8.10", + ":libc-0.2.147", + ":mio-0.8.8", ":signal-hook-0.3.17", ":signal-hook-mio-0.2.3", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.151", - ":mio-0.8.10", + ":libc-0.2.147", + ":mio-0.8.8", ":signal-hook-0.3.17", ":signal-hook-mio-0.2.3", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.151", - ":mio-0.8.10", + ":libc-0.2.147", + ":mio-0.8.8", ":signal-hook-0.3.17", ":signal-hook-mio-0.2.3", ], @@ -2820,32 +2789,51 @@ cargo.rust_library( ) http_archive( - name = "crossterm-0.27.0.crate", - sha256 = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df", - strip_prefix = "crossterm-0.27.0", - urls = ["https://crates.io/api/v1/crates/crossterm/0.27.0/download"], + name = "crossterm-0.26.1.crate", + sha256 = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13", + strip_prefix = "crossterm-0.26.1", + urls = ["https://crates.io/api/v1/crates/crossterm/0.26.1/download"], visibility = [], ) cargo.rust_library( - name = "crossterm-0.27.0", - srcs = [":crossterm-0.27.0.crate"], + name = "crossterm-0.26.1", + srcs = [":crossterm-0.26.1.crate"], crate = "crossterm", - crate_root = "crossterm-0.27.0.crate/src/lib.rs", + crate_root = "crossterm-0.26.1.crate/src/lib.rs", edition = "2021", - features = ["windows"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [ + ":libc-0.2.147", + ":mio-0.8.8", + ":signal-hook-0.3.17", + ":signal-hook-mio-0.2.3", + ], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [ + ":libc-0.2.147", + ":mio-0.8.8", + ":signal-hook-0.3.17", + ":signal-hook-mio-0.2.3", + ], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [ + ":libc-0.2.147", + ":mio-0.8.8", + ":signal-hook-0.3.17", + ":signal-hook-mio-0.2.3", + ], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [ + ":libc-0.2.147", + ":mio-0.8.8", + ":signal-hook-0.3.17", + ":signal-hook-mio-0.2.3", + ], ), "windows-gnu": dict( deps = [ @@ -2862,7 +2850,7 @@ cargo.rust_library( }, visibility = [], deps = [ - ":bitflags-2.4.1", + ":bitflags-1.3.2", ":parking_lot-0.12.1", ], ) @@ -2893,18 +2881,18 @@ cargo.rust_library( ) http_archive( - name = "crypto-bigint-0.5.5.crate", - sha256 = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76", - strip_prefix = "crypto-bigint-0.5.5", - urls = ["https://crates.io/api/v1/crates/crypto-bigint/0.5.5/download"], + name = "crypto-bigint-0.5.2.crate", + sha256 = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15", + strip_prefix = "crypto-bigint-0.5.2", + urls = ["https://crates.io/api/v1/crates/crypto-bigint/0.5.2/download"], visibility = [], ) cargo.rust_library( - name = "crypto-bigint-0.5.5", - srcs = [":crypto-bigint-0.5.5.crate"], + name = "crypto-bigint-0.5.2", + srcs = [":crypto-bigint-0.5.2.crate"], crate = "crypto_bigint", - crate_root = "crypto-bigint-0.5.5.crate/src/lib.rs", + crate_root = "crypto-bigint-0.5.2.crate/src/lib.rs", edition = "2021", features = [ "generic-array", @@ -2916,7 +2904,7 @@ cargo.rust_library( ":generic-array-0.14.7", ":rand_core-0.6.4", ":subtle-2.5.0", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) @@ -2938,7 +2926,7 @@ cargo.rust_library( visibility = [], deps = [ ":generic-array-0.14.7", - ":typenum-1.17.0", + ":typenum-1.16.0", ], ) @@ -2991,128 +2979,11 @@ cargo.rust_library( features = ["u64_backend"], visibility = [], deps = [ - ":byteorder-1.5.0", + ":byteorder-1.4.3", ":digest-0.9.0", ":rand_core-0.5.1", ":subtle-2.5.0", - ":zeroize-1.7.0", - ], -) - -http_archive( - name = "curve25519-dalek-4.1.1.crate", - sha256 = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c", - strip_prefix = "curve25519-dalek-4.1.1", - urls = ["https://crates.io/api/v1/crates/curve25519-dalek/4.1.1/download"], - visibility = [], -) - -cargo.rust_library( - name = "curve25519-dalek-4.1.1", - srcs = [":curve25519-dalek-4.1.1.crate"], - crate = "curve25519_dalek", - crate_root = "curve25519-dalek-4.1.1.crate/src/lib.rs", - edition = "2021", - env = { - "CARGO_MANIFEST_DIR": "curve25519-dalek-4.1.1.crate", - "CARGO_PKG_AUTHORS": "Isis Lovecruft :Henry de Valence ", - "CARGO_PKG_DESCRIPTION": "A pure-Rust implementation of group operations on ristretto255 and Curve25519", - "CARGO_PKG_NAME": "curve25519-dalek", - "CARGO_PKG_REPOSITORY": "https://github.com/dalek-cryptography/curve25519-dalek/tree/main/curve25519-dalek", - "CARGO_PKG_VERSION": "4.1.1", - "CARGO_PKG_VERSION_MAJOR": "4", - "CARGO_PKG_VERSION_MINOR": "1", - "CARGO_PKG_VERSION_PATCH": "1", - }, - features = ["digest"], - platform = { - "linux-x86_64": dict( - deps = [ - ":cpufeatures-0.2.11", - ":curve25519-dalek-derive-0.1.1", - ], - ), - "macos-x86_64": dict( - deps = [ - ":cpufeatures-0.2.11", - ":curve25519-dalek-derive-0.1.1", - ], - ), - "windows-gnu": dict( - deps = [ - ":cpufeatures-0.2.11", - ":curve25519-dalek-derive-0.1.1", - ], - ), - "windows-msvc": dict( - deps = [ - ":cpufeatures-0.2.11", - ":curve25519-dalek-derive-0.1.1", - ], - ), - }, - rustc_flags = ["@$(location :curve25519-dalek-4.1.1-build-script-run[rustc_flags])"], - visibility = [], - deps = [ - ":cfg-if-1.0.0", - ":digest-0.10.7", - ":subtle-2.5.0", - ], -) - -cargo.rust_binary( - name = "curve25519-dalek-4.1.1-build-script-build", - srcs = [":curve25519-dalek-4.1.1.crate"], - crate = "build_script_build", - crate_root = "curve25519-dalek-4.1.1.crate/build.rs", - edition = "2021", - env = { - "CARGO_MANIFEST_DIR": "curve25519-dalek-4.1.1.crate", - "CARGO_PKG_AUTHORS": "Isis Lovecruft :Henry de Valence ", - "CARGO_PKG_DESCRIPTION": "A pure-Rust implementation of group operations on ristretto255 and Curve25519", - "CARGO_PKG_NAME": "curve25519-dalek", - "CARGO_PKG_REPOSITORY": "https://github.com/dalek-cryptography/curve25519-dalek/tree/main/curve25519-dalek", - "CARGO_PKG_VERSION": "4.1.1", - "CARGO_PKG_VERSION_MAJOR": "4", - "CARGO_PKG_VERSION_MINOR": "1", - "CARGO_PKG_VERSION_PATCH": "1", - }, - features = ["digest"], - visibility = [], - deps = [ - ":platforms-3.2.0", - ":rustc_version-0.4.0", - ], -) - -buildscript_run( - name = "curve25519-dalek-4.1.1-build-script-run", - package_name = "curve25519-dalek", - buildscript_rule = ":curve25519-dalek-4.1.1-build-script-build", - features = ["digest"], - version = "4.1.1", -) - -http_archive( - name = "curve25519-dalek-derive-0.1.1.crate", - sha256 = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3", - strip_prefix = "curve25519-dalek-derive-0.1.1", - urls = ["https://crates.io/api/v1/crates/curve25519-dalek-derive/0.1.1/download"], - visibility = [], -) - -cargo.rust_library( - name = "curve25519-dalek-derive-0.1.1", - srcs = [":curve25519-dalek-derive-0.1.1.crate"], - crate = "curve25519_dalek_derive", - crate_root = "curve25519-dalek-derive-0.1.1.crate/src/lib.rs", - edition = "2021", - proc_macro = True, - visibility = [], - deps = [ - ":proc-macro2-1.0.70", - ":quote-1.0.33", - ":syn-2.0.40", + ":zeroize-1.6.0", ], ) @@ -3188,7 +3059,7 @@ cargo.rust_library( deps = [ ":fnv-1.0.7", ":ident_case-1.0.1", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":strsim-0.10.0", ":syn-1.0.109", @@ -3217,10 +3088,10 @@ cargo.rust_library( deps = [ ":fnv-1.0.7", ":ident_case-1.0.1", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":strsim-0.10.0", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -3266,7 +3137,7 @@ cargo.rust_library( deps = [ ":darling_core-0.20.3", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -3289,47 +3160,47 @@ cargo.rust_library( ":anyhow-1.0.75", ":html-escape-0.2.13", ":nom-7.1.3", - ":ordered-float-2.10.1", + ":ordered-float-2.10.0", ], ) http_archive( - name = "dashmap-5.5.3.crate", - sha256 = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856", - strip_prefix = "dashmap-5.5.3", - urls = ["https://crates.io/api/v1/crates/dashmap/5.5.3/download"], + name = "dashmap-5.5.1.crate", + sha256 = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28", + strip_prefix = "dashmap-5.5.1", + urls = ["https://crates.io/api/v1/crates/dashmap/5.5.1/download"], visibility = [], ) cargo.rust_library( - name = "dashmap-5.5.3", - srcs = [":dashmap-5.5.3.crate"], + name = "dashmap-5.5.1", + srcs = [":dashmap-5.5.1.crate"], crate = "dashmap", - crate_root = "dashmap-5.5.3.crate/src/lib.rs", + crate_root = "dashmap-5.5.1.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ ":cfg-if-1.0.0", - ":hashbrown-0.14.3", - ":lock_api-0.4.11", - ":once_cell-1.19.0", - ":parking_lot_core-0.9.9", + ":hashbrown-0.14.0", + ":lock_api-0.4.10", + ":once_cell-1.18.0", + ":parking_lot_core-0.9.8", ], ) http_archive( - name = "data-encoding-2.5.0.crate", - sha256 = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5", - strip_prefix = "data-encoding-2.5.0", - urls = ["https://crates.io/api/v1/crates/data-encoding/2.5.0/download"], + name = "data-encoding-2.4.0.crate", + sha256 = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308", + strip_prefix = "data-encoding-2.4.0", + urls = ["https://crates.io/api/v1/crates/data-encoding/2.4.0/download"], visibility = [], ) cargo.rust_library( - name = "data-encoding-2.5.0", - srcs = [":data-encoding-2.5.0.crate"], + name = "data-encoding-2.4.0", + srcs = [":data-encoding-2.4.0.crate"], crate = "data_encoding", - crate_root = "data-encoding-2.5.0.crate/src/lib.rs", + crate_root = "data-encoding-2.4.0.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -3368,11 +3239,11 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.74", - ":deadpool-runtime-0.1.3", + ":async-trait-0.1.73", + ":deadpool-runtime-0.1.2", ":num_cpus-1.16.0", ":retain_mut-0.1.9", - ":tokio-1.35.0", + ":tokio-1.32.0", ], ) @@ -3404,28 +3275,28 @@ cargo.rust_library( deps = [ ":deadpool-0.9.5", ":log-0.4.20", - ":tokio-1.35.0", - ":tokio-postgres-0.7.10", + ":tokio-1.32.0", + ":tokio-postgres-0.7.9", ], ) http_archive( - name = "deadpool-runtime-0.1.3.crate", - sha256 = "63dfa964fe2a66f3fde91fc70b267fe193d822c7e603e2a675a49a7f46ad3f49", - strip_prefix = "deadpool-runtime-0.1.3", - urls = ["https://crates.io/api/v1/crates/deadpool-runtime/0.1.3/download"], + name = "deadpool-runtime-0.1.2.crate", + sha256 = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1", + strip_prefix = "deadpool-runtime-0.1.2", + urls = ["https://crates.io/api/v1/crates/deadpool-runtime/0.1.2/download"], visibility = [], ) cargo.rust_library( - name = "deadpool-runtime-0.1.3", - srcs = [":deadpool-runtime-0.1.3.crate"], + name = "deadpool-runtime-0.1.2", + srcs = [":deadpool-runtime-0.1.2.crate"], crate = "deadpool_runtime", - crate_root = "deadpool-runtime-0.1.3.crate/src/lib.rs", + crate_root = "deadpool-runtime-0.1.2.crate/src/lib.rs", edition = "2018", features = ["tokio_1"], named_deps = { - "tokio_1": ":tokio-1.35.0", + "tokio_1": ":tokio-1.32.0", }, visibility = [], ) @@ -3481,7 +3352,7 @@ cargo.rust_library( deps = [ ":const-oid-0.9.5", ":pem-rfc7468-0.6.0", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) @@ -3510,35 +3381,31 @@ cargo.rust_library( deps = [ ":const-oid-0.9.5", ":pem-rfc7468-0.7.0", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) http_archive( - name = "deranged-0.3.10.crate", - sha256 = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc", - strip_prefix = "deranged-0.3.10", - urls = ["https://crates.io/api/v1/crates/deranged/0.3.10/download"], + name = "deranged-0.3.8.crate", + sha256 = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946", + strip_prefix = "deranged-0.3.8", + urls = ["https://crates.io/api/v1/crates/deranged/0.3.8/download"], visibility = [], ) cargo.rust_library( - name = "deranged-0.3.10", - srcs = [":deranged-0.3.10.crate"], + name = "deranged-0.3.8", + srcs = [":deranged-0.3.8.crate"], crate = "deranged", - crate_root = "deranged-0.3.10.crate/src/lib.rs", + crate_root = "deranged-0.3.8.crate/src/lib.rs", edition = "2021", features = [ "alloc", - "powerfmt", "serde", "std", ], visibility = [], - deps = [ - ":powerfmt-0.2.0", - ":serde-1.0.193", - ], + deps = [":serde-1.0.186"], ) alias( @@ -3586,7 +3453,7 @@ cargo.rust_library( visibility = [], deps = [ ":darling-0.14.4", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -3666,7 +3533,7 @@ cargo.rust_library( visibility = [], deps = [ ":convert_case-0.4.0", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -3805,16 +3672,16 @@ cargo.rust_library( edition = "2015", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( deps = [":winapi-0.3.9"], @@ -3842,16 +3709,16 @@ cargo.rust_library( edition = "2015", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -3901,21 +3768,21 @@ cargo.rust_library( deps = [ ":asynchronous-codec-0.6.2", ":base64-0.13.1", - ":byteorder-1.5.0", - ":bytes-1.5.0", - ":chrono-0.4.31", + ":byteorder-1.4.3", + ":bytes-1.4.0", + ":chrono-0.4.26", ":containers-api-0.9.0", ":docker-api-stubs-0.6.0", ":futures-util-0.3.29", - ":http-0.2.11", + ":http-0.2.9", ":hyper-0.14.27", ":log-0.4.20", ":paste-1.0.14", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ":tar-0.4.40", - ":thiserror-1.0.50", - ":url-2.5.0", + ":thiserror-1.0.47", + ":url-2.4.0", ], ) @@ -3927,9 +3794,9 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":chrono-0.4.31", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":chrono-0.4.26", + ":serde-1.0.186", + ":serde_json-1.0.105", ":serde_with-2.3.3", ], ) @@ -3953,40 +3820,40 @@ cargo.rust_library( alias( name = "dyn-clone", - actual = ":dyn-clone-1.0.16", + actual = ":dyn-clone-1.0.13", visibility = ["PUBLIC"], ) http_archive( - name = "dyn-clone-1.0.16.crate", - sha256 = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d", - strip_prefix = "dyn-clone-1.0.16", - urls = ["https://crates.io/api/v1/crates/dyn-clone/1.0.16/download"], + name = "dyn-clone-1.0.13.crate", + sha256 = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555", + strip_prefix = "dyn-clone-1.0.13", + urls = ["https://crates.io/api/v1/crates/dyn-clone/1.0.13/download"], visibility = [], ) cargo.rust_library( - name = "dyn-clone-1.0.16", - srcs = [":dyn-clone-1.0.16.crate"], + name = "dyn-clone-1.0.13", + srcs = [":dyn-clone-1.0.13.crate"], crate = "dyn_clone", - crate_root = "dyn-clone-1.0.16.crate/src/lib.rs", + crate_root = "dyn-clone-1.0.13.crate/src/lib.rs", edition = "2018", visibility = [], ) http_archive( - name = "ecdsa-0.16.9.crate", - sha256 = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca", - strip_prefix = "ecdsa-0.16.9", - urls = ["https://crates.io/api/v1/crates/ecdsa/0.16.9/download"], + name = "ecdsa-0.16.8.crate", + sha256 = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4", + strip_prefix = "ecdsa-0.16.8", + urls = ["https://crates.io/api/v1/crates/ecdsa/0.16.8/download"], visibility = [], ) cargo.rust_library( - name = "ecdsa-0.16.9", - srcs = [":ecdsa-0.16.9.crate"], + name = "ecdsa-0.16.8", + srcs = [":ecdsa-0.16.8.crate"], crate = "ecdsa", - crate_root = "ecdsa-0.16.9.crate/src/lib.rs", + crate_root = "ecdsa-0.16.8.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -4006,10 +3873,10 @@ cargo.rust_library( deps = [ ":der-0.7.8", ":digest-0.10.7", - ":elliptic-curve-0.13.8", + ":elliptic-curve-0.13.5", ":rfc6979-0.4.0", - ":signature-2.2.0", - ":spki-0.7.3", + ":signature-2.1.0", + ":spki-0.7.2", ], ) @@ -4031,24 +3898,6 @@ cargo.rust_library( deps = [":signature-1.6.4"], ) -http_archive( - name = "ed25519-2.2.3.crate", - sha256 = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53", - strip_prefix = "ed25519-2.2.3", - urls = ["https://crates.io/api/v1/crates/ed25519/2.2.3/download"], - visibility = [], -) - -cargo.rust_library( - name = "ed25519-2.2.3", - srcs = [":ed25519-2.2.3.crate"], - crate = "ed25519", - crate_root = "ed25519-2.2.3.crate/src/lib.rs", - edition = "2021", - visibility = [], - deps = [":signature-2.2.0"], -) - http_archive( name = "ed25519-compact-2.0.4.crate", sha256 = "6a3d382e8464107391c8706b4c14b087808ecb909f6c15c34114bc42e53a9e4c", @@ -4075,7 +3924,7 @@ cargo.rust_library( visibility = [], deps = [ ":ct-codecs-1.1.1", - ":getrandom-0.2.11", + ":getrandom-0.2.10", ], ) @@ -4099,51 +3948,23 @@ cargo.rust_library( ":curve25519-dalek-3.2.0", ":ed25519-1.5.3", ":sha2-0.9.9", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) http_archive( - name = "ed25519-dalek-2.1.0.crate", - sha256 = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0", - strip_prefix = "ed25519-dalek-2.1.0", - urls = ["https://crates.io/api/v1/crates/ed25519-dalek/2.1.0/download"], + name = "educe-0.4.22.crate", + sha256 = "079044df30bb07de7d846d41a184c4b00e66ebdac93ee459253474f3a47e50ae", + strip_prefix = "educe-0.4.22", + urls = ["https://crates.io/api/v1/crates/educe/0.4.22/download"], visibility = [], ) cargo.rust_library( - name = "ed25519-dalek-2.1.0", - srcs = [":ed25519-dalek-2.1.0.crate"], - crate = "ed25519_dalek", - crate_root = "ed25519-dalek-2.1.0.crate/src/lib.rs", - edition = "2021", - features = [ - "digest", - "signature", - ], - visibility = [], - deps = [ - ":curve25519-dalek-4.1.1", - ":ed25519-2.2.3", - ":sha2-0.10.8", - ":signature-2.2.0", - ":subtle-2.5.0", - ], -) - -http_archive( - name = "educe-0.4.23.crate", - sha256 = "0f0042ff8246a363dbe77d2ceedb073339e85a804b9a47636c6e016a9a32c05f", - strip_prefix = "educe-0.4.23", - urls = ["https://crates.io/api/v1/crates/educe/0.4.23/download"], - visibility = [], -) - -cargo.rust_library( - name = "educe-0.4.23", - srcs = [":educe-0.4.23.crate"], - crate = "educe", - crate_root = "educe-0.4.23.crate/src/lib.rs", + name = "educe-0.4.22", + srcs = [":educe-0.4.22.crate"], + crate = "educe", + crate_root = "educe-0.4.22.crate/src/lib.rs", edition = "2021", features = [ "Debug", @@ -4152,8 +3973,8 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":enum-ordinalize-3.1.15", - ":proc-macro2-1.0.70", + ":enum-ordinalize-3.1.13", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -4181,18 +4002,18 @@ cargo.rust_library( ) http_archive( - name = "elliptic-curve-0.13.8.crate", - sha256 = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47", - strip_prefix = "elliptic-curve-0.13.8", - urls = ["https://crates.io/api/v1/crates/elliptic-curve/0.13.8/download"], + name = "elliptic-curve-0.13.5.crate", + sha256 = "968405c8fdc9b3bf4df0a6638858cc0b52462836ab6b1c87377785dd09cf1c0b", + strip_prefix = "elliptic-curve-0.13.5", + urls = ["https://crates.io/api/v1/crates/elliptic-curve/0.13.5/download"], visibility = [], ) cargo.rust_library( - name = "elliptic-curve-0.13.8", - srcs = [":elliptic-curve-0.13.8.crate"], + name = "elliptic-curve-0.13.5", + srcs = [":elliptic-curve-0.13.5.crate"], crate = "elliptic_curve", - crate_root = "elliptic-curve-0.13.8.crate/src/lib.rs", + crate_root = "elliptic-curve-0.13.5.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -4210,7 +4031,7 @@ cargo.rust_library( visibility = [], deps = [ ":base16ct-0.2.0", - ":crypto-bigint-0.5.5", + ":crypto-bigint-0.5.2", ":digest-0.10.7", ":ff-0.13.0", ":generic-array-0.14.7", @@ -4221,10 +4042,28 @@ cargo.rust_library( ":rand_core-0.6.4", ":sec1-0.7.3", ":subtle-2.5.0", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) +http_archive( + name = "embedded-io-0.4.0.crate", + sha256 = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced", + strip_prefix = "embedded-io-0.4.0", + urls = ["https://crates.io/api/v1/crates/embedded-io/0.4.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "embedded-io-0.4.0", + srcs = [":embedded-io-0.4.0.crate"], + crate = "embedded_io", + crate_root = "embedded-io-0.4.0.crate/src/lib.rs", + edition = "2021", + features = ["alloc"], + visibility = [], +) + http_archive( name = "encode_unicode-0.3.6.crate", sha256 = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f", @@ -4269,27 +4108,27 @@ cargo.rust_library( ) http_archive( - name = "enum-ordinalize-3.1.15.crate", - sha256 = "1bf1fa3f06bbff1ea5b1a9c7b14aa992a39657db60a2759457328d7e058f49ee", - strip_prefix = "enum-ordinalize-3.1.15", - urls = ["https://crates.io/api/v1/crates/enum-ordinalize/3.1.15/download"], + name = "enum-ordinalize-3.1.13.crate", + sha256 = "e4f76552f53cefc9a7f64987c3701b99d982f7690606fd67de1d09712fbf52f1", + strip_prefix = "enum-ordinalize-3.1.13", + urls = ["https://crates.io/api/v1/crates/enum-ordinalize/3.1.13/download"], visibility = [], ) cargo.rust_library( - name = "enum-ordinalize-3.1.15", - srcs = [":enum-ordinalize-3.1.15.crate"], + name = "enum-ordinalize-3.1.13", + srcs = [":enum-ordinalize-3.1.13.crate"], crate = "enum_ordinalize", - crate_root = "enum-ordinalize-3.1.15.crate/src/lib.rs", + crate_root = "enum-ordinalize-3.1.13.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ ":num-bigint-0.4.4", - ":num-traits-0.2.17", - ":proc-macro2-1.0.70", + ":num-traits-0.2.16", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -4311,38 +4150,37 @@ cargo.rust_library( ) http_archive( - name = "errno-0.3.8.crate", - sha256 = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245", - strip_prefix = "errno-0.3.8", - urls = ["https://crates.io/api/v1/crates/errno/0.3.8/download"], + name = "errno-0.3.2.crate", + sha256 = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f", + strip_prefix = "errno-0.3.2", + urls = ["https://crates.io/api/v1/crates/errno/0.3.2/download"], visibility = [], ) cargo.rust_library( - name = "errno-0.3.8", - srcs = [":errno-0.3.8.crate"], + name = "errno-0.3.2", + srcs = [":errno-0.3.2.crate"], crate = "errno", - crate_root = "errno-0.3.8.crate/src/lib.rs", + crate_root = "errno-0.3.2.crate/src/lib.rs", edition = "2018", - features = ["std"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), "windows-msvc": dict( - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), }, visibility = [], @@ -4366,18 +4204,18 @@ cargo.rust_library( ) http_archive( - name = "eyre-0.6.10.crate", - sha256 = "8bbb8258be8305fb0237d7b295f47bb24ff1b136a535f473baf40e70468515aa", - strip_prefix = "eyre-0.6.10", - urls = ["https://crates.io/api/v1/crates/eyre/0.6.10/download"], + name = "eyre-0.6.8.crate", + sha256 = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb", + strip_prefix = "eyre-0.6.8", + urls = ["https://crates.io/api/v1/crates/eyre/0.6.8/download"], visibility = [], ) cargo.rust_library( - name = "eyre-0.6.10", - srcs = [":eyre-0.6.10.crate"], + name = "eyre-0.6.8", + srcs = [":eyre-0.6.8.crate"], crate = "eyre", - crate_root = "eyre-0.6.10.crate/src/lib.rs", + crate_root = "eyre-0.6.8.crate/src/lib.rs", edition = "2018", features = [ "auto-install", @@ -4387,7 +4225,7 @@ cargo.rust_library( visibility = [], deps = [ ":indenter-0.3.3", - ":once_cell-1.19.0", + ":once_cell-1.18.0", ], ) @@ -4430,18 +4268,18 @@ cargo.rust_library( ) http_archive( - name = "fastrand-2.0.1.crate", - sha256 = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5", - strip_prefix = "fastrand-2.0.1", - urls = ["https://crates.io/api/v1/crates/fastrand/2.0.1/download"], + name = "fastrand-2.0.0.crate", + sha256 = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764", + strip_prefix = "fastrand-2.0.0", + urls = ["https://crates.io/api/v1/crates/fastrand/2.0.0/download"], visibility = [], ) cargo.rust_library( - name = "fastrand-2.0.1", - srcs = [":fastrand-2.0.1.crate"], + name = "fastrand-2.0.0", + srcs = [":fastrand-2.0.0.crate"], crate = "fastrand", - crate_root = "fastrand-2.0.1.crate/src/lib.rs", + crate_root = "fastrand-2.0.0.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -4474,65 +4312,43 @@ cargo.rust_library( ) http_archive( - name = "filetime-0.2.23.crate", - sha256 = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd", - strip_prefix = "filetime-0.2.23", - urls = ["https://crates.io/api/v1/crates/filetime/0.2.23/download"], + name = "filetime-0.2.22.crate", + sha256 = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0", + strip_prefix = "filetime-0.2.22", + urls = ["https://crates.io/api/v1/crates/filetime/0.2.22/download"], visibility = [], ) cargo.rust_library( - name = "filetime-0.2.23", - srcs = [":filetime-0.2.23.crate"], + name = "filetime-0.2.22", + srcs = [":filetime-0.2.22.crate"], crate = "filetime", - crate_root = "filetime-0.2.23.crate/src/lib.rs", + crate_root = "filetime-0.2.22.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), "windows-msvc": dict( - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), }, visibility = [], deps = [":cfg-if-1.0.0"], ) -http_archive( - name = "finl_unicode-1.2.0.crate", - sha256 = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6", - strip_prefix = "finl_unicode-1.2.0", - urls = ["https://crates.io/api/v1/crates/finl_unicode/1.2.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "finl_unicode-1.2.0", - srcs = [":finl_unicode-1.2.0.crate"], - crate = "finl_unicode", - crate_root = "finl_unicode-1.2.0.crate/src/lib.rs", - edition = "2021", - features = [ - "categories", - "default", - "grapheme_clusters", - ], - visibility = [], -) - http_archive( name = "fixedbitset-0.4.2.crate", sha256 = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80", @@ -4552,23 +4368,23 @@ cargo.rust_library( alias( name = "flate2", - actual = ":flate2-1.0.28", + actual = ":flate2-1.0.27", visibility = ["PUBLIC"], ) http_archive( - name = "flate2-1.0.28.crate", - sha256 = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e", - strip_prefix = "flate2-1.0.28", - urls = ["https://crates.io/api/v1/crates/flate2/1.0.28/download"], + name = "flate2-1.0.27.crate", + sha256 = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010", + strip_prefix = "flate2-1.0.27", + urls = ["https://crates.io/api/v1/crates/flate2/1.0.27/download"], visibility = [], ) cargo.rust_library( - name = "flate2-1.0.28", - srcs = [":flate2-1.0.28.crate"], + name = "flate2-1.0.27", + srcs = [":flate2-1.0.27.crate"], crate = "flate2", - crate_root = "flate2-1.0.28.crate/src/lib.rs", + crate_root = "flate2-1.0.27.crate/src/lib.rs", edition = "2018", features = [ "any_impl", @@ -4605,18 +4421,18 @@ cargo.rust_library( ) http_archive( - name = "form_urlencoded-1.2.1.crate", - sha256 = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456", - strip_prefix = "form_urlencoded-1.2.1", - urls = ["https://crates.io/api/v1/crates/form_urlencoded/1.2.1/download"], + name = "form_urlencoded-1.2.0.crate", + sha256 = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652", + strip_prefix = "form_urlencoded-1.2.0", + urls = ["https://crates.io/api/v1/crates/form_urlencoded/1.2.0/download"], visibility = [], ) cargo.rust_library( - name = "form_urlencoded-1.2.1", - srcs = [":form_urlencoded-1.2.1.crate"], + name = "form_urlencoded-1.2.0", + srcs = [":form_urlencoded-1.2.0.crate"], crate = "form_urlencoded", - crate_root = "form_urlencoded-1.2.1.crate/src/lib.rs", + crate_root = "form_urlencoded-1.2.0.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -4624,7 +4440,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":percent-encoding-2.3.1"], + deps = [":percent-encoding-2.3.0"], ) http_archive( @@ -4646,23 +4462,23 @@ cargo.rust_library( alias( name = "futures", - actual = ":futures-0.3.29", + actual = ":futures-0.3.28", visibility = ["PUBLIC"], ) http_archive( - name = "futures-0.3.29.crate", - sha256 = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335", - strip_prefix = "futures-0.3.29", - urls = ["https://crates.io/api/v1/crates/futures/0.3.29/download"], + name = "futures-0.3.28.crate", + sha256 = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40", + strip_prefix = "futures-0.3.28", + urls = ["https://crates.io/api/v1/crates/futures/0.3.28/download"], visibility = [], ) cargo.rust_library( - name = "futures-0.3.29", - srcs = [":futures-0.3.29.crate"], + name = "futures-0.3.28", + srcs = [":futures-0.3.28.crate"], crate = "futures", - crate_root = "futures-0.3.29.crate/src/lib.rs", + crate_root = "futures-0.3.28.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -4676,7 +4492,7 @@ cargo.rust_library( deps = [ ":futures-channel-0.3.29", ":futures-core-0.3.29", - ":futures-executor-0.3.29", + ":futures-executor-0.3.28", ":futures-io-0.3.29", ":futures-sink-0.3.29", ":futures-task-0.3.29", @@ -4757,18 +4573,18 @@ cargo.rust_library( ) http_archive( - name = "futures-executor-0.3.29.crate", - sha256 = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc", - strip_prefix = "futures-executor-0.3.29", - urls = ["https://crates.io/api/v1/crates/futures-executor/0.3.29/download"], + name = "futures-executor-0.3.28.crate", + sha256 = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0", + strip_prefix = "futures-executor-0.3.28", + urls = ["https://crates.io/api/v1/crates/futures-executor/0.3.28/download"], visibility = [], ) cargo.rust_library( - name = "futures-executor-0.3.29", - srcs = [":futures-executor-0.3.29.crate"], + name = "futures-executor-0.3.28", + srcs = [":futures-executor-0.3.28.crate"], crate = "futures_executor", - crate_root = "futures-executor-0.3.29.crate/src/lib.rs", + crate_root = "futures-executor-0.3.28.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -4805,7 +4621,7 @@ cargo.rust_library( visibility = [], deps = [ ":futures-core-0.3.29", - ":lock_api-0.4.11", + ":lock_api-0.4.10", ":parking_lot-0.11.2", ], ) @@ -4866,10 +4682,10 @@ cargo.rust_library( ":fastrand-1.9.0", ":futures-core-0.3.29", ":futures-io-0.3.29", - ":memchr-2.6.4", - ":parking-2.2.0", - ":pin-project-lite-0.2.13", - ":waker-fn-1.1.1", + ":memchr-2.5.0", + ":parking-2.1.0", + ":pin-project-lite-0.2.12", + ":waker-fn-1.1.0", ], ) @@ -4890,9 +4706,9 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -4999,8 +4815,8 @@ cargo.rust_library( ":futures-macro-0.3.29", ":futures-sink-0.3.29", ":futures-task-0.3.29", - ":memchr-2.6.4", - ":pin-project-lite-0.2.13", + ":memchr-2.5.0", + ":pin-project-lite-0.2.12", ":pin-utils-0.1.0", ":slab-0.4.9", ], @@ -5024,8 +4840,8 @@ cargo.rust_library( visibility = [], deps = [ ":bytes-0.5.6", - ":futures-0.3.29", - ":memchr-2.6.4", + ":futures-0.3.28", + ":memchr-2.5.0", ":pin-project-0.4.30", ], ) @@ -5050,8 +4866,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":typenum-1.17.0", - ":zeroize-1.7.0", + ":typenum-1.16.0", + ":zeroize-1.6.0", ], ) @@ -5077,16 +4893,16 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), }, visibility = [], @@ -5094,18 +4910,18 @@ cargo.rust_library( ) http_archive( - name = "getrandom-0.2.11.crate", - sha256 = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f", - strip_prefix = "getrandom-0.2.11", - urls = ["https://crates.io/api/v1/crates/getrandom/0.2.11/download"], + name = "getrandom-0.2.10.crate", + sha256 = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427", + strip_prefix = "getrandom-0.2.10", + urls = ["https://crates.io/api/v1/crates/getrandom/0.2.10/download"], visibility = [], ) cargo.rust_library( - name = "getrandom-0.2.11", - srcs = [":getrandom-0.2.11.crate"], + name = "getrandom-0.2.10", + srcs = [":getrandom-0.2.10.crate"], crate = "getrandom", - crate_root = "getrandom-0.2.11.crate/src/lib.rs", + crate_root = "getrandom-0.2.10.crate/src/lib.rs", edition = "2018", features = [ "custom", @@ -5113,16 +4929,16 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), }, visibility = [], @@ -5130,18 +4946,18 @@ cargo.rust_library( ) http_archive( - name = "gimli-0.28.1.crate", - sha256 = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253", - strip_prefix = "gimli-0.28.1", - urls = ["https://crates.io/api/v1/crates/gimli/0.28.1/download"], + name = "gimli-0.28.0.crate", + sha256 = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0", + strip_prefix = "gimli-0.28.0", + urls = ["https://crates.io/api/v1/crates/gimli/0.28.0/download"], visibility = [], ) cargo.rust_library( - name = "gimli-0.28.1", - srcs = [":gimli-0.28.1.crate"], + name = "gimli-0.28.0", + srcs = [":gimli-0.28.0.crate"], crate = "gimli", - crate_root = "gimli-0.28.1.crate/src/lib.rs", + crate_root = "gimli-0.28.0.crate/src/lib.rs", edition = "2018", features = [ "read", @@ -5151,30 +4967,30 @@ cargo.rust_library( ) http_archive( - name = "globset-0.4.14.crate", - sha256 = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1", - strip_prefix = "globset-0.4.14", - urls = ["https://crates.io/api/v1/crates/globset/0.4.14/download"], + name = "globset-0.4.13.crate", + sha256 = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d", + strip_prefix = "globset-0.4.13", + urls = ["https://crates.io/api/v1/crates/globset/0.4.13/download"], visibility = [], ) cargo.rust_library( - name = "globset-0.4.14", - srcs = [":globset-0.4.14.crate"], + name = "globset-0.4.13", + srcs = [":globset-0.4.13.crate"], crate = "globset", - crate_root = "globset-0.4.14.crate/src/lib.rs", - edition = "2021", + crate_root = "globset-0.4.13.crate/src/lib.rs", + edition = "2018", features = [ "default", "log", ], visibility = [], deps = [ - ":aho-corasick-1.1.2", - ":bstr-1.8.0", + ":aho-corasick-1.0.4", + ":bstr-1.6.0", + ":fnv-1.0.7", ":log-0.4.20", - ":regex-automata-0.4.3", - ":regex-syntax-0.8.2", + ":regex-1.9.3", ], ) @@ -5202,32 +5018,32 @@ cargo.rust_library( ) http_archive( - name = "h2-0.3.22.crate", - sha256 = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178", - strip_prefix = "h2-0.3.22", - urls = ["https://crates.io/api/v1/crates/h2/0.3.22/download"], + name = "h2-0.3.21.crate", + sha256 = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833", + strip_prefix = "h2-0.3.21", + urls = ["https://crates.io/api/v1/crates/h2/0.3.21/download"], visibility = [], ) cargo.rust_library( - name = "h2-0.3.22", - srcs = [":h2-0.3.22.crate"], + name = "h2-0.3.21", + srcs = [":h2-0.3.21.crate"], crate = "h2", - crate_root = "h2-0.3.22.crate/src/lib.rs", + crate_root = "h2-0.3.21.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ - ":bytes-1.5.0", + ":bytes-1.4.0", ":fnv-1.0.7", ":futures-core-0.3.29", ":futures-sink-0.3.29", ":futures-util-0.3.29", - ":http-0.2.11", - ":indexmap-2.1.0", + ":http-0.2.9", + ":indexmap-1.9.3", ":slab-0.4.9", - ":tokio-1.35.0", - ":tokio-util-0.7.10", - ":tracing-0.1.40", + ":tokio-1.32.0", + ":tokio-util-0.7.8", + ":tracing-0.1.37", ], ) @@ -5248,6 +5064,24 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "hash32-0.2.1.crate", + sha256 = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67", + strip_prefix = "hash32-0.2.1", + urls = ["https://crates.io/api/v1/crates/hash32/0.2.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "hash32-0.2.1", + srcs = [":hash32-0.2.1.crate"], + crate = "hash32", + crate_root = "hash32-0.2.1.crate/src/lib.rs", + edition = "2015", + visibility = [], + deps = [":byteorder-1.4.3"], +) + http_archive( name = "hashbrown-0.12.3.crate", sha256 = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888", @@ -5269,22 +5103,45 @@ cargo.rust_library( "raw", ], visibility = [], - deps = [":ahash-0.7.7"], + deps = [":ahash-0.7.6"], +) + +http_archive( + name = "hashbrown-0.13.2.crate", + sha256 = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e", + strip_prefix = "hashbrown-0.13.2", + urls = ["https://crates.io/api/v1/crates/hashbrown/0.13.2/download"], + visibility = [], +) + +cargo.rust_library( + name = "hashbrown-0.13.2", + srcs = [":hashbrown-0.13.2.crate"], + crate = "hashbrown", + crate_root = "hashbrown-0.13.2.crate/src/lib.rs", + edition = "2021", + features = [ + "ahash", + "default", + "inline-more", + ], + visibility = [], + deps = [":ahash-0.8.3"], ) http_archive( - name = "hashbrown-0.14.3.crate", - sha256 = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604", - strip_prefix = "hashbrown-0.14.3", - urls = ["https://crates.io/api/v1/crates/hashbrown/0.14.3/download"], + name = "hashbrown-0.14.0.crate", + sha256 = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a", + strip_prefix = "hashbrown-0.14.0", + urls = ["https://crates.io/api/v1/crates/hashbrown/0.14.0/download"], visibility = [], ) cargo.rust_library( - name = "hashbrown-0.14.3", - srcs = [":hashbrown-0.14.3.crate"], + name = "hashbrown-0.14.0", + srcs = [":hashbrown-0.14.0.crate"], crate = "hashbrown", - crate_root = "hashbrown-0.14.3.crate/src/lib.rs", + crate_root = "hashbrown-0.14.0.crate/src/lib.rs", edition = "2021", features = [ "ahash", @@ -5295,27 +5152,68 @@ cargo.rust_library( ], visibility = [], deps = [ - ":ahash-0.8.6", + ":ahash-0.8.3", ":allocator-api2-0.2.16", ], ) http_archive( - name = "hashlink-0.8.4.crate", - sha256 = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7", - strip_prefix = "hashlink-0.8.4", - urls = ["https://crates.io/api/v1/crates/hashlink/0.8.4/download"], + name = "hashlink-0.8.3.crate", + sha256 = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f", + strip_prefix = "hashlink-0.8.3", + urls = ["https://crates.io/api/v1/crates/hashlink/0.8.3/download"], visibility = [], ) cargo.rust_library( - name = "hashlink-0.8.4", - srcs = [":hashlink-0.8.4.crate"], + name = "hashlink-0.8.3", + srcs = [":hashlink-0.8.3.crate"], crate = "hashlink", - crate_root = "hashlink-0.8.4.crate/src/lib.rs", + crate_root = "hashlink-0.8.3.crate/src/lib.rs", + edition = "2018", + visibility = [], + deps = [":hashbrown-0.14.0"], +) + +http_archive( + name = "heapless-0.7.17.crate", + sha256 = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f", + strip_prefix = "heapless-0.7.17", + urls = ["https://crates.io/api/v1/crates/heapless/0.7.17/download"], + visibility = [], +) + +cargo.rust_library( + name = "heapless-0.7.17", + srcs = [":heapless-0.7.17.crate"], + crate = "heapless", + crate_root = "heapless-0.7.17.crate/src/lib.rs", edition = "2018", + features = [ + "atomic-polyfill", + "cas", + "serde", + ], + platform = { + "linux-x86_64": dict( + deps = [":spin-0.9.8"], + ), + "macos-x86_64": dict( + deps = [":spin-0.9.8"], + ), + "windows-gnu": dict( + deps = [":spin-0.9.8"], + ), + "windows-msvc": dict( + deps = [":spin-0.9.8"], + ), + }, visibility = [], - deps = [":hashbrown-0.14.3"], + deps = [ + ":hash32-0.2.1", + ":serde-1.0.186", + ":stable_deref_trait-1.2.0", + ], ) http_archive( @@ -5493,31 +5391,6 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "home-0.5.5.crate", - sha256 = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb", - strip_prefix = "home-0.5.5", - urls = ["https://crates.io/api/v1/crates/home/0.5.5/download"], - visibility = [], -) - -cargo.rust_library( - name = "home-0.5.5", - srcs = [":home-0.5.5.crate"], - crate = "home", - crate_root = "home-0.5.5.crate/src/lib.rs", - edition = "2018", - platform = { - "windows-gnu": dict( - deps = [":windows-sys-0.48.0"], - ), - "windows-msvc": dict( - deps = [":windows-sys-0.48.0"], - ), - }, - visibility = [], -) - http_archive( name = "html-escape-0.2.13.crate", sha256 = "6d1ad449764d627e22bfd7cd5e8868264fc9236e07c752972b4080cd351cb476", @@ -5537,56 +5410,56 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":utf8-width-0.1.7"], + deps = [":utf8-width-0.1.6"], ) alias( name = "http", - actual = ":http-0.2.11", + actual = ":http-0.2.9", visibility = ["PUBLIC"], ) http_archive( - name = "http-0.2.11.crate", - sha256 = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb", - strip_prefix = "http-0.2.11", - urls = ["https://crates.io/api/v1/crates/http/0.2.11/download"], + name = "http-0.2.9.crate", + sha256 = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482", + strip_prefix = "http-0.2.9", + urls = ["https://crates.io/api/v1/crates/http/0.2.9/download"], visibility = [], ) cargo.rust_library( - name = "http-0.2.11", - srcs = [":http-0.2.11.crate"], + name = "http-0.2.9", + srcs = [":http-0.2.9.crate"], crate = "http", - crate_root = "http-0.2.11.crate/src/lib.rs", + crate_root = "http-0.2.9.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ - ":bytes-1.5.0", + ":bytes-1.4.0", ":fnv-1.0.7", - ":itoa-1.0.10", + ":itoa-1.0.9", ], ) http_archive( - name = "http-body-0.4.6.crate", - sha256 = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2", - strip_prefix = "http-body-0.4.6", - urls = ["https://crates.io/api/v1/crates/http-body/0.4.6/download"], + name = "http-body-0.4.5.crate", + sha256 = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1", + strip_prefix = "http-body-0.4.5", + urls = ["https://crates.io/api/v1/crates/http-body/0.4.5/download"], visibility = [], ) cargo.rust_library( - name = "http-body-0.4.6", - srcs = [":http-body-0.4.6.crate"], + name = "http-body-0.4.5", + srcs = [":http-body-0.4.5.crate"], crate = "http_body", - crate_root = "http-body-0.4.6.crate/src/lib.rs", + crate_root = "http-body-0.4.5.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ - ":bytes-1.5.0", - ":http-0.2.11", - ":pin-project-lite-0.2.13", + ":bytes-1.4.0", + ":http-0.2.9", + ":pin-project-lite-0.2.12", ], ) @@ -5680,46 +5553,46 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.5.0", + ":bytes-1.4.0", ":futures-channel-0.3.29", ":futures-core-0.3.29", ":futures-util-0.3.29", - ":h2-0.3.22", - ":http-0.2.11", - ":http-body-0.4.6", + ":h2-0.3.21", + ":http-0.2.9", + ":http-body-0.4.5", ":httparse-1.8.0", ":httpdate-1.0.3", - ":itoa-1.0.10", - ":pin-project-lite-0.2.13", - ":socket2-0.4.10", - ":tokio-1.35.0", + ":itoa-1.0.9", + ":pin-project-lite-0.2.12", + ":socket2-0.4.9", + ":tokio-1.32.0", ":tower-service-0.3.2", - ":tracing-0.1.40", + ":tracing-0.1.37", ":want-0.3.1", ], ) http_archive( - name = "hyper-rustls-0.24.2.crate", - sha256 = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590", - strip_prefix = "hyper-rustls-0.24.2", - urls = ["https://crates.io/api/v1/crates/hyper-rustls/0.24.2/download"], + name = "hyper-rustls-0.24.1.crate", + sha256 = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97", + strip_prefix = "hyper-rustls-0.24.1", + urls = ["https://crates.io/api/v1/crates/hyper-rustls/0.24.1/download"], visibility = [], ) cargo.rust_library( - name = "hyper-rustls-0.24.2", - srcs = [":hyper-rustls-0.24.2.crate"], + name = "hyper-rustls-0.24.1", + srcs = [":hyper-rustls-0.24.1.crate"], crate = "hyper_rustls", - crate_root = "hyper-rustls-0.24.2.crate/src/lib.rs", + crate_root = "hyper-rustls-0.24.1.crate/src/lib.rs", edition = "2021", visibility = [], deps = [ ":futures-util-0.3.29", - ":http-0.2.11", + ":http-0.2.9", ":hyper-0.14.27", - ":rustls-0.21.10", - ":tokio-1.35.0", + ":rustls-0.21.6", + ":tokio-1.32.0", ":tokio-rustls-0.24.1", ], ) @@ -5741,8 +5614,8 @@ cargo.rust_library( visibility = [], deps = [ ":hyper-0.14.27", - ":pin-project-lite-0.2.13", - ":tokio-1.35.0", + ":pin-project-lite-0.2.12", + ":tokio-1.32.0", ":tokio-io-timeout-1.2.0", ], ) @@ -5770,37 +5643,37 @@ cargo.rust_library( ":hex-0.4.3", ":hyper-0.14.27", ":pin-project-1.1.3", - ":tokio-1.35.0", + ":tokio-1.32.0", ], ) http_archive( - name = "iana-time-zone-0.1.58.crate", - sha256 = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20", - strip_prefix = "iana-time-zone-0.1.58", - urls = ["https://crates.io/api/v1/crates/iana-time-zone/0.1.58/download"], + name = "iana-time-zone-0.1.57.crate", + sha256 = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613", + strip_prefix = "iana-time-zone-0.1.57", + urls = ["https://crates.io/api/v1/crates/iana-time-zone/0.1.57/download"], visibility = [], ) cargo.rust_library( - name = "iana-time-zone-0.1.58", - srcs = [":iana-time-zone-0.1.58.crate"], + name = "iana-time-zone-0.1.57", + srcs = [":iana-time-zone-0.1.57.crate"], crate = "iana_time_zone", - crate_root = "iana-time-zone-0.1.58.crate/src/lib.rs", + crate_root = "iana-time-zone-0.1.57.crate/src/lib.rs", edition = "2018", features = ["fallback"], platform = { "macos-arm64": dict( - deps = [":core-foundation-sys-0.8.6"], + deps = [":core-foundation-sys-0.8.4"], ), "macos-x86_64": dict( - deps = [":core-foundation-sys-0.8.6"], + deps = [":core-foundation-sys-0.8.4"], ), "windows-gnu": dict( - deps = [":windows-core-0.51.1"], + deps = [":windows-0.48.0"], ), "windows-msvc": dict( - deps = [":windows-core-0.51.1"], + deps = [":windows-0.48.0"], ), }, visibility = [], @@ -5824,18 +5697,18 @@ cargo.rust_library( ) http_archive( - name = "idna-0.5.0.crate", - sha256 = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6", - strip_prefix = "idna-0.5.0", - urls = ["https://crates.io/api/v1/crates/idna/0.5.0/download"], + name = "idna-0.4.0.crate", + sha256 = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c", + strip_prefix = "idna-0.4.0", + urls = ["https://crates.io/api/v1/crates/idna/0.4.0/download"], visibility = [], ) cargo.rust_library( - name = "idna-0.5.0", - srcs = [":idna-0.5.0.crate"], + name = "idna-0.4.0", + srcs = [":idna-0.4.0.crate"], crate = "idna", - crate_root = "idna-0.5.0.crate/src/lib.rs", + crate_root = "idna-0.4.0.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -5844,7 +5717,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":unicode-bidi-0.3.14", + ":unicode-bidi-0.3.13", ":unicode-normalization-0.1.22", ], ) @@ -5872,47 +5745,48 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":ignore-0.4.21", - ":proc-macro2-1.0.70", + ":ignore-0.4.20", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":serde-1.0.193", + ":serde-1.0.186", ":syn-1.0.109", - ":toml-0.7.8", + ":toml-0.7.6", ":unicode-xid-0.2.4", ], ) http_archive( - name = "ignore-0.4.21.crate", - sha256 = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060", - strip_prefix = "ignore-0.4.21", - urls = ["https://crates.io/api/v1/crates/ignore/0.4.21/download"], + name = "ignore-0.4.20.crate", + sha256 = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492", + strip_prefix = "ignore-0.4.20", + urls = ["https://crates.io/api/v1/crates/ignore/0.4.20/download"], visibility = [], ) cargo.rust_library( - name = "ignore-0.4.21", - srcs = [":ignore-0.4.21.crate"], + name = "ignore-0.4.20", + srcs = [":ignore-0.4.20.crate"], crate = "ignore", - crate_root = "ignore-0.4.21.crate/src/lib.rs", - edition = "2021", + crate_root = "ignore-0.4.20.crate/src/lib.rs", + edition = "2018", platform = { "windows-gnu": dict( - deps = [":winapi-util-0.1.6"], + deps = [":winapi-util-0.1.5"], ), "windows-msvc": dict( - deps = [":winapi-util-0.1.6"], + deps = [":winapi-util-0.1.5"], ), }, visibility = [], deps = [ - ":crossbeam-deque-0.8.3", - ":globset-0.4.14", + ":globset-0.4.13", + ":lazy_static-1.4.0", ":log-0.4.20", - ":memchr-2.6.4", - ":regex-automata-0.4.3", + ":memchr-2.5.0", + ":regex-1.9.3", ":same-file-1.0.6", - ":walkdir-2.4.0", + ":thread_local-1.1.7", + ":walkdir-2.3.3", ], ) @@ -5957,23 +5831,23 @@ cargo.rust_library( visibility = [], deps = [ ":hashbrown-0.12.3", - ":serde-1.0.193", + ":serde-1.0.186", ], ) http_archive( - name = "indexmap-2.1.0.crate", - sha256 = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f", - strip_prefix = "indexmap-2.1.0", - urls = ["https://crates.io/api/v1/crates/indexmap/2.1.0/download"], + name = "indexmap-2.0.0.crate", + sha256 = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d", + strip_prefix = "indexmap-2.0.0", + urls = ["https://crates.io/api/v1/crates/indexmap/2.0.0/download"], visibility = [], ) cargo.rust_library( - name = "indexmap-2.1.0", - srcs = [":indexmap-2.1.0.crate"], + name = "indexmap-2.0.0", + srcs = [":indexmap-2.0.0.crate"], crate = "indexmap", - crate_root = "indexmap-2.1.0.crate/src/lib.rs", + crate_root = "indexmap-2.0.0.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -5984,31 +5858,31 @@ cargo.rust_library( visibility = [], deps = [ ":equivalent-1.0.1", - ":hashbrown-0.14.3", - ":serde-1.0.193", + ":hashbrown-0.14.0", + ":serde-1.0.186", ], ) alias( name = "indicatif", - actual = ":indicatif-0.17.7", + actual = ":indicatif-0.17.6", visibility = ["PUBLIC"], ) http_archive( - name = "indicatif-0.17.7.crate", - sha256 = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25", - strip_prefix = "indicatif-0.17.7", - urls = ["https://crates.io/api/v1/crates/indicatif/0.17.7/download"], + name = "indicatif-0.17.6.crate", + sha256 = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730", + strip_prefix = "indicatif-0.17.6", + urls = ["https://crates.io/api/v1/crates/indicatif/0.17.6/download"], visibility = [], ) cargo.rust_library( - name = "indicatif-0.17.7", - srcs = [":indicatif-0.17.7.crate"], + name = "indicatif-0.17.6", + srcs = [":indicatif-0.17.6.crate"], crate = "indicatif", - crate_root = "indicatif-0.17.7.crate/src/lib.rs", - edition = "2021", + crate_root = "indicatif-0.17.6.crate/src/lib.rs", + edition = "2018", features = [ "default", "unicode-width", @@ -6017,30 +5891,30 @@ cargo.rust_library( deps = [ ":console-0.15.7", ":number_prefix-0.4.0", - ":portable-atomic-1.6.0", - ":unicode-width-0.1.11", + ":portable-atomic-1.4.2", + ":unicode-width-0.1.10", ], ) alias( name = "indoc", - actual = ":indoc-2.0.4", + actual = ":indoc-2.0.3", visibility = ["PUBLIC"], ) http_archive( - name = "indoc-2.0.4.crate", - sha256 = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8", - strip_prefix = "indoc-2.0.4", - urls = ["https://crates.io/api/v1/crates/indoc/2.0.4/download"], + name = "indoc-2.0.3.crate", + sha256 = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4", + strip_prefix = "indoc-2.0.3", + urls = ["https://crates.io/api/v1/crates/indoc/2.0.3/download"], visibility = [], ) cargo.rust_library( - name = "indoc-2.0.4", - srcs = [":indoc-2.0.4.crate"], + name = "indoc-2.0.3", + srcs = [":indoc-2.0.3.crate"], crate = "indoc", - crate_root = "indoc-2.0.4.crate/src/lib.rs", + crate_root = "indoc-2.0.3.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], @@ -6075,12 +5949,12 @@ cargo.rust_library( deps = [ ":bitflags-1.3.2", ":crossterm-0.25.0", - ":dyn-clone-1.0.16", + ":dyn-clone-1.0.13", ":lazy_static-1.4.0", ":newline-converter-0.2.2", - ":thiserror-1.0.50", + ":thiserror-1.0.47", ":unicode-segmentation-1.10.1", - ":unicode-width-0.1.11", + ":unicode-width-0.1.10", ], ) @@ -6103,18 +5977,61 @@ cargo.rust_library( ) http_archive( - name = "ipnet-2.9.0.crate", - sha256 = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3", - strip_prefix = "ipnet-2.9.0", - urls = ["https://crates.io/api/v1/crates/ipnet/2.9.0/download"], + name = "io-lifetimes-1.0.11.crate", + sha256 = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2", + strip_prefix = "io-lifetimes-1.0.11", + urls = ["https://crates.io/api/v1/crates/io-lifetimes/1.0.11/download"], + visibility = [], +) + +cargo.rust_library( + name = "io-lifetimes-1.0.11", + srcs = [":io-lifetimes-1.0.11.crate"], + crate = "io_lifetimes", + crate_root = "io-lifetimes-1.0.11.crate/src/lib.rs", + edition = "2018", + features = [ + "close", + "hermit-abi", + "libc", + "windows-sys", + ], + platform = { + "linux-arm64": dict( + deps = [":libc-0.2.147"], + ), + "linux-x86_64": dict( + deps = [":libc-0.2.147"], + ), + "macos-arm64": dict( + deps = [":libc-0.2.147"], + ), + "macos-x86_64": dict( + deps = [":libc-0.2.147"], + ), + "windows-gnu": dict( + deps = [":windows-sys-0.48.0"], + ), + "windows-msvc": dict( + deps = [":windows-sys-0.48.0"], + ), + }, + visibility = [], +) + +http_archive( + name = "ipnet-2.8.0.crate", + sha256 = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6", + strip_prefix = "ipnet-2.8.0", + urls = ["https://crates.io/api/v1/crates/ipnet/2.8.0/download"], visibility = [], ) cargo.rust_library( - name = "ipnet-2.9.0", - srcs = [":ipnet-2.9.0.crate"], + name = "ipnet-2.8.0", + srcs = [":ipnet-2.8.0.crate"], crate = "ipnet", - crate_root = "ipnet-2.9.0.crate/src/lib.rs", + crate_root = "ipnet-2.8.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -6138,7 +6055,44 @@ cargo.rust_library( crate_root = "is-docker-0.2.0.crate/src/lib.rs", edition = "2015", visibility = [], - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], +) + +http_archive( + name = "is-terminal-0.4.9.crate", + sha256 = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b", + strip_prefix = "is-terminal-0.4.9", + urls = ["https://crates.io/api/v1/crates/is-terminal/0.4.9/download"], + visibility = [], +) + +cargo.rust_library( + name = "is-terminal-0.4.9", + srcs = [":is-terminal-0.4.9.crate"], + crate = "is_terminal", + crate_root = "is-terminal-0.4.9.crate/src/lib.rs", + edition = "2018", + platform = { + "linux-arm64": dict( + deps = [":rustix-0.38.8"], + ), + "linux-x86_64": dict( + deps = [":rustix-0.38.8"], + ), + "macos-arm64": dict( + deps = [":rustix-0.38.8"], + ), + "macos-x86_64": dict( + deps = [":rustix-0.38.8"], + ), + "windows-gnu": dict( + deps = [":windows-sys-0.48.0"], + ), + "windows-msvc": dict( + deps = [":windows-sys-0.48.0"], + ), + }, + visibility = [], ) http_archive( @@ -6158,7 +6112,7 @@ cargo.rust_library( visibility = [], deps = [ ":is-docker-0.2.0", - ":once_cell-1.19.0", + ":once_cell-1.18.0", ], ) @@ -6192,101 +6146,78 @@ cargo.rust_library( ) http_archive( - name = "itertools-0.12.0.crate", - sha256 = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0", - strip_prefix = "itertools-0.12.0", - urls = ["https://crates.io/api/v1/crates/itertools/0.12.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "itertools-0.12.0", - srcs = [":itertools-0.12.0.crate"], - crate = "itertools", - crate_root = "itertools-0.12.0.crate/src/lib.rs", - edition = "2018", - features = [ - "default", - "use_alloc", - "use_std", - ], - visibility = [], - deps = [":either-1.9.0"], -) - -http_archive( - name = "itoa-1.0.10.crate", - sha256 = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c", - strip_prefix = "itoa-1.0.10", - urls = ["https://crates.io/api/v1/crates/itoa/1.0.10/download"], + name = "itoa-1.0.9.crate", + sha256 = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38", + strip_prefix = "itoa-1.0.9", + urls = ["https://crates.io/api/v1/crates/itoa/1.0.9/download"], visibility = [], ) cargo.rust_library( - name = "itoa-1.0.10", - srcs = [":itoa-1.0.10.crate"], + name = "itoa-1.0.9", + srcs = [":itoa-1.0.9.crate"], crate = "itoa", - crate_root = "itoa-1.0.10.crate/src/lib.rs", + crate_root = "itoa-1.0.9.crate/src/lib.rs", edition = "2018", visibility = [], ) alias( name = "jwt-simple", - actual = ":jwt-simple-0.11.9", + actual = ":jwt-simple-0.11.6", visibility = ["PUBLIC"], ) http_archive( - name = "jwt-simple-0.11.9.crate", - sha256 = "357892bb32159d763abdea50733fadcb9a8e1c319a9aa77592db8555d05af83e", - strip_prefix = "jwt-simple-0.11.9", - urls = ["https://crates.io/api/v1/crates/jwt-simple/0.11.9/download"], + name = "jwt-simple-0.11.6.crate", + sha256 = "733741e7bcd1532b56c9ba6c698c069f274f3782ad956f0d2c7f31650cedaa1b", + strip_prefix = "jwt-simple-0.11.6", + urls = ["https://crates.io/api/v1/crates/jwt-simple/0.11.6/download"], visibility = [], ) cargo.rust_library( - name = "jwt-simple-0.11.9", - srcs = [":jwt-simple-0.11.9.crate"], + name = "jwt-simple-0.11.6", + srcs = [":jwt-simple-0.11.6.crate"], crate = "jwt_simple", - crate_root = "jwt-simple-0.11.9.crate/src/lib.rs", + crate_root = "jwt-simple-0.11.6.crate/src/lib.rs", edition = "2018", visibility = [], deps = [ ":anyhow-1.0.75", ":binstring-0.1.1", - ":coarsetime-0.1.33", + ":coarsetime-0.1.23", ":ct-codecs-1.1.1", ":ed25519-compact-2.0.4", ":hmac-sha1-compact-1.1.4", ":hmac-sha256-1.1.7", ":hmac-sha512-1.1.5", - ":k256-0.13.2", + ":k256-0.13.1", ":p256-0.13.2", ":p384-0.13.0", ":rand-0.8.5", ":rsa-0.7.2", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ":spki-0.6.0", - ":thiserror-1.0.50", - ":zeroize-1.7.0", + ":thiserror-1.0.47", + ":zeroize-1.6.0", ], ) http_archive( - name = "k256-0.13.2.crate", - sha256 = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b", - strip_prefix = "k256-0.13.2", - urls = ["https://crates.io/api/v1/crates/k256/0.13.2/download"], + name = "k256-0.13.1.crate", + sha256 = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc", + strip_prefix = "k256-0.13.1", + urls = ["https://crates.io/api/v1/crates/k256/0.13.1/download"], visibility = [], ) cargo.rust_library( - name = "k256-0.13.2", - srcs = [":k256-0.13.2.crate"], + name = "k256-0.13.1", + srcs = [":k256-0.13.1.crate"], crate = "k256", - crate_root = "k256-0.13.2.crate/src/lib.rs", + crate_root = "k256-0.13.1.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -6306,15 +6237,15 @@ cargo.rust_library( "std", ], named_deps = { - "ecdsa_core": ":ecdsa-0.16.9", + "ecdsa_core": ":ecdsa-0.16.8", }, visibility = [], deps = [ ":cfg-if-1.0.0", - ":elliptic-curve-0.13.8", - ":once_cell-1.19.0", - ":sha2-0.10.8", - ":signature-2.2.0", + ":elliptic-curve-0.13.5", + ":once_cell-1.18.0", + ":sha2-0.10.7", + ":signature-2.1.0", ], ) @@ -6361,37 +6292,37 @@ cargo.rust_library( crate_root = "lib0-0.16.10.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":thiserror-1.0.50"], + deps = [":thiserror-1.0.47"], ) http_archive( - name = "libc-0.2.151.crate", - sha256 = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4", - strip_prefix = "libc-0.2.151", - urls = ["https://crates.io/api/v1/crates/libc/0.2.151/download"], + name = "libc-0.2.147.crate", + sha256 = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3", + strip_prefix = "libc-0.2.147", + urls = ["https://crates.io/api/v1/crates/libc/0.2.147/download"], visibility = [], ) cargo.rust_library( - name = "libc-0.2.151", - srcs = [":libc-0.2.151.crate"], + name = "libc-0.2.147", + srcs = [":libc-0.2.147.crate"], crate = "libc", - crate_root = "libc-0.2.151.crate/src/lib.rs", + crate_root = "libc-0.2.147.crate/src/lib.rs", edition = "2015", features = [ "default", "extra_traits", "std", ], - rustc_flags = ["@$(location :libc-0.2.151-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :libc-0.2.147-build-script-run[rustc_flags])"], visibility = [], ) cargo.rust_binary( - name = "libc-0.2.151-build-script-build", - srcs = [":libc-0.2.151.crate"], + name = "libc-0.2.147-build-script-build", + srcs = [":libc-0.2.147.crate"], crate = "build_script_build", - crate_root = "libc-0.2.151.crate/build.rs", + crate_root = "libc-0.2.147.crate/build.rs", edition = "2015", features = [ "default", @@ -6402,30 +6333,30 @@ cargo.rust_binary( ) buildscript_run( - name = "libc-0.2.151-build-script-run", + name = "libc-0.2.147-build-script-run", package_name = "libc", - buildscript_rule = ":libc-0.2.151-build-script-build", + buildscript_rule = ":libc-0.2.147-build-script-build", features = [ "default", "extra_traits", "std", ], - version = "0.2.151", + version = "0.2.147", ) http_archive( - name = "libm-0.2.8.crate", - sha256 = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058", - strip_prefix = "libm-0.2.8", - urls = ["https://crates.io/api/v1/crates/libm/0.2.8/download"], + name = "libm-0.2.7.crate", + sha256 = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4", + strip_prefix = "libm-0.2.7", + urls = ["https://crates.io/api/v1/crates/libm/0.2.7/download"], visibility = [], ) cargo.rust_library( - name = "libm-0.2.8", - srcs = [":libm-0.2.8.crate"], + name = "libm-0.2.7", + srcs = [":libm-0.2.7.crate"], crate = "libm", - crate_root = "libm-0.2.8.crate/src/lib.rs", + crate_root = "libm-0.2.7.crate/src/lib.rs", edition = "2018", features = ["default"], preferred_linkage = "static", @@ -6684,7 +6615,7 @@ cargo.rust_library( edition = "2015", visibility = [], deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":libsodium-sys-0.2.7-libsodium", ], ) @@ -6940,43 +6871,64 @@ cxx_library( ) http_archive( - name = "linux-raw-sys-0.4.12.crate", - sha256 = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456", - strip_prefix = "linux-raw-sys-0.4.12", - urls = ["https://crates.io/api/v1/crates/linux-raw-sys/0.4.12/download"], + name = "linux-raw-sys-0.3.8.crate", + sha256 = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519", + strip_prefix = "linux-raw-sys-0.3.8", + urls = ["https://crates.io/api/v1/crates/linux-raw-sys/0.3.8/download"], + visibility = [], +) + +cargo.rust_library( + name = "linux-raw-sys-0.3.8", + srcs = [":linux-raw-sys-0.3.8.crate"], + crate = "linux_raw_sys", + crate_root = "linux-raw-sys-0.3.8.crate/src/lib.rs", + edition = "2018", + features = [ + "errno", + "general", + "ioctl", + "no_std", + ], + visibility = [], +) + +http_archive( + name = "linux-raw-sys-0.4.5.crate", + sha256 = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503", + strip_prefix = "linux-raw-sys-0.4.5", + urls = ["https://crates.io/api/v1/crates/linux-raw-sys/0.4.5/download"], visibility = [], ) cargo.rust_library( - name = "linux-raw-sys-0.4.12", - srcs = [":linux-raw-sys-0.4.12.crate"], + name = "linux-raw-sys-0.4.5", + srcs = [":linux-raw-sys-0.4.5.crate"], crate = "linux_raw_sys", - crate_root = "linux-raw-sys-0.4.12.crate/src/lib.rs", + crate_root = "linux-raw-sys-0.4.5.crate/src/lib.rs", edition = "2021", features = [ - "elf", "errno", "general", "ioctl", "no_std", - "std", ], visibility = [], ) http_archive( - name = "lock_api-0.4.11.crate", - sha256 = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45", - strip_prefix = "lock_api-0.4.11", - urls = ["https://crates.io/api/v1/crates/lock_api/0.4.11/download"], + name = "lock_api-0.4.10.crate", + sha256 = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16", + strip_prefix = "lock_api-0.4.10", + urls = ["https://crates.io/api/v1/crates/lock_api/0.4.10/download"], visibility = [], ) cargo.rust_library( - name = "lock_api-0.4.11", - srcs = [":lock_api-0.4.11.crate"], + name = "lock_api-0.4.10", + srcs = [":lock_api-0.4.10.crate"], crate = "lock_api", - crate_root = "lock_api-0.4.11.crate/src/lib.rs", + crate_root = "lock_api-0.4.10.crate/src/lib.rs", edition = "2018", features = [ "atomic_usize", @@ -7023,18 +6975,18 @@ cargo.rust_library( ) http_archive( - name = "matchit-0.7.3.crate", - sha256 = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94", - strip_prefix = "matchit-0.7.3", - urls = ["https://crates.io/api/v1/crates/matchit/0.7.3/download"], + name = "matchit-0.7.2.crate", + sha256 = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef", + strip_prefix = "matchit-0.7.2", + urls = ["https://crates.io/api/v1/crates/matchit/0.7.2/download"], visibility = [], ) cargo.rust_library( - name = "matchit-0.7.3", - srcs = [":matchit-0.7.3.crate"], + name = "matchit-0.7.2", + srcs = [":matchit-0.7.2.crate"], crate = "matchit", - crate_root = "matchit-0.7.3.crate/src/lib.rs", + crate_root = "matchit-0.7.2.crate/src/lib.rs", edition = "2021", features = ["default"], visibility = [], @@ -7058,35 +7010,32 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], ) http_archive( - name = "md-5-0.10.6.crate", - sha256 = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf", - strip_prefix = "md-5-0.10.6", - urls = ["https://crates.io/api/v1/crates/md-5/0.10.6/download"], + name = "md-5-0.10.5.crate", + sha256 = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca", + strip_prefix = "md-5-0.10.5", + urls = ["https://crates.io/api/v1/crates/md-5/0.10.5/download"], visibility = [], ) cargo.rust_library( - name = "md-5-0.10.6", - srcs = [":md-5-0.10.6.crate"], + name = "md-5-0.10.5", + srcs = [":md-5-0.10.5.crate"], crate = "md5", - crate_root = "md-5-0.10.6.crate/src/lib.rs", + crate_root = "md-5-0.10.5.crate/src/lib.rs", edition = "2018", features = [ "default", "std", ], visibility = [], - deps = [ - ":cfg-if-1.0.0", - ":digest-0.10.7", - ], + deps = [":digest-0.10.7"], ) http_archive( @@ -7111,21 +7060,20 @@ cargo.rust_library( ) http_archive( - name = "memchr-2.6.4.crate", - sha256 = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167", - strip_prefix = "memchr-2.6.4", - urls = ["https://crates.io/api/v1/crates/memchr/2.6.4/download"], + name = "memchr-2.5.0.crate", + sha256 = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d", + strip_prefix = "memchr-2.5.0", + urls = ["https://crates.io/api/v1/crates/memchr/2.5.0/download"], visibility = [], ) cargo.rust_library( - name = "memchr-2.6.4", - srcs = [":memchr-2.6.4.crate"], + name = "memchr-2.5.0", + srcs = [":memchr-2.5.0.crate"], crate = "memchr", - crate_root = "memchr-2.6.4.crate/src/lib.rs", - edition = "2021", + crate_root = "memchr-2.5.0.crate/src/lib.rs", + edition = "2018", features = [ - "alloc", "default", "std", ], @@ -7149,16 +7097,16 @@ cargo.rust_library( features = ["stable_deref_trait"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), }, visibility = [], @@ -7201,24 +7149,6 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "memoffset-0.9.0.crate", - sha256 = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c", - strip_prefix = "memoffset-0.9.0", - urls = ["https://crates.io/api/v1/crates/memoffset/0.9.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "memoffset-0.9.0", - srcs = [":memoffset-0.9.0.crate"], - crate = "memoffset", - crate_root = "memoffset-0.9.0.crate/src/lib.rs", - edition = "2015", - features = ["default"], - visibility = [], -) - http_archive( name = "mime-0.3.17.crate", sha256 = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a", @@ -7315,18 +7245,18 @@ cargo.rust_library( ) http_archive( - name = "mio-0.8.10.crate", - sha256 = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09", - strip_prefix = "mio-0.8.10", - urls = ["https://crates.io/api/v1/crates/mio/0.8.10/download"], + name = "mio-0.8.8.crate", + sha256 = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2", + strip_prefix = "mio-0.8.8", + urls = ["https://crates.io/api/v1/crates/mio/0.8.8/download"], visibility = [], ) cargo.rust_library( - name = "mio-0.8.10", - srcs = [":mio-0.8.10.crate"], + name = "mio-0.8.8", + srcs = [":mio-0.8.8.crate"], crate = "mio", - crate_root = "mio-0.8.10.crate/src/lib.rs", + crate_root = "mio-0.8.8.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -7337,16 +7267,16 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -7376,13 +7306,13 @@ cargo.rust_library( features = ["default"], visibility = [], deps = [ - ":bytes-1.5.0", + ":bytes-1.4.0", ":encoding_rs-0.8.33", ":futures-util-0.3.29", - ":http-0.2.11", + ":http-0.2.9", ":httparse-1.8.0", ":log-0.4.20", - ":memchr-2.6.4", + ":memchr-2.5.0", ":mime-0.3.17", ":spin-0.9.8", ":version_check-0.9.4", @@ -7562,29 +7492,29 @@ cargo.rust_library( deps = [ ":bitflags-1.3.2", ":cfg-if-1.0.0", - ":libc-0.2.151", + ":libc-0.2.147", ], ) alias( name = "nix", - actual = ":nix-0.26.4", + actual = ":nix-0.26.2", visibility = ["PUBLIC"], ) http_archive( - name = "nix-0.26.4.crate", - sha256 = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b", - strip_prefix = "nix-0.26.4", - urls = ["https://crates.io/api/v1/crates/nix/0.26.4/download"], + name = "nix-0.26.2.crate", + sha256 = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a", + strip_prefix = "nix-0.26.2", + urls = ["https://crates.io/api/v1/crates/nix/0.26.2/download"], visibility = [], ) cargo.rust_library( - name = "nix-0.26.4", - srcs = [":nix-0.26.4.crate"], + name = "nix-0.26.2", + srcs = [":nix-0.26.2.crate"], crate = "nix", - crate_root = "nix-0.26.4.crate/src/lib.rs", + crate_root = "nix-0.26.2.crate/src/lib.rs", edition = "2018", features = [ "acct", @@ -7647,8 +7577,9 @@ cargo.rust_library( deps = [ ":bitflags-1.3.2", ":cfg-if-1.0.0", - ":libc-0.2.151", + ":libc-0.2.147", ":pin-utils-0.1.0", + ":static_assertions-1.1.0", ], ) @@ -7674,8 +7605,8 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":byteorder-1.5.0", - ":data-encoding-2.5.0", + ":byteorder-1.4.3", + ":data-encoding-2.4.0", ":ed25519-dalek-1.0.1", ":log-0.4.20", ":rand-0.8.5", @@ -7684,28 +7615,28 @@ cargo.rust_library( ) http_archive( - name = "nkeys-0.3.2.crate", - sha256 = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47", - strip_prefix = "nkeys-0.3.2", - urls = ["https://crates.io/api/v1/crates/nkeys/0.3.2/download"], + name = "nkeys-0.3.1.crate", + sha256 = "3e9261eb915c785ea65708bc45ef43507ea46914e1a73f1412d1a38aba967c8e", + strip_prefix = "nkeys-0.3.1", + urls = ["https://crates.io/api/v1/crates/nkeys/0.3.1/download"], visibility = [], ) cargo.rust_library( - name = "nkeys-0.3.2", - srcs = [":nkeys-0.3.2.crate"], + name = "nkeys-0.3.1", + srcs = [":nkeys-0.3.1.crate"], crate = "nkeys", - crate_root = "nkeys-0.3.2.crate/src/lib.rs", + crate_root = "nkeys-0.3.1.crate/src/lib.rs", edition = "2021", visibility = [], deps = [ - ":byteorder-1.5.0", - ":data-encoding-2.5.0", - ":ed25519-2.2.3", - ":ed25519-dalek-2.1.0", + ":byteorder-1.4.3", + ":data-encoding-2.4.0", + ":ed25519-1.5.3", + ":ed25519-dalek-1.0.1", ":log-0.4.20", ":rand-0.8.5", - ":signatory-0.27.1", + ":signatory-0.23.2", ], ) @@ -7730,7 +7661,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":memchr-2.6.4", + ":memchr-2.5.0", ":minimal-lexical-0.2.1", ], ) @@ -7807,7 +7738,7 @@ cargo.rust_library( visibility = [], deps = [ ":num-integer-0.1.45", - ":num-traits-0.2.17", + ":num-traits-0.2.16", ], ) @@ -7860,15 +7791,15 @@ cargo.rust_library( rustc_flags = ["@$(location :num-bigint-dig-0.8.4-build-script-run[rustc_flags])"], visibility = [], deps = [ - ":byteorder-1.5.0", + ":byteorder-1.4.3", ":lazy_static-1.4.0", - ":libm-0.2.8", + ":libm-0.2.7", ":num-integer-0.1.45", ":num-iter-0.1.43", - ":num-traits-0.2.17", + ":num-traits-0.2.16", ":rand-0.8.5", - ":smallvec-1.11.2", - ":zeroize-1.7.0", + ":smallvec-1.11.0", + ":zeroize-1.6.0", ], ) @@ -7923,7 +7854,7 @@ cargo.rust_library( ], rustc_flags = ["@$(location :num-integer-0.1.45-build-script-run[rustc_flags])"], visibility = [], - deps = [":num-traits-0.2.17"], + deps = [":num-traits-0.2.16"], ) cargo.rust_binary( @@ -7971,7 +7902,7 @@ cargo.rust_library( visibility = [], deps = [ ":num-integer-0.1.45", - ":num-traits-0.2.17", + ":num-traits-0.2.16", ], ) @@ -7993,18 +7924,18 @@ buildscript_run( ) http_archive( - name = "num-traits-0.2.17.crate", - sha256 = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c", - strip_prefix = "num-traits-0.2.17", - urls = ["https://crates.io/api/v1/crates/num-traits/0.2.17/download"], + name = "num-traits-0.2.16.crate", + sha256 = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2", + strip_prefix = "num-traits-0.2.16", + urls = ["https://crates.io/api/v1/crates/num-traits/0.2.16/download"], visibility = [], ) cargo.rust_library( - name = "num-traits-0.2.17", - srcs = [":num-traits-0.2.17.crate"], + name = "num-traits-0.2.16", + srcs = [":num-traits-0.2.16.crate"], crate = "num_traits", - crate_root = "num-traits-0.2.17.crate/src/lib.rs", + crate_root = "num-traits-0.2.16.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -8012,16 +7943,16 @@ cargo.rust_library( "libm", "std", ], - rustc_flags = ["@$(location :num-traits-0.2.17-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :num-traits-0.2.16-build-script-run[rustc_flags])"], visibility = [], - deps = [":libm-0.2.8"], + deps = [":libm-0.2.7"], ) cargo.rust_binary( - name = "num-traits-0.2.17-build-script-build", - srcs = [":num-traits-0.2.17.crate"], + name = "num-traits-0.2.16-build-script-build", + srcs = [":num-traits-0.2.16.crate"], crate = "build_script_build", - crate_root = "num-traits-0.2.17.crate/build.rs", + crate_root = "num-traits-0.2.16.crate/build.rs", edition = "2018", features = [ "default", @@ -8034,16 +7965,16 @@ cargo.rust_binary( ) buildscript_run( - name = "num-traits-0.2.17-build-script-run", + name = "num-traits-0.2.16-build-script-run", package_name = "num-traits", - buildscript_rule = ":num-traits-0.2.17-build-script-build", + buildscript_rule = ":num-traits-0.2.16-build-script-build", features = [ "default", "i128", "libm", "std", ], - version = "0.2.17", + version = "0.2.16", ) alias( @@ -8068,16 +7999,16 @@ cargo.rust_library( edition = "2015", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), }, visibility = [], @@ -8127,9 +8058,9 @@ cargo.rust_library( visibility = [], deps = [ ":proc-macro-crate-1.3.1", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -8155,18 +8086,18 @@ cargo.rust_library( ) http_archive( - name = "object-0.32.1.crate", - sha256 = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0", - strip_prefix = "object-0.32.1", - urls = ["https://crates.io/api/v1/crates/object/0.32.1/download"], + name = "object-0.32.0.crate", + sha256 = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe", + strip_prefix = "object-0.32.0", + urls = ["https://crates.io/api/v1/crates/object/0.32.0/download"], visibility = [], ) cargo.rust_library( - name = "object-0.32.1", - srcs = [":object-0.32.1.crate"], + name = "object-0.32.0", + srcs = [":object-0.32.0.crate"], crate = "object", - crate_root = "object-0.32.1.crate/src/lib.rs", + crate_root = "object-0.32.0.crate/src/lib.rs", edition = "2018", features = [ "archive", @@ -8178,28 +8109,28 @@ cargo.rust_library( "unaligned", ], visibility = [], - deps = [":memchr-2.6.4"], + deps = [":memchr-2.5.0"], ) alias( name = "once_cell", - actual = ":once_cell-1.19.0", + actual = ":once_cell-1.18.0", visibility = ["PUBLIC"], ) http_archive( - name = "once_cell-1.19.0.crate", - sha256 = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92", - strip_prefix = "once_cell-1.19.0", - urls = ["https://crates.io/api/v1/crates/once_cell/1.19.0/download"], + name = "once_cell-1.18.0.crate", + sha256 = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d", + strip_prefix = "once_cell-1.18.0", + urls = ["https://crates.io/api/v1/crates/once_cell/1.18.0/download"], visibility = [], ) cargo.rust_library( - name = "once_cell-1.19.0", - srcs = [":once_cell-1.19.0.crate"], + name = "once_cell-1.18.0", + srcs = [":once_cell-1.18.0.crate"], crate = "once_cell", - crate_root = "once_cell-1.19.0.crate/src/lib.rs", + crate_root = "once_cell-1.18.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -8230,48 +8161,48 @@ cargo.rust_library( alias( name = "open", - actual = ":open-5.0.1", + actual = ":open-5.0.0", visibility = ["PUBLIC"], ) http_archive( - name = "open-5.0.1.crate", - sha256 = "90878fb664448b54c4e592455ad02831e23a3f7e157374a8b95654731aac7349", - strip_prefix = "open-5.0.1", - urls = ["https://crates.io/api/v1/crates/open/5.0.1/download"], + name = "open-5.0.0.crate", + sha256 = "cfabf1927dce4d6fdf563d63328a0a506101ced3ec780ca2135747336c98cef8", + strip_prefix = "open-5.0.0", + urls = ["https://crates.io/api/v1/crates/open/5.0.0/download"], visibility = [], ) cargo.rust_library( - name = "open-5.0.1", - srcs = [":open-5.0.1.crate"], + name = "open-5.0.0", + srcs = [":open-5.0.0.crate"], crate = "open", - crate_root = "open-5.0.1.crate/src/lib.rs", + crate_root = "open-5.0.0.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( deps = [ ":is-wsl-0.4.0", - ":libc-0.2.151", + ":libc-0.2.147", ":pathdiff-0.2.1", ], ), "linux-x86_64": dict( deps = [ ":is-wsl-0.4.0", - ":libc-0.2.151", + ":libc-0.2.147", ":pathdiff-0.2.1", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":pathdiff-0.2.1", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":pathdiff-0.2.1", ], ), @@ -8371,15 +8302,15 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.74", - ":futures-0.3.29", + ":async-trait-0.1.73", + ":futures-0.3.28", ":futures-util-0.3.29", - ":http-0.2.11", + ":http-0.2.9", ":opentelemetry-0.18.0", ":opentelemetry-proto-0.1.0", ":prost-0.11.9", - ":thiserror-1.0.50", - ":tokio-1.35.0", + ":thiserror-1.0.47", + ":tokio-1.32.0", ":tonic-0.8.3", ], ) @@ -8411,7 +8342,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":futures-0.3.29", + ":futures-0.3.28", ":futures-util-0.3.29", ":opentelemetry-0.18.0", ":prost-0.11.9", @@ -8503,9 +8434,9 @@ cargo.rust_library( ":futures-channel-0.3.29", ":futures-util-0.3.29", ":indexmap-1.9.3", - ":once_cell-1.19.0", - ":pin-project-lite-0.2.13", - ":thiserror-1.0.50", + ":once_cell-1.18.0", + ":pin-project-lite-0.2.12", + ":thiserror-1.0.47", ], ) @@ -8550,19 +8481,19 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.74", + ":async-trait-0.1.73", ":crossbeam-channel-0.5.8", - ":dashmap-5.5.3", + ":dashmap-5.5.1", ":fnv-1.0.7", ":futures-channel-0.3.29", - ":futures-executor-0.3.29", + ":futures-executor-0.3.28", ":futures-util-0.3.29", - ":once_cell-1.19.0", + ":once_cell-1.18.0", ":opentelemetry_api-0.18.0", - ":percent-encoding-2.3.1", + ":percent-encoding-2.3.0", ":rand-0.8.5", - ":thiserror-1.0.50", - ":tokio-1.35.0", + ":thiserror-1.0.47", + ":tokio-1.32.0", ":tokio-stream-0.1.14", ], ) @@ -8585,25 +8516,25 @@ cargo.rust_library( ) http_archive( - name = "ordered-float-2.10.1.crate", - sha256 = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c", - strip_prefix = "ordered-float-2.10.1", - urls = ["https://crates.io/api/v1/crates/ordered-float/2.10.1/download"], + name = "ordered-float-2.10.0.crate", + sha256 = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87", + strip_prefix = "ordered-float-2.10.0", + urls = ["https://crates.io/api/v1/crates/ordered-float/2.10.0/download"], visibility = [], ) cargo.rust_library( - name = "ordered-float-2.10.1", - srcs = [":ordered-float-2.10.1.crate"], + name = "ordered-float-2.10.0", + srcs = [":ordered-float-2.10.0.crate"], crate = "ordered_float", - crate_root = "ordered-float-2.10.1.crate/src/lib.rs", + crate_root = "ordered-float-2.10.0.crate/src/lib.rs", edition = "2018", features = [ "default", "std", ], visibility = [], - deps = [":num-traits-0.2.17"], + deps = [":num-traits-0.2.16"], ) http_archive( @@ -8625,7 +8556,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":num-traits-0.2.17"], + deps = [":num-traits-0.2.16"], ) http_archive( @@ -8700,7 +8631,7 @@ cargo.rust_library( deps = [ ":Inflector-0.11.4", ":proc-macro-error-1.0.4", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -8768,13 +8699,13 @@ cargo.rust_library( "std", ], named_deps = { - "ecdsa_core": ":ecdsa-0.16.9", + "ecdsa_core": ":ecdsa-0.16.8", }, visibility = [], deps = [ - ":elliptic-curve-0.13.8", - ":primeorder-0.13.6", - ":sha2-0.10.8", + ":elliptic-curve-0.13.5", + ":primeorder-0.13.2", + ":sha2-0.10.7", ], ) @@ -8807,29 +8738,29 @@ cargo.rust_library( "std", ], named_deps = { - "ecdsa_core": ":ecdsa-0.16.9", + "ecdsa_core": ":ecdsa-0.16.8", }, visibility = [], deps = [ - ":elliptic-curve-0.13.8", - ":primeorder-0.13.6", - ":sha2-0.10.8", + ":elliptic-curve-0.13.5", + ":primeorder-0.13.2", + ":sha2-0.10.7", ], ) http_archive( - name = "parking-2.2.0.crate", - sha256 = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae", - strip_prefix = "parking-2.2.0", - urls = ["https://crates.io/api/v1/crates/parking/2.2.0/download"], + name = "parking-2.1.0.crate", + sha256 = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e", + strip_prefix = "parking-2.1.0", + urls = ["https://crates.io/api/v1/crates/parking/2.1.0/download"], visibility = [], ) cargo.rust_library( - name = "parking-2.2.0", - srcs = [":parking-2.2.0.crate"], + name = "parking-2.1.0", + srcs = [":parking-2.1.0.crate"], crate = "parking", - crate_root = "parking-2.2.0.crate/src/lib.rs", + crate_root = "parking-2.1.0.crate/src/lib.rs", edition = "2018", visibility = [], ) @@ -8852,7 +8783,7 @@ cargo.rust_library( visibility = [], deps = [ ":instant-0.1.12", - ":lock_api-0.4.11", + ":lock_api-0.4.10", ":parking_lot_core-0.8.6", ], ) @@ -8874,8 +8805,8 @@ cargo.rust_library( features = ["default"], visibility = [], deps = [ - ":lock_api-0.4.11", - ":parking_lot_core-0.9.9", + ":lock_api-0.4.10", + ":parking_lot_core-0.9.8", ], ) @@ -8895,16 +8826,16 @@ cargo.rust_library( edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( deps = [":winapi-0.3.9"], @@ -8917,36 +8848,36 @@ cargo.rust_library( deps = [ ":cfg-if-1.0.0", ":instant-0.1.12", - ":smallvec-1.11.2", + ":smallvec-1.11.0", ], ) http_archive( - name = "parking_lot_core-0.9.9.crate", - sha256 = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e", - strip_prefix = "parking_lot_core-0.9.9", - urls = ["https://crates.io/api/v1/crates/parking_lot_core/0.9.9/download"], + name = "parking_lot_core-0.9.8.crate", + sha256 = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447", + strip_prefix = "parking_lot_core-0.9.8", + urls = ["https://crates.io/api/v1/crates/parking_lot_core/0.9.8/download"], visibility = [], ) cargo.rust_library( - name = "parking_lot_core-0.9.9", - srcs = [":parking_lot_core-0.9.9.crate"], + name = "parking_lot_core-0.9.8", + srcs = [":parking_lot_core-0.9.8.crate"], crate = "parking_lot_core", - crate_root = "parking_lot_core-0.9.9.crate/src/lib.rs", + crate_root = "parking_lot_core-0.9.8.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( deps = [":windows-targets-0.48.5"], @@ -8958,7 +8889,7 @@ cargo.rust_library( visibility = [], deps = [ ":cfg-if-1.0.0", - ":smallvec-1.11.2", + ":smallvec-1.11.0", ], ) @@ -9067,18 +8998,18 @@ cargo.rust_library( ) http_archive( - name = "percent-encoding-2.3.1.crate", - sha256 = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e", - strip_prefix = "percent-encoding-2.3.1", - urls = ["https://crates.io/api/v1/crates/percent-encoding/2.3.1/download"], + name = "percent-encoding-2.3.0.crate", + sha256 = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94", + strip_prefix = "percent-encoding-2.3.0", + urls = ["https://crates.io/api/v1/crates/percent-encoding/2.3.0/download"], visibility = [], ) cargo.rust_library( - name = "percent-encoding-2.3.1", - srcs = [":percent-encoding-2.3.1.crate"], + name = "percent-encoding-2.3.0", + srcs = [":percent-encoding-2.3.0.crate"], crate = "percent_encoding", - crate_root = "percent-encoding-2.3.1.crate/src/lib.rs", + crate_root = "percent-encoding-2.3.0.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -9120,9 +9051,9 @@ cargo.rust_library( visibility = [], deps = [ ":fixedbitset-0.4.2", - ":indexmap-2.1.0", - ":serde-1.0.193", - ":serde_derive-1.0.193", + ":indexmap-2.0.0", + ":serde-1.0.186", + ":serde_derive-1.0.186", ], ) @@ -9220,7 +9151,7 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -9243,31 +9174,31 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) alias( name = "pin-project-lite", - actual = ":pin-project-lite-0.2.13", + actual = ":pin-project-lite-0.2.12", visibility = ["PUBLIC"], ) http_archive( - name = "pin-project-lite-0.2.13.crate", - sha256 = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58", - strip_prefix = "pin-project-lite-0.2.13", - urls = ["https://crates.io/api/v1/crates/pin-project-lite/0.2.13/download"], + name = "pin-project-lite-0.2.12.crate", + sha256 = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05", + strip_prefix = "pin-project-lite-0.2.12", + urls = ["https://crates.io/api/v1/crates/pin-project-lite/0.2.12/download"], visibility = [], ) cargo.rust_library( - name = "pin-project-lite-0.2.13", - srcs = [":pin-project-lite-0.2.13.crate"], + name = "pin-project-lite-0.2.12", + srcs = [":pin-project-lite-0.2.12.crate"], crate = "pin_project_lite", - crate_root = "pin-project-lite-0.2.13.crate/src/lib.rs", + crate_root = "pin-project-lite-0.2.12.crate/src/lib.rs", edition = "2018", visibility = [], ) @@ -9315,7 +9246,7 @@ cargo.rust_library( ":der-0.6.1", ":pkcs8-0.9.0", ":spki-0.6.0", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) @@ -9341,7 +9272,7 @@ cargo.rust_library( visibility = [], deps = [ ":der-0.7.8", - ":spki-0.7.3", + ":spki-0.7.2", ], ) @@ -9371,7 +9302,7 @@ cargo.rust_library( ":der-0.4.5", ":pem-rfc7468-0.2.3", ":spki-0.4.1", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) @@ -9418,27 +9349,6 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "platforms-3.2.0.crate", - sha256 = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0", - strip_prefix = "platforms-3.2.0", - urls = ["https://crates.io/api/v1/crates/platforms/3.2.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "platforms-3.2.0", - srcs = [":platforms-3.2.0.crate"], - crate = "platforms", - crate_root = "platforms-3.2.0.crate/src/lib.rs", - edition = "2018", - features = [ - "default", - "std", - ], - visibility = [], -) - alias( name = "podman-api", actual = ":podman-api-0.10.0", @@ -9466,22 +9376,22 @@ cargo.rust_library( visibility = [], deps = [ ":base64-0.13.1", - ":byteorder-1.5.0", - ":bytes-1.5.0", - ":chrono-0.4.31", + ":byteorder-1.4.3", + ":bytes-1.4.0", + ":chrono-0.4.26", ":containers-api-0.8.0", - ":flate2-1.0.28", + ":flate2-1.0.27", ":futures-util-0.3.29", ":futures_codec-0.4.1", ":log-0.4.20", ":paste-1.0.14", ":podman-api-stubs-0.9.0", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ":tar-0.4.40", - ":thiserror-1.0.50", - ":tokio-1.35.0", - ":url-2.5.0", + ":thiserror-1.0.47", + ":tokio-1.32.0", + ":url-2.4.0", ], ) @@ -9501,61 +9411,61 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":chrono-0.4.31", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":chrono-0.4.26", + ":serde-1.0.186", + ":serde_json-1.0.105", ], ) http_archive( - name = "portable-atomic-1.6.0.crate", - sha256 = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0", - strip_prefix = "portable-atomic-1.6.0", - urls = ["https://crates.io/api/v1/crates/portable-atomic/1.6.0/download"], + name = "portable-atomic-1.4.2.crate", + sha256 = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e", + strip_prefix = "portable-atomic-1.4.2", + urls = ["https://crates.io/api/v1/crates/portable-atomic/1.4.2/download"], visibility = [], ) cargo.rust_library( - name = "portable-atomic-1.6.0", - srcs = [":portable-atomic-1.6.0.crate"], + name = "portable-atomic-1.4.2", + srcs = [":portable-atomic-1.4.2.crate"], crate = "portable_atomic", - crate_root = "portable-atomic-1.6.0.crate/src/lib.rs", + crate_root = "portable-atomic-1.4.2.crate/src/lib.rs", edition = "2018", env = { - "CARGO_MANIFEST_DIR": "portable-atomic-1.6.0.crate", + "CARGO_MANIFEST_DIR": "portable-atomic-1.4.2.crate", "CARGO_PKG_AUTHORS": "", "CARGO_PKG_DESCRIPTION": "Portable atomic types including support for 128-bit atomics, atomic float, etc.\n", "CARGO_PKG_NAME": "portable-atomic", "CARGO_PKG_REPOSITORY": "https://github.com/taiki-e/portable-atomic", - "CARGO_PKG_VERSION": "1.6.0", + "CARGO_PKG_VERSION": "1.4.2", "CARGO_PKG_VERSION_MAJOR": "1", - "CARGO_PKG_VERSION_MINOR": "6", - "CARGO_PKG_VERSION_PATCH": "0", + "CARGO_PKG_VERSION_MINOR": "4", + "CARGO_PKG_VERSION_PATCH": "2", }, features = [ "default", "fallback", ], - rustc_flags = ["@$(location :portable-atomic-1.6.0-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :portable-atomic-1.4.2-build-script-run[rustc_flags])"], visibility = [], ) cargo.rust_binary( - name = "portable-atomic-1.6.0-build-script-build", - srcs = [":portable-atomic-1.6.0.crate"], + name = "portable-atomic-1.4.2-build-script-build", + srcs = [":portable-atomic-1.4.2.crate"], crate = "build_script_build", - crate_root = "portable-atomic-1.6.0.crate/build.rs", + crate_root = "portable-atomic-1.4.2.crate/build.rs", edition = "2018", env = { - "CARGO_MANIFEST_DIR": "portable-atomic-1.6.0.crate", + "CARGO_MANIFEST_DIR": "portable-atomic-1.4.2.crate", "CARGO_PKG_AUTHORS": "", "CARGO_PKG_DESCRIPTION": "Portable atomic types including support for 128-bit atomics, atomic float, etc.\n", "CARGO_PKG_NAME": "portable-atomic", "CARGO_PKG_REPOSITORY": "https://github.com/taiki-e/portable-atomic", - "CARGO_PKG_VERSION": "1.6.0", + "CARGO_PKG_VERSION": "1.4.2", "CARGO_PKG_VERSION_MAJOR": "1", - "CARGO_PKG_VERSION_MINOR": "6", - "CARGO_PKG_VERSION_PATCH": "0", + "CARGO_PKG_VERSION_MINOR": "4", + "CARGO_PKG_VERSION_PATCH": "2", }, features = [ "default", @@ -9565,37 +9475,74 @@ cargo.rust_binary( ) buildscript_run( - name = "portable-atomic-1.6.0-build-script-run", + name = "portable-atomic-1.4.2-build-script-run", package_name = "portable-atomic", - buildscript_rule = ":portable-atomic-1.6.0-build-script-build", + buildscript_rule = ":portable-atomic-1.4.2-build-script-build", features = [ "default", "fallback", ], - version = "1.6.0", + version = "1.4.2", +) + +alias( + name = "postcard", + actual = ":postcard-1.0.8", + visibility = ["PUBLIC"], ) http_archive( - name = "postgres-derive-0.4.5.crate", - sha256 = "83145eba741b050ef981a9a1838c843fa7665e154383325aa8b440ae703180a2", - strip_prefix = "postgres-derive-0.4.5", - urls = ["https://crates.io/api/v1/crates/postgres-derive/0.4.5/download"], + name = "postcard-1.0.8.crate", + sha256 = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8", + strip_prefix = "postcard-1.0.8", + urls = ["https://crates.io/api/v1/crates/postcard/1.0.8/download"], visibility = [], ) cargo.rust_library( - name = "postgres-derive-0.4.5", - srcs = [":postgres-derive-0.4.5.crate"], - crate = "postgres_derive", - crate_root = "postgres-derive-0.4.5.crate/src/lib.rs", + name = "postcard-1.0.8", + srcs = [":postcard-1.0.8.crate"], + crate = "postcard", + crate_root = "postcard-1.0.8.crate/src/lib.rs", + edition = "2018", + features = [ + "alloc", + "default", + "embedded-io", + "heapless", + "heapless-cas", + "use-std", + ], + visibility = [], + deps = [ + ":cobs-0.2.3", + ":embedded-io-0.4.0", + ":heapless-0.7.17", + ":serde-1.0.186", + ], +) + +http_archive( + name = "postgres-derive-0.4.5.crate", + sha256 = "83145eba741b050ef981a9a1838c843fa7665e154383325aa8b440ae703180a2", + strip_prefix = "postgres-derive-0.4.5", + urls = ["https://crates.io/api/v1/crates/postgres-derive/0.4.5/download"], + visibility = [], +) + +cargo.rust_library( + name = "postgres-derive-0.4.5", + srcs = [":postgres-derive-0.4.5.crate"], + crate = "postgres_derive", + crate_root = "postgres-derive-0.4.5.crate/src/lib.rs", edition = "2018", proc_macro = True, visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -9616,16 +9563,16 @@ cargo.rust_library( features = ["default"], visibility = [], deps = [ - ":base64-0.21.5", - ":byteorder-1.5.0", - ":bytes-1.5.0", + ":base64-0.21.2", + ":byteorder-1.4.3", + ":bytes-1.4.0", ":fallible-iterator-0.2.0", ":hmac-0.12.1", - ":md-5-0.10.6", - ":memchr-2.6.4", + ":md-5-0.10.5", + ":memchr-2.5.0", ":rand-0.8.5", - ":sha2-0.10.8", - ":stringprep-0.1.4", + ":sha2-0.10.7", + ":stringprep-0.1.3", ], ) @@ -9650,6 +9597,8 @@ cargo.rust_library( crate_root = "postgres-types-0.2.6.crate/src/lib.rs", edition = "2018", features = [ + "array-impls", + "array-init", "chrono-04", "derive", "postgres-derive", @@ -9659,36 +9608,20 @@ cargo.rust_library( "with-serde_json-1", ], named_deps = { - "chrono_04": ":chrono-0.4.31", - "serde_1": ":serde-1.0.193", - "serde_json_1": ":serde_json-1.0.108", + "chrono_04": ":chrono-0.4.26", + "serde_1": ":serde-1.0.186", + "serde_json_1": ":serde_json-1.0.105", }, visibility = [], deps = [ - ":bytes-1.5.0", + ":array-init-2.1.0", + ":bytes-1.4.0", ":fallible-iterator-0.2.0", ":postgres-derive-0.4.5", ":postgres-protocol-0.6.6", ], ) -http_archive( - name = "powerfmt-0.2.0.crate", - sha256 = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391", - strip_prefix = "powerfmt-0.2.0", - urls = ["https://crates.io/api/v1/crates/powerfmt/0.2.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "powerfmt-0.2.0", - srcs = [":powerfmt-0.2.0.crate"], - crate = "powerfmt", - crate_root = "powerfmt-0.2.0.crate/src/lib.rs", - edition = "2021", - visibility = [], -) - http_archive( name = "ppv-lite86-0.2.17.crate", sha256 = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de", @@ -9789,66 +9722,66 @@ cargo.rust_library( }, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":syn-1.0.109", ], ) http_archive( - name = "primeorder-0.13.6.crate", - sha256 = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6", - strip_prefix = "primeorder-0.13.6", - urls = ["https://crates.io/api/v1/crates/primeorder/0.13.6/download"], + name = "primeorder-0.13.2.crate", + sha256 = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3", + strip_prefix = "primeorder-0.13.2", + urls = ["https://crates.io/api/v1/crates/primeorder/0.13.2/download"], visibility = [], ) cargo.rust_library( - name = "primeorder-0.13.6", - srcs = [":primeorder-0.13.6.crate"], + name = "primeorder-0.13.2", + srcs = [":primeorder-0.13.2.crate"], crate = "primeorder", - crate_root = "primeorder-0.13.6.crate/src/lib.rs", + crate_root = "primeorder-0.13.2.crate/src/lib.rs", edition = "2021", visibility = [], - deps = [":elliptic-curve-0.13.8"], + deps = [":elliptic-curve-0.13.5"], ) http_archive( - name = "proc-macro-crate-1.3.1.crate", - sha256 = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919", - strip_prefix = "proc-macro-crate-1.3.1", - urls = ["https://crates.io/api/v1/crates/proc-macro-crate/1.3.1/download"], + name = "proc-macro-crate-0.1.5.crate", + sha256 = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785", + strip_prefix = "proc-macro-crate-0.1.5", + urls = ["https://crates.io/api/v1/crates/proc-macro-crate/0.1.5/download"], visibility = [], ) cargo.rust_library( - name = "proc-macro-crate-1.3.1", - srcs = [":proc-macro-crate-1.3.1.crate"], + name = "proc-macro-crate-0.1.5", + srcs = [":proc-macro-crate-0.1.5.crate"], crate = "proc_macro_crate", - crate_root = "proc-macro-crate-1.3.1.crate/src/lib.rs", - edition = "2021", + crate_root = "proc-macro-crate-0.1.5.crate/src/lib.rs", + edition = "2018", visibility = [], - deps = [ - ":once_cell-1.19.0", - ":toml_edit-0.19.15", - ], + deps = [":toml-0.5.11"], ) http_archive( - name = "proc-macro-crate-2.0.0.crate", - sha256 = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8", - strip_prefix = "proc-macro-crate-2.0.0", - urls = ["https://crates.io/api/v1/crates/proc-macro-crate/2.0.0/download"], + name = "proc-macro-crate-1.3.1.crate", + sha256 = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919", + strip_prefix = "proc-macro-crate-1.3.1", + urls = ["https://crates.io/api/v1/crates/proc-macro-crate/1.3.1/download"], visibility = [], ) cargo.rust_library( - name = "proc-macro-crate-2.0.0", - srcs = [":proc-macro-crate-2.0.0.crate"], + name = "proc-macro-crate-1.3.1", + srcs = [":proc-macro-crate-1.3.1.crate"], crate = "proc_macro_crate", - crate_root = "proc-macro-crate-2.0.0.crate/src/lib.rs", + crate_root = "proc-macro-crate-1.3.1.crate/src/lib.rs", edition = "2021", visibility = [], - deps = [":toml_edit-0.20.7"], + deps = [ + ":once_cell-1.18.0", + ":toml_edit-0.19.14", + ], ) http_archive( @@ -9874,7 +9807,7 @@ cargo.rust_library( visibility = [], deps = [ ":proc-macro-error-attr-1.0.4", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -9924,45 +9857,45 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ], ) alias( name = "proc-macro2", - actual = ":proc-macro2-1.0.70", + actual = ":proc-macro2-1.0.66", visibility = ["PUBLIC"], ) http_archive( - name = "proc-macro2-1.0.70.crate", - sha256 = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b", - strip_prefix = "proc-macro2-1.0.70", - urls = ["https://crates.io/api/v1/crates/proc-macro2/1.0.70/download"], + name = "proc-macro2-1.0.66.crate", + sha256 = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9", + strip_prefix = "proc-macro2-1.0.66", + urls = ["https://crates.io/api/v1/crates/proc-macro2/1.0.66/download"], visibility = [], ) cargo.rust_library( - name = "proc-macro2-1.0.70", - srcs = [":proc-macro2-1.0.70.crate"], + name = "proc-macro2-1.0.66", + srcs = [":proc-macro2-1.0.66.crate"], crate = "proc_macro2", - crate_root = "proc-macro2-1.0.70.crate/src/lib.rs", + crate_root = "proc-macro2-1.0.66.crate/src/lib.rs", edition = "2021", features = [ "default", "proc-macro", ], - rustc_flags = ["@$(location :proc-macro2-1.0.70-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :proc-macro2-1.0.66-build-script-run[rustc_flags])"], visibility = [], - deps = [":unicode-ident-1.0.12"], + deps = [":unicode-ident-1.0.11"], ) cargo.rust_binary( - name = "proc-macro2-1.0.70-build-script-build", - srcs = [":proc-macro2-1.0.70.crate"], + name = "proc-macro2-1.0.66-build-script-build", + srcs = [":proc-macro2-1.0.66.crate"], crate = "build_script_build", - crate_root = "proc-macro2-1.0.70.crate/build.rs", + crate_root = "proc-macro2-1.0.66.crate/build.rs", edition = "2021", features = [ "default", @@ -9972,14 +9905,14 @@ cargo.rust_binary( ) buildscript_run( - name = "proc-macro2-1.0.70-build-script-run", + name = "proc-macro2-1.0.66-build-script-run", package_name = "proc-macro2", - buildscript_rule = ":proc-macro2-1.0.70-build-script-build", + buildscript_rule = ":proc-macro2-1.0.66-build-script-build", features = [ "default", "proc-macro", ], - version = "1.0.70", + version = "1.0.66", ) http_archive( @@ -10003,7 +9936,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.5.0", + ":bytes-1.4.0", ":prost-derive-0.11.9", ], ) @@ -10030,7 +9963,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.5.0", + ":bytes-1.4.0", ":heck-0.4.1", ":itertools-0.10.5", ":lazy_static-1.4.0", @@ -10040,10 +9973,10 @@ cargo.rust_library( ":prettyplease-0.1.25", ":prost-0.11.9", ":prost-types-0.11.9", - ":regex-1.10.2", + ":regex-1.9.3", ":syn-1.0.109", - ":tempfile-3.8.1", - ":which-4.4.2", + ":tempfile-3.8.0", + ":which-4.4.0", ], ) @@ -10066,7 +9999,7 @@ cargo.rust_library( deps = [ ":anyhow-1.0.75", ":itertools-0.10.5", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -10126,7 +10059,7 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -10153,8 +10086,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":memchr-2.6.4", - ":serde-1.0.193", + ":memchr-2.5.0", + ":serde-1.0.186", ], ) @@ -10183,7 +10116,7 @@ cargo.rust_library( "proc-macro", ], visibility = [], - deps = [":proc-macro2-1.0.70"], + deps = [":proc-macro2-1.0.66"], ) alias( @@ -10208,21 +10141,21 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":async-trait-0.1.74", - ":bytes-1.5.0", - ":dashmap-5.5.3", - ":futures-0.3.29", + ":async-trait-0.1.73", + ":bytes-1.4.0", + ":dashmap-5.5.1", + ":futures-0.3.28", ":pin-project-1.1.3", ":rabbitmq-stream-protocol-0.3.0", ":rand-0.8.5", - ":rustls-pemfile-1.0.4", - ":thiserror-1.0.50", - ":tokio-1.35.0", + ":rustls-pemfile-1.0.3", + ":thiserror-1.0.47", + ":tokio-1.32.0", ":tokio-rustls-0.24.1", ":tokio-stream-0.1.14", - ":tokio-util-0.7.10", - ":tracing-0.1.40", - ":url-2.5.0", + ":tokio-util-0.7.8", + ":tracing-0.1.37", + ":url-2.4.0", ], ) @@ -10242,12 +10175,12 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":byteorder-1.5.0", - ":chrono-0.4.31", + ":byteorder-1.4.3", + ":chrono-0.4.26", ":derive_more-0.99.17", ":num_enum-0.6.1", ":ordered-float-3.9.2", - ":uuid-1.6.1", + ":uuid-1.4.1", ], ) @@ -10297,25 +10230,25 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":rand_chacha-0.2.2", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":rand_chacha-0.2.2", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":rand_chacha-0.2.2", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":rand_chacha-0.2.2", ], ), @@ -10362,16 +10295,16 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), }, visibility = [], @@ -10468,28 +10401,28 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":getrandom-0.2.11"], + deps = [":getrandom-0.2.10"], ) alias( name = "refinery", - actual = ":refinery-0.8.11", + actual = ":refinery-0.8.10", visibility = ["PUBLIC"], ) http_archive( - name = "refinery-0.8.11.crate", - sha256 = "529664dbccc0a296947615c997a857912d72d1c44be1fafb7bae54ecfa7a8c24", - strip_prefix = "refinery-0.8.11", - urls = ["https://crates.io/api/v1/crates/refinery/0.8.11/download"], + name = "refinery-0.8.10.crate", + sha256 = "cdb0436d0dd7bd8d4fce1e828751fa79742b08e35f27cfea7546f8a322b5ef24", + strip_prefix = "refinery-0.8.10", + urls = ["https://crates.io/api/v1/crates/refinery/0.8.10/download"], visibility = [], ) cargo.rust_library( - name = "refinery-0.8.11", - srcs = [":refinery-0.8.11.crate"], + name = "refinery-0.8.10", + srcs = [":refinery-0.8.10.crate"], crate = "refinery", - crate_root = "refinery-0.8.11.crate/src/lib.rs", + crate_root = "refinery-0.8.10.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -10497,24 +10430,24 @@ cargo.rust_library( ], visibility = [], deps = [ - ":refinery-core-0.8.11", - ":refinery-macros-0.8.11", + ":refinery-core-0.8.10", + ":refinery-macros-0.8.10", ], ) http_archive( - name = "refinery-core-0.8.11.crate", - sha256 = "e895cb870cf06e92318cbbeb701f274d022d5ca87a16fa8244e291cd035ef954", - strip_prefix = "refinery-core-0.8.11", - urls = ["https://crates.io/api/v1/crates/refinery-core/0.8.11/download"], + name = "refinery-core-0.8.10.crate", + sha256 = "19206547cd047e8f4dfa6b20c30d3ecaf24be05841b6aa0aa926a47a3d0662bb", + strip_prefix = "refinery-core-0.8.10", + urls = ["https://crates.io/api/v1/crates/refinery-core/0.8.10/download"], visibility = [], ) cargo.rust_library( - name = "refinery-core-0.8.11", - srcs = [":refinery-core-0.8.11.crate"], + name = "refinery-core-0.8.10", + srcs = [":refinery-core-0.8.10.crate"], crate = "refinery_core", - crate_root = "refinery-core-0.8.11.crate/src/lib.rs", + crate_root = "refinery-core-0.8.10.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -10523,67 +10456,67 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.74", + ":async-trait-0.1.73", ":cfg-if-1.0.0", ":lazy_static-1.4.0", ":log-0.4.20", - ":regex-1.10.2", - ":serde-1.0.193", - ":siphasher-1.0.0", - ":thiserror-1.0.50", - ":time-0.3.30", - ":tokio-1.35.0", - ":tokio-postgres-0.7.10", - ":toml-0.7.8", - ":url-2.5.0", - ":walkdir-2.4.0", + ":regex-1.9.3", + ":serde-1.0.186", + ":siphasher-0.3.11", + ":thiserror-1.0.47", + ":time-0.3.27", + ":tokio-1.32.0", + ":tokio-postgres-0.7.9", + ":toml-0.7.6", + ":url-2.4.0", + ":walkdir-2.3.3", ], ) http_archive( - name = "refinery-macros-0.8.11.crate", - sha256 = "123e8b80f8010c3ae38330c81e76938fc7adf6cdbfbaad20295bb8c22718b4f1", - strip_prefix = "refinery-macros-0.8.11", - urls = ["https://crates.io/api/v1/crates/refinery-macros/0.8.11/download"], + name = "refinery-macros-0.8.10.crate", + sha256 = "d94d4b9241859ba19eaa5c04c86e782eb3aa0aae2c5868e0cfa90c856e58a174", + strip_prefix = "refinery-macros-0.8.10", + urls = ["https://crates.io/api/v1/crates/refinery-macros/0.8.10/download"], visibility = [], ) cargo.rust_library( - name = "refinery-macros-0.8.11", - srcs = [":refinery-macros-0.8.11.crate"], + name = "refinery-macros-0.8.10", + srcs = [":refinery-macros-0.8.10.crate"], crate = "refinery_macros", - crate_root = "refinery-macros-0.8.11.crate/src/lib.rs", + crate_root = "refinery-macros-0.8.10.crate/src/lib.rs", edition = "2018", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":refinery-core-0.8.11", - ":regex-1.10.2", - ":syn-2.0.40", + ":refinery-core-0.8.10", + ":regex-1.9.3", + ":syn-2.0.29", ], ) alias( name = "regex", - actual = ":regex-1.10.2", + actual = ":regex-1.9.3", visibility = ["PUBLIC"], ) http_archive( - name = "regex-1.10.2.crate", - sha256 = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343", - strip_prefix = "regex-1.10.2", - urls = ["https://crates.io/api/v1/crates/regex/1.10.2/download"], + name = "regex-1.9.3.crate", + sha256 = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a", + strip_prefix = "regex-1.9.3", + urls = ["https://crates.io/api/v1/crates/regex/1.9.3/download"], visibility = [], ) cargo.rust_library( - name = "regex-1.10.2", - srcs = [":regex-1.10.2.crate"], + name = "regex-1.9.3", + srcs = [":regex-1.9.3.crate"], crate = "regex", - crate_root = "regex-1.10.2.crate/src/lib.rs", + crate_root = "regex-1.9.3.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -10606,10 +10539,10 @@ cargo.rust_library( ], visibility = [], deps = [ - ":aho-corasick-1.1.2", - ":memchr-2.6.4", - ":regex-automata-0.4.3", - ":regex-syntax-0.8.2", + ":aho-corasick-1.0.4", + ":memchr-2.5.0", + ":regex-automata-0.3.6", + ":regex-syntax-0.7.4", ], ) @@ -10637,29 +10570,27 @@ cargo.rust_library( ) http_archive( - name = "regex-automata-0.4.3.crate", - sha256 = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f", - strip_prefix = "regex-automata-0.4.3", - urls = ["https://crates.io/api/v1/crates/regex-automata/0.4.3/download"], + name = "regex-automata-0.3.6.crate", + sha256 = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69", + strip_prefix = "regex-automata-0.3.6", + urls = ["https://crates.io/api/v1/crates/regex-automata/0.3.6/download"], visibility = [], ) cargo.rust_library( - name = "regex-automata-0.4.3", - srcs = [":regex-automata-0.4.3.crate"], + name = "regex-automata-0.3.6", + srcs = [":regex-automata-0.3.6.crate"], crate = "regex_automata", - crate_root = "regex-automata-0.4.3.crate/src/lib.rs", + crate_root = "regex-automata-0.3.6.crate/src/lib.rs", edition = "2021", features = [ "alloc", "dfa-onepass", "hybrid", "meta", - "nfa", "nfa-backtrack", "nfa-pikevm", "nfa-thompson", - "perf", "perf-inline", "perf-literal", "perf-literal-multisubstring", @@ -10678,9 +10609,9 @@ cargo.rust_library( ], visibility = [], deps = [ - ":aho-corasick-1.1.2", - ":memchr-2.6.4", - ":regex-syntax-0.8.2", + ":aho-corasick-1.0.4", + ":memchr-2.5.0", + ":regex-syntax-0.7.4", ], ) @@ -10713,18 +10644,18 @@ cargo.rust_library( ) http_archive( - name = "regex-syntax-0.8.2.crate", - sha256 = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f", - strip_prefix = "regex-syntax-0.8.2", - urls = ["https://crates.io/api/v1/crates/regex-syntax/0.8.2/download"], + name = "regex-syntax-0.7.4.crate", + sha256 = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2", + strip_prefix = "regex-syntax-0.7.4", + urls = ["https://crates.io/api/v1/crates/regex-syntax/0.7.4/download"], visibility = [], ) cargo.rust_library( - name = "regex-syntax-0.8.2", - srcs = [":regex-syntax-0.8.2.crate"], + name = "regex-syntax-0.7.4", + srcs = [":regex-syntax-0.7.4.crate"], crate = "regex_syntax", - crate_root = "regex-syntax-0.8.2.crate/src/lib.rs", + crate_root = "regex-syntax-0.7.4.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -10764,25 +10695,25 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) http_archive( - name = "rend-0.4.1.crate", - sha256 = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd", - strip_prefix = "rend-0.4.1", - urls = ["https://crates.io/api/v1/crates/rend/0.4.1/download"], + name = "rend-0.4.0.crate", + sha256 = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab", + strip_prefix = "rend-0.4.0", + urls = ["https://crates.io/api/v1/crates/rend/0.4.0/download"], visibility = [], ) cargo.rust_library( - name = "rend-0.4.1", - srcs = [":rend-0.4.1.crate"], + name = "rend-0.4.0", + srcs = [":rend-0.4.0.crate"], crate = "rend", - crate_root = "rend-0.4.1.crate/src/lib.rs", + crate_root = "rend-0.4.0.crate/src/lib.rs", edition = "2018", features = [ "bytecheck", @@ -10794,23 +10725,23 @@ cargo.rust_library( alias( name = "reqwest", - actual = ":reqwest-0.11.22", + actual = ":reqwest-0.11.20", visibility = ["PUBLIC"], ) http_archive( - name = "reqwest-0.11.22.crate", - sha256 = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b", - strip_prefix = "reqwest-0.11.22", - urls = ["https://crates.io/api/v1/crates/reqwest/0.11.22/download"], + name = "reqwest-0.11.20.crate", + sha256 = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1", + strip_prefix = "reqwest-0.11.20", + urls = ["https://crates.io/api/v1/crates/reqwest/0.11.20/download"], visibility = [], ) cargo.rust_library( - name = "reqwest-0.11.22", - srcs = [":reqwest-0.11.22.crate"], + name = "reqwest-0.11.20", + srcs = [":reqwest-0.11.20.crate"], crate = "reqwest", - crate_root = "reqwest-0.11.22.crate/src/lib.rs", + crate_root = "reqwest-0.11.20.crate/src/lib.rs", edition = "2018", features = [ "__rustls", @@ -10834,147 +10765,145 @@ cargo.rust_library( "linux-arm64": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.22", - ":http-body-0.4.6", + ":h2-0.3.21", + ":http-body-0.4.5", ":hyper-0.14.27", - ":hyper-rustls-0.24.2", - ":ipnet-2.9.0", + ":hyper-rustls-0.24.1", + ":ipnet-2.8.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.19.0", - ":percent-encoding-2.3.1", - ":pin-project-lite-0.2.13", - ":rustls-0.21.10", - ":rustls-pemfile-1.0.4", - ":tokio-1.35.0", + ":once_cell-1.18.0", + ":percent-encoding-2.3.0", + ":pin-project-lite-0.2.12", + ":rustls-0.21.6", + ":rustls-pemfile-1.0.3", + ":tokio-1.32.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.10", - ":webpki-roots-0.25.3", + ":tokio-util-0.7.8", + ":webpki-roots-0.25.2", ], ), "linux-x86_64": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.22", - ":http-body-0.4.6", + ":h2-0.3.21", + ":http-body-0.4.5", ":hyper-0.14.27", - ":hyper-rustls-0.24.2", - ":ipnet-2.9.0", + ":hyper-rustls-0.24.1", + ":ipnet-2.8.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.19.0", - ":percent-encoding-2.3.1", - ":pin-project-lite-0.2.13", - ":rustls-0.21.10", - ":rustls-pemfile-1.0.4", - ":tokio-1.35.0", + ":once_cell-1.18.0", + ":percent-encoding-2.3.0", + ":pin-project-lite-0.2.12", + ":rustls-0.21.6", + ":rustls-pemfile-1.0.3", + ":tokio-1.32.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.10", - ":webpki-roots-0.25.3", + ":tokio-util-0.7.8", + ":webpki-roots-0.25.2", ], ), "macos-arm64": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.22", - ":http-body-0.4.6", + ":h2-0.3.21", + ":http-body-0.4.5", ":hyper-0.14.27", - ":hyper-rustls-0.24.2", - ":ipnet-2.9.0", + ":hyper-rustls-0.24.1", + ":ipnet-2.8.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.19.0", - ":percent-encoding-2.3.1", - ":pin-project-lite-0.2.13", - ":rustls-0.21.10", - ":rustls-pemfile-1.0.4", - ":system-configuration-0.5.1", - ":tokio-1.35.0", + ":once_cell-1.18.0", + ":percent-encoding-2.3.0", + ":pin-project-lite-0.2.12", + ":rustls-0.21.6", + ":rustls-pemfile-1.0.3", + ":tokio-1.32.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.10", - ":webpki-roots-0.25.3", + ":tokio-util-0.7.8", + ":webpki-roots-0.25.2", ], ), "macos-x86_64": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.22", - ":http-body-0.4.6", + ":h2-0.3.21", + ":http-body-0.4.5", ":hyper-0.14.27", - ":hyper-rustls-0.24.2", - ":ipnet-2.9.0", + ":hyper-rustls-0.24.1", + ":ipnet-2.8.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.19.0", - ":percent-encoding-2.3.1", - ":pin-project-lite-0.2.13", - ":rustls-0.21.10", - ":rustls-pemfile-1.0.4", - ":system-configuration-0.5.1", - ":tokio-1.35.0", + ":once_cell-1.18.0", + ":percent-encoding-2.3.0", + ":pin-project-lite-0.2.12", + ":rustls-0.21.6", + ":rustls-pemfile-1.0.3", + ":tokio-1.32.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.10", - ":webpki-roots-0.25.3", + ":tokio-util-0.7.8", + ":webpki-roots-0.25.2", ], ), "windows-gnu": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.22", - ":http-body-0.4.6", + ":h2-0.3.21", + ":http-body-0.4.5", ":hyper-0.14.27", - ":hyper-rustls-0.24.2", - ":ipnet-2.9.0", + ":hyper-rustls-0.24.1", + ":ipnet-2.8.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.19.0", - ":percent-encoding-2.3.1", - ":pin-project-lite-0.2.13", - ":rustls-0.21.10", - ":rustls-pemfile-1.0.4", - ":tokio-1.35.0", + ":once_cell-1.18.0", + ":percent-encoding-2.3.0", + ":pin-project-lite-0.2.12", + ":rustls-0.21.6", + ":rustls-pemfile-1.0.3", + ":tokio-1.32.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.10", - ":webpki-roots-0.25.3", + ":tokio-util-0.7.8", + ":webpki-roots-0.25.2", ":winreg-0.50.0", ], ), "windows-msvc": dict( deps = [ ":encoding_rs-0.8.33", - ":h2-0.3.22", - ":http-body-0.4.6", + ":h2-0.3.21", + ":http-body-0.4.5", ":hyper-0.14.27", - ":hyper-rustls-0.24.2", - ":ipnet-2.9.0", + ":hyper-rustls-0.24.1", + ":ipnet-2.8.0", ":log-0.4.20", ":mime-0.3.17", - ":once_cell-1.19.0", - ":percent-encoding-2.3.1", - ":pin-project-lite-0.2.13", - ":rustls-0.21.10", - ":rustls-pemfile-1.0.4", - ":tokio-1.35.0", + ":once_cell-1.18.0", + ":percent-encoding-2.3.0", + ":pin-project-lite-0.2.12", + ":rustls-0.21.6", + ":rustls-pemfile-1.0.3", + ":tokio-1.32.0", ":tokio-rustls-0.24.1", - ":tokio-util-0.7.10", - ":webpki-roots-0.25.3", + ":tokio-util-0.7.8", + ":webpki-roots-0.25.2", ":winreg-0.50.0", ], ), }, visibility = [], deps = [ - ":base64-0.21.5", - ":bytes-1.5.0", + ":base64-0.21.2", + ":bytes-1.4.0", ":futures-core-0.3.29", ":futures-util-0.3.29", - ":http-0.2.11", + ":http-0.2.9", ":mime_guess-2.0.4", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ":serde_urlencoded-0.7.1", ":tower-service-0.3.2", - ":url-2.5.0", + ":url-2.4.0", ], ) @@ -11121,6 +11050,17 @@ cargo.rust_library( crate = "ring", crate_root = "ring-0.16.20.crate/src/lib.rs", edition = "2018", + env = { + "CARGO_MANIFEST_DIR": "ring-0.16.20.crate", + "CARGO_PKG_AUTHORS": "Brian Smith ", + "CARGO_PKG_DESCRIPTION": "Safe, fast, small crypto using Rust.", + "CARGO_PKG_NAME": "ring", + "CARGO_PKG_REPOSITORY": "https://github.com/briansmith/ring", + "CARGO_PKG_VERSION": "0.16.20", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "16", + "CARGO_PKG_VERSION_PATCH": "20", + }, features = [ "alloc", "default", @@ -11130,16 +11070,16 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.151", - ":once_cell-1.19.0", + ":libc-0.2.147", + ":once_cell-1.18.0", ":ring-0.16.20-ring-c-asm-elf-aarch64", ":spin-0.5.2", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.151", - ":once_cell-1.19.0", + ":libc-0.2.147", + ":once_cell-1.18.0", ":ring-0.16.20-ring-c-asm-elf-x86_84", ":spin-0.5.2", ], @@ -11548,511 +11488,6 @@ cxx_library( visibility = [], ) -http_archive( - name = "ring-0.17.7.crate", - sha256 = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74", - strip_prefix = "ring-0.17.7", - sub_targets = [ - "crypto/constant_time_test.c", - "crypto/cpu_intel.c", - "crypto/crypto.c", - "crypto/curve25519/curve25519.c", - "crypto/curve25519/curve25519_64_adx.c", - "crypto/curve25519/curve25519_tables.h", - "crypto/curve25519/internal.h", - "crypto/fipsmodule/aes/aes_nohw.c", - "crypto/fipsmodule/bn/internal.h", - "crypto/fipsmodule/bn/montgomery.c", - "crypto/fipsmodule/bn/montgomery_inv.c", - "crypto/fipsmodule/ec/ecp_nistz.c", - "crypto/fipsmodule/ec/ecp_nistz.h", - "crypto/fipsmodule/ec/ecp_nistz384.h", - "crypto/fipsmodule/ec/ecp_nistz384.inl", - "crypto/fipsmodule/ec/gfp_p256.c", - "crypto/fipsmodule/ec/gfp_p384.c", - "crypto/fipsmodule/ec/p256.c", - "crypto/fipsmodule/ec/p256-nistz.c", - "crypto/fipsmodule/ec/p256-nistz.h", - "crypto/fipsmodule/ec/p256-nistz-table.h", - "crypto/fipsmodule/ec/p256_shared.h", - "crypto/fipsmodule/ec/p256_table.h", - "crypto/fipsmodule/ec/util.h", - "crypto/internal.h", - "crypto/limbs/limbs.c", - "crypto/limbs/limbs.h", - "crypto/limbs/limbs.inl", - "crypto/mem.c", - "crypto/poly1305/internal.h", - "crypto/poly1305/poly1305.c", - "crypto/poly1305/poly1305_arm.c", - "crypto/poly1305/poly1305_vec.c", - "include/ring-core/aes.h", - "include/ring-core/arm_arch.h", - "include/ring-core/asm_base.h", - "include/ring-core/base.h", - "include/ring-core/check.h", - "include/ring-core/mem.h", - "include/ring-core/poly1305.h", - "include/ring-core/target.h", - "include/ring-core/type_check.h", - "pregenerated/aesni-gcm-x86_64-elf.S", - "pregenerated/aesni-gcm-x86_64-macosx.S", - "pregenerated/aesni-x86_64-elf.S", - "pregenerated/aesni-x86_64-macosx.S", - "pregenerated/aesv8-armx-ios64.S", - "pregenerated/aesv8-armx-linux64.S", - "pregenerated/armv8-mont-ios64.S", - "pregenerated/armv8-mont-linux64.S", - "pregenerated/chacha-armv8-ios64.S", - "pregenerated/chacha-armv8-linux64.S", - "pregenerated/chacha-x86_64-elf.S", - "pregenerated/chacha-x86_64-macosx.S", - "pregenerated/chacha20_poly1305_armv8-ios64.S", - "pregenerated/chacha20_poly1305_armv8-linux64.S", - "pregenerated/chacha20_poly1305_x86_64-elf.S", - "pregenerated/chacha20_poly1305_x86_64-macosx.S", - "pregenerated/ghash-neon-armv8-ios64.S", - "pregenerated/ghash-neon-armv8-linux64.S", - "pregenerated/ghash-x86_64-elf.S", - "pregenerated/ghash-x86_64-macosx.S", - "pregenerated/ghashv8-armx-ios64.S", - "pregenerated/ghashv8-armx-linux64.S", - "pregenerated/p256-armv8-asm-ios64.S", - "pregenerated/p256-armv8-asm-linux64.S", - "pregenerated/p256-x86_64-asm-elf.S", - "pregenerated/p256-x86_64-asm-macosx.S", - "pregenerated/sha256-armv8-ios64.S", - "pregenerated/sha256-armv8-linux64.S", - "pregenerated/sha256-x86_64-elf.S", - "pregenerated/sha256-x86_64-macosx.S", - "pregenerated/sha512-armv8-ios64.S", - "pregenerated/sha512-armv8-linux64.S", - "pregenerated/sha512-x86_64-elf.S", - "pregenerated/sha512-x86_64-macosx.S", - "pregenerated/vpaes-armv8-ios64.S", - "pregenerated/vpaes-armv8-linux64.S", - "pregenerated/vpaes-x86_64-elf.S", - "pregenerated/vpaes-x86_64-macosx.S", - "pregenerated/x86_64-mont-elf.S", - "pregenerated/x86_64-mont-macosx.S", - "pregenerated/x86_64-mont5-elf.S", - "pregenerated/x86_64-mont5-macosx.S", - "third_party/fiat/curve25519_32.h", - "third_party/fiat/curve25519_64.h", - "third_party/fiat/curve25519_64_adx.h", - "third_party/fiat/curve25519_64_msvc.h", - "third_party/fiat/p256_32.h", - "third_party/fiat/p256_64.h", - "third_party/fiat/p256_64_msvc.h", - ], - urls = ["https://crates.io/api/v1/crates/ring/0.17.7/download"], - visibility = [], -) - -cargo.rust_library( - name = "ring-0.17.7", - srcs = [":ring-0.17.7.crate"], - crate = "ring", - crate_root = "ring-0.17.7.crate/src/lib.rs", - edition = "2021", - features = [ - "alloc", - "default", - "dev_urandom_fallback", - ], - platform = { - "linux-arm64": dict( - deps = [ - ":libc-0.2.151", - ":ring-0.17.7-ring-c-asm-elf-aarch64", - ":spin-0.9.8", - ], - ), - "linux-x86_64": dict( - deps = [ - ":ring-0.17.7-ring-c-asm-elf-x86_84", - ":spin-0.9.8", - ], - ), - "macos-arm64": dict( - deps = [ - ":ring-0.17.7-ring-c-asm-macos-arm64", - ":spin-0.9.8", - ], - ), - "macos-x86_64": dict( - deps = [ - ":ring-0.17.7-ring-c-asm-macos-x86_64", - ":spin-0.9.8", - ], - ), - "windows-gnu": dict( - deps = [ - ":ring-0.17.7-ring-c-win-x86_84", - ":spin-0.9.8", - ], - ), - "windows-msvc": dict( - deps = [ - ":ring-0.17.7-ring-c-win-x86_84", - ":spin-0.9.8", - ], - ), - }, - visibility = [], - deps = [ - ":getrandom-0.2.11", - ":untrusted-0.9.0", - ], -) - -cxx_library( - name = "ring-0.17.7-ring-c-asm-elf-aarch64", - srcs = [ - ":ring-0.17.7.crate[crypto/constant_time_test.c]", - ":ring-0.17.7.crate[crypto/cpu_intel.c]", - ":ring-0.17.7.crate[crypto/crypto.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", - ":ring-0.17.7.crate[crypto/limbs/limbs.c]", - ":ring-0.17.7.crate[crypto/mem.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", - ":ring-0.17.7.crate[pregenerated/aesv8-armx-linux64.S]", - ":ring-0.17.7.crate[pregenerated/armv8-mont-linux64.S]", - ":ring-0.17.7.crate[pregenerated/chacha-armv8-linux64.S]", - ":ring-0.17.7.crate[pregenerated/chacha20_poly1305_armv8-linux64.S]", - ":ring-0.17.7.crate[pregenerated/ghash-neon-armv8-linux64.S]", - ":ring-0.17.7.crate[pregenerated/ghashv8-armx-linux64.S]", - ":ring-0.17.7.crate[pregenerated/p256-armv8-asm-linux64.S]", - ":ring-0.17.7.crate[pregenerated/sha256-armv8-linux64.S]", - ":ring-0.17.7.crate[pregenerated/sha512-armv8-linux64.S]", - ":ring-0.17.7.crate[pregenerated/vpaes-armv8-linux64.S]", - ], - headers = [ - ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", - ":ring-0.17.7.crate[crypto/curve25519/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", - ":ring-0.17.7.crate[crypto/internal.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", - ":ring-0.17.7.crate[crypto/poly1305/internal.h]", - ":ring-0.17.7.crate[include/ring-core/aes.h]", - ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", - ":ring-0.17.7.crate[include/ring-core/asm_base.h]", - ":ring-0.17.7.crate[include/ring-core/base.h]", - ":ring-0.17.7.crate[include/ring-core/check.h]", - ":ring-0.17.7.crate[include/ring-core/mem.h]", - ":ring-0.17.7.crate[include/ring-core/poly1305.h]", - ":ring-0.17.7.crate[include/ring-core/target.h]", - ":ring-0.17.7.crate[include/ring-core/type_check.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", - ], - compiler_flags = ["-Wno-error"], - preferred_linkage = "static", - preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], - visibility = [], -) - -cxx_library( - name = "ring-0.17.7-ring-c-asm-elf-x86_84", - srcs = [ - ":ring-0.17.7.crate[crypto/constant_time_test.c]", - ":ring-0.17.7.crate[crypto/cpu_intel.c]", - ":ring-0.17.7.crate[crypto/crypto.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", - ":ring-0.17.7.crate[crypto/limbs/limbs.c]", - ":ring-0.17.7.crate[crypto/mem.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", - ":ring-0.17.7.crate[pregenerated/aesni-gcm-x86_64-elf.S]", - ":ring-0.17.7.crate[pregenerated/aesni-x86_64-elf.S]", - ":ring-0.17.7.crate[pregenerated/chacha-x86_64-elf.S]", - ":ring-0.17.7.crate[pregenerated/chacha20_poly1305_x86_64-elf.S]", - ":ring-0.17.7.crate[pregenerated/ghash-x86_64-elf.S]", - ":ring-0.17.7.crate[pregenerated/p256-x86_64-asm-elf.S]", - ":ring-0.17.7.crate[pregenerated/sha256-x86_64-elf.S]", - ":ring-0.17.7.crate[pregenerated/sha512-x86_64-elf.S]", - ":ring-0.17.7.crate[pregenerated/vpaes-x86_64-elf.S]", - ":ring-0.17.7.crate[pregenerated/x86_64-mont-elf.S]", - ":ring-0.17.7.crate[pregenerated/x86_64-mont5-elf.S]", - ], - headers = [ - ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", - ":ring-0.17.7.crate[crypto/curve25519/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", - ":ring-0.17.7.crate[crypto/internal.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", - ":ring-0.17.7.crate[crypto/poly1305/internal.h]", - ":ring-0.17.7.crate[include/ring-core/aes.h]", - ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", - ":ring-0.17.7.crate[include/ring-core/asm_base.h]", - ":ring-0.17.7.crate[include/ring-core/base.h]", - ":ring-0.17.7.crate[include/ring-core/check.h]", - ":ring-0.17.7.crate[include/ring-core/mem.h]", - ":ring-0.17.7.crate[include/ring-core/poly1305.h]", - ":ring-0.17.7.crate[include/ring-core/target.h]", - ":ring-0.17.7.crate[include/ring-core/type_check.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", - ], - compiler_flags = ["-Wno-error"], - preferred_linkage = "static", - preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], - visibility = [], -) - -cxx_library( - name = "ring-0.17.7-ring-c-asm-macos-arm64", - srcs = [ - ":ring-0.17.7.crate[crypto/constant_time_test.c]", - ":ring-0.17.7.crate[crypto/cpu_intel.c]", - ":ring-0.17.7.crate[crypto/crypto.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", - ":ring-0.17.7.crate[crypto/limbs/limbs.c]", - ":ring-0.17.7.crate[crypto/mem.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", - ":ring-0.17.7.crate[pregenerated/aesv8-armx-ios64.S]", - ":ring-0.17.7.crate[pregenerated/armv8-mont-ios64.S]", - ":ring-0.17.7.crate[pregenerated/chacha-armv8-ios64.S]", - ":ring-0.17.7.crate[pregenerated/chacha20_poly1305_armv8-ios64.S]", - ":ring-0.17.7.crate[pregenerated/ghash-neon-armv8-ios64.S]", - ":ring-0.17.7.crate[pregenerated/ghashv8-armx-ios64.S]", - ":ring-0.17.7.crate[pregenerated/p256-armv8-asm-ios64.S]", - ":ring-0.17.7.crate[pregenerated/sha256-armv8-ios64.S]", - ":ring-0.17.7.crate[pregenerated/sha512-armv8-ios64.S]", - ":ring-0.17.7.crate[pregenerated/vpaes-armv8-ios64.S]", - ], - headers = [ - ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", - ":ring-0.17.7.crate[crypto/curve25519/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", - ":ring-0.17.7.crate[crypto/internal.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", - ":ring-0.17.7.crate[crypto/poly1305/internal.h]", - ":ring-0.17.7.crate[include/ring-core/aes.h]", - ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", - ":ring-0.17.7.crate[include/ring-core/asm_base.h]", - ":ring-0.17.7.crate[include/ring-core/base.h]", - ":ring-0.17.7.crate[include/ring-core/check.h]", - ":ring-0.17.7.crate[include/ring-core/mem.h]", - ":ring-0.17.7.crate[include/ring-core/poly1305.h]", - ":ring-0.17.7.crate[include/ring-core/target.h]", - ":ring-0.17.7.crate[include/ring-core/type_check.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", - ], - compiler_flags = ["-Wno-error"], - preferred_linkage = "static", - preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], - visibility = [], -) - -cxx_library( - name = "ring-0.17.7-ring-c-asm-macos-x86_64", - srcs = [ - ":ring-0.17.7.crate[crypto/constant_time_test.c]", - ":ring-0.17.7.crate[crypto/cpu_intel.c]", - ":ring-0.17.7.crate[crypto/crypto.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", - ":ring-0.17.7.crate[crypto/limbs/limbs.c]", - ":ring-0.17.7.crate[crypto/mem.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", - ":ring-0.17.7.crate[pregenerated/aesni-gcm-x86_64-macosx.S]", - ":ring-0.17.7.crate[pregenerated/aesni-x86_64-macosx.S]", - ":ring-0.17.7.crate[pregenerated/chacha-x86_64-macosx.S]", - ":ring-0.17.7.crate[pregenerated/chacha20_poly1305_x86_64-macosx.S]", - ":ring-0.17.7.crate[pregenerated/ghash-x86_64-macosx.S]", - ":ring-0.17.7.crate[pregenerated/p256-x86_64-asm-macosx.S]", - ":ring-0.17.7.crate[pregenerated/sha256-x86_64-macosx.S]", - ":ring-0.17.7.crate[pregenerated/sha512-x86_64-macosx.S]", - ":ring-0.17.7.crate[pregenerated/vpaes-x86_64-macosx.S]", - ":ring-0.17.7.crate[pregenerated/x86_64-mont-macosx.S]", - ":ring-0.17.7.crate[pregenerated/x86_64-mont5-macosx.S]", - ], - headers = [ - ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", - ":ring-0.17.7.crate[crypto/curve25519/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", - ":ring-0.17.7.crate[crypto/internal.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", - ":ring-0.17.7.crate[crypto/poly1305/internal.h]", - ":ring-0.17.7.crate[include/ring-core/aes.h]", - ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", - ":ring-0.17.7.crate[include/ring-core/asm_base.h]", - ":ring-0.17.7.crate[include/ring-core/base.h]", - ":ring-0.17.7.crate[include/ring-core/check.h]", - ":ring-0.17.7.crate[include/ring-core/mem.h]", - ":ring-0.17.7.crate[include/ring-core/poly1305.h]", - ":ring-0.17.7.crate[include/ring-core/target.h]", - ":ring-0.17.7.crate[include/ring-core/type_check.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", - ], - compiler_flags = ["-Wno-error"], - preferred_linkage = "static", - preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], - visibility = [], -) - -cxx_library( - name = "ring-0.17.7-ring-c-win-x86_84", - srcs = [ - ":ring-0.17.7.crate[crypto/constant_time_test.c]", - ":ring-0.17.7.crate[crypto/cpu_intel.c]", - ":ring-0.17.7.crate[crypto/crypto.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519.c]", - ":ring-0.17.7.crate[crypto/curve25519/curve25519_64_adx.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/aes/aes_nohw.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/montgomery_inv.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/gfp_p384.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256.c]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.c]", - ":ring-0.17.7.crate[crypto/limbs/limbs.c]", - ":ring-0.17.7.crate[crypto/mem.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_arm.c]", - ":ring-0.17.7.crate[crypto/poly1305/poly1305_vec.c]", - ], - headers = [ - ":ring-0.17.7.crate[crypto/curve25519/curve25519_tables.h]", - ":ring-0.17.7.crate[crypto/curve25519/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/bn/internal.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/ecp_nistz384.inl]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256-nistz-table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_shared.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/p256_table.h]", - ":ring-0.17.7.crate[crypto/fipsmodule/ec/util.h]", - ":ring-0.17.7.crate[crypto/internal.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.h]", - ":ring-0.17.7.crate[crypto/limbs/limbs.inl]", - ":ring-0.17.7.crate[crypto/poly1305/internal.h]", - ":ring-0.17.7.crate[include/ring-core/aes.h]", - ":ring-0.17.7.crate[include/ring-core/arm_arch.h]", - ":ring-0.17.7.crate[include/ring-core/asm_base.h]", - ":ring-0.17.7.crate[include/ring-core/base.h]", - ":ring-0.17.7.crate[include/ring-core/check.h]", - ":ring-0.17.7.crate[include/ring-core/mem.h]", - ":ring-0.17.7.crate[include/ring-core/poly1305.h]", - ":ring-0.17.7.crate[include/ring-core/target.h]", - ":ring-0.17.7.crate[include/ring-core/type_check.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_32.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_adx.h]", - ":ring-0.17.7.crate[third_party/fiat/curve25519_64_msvc.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_32.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64.h]", - ":ring-0.17.7.crate[third_party/fiat/p256_64_msvc.h]", - ], - compiler_flags = ["-Wno-error"], - preferred_linkage = "static", - preprocessor_flags = ["-I$(location :ring-0.17.7.crate)/include"], - visibility = [], -) - http_archive( name = "rkyv-0.7.42.crate", sha256 = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58", @@ -12079,11 +11514,11 @@ cargo.rust_library( ":bytecheck-0.6.11", ":hashbrown-0.12.3", ":ptr_meta-0.1.4", - ":rend-0.4.1", + ":rend-0.4.0", ":rkyv_derive-0.7.42", ":seahash-4.1.0", ":tinyvec-1.6.0", - ":uuid-1.6.1", + ":uuid-1.4.1", ], ) @@ -12105,7 +11540,7 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -12135,18 +11570,18 @@ cargo.rust_library( }, visibility = [], deps = [ - ":byteorder-1.5.0", + ":byteorder-1.4.3", ":digest-0.10.7", ":num-integer-0.1.45", ":num-iter-0.1.43", - ":num-traits-0.2.17", + ":num-traits-0.2.16", ":pkcs1-0.4.1", ":pkcs8-0.9.0", ":rand_core-0.6.4", ":signature-1.6.4", - ":smallvec-1.11.2", + ":smallvec-1.11.0", ":subtle-2.5.0", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) @@ -12202,49 +11637,49 @@ cargo.rust_library( ], visibility = [], deps = [ - ":async-trait-0.1.74", + ":async-trait-0.1.73", ":aws-creds-0.34.1", - ":aws-region-0.25.4", + ":aws-region-0.25.3", ":base64-0.13.1", - ":bytes-1.5.0", + ":bytes-1.4.0", ":cfg-if-1.0.0", - ":futures-0.3.29", + ":futures-0.3.28", ":hex-0.4.3", ":hmac-0.12.1", - ":http-0.2.11", + ":http-0.2.9", ":log-0.4.20", ":maybe-async-0.2.7", ":md5-0.7.0", - ":percent-encoding-2.3.1", + ":percent-encoding-2.3.0", ":quick-xml-0.26.0", - ":reqwest-0.11.22", - ":serde-1.0.193", - ":serde_derive-1.0.193", - ":sha2-0.10.8", - ":thiserror-1.0.50", - ":time-0.3.30", - ":tokio-1.35.0", + ":reqwest-0.11.20", + ":serde-1.0.186", + ":serde_derive-1.0.186", + ":sha2-0.10.7", + ":thiserror-1.0.47", + ":time-0.3.27", + ":tokio-1.32.0", ":tokio-stream-0.1.14", - ":url-2.5.0", + ":url-2.4.0", ], ) http_archive( - name = "rust_decimal-1.33.1.crate", - sha256 = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4", - strip_prefix = "rust_decimal-1.33.1", - urls = ["https://crates.io/api/v1/crates/rust_decimal/1.33.1/download"], + name = "rust_decimal-1.32.0.crate", + sha256 = "a4c4216490d5a413bc6d10fa4742bd7d4955941d062c0ef873141d6b0e7b30fd", + strip_prefix = "rust_decimal-1.32.0", + urls = ["https://crates.io/api/v1/crates/rust_decimal/1.32.0/download"], visibility = [], ) cargo.rust_library( - name = "rust_decimal-1.33.1", - srcs = [":rust_decimal-1.33.1.crate"], + name = "rust_decimal-1.32.0", + srcs = [":rust_decimal-1.32.0.crate"], crate = "rust_decimal", - crate_root = "rust_decimal-1.33.1.crate/src/lib.rs", + crate_root = "rust_decimal-1.32.0.crate/src/lib.rs", edition = "2021", env = { - "OUT_DIR": "$(location :rust_decimal-1.33.1-build-script-run[out_dir])", + "OUT_DIR": "$(location :rust_decimal-1.32.0-build-script-run[out_dir])", }, features = [ "default", @@ -12254,21 +11689,21 @@ cargo.rust_library( visibility = [], deps = [ ":arrayvec-0.7.4", - ":borsh-1.2.1", - ":bytes-1.5.0", - ":num-traits-0.2.17", + ":borsh-0.10.3", + ":bytes-1.4.0", + ":num-traits-0.2.16", ":rand-0.8.5", ":rkyv-0.7.42", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ], ) cargo.rust_binary( - name = "rust_decimal-1.33.1-build-script-build", - srcs = [":rust_decimal-1.33.1.crate"], + name = "rust_decimal-1.32.0-build-script-build", + srcs = [":rust_decimal-1.32.0.crate"], crate = "build_script_build", - crate_root = "rust_decimal-1.33.1.crate/build.rs", + crate_root = "rust_decimal-1.32.0.crate/build.rs", edition = "2021", features = [ "default", @@ -12279,15 +11714,15 @@ cargo.rust_binary( ) buildscript_run( - name = "rust_decimal-1.33.1-build-script-run", + name = "rust_decimal-1.32.0-build-script-run", package_name = "rust_decimal", - buildscript_rule = ":rust_decimal-1.33.1-build-script-build", + buildscript_rule = ":rust_decimal-1.32.0-build-script-build", features = [ "default", "serde", "std", ], - version = "1.33.1", + version = "1.32.0", ) http_archive( @@ -12308,39 +11743,120 @@ cargo.rust_library( ) http_archive( - name = "rustc_version-0.4.0.crate", - sha256 = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366", - strip_prefix = "rustc_version-0.4.0", - urls = ["https://crates.io/api/v1/crates/rustc_version/0.4.0/download"], + name = "rustix-0.37.23.crate", + sha256 = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06", + strip_prefix = "rustix-0.37.23", + urls = ["https://crates.io/api/v1/crates/rustix/0.37.23/download"], visibility = [], ) cargo.rust_library( - name = "rustc_version-0.4.0", - srcs = [":rustc_version-0.4.0.crate"], - crate = "rustc_version", - crate_root = "rustc_version-0.4.0.crate/src/lib.rs", + name = "rustix-0.37.23", + srcs = [":rustix-0.37.23.crate"], + crate = "rustix", + crate_root = "rustix-0.37.23.crate/src/lib.rs", edition = "2018", + features = [ + "default", + "io-lifetimes", + "libc", + "std", + "termios", + "use-libc-auxv", + ], + platform = { + "linux-arm64": dict( + deps = [ + ":libc-0.2.147", + ":linux-raw-sys-0.3.8", + ], + ), + "linux-x86_64": dict( + deps = [ + ":libc-0.2.147", + ":linux-raw-sys-0.3.8", + ], + ), + "macos-arm64": dict( + named_deps = { + "libc_errno": ":errno-0.3.2", + }, + deps = [":libc-0.2.147"], + ), + "macos-x86_64": dict( + named_deps = { + "libc_errno": ":errno-0.3.2", + }, + deps = [":libc-0.2.147"], + ), + "windows-gnu": dict( + named_deps = { + "libc_errno": ":errno-0.3.2", + }, + deps = [":windows-sys-0.48.0"], + ), + "windows-msvc": dict( + named_deps = { + "libc_errno": ":errno-0.3.2", + }, + deps = [":windows-sys-0.48.0"], + ), + }, + rustc_flags = ["@$(location :rustix-0.37.23-build-script-run[rustc_flags])"], visibility = [], - deps = [":semver-1.0.20"], + deps = [ + ":bitflags-1.3.2", + ":io-lifetimes-1.0.11", + ], +) + +cargo.rust_binary( + name = "rustix-0.37.23-build-script-build", + srcs = [":rustix-0.37.23.crate"], + crate = "build_script_build", + crate_root = "rustix-0.37.23.crate/build.rs", + edition = "2018", + features = [ + "default", + "io-lifetimes", + "libc", + "std", + "termios", + "use-libc-auxv", + ], + visibility = [], +) + +buildscript_run( + name = "rustix-0.37.23-build-script-run", + package_name = "rustix", + buildscript_rule = ":rustix-0.37.23-build-script-build", + features = [ + "default", + "io-lifetimes", + "libc", + "std", + "termios", + "use-libc-auxv", + ], + version = "0.37.23", ) http_archive( - name = "rustix-0.38.28.crate", - sha256 = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316", - strip_prefix = "rustix-0.38.28", - urls = ["https://crates.io/api/v1/crates/rustix/0.38.28/download"], + name = "rustix-0.38.8.crate", + sha256 = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f", + strip_prefix = "rustix-0.38.8", + urls = ["https://crates.io/api/v1/crates/rustix/0.38.8/download"], visibility = [], ) cargo.rust_library( - name = "rustix-0.38.28", - srcs = [":rustix-0.38.28.crate"], + name = "rustix-0.38.8", + srcs = [":rustix-0.38.8.crate"], crate = "rustix", - crate_root = "rustix-0.38.28.crate/src/lib.rs", + crate_root = "rustix-0.38.8.crate/src/lib.rs", edition = "2021", features = [ - "alloc", "default", "fs", "std", @@ -12349,61 +11865,48 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - named_deps = { - "libc_errno": ":errno-0.3.8", - }, - deps = [ - ":libc-0.2.151", - ":linux-raw-sys-0.4.12", - ], + deps = [":linux-raw-sys-0.4.5"], ), "linux-x86_64": dict( - named_deps = { - "libc_errno": ":errno-0.3.8", - }, - deps = [ - ":libc-0.2.151", - ":linux-raw-sys-0.4.12", - ], + deps = [":linux-raw-sys-0.4.5"], ), "macos-arm64": dict( named_deps = { - "libc_errno": ":errno-0.3.8", + "libc_errno": ":errno-0.3.2", }, - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( named_deps = { - "libc_errno": ":errno-0.3.8", + "libc_errno": ":errno-0.3.2", }, - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( named_deps = { - "libc_errno": ":errno-0.3.8", + "libc_errno": ":errno-0.3.2", }, - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), "windows-msvc": dict( named_deps = { - "libc_errno": ":errno-0.3.8", + "libc_errno": ":errno-0.3.2", }, - deps = [":windows-sys-0.52.0"], + deps = [":windows-sys-0.48.0"], ), }, - rustc_flags = ["@$(location :rustix-0.38.28-build-script-run[rustc_flags])"], + rustc_flags = ["@$(location :rustix-0.38.8-build-script-run[rustc_flags])"], visibility = [], - deps = [":bitflags-2.4.1"], + deps = [":bitflags-2.4.0"], ) cargo.rust_binary( - name = "rustix-0.38.28-build-script-build", - srcs = [":rustix-0.38.28.crate"], + name = "rustix-0.38.8-build-script-build", + srcs = [":rustix-0.38.8.crate"], crate = "build_script_build", - crate_root = "rustix-0.38.28.crate/build.rs", + crate_root = "rustix-0.38.8.crate/build.rs", edition = "2021", features = [ - "alloc", "default", "fs", "std", @@ -12414,33 +11917,32 @@ cargo.rust_binary( ) buildscript_run( - name = "rustix-0.38.28-build-script-run", + name = "rustix-0.38.8-build-script-run", package_name = "rustix", - buildscript_rule = ":rustix-0.38.28-build-script-build", + buildscript_rule = ":rustix-0.38.8-build-script-build", features = [ - "alloc", "default", "fs", "std", "termios", "use-libc-auxv", ], - version = "0.38.28", + version = "0.38.8", ) http_archive( - name = "rustls-0.20.9.crate", - sha256 = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99", - strip_prefix = "rustls-0.20.9", - urls = ["https://crates.io/api/v1/crates/rustls/0.20.9/download"], + name = "rustls-0.20.8.crate", + sha256 = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f", + strip_prefix = "rustls-0.20.8", + urls = ["https://crates.io/api/v1/crates/rustls/0.20.8/download"], visibility = [], ) cargo.rust_library( - name = "rustls-0.20.9", - srcs = [":rustls-0.20.9.crate"], + name = "rustls-0.20.8", + srcs = [":rustls-0.20.8.crate"], crate = "rustls", - crate_root = "rustls-0.20.9.crate/src/lib.rs", + crate_root = "rustls-0.20.8.crate/src/lib.rs", edition = "2018", features = [ "dangerous_configuration", @@ -12453,30 +11955,30 @@ cargo.rust_library( deps = [ ":log-0.4.20", ":ring-0.16.20", - ":sct-0.7.1", - ":webpki-0.22.4", + ":sct-0.7.0", + ":webpki-0.22.0", ], ) alias( name = "rustls", - actual = ":rustls-0.21.10", + actual = ":rustls-0.21.6", visibility = ["PUBLIC"], ) http_archive( - name = "rustls-0.21.10.crate", - sha256 = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba", - strip_prefix = "rustls-0.21.10", - urls = ["https://crates.io/api/v1/crates/rustls/0.21.10/download"], + name = "rustls-0.21.6.crate", + sha256 = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb", + strip_prefix = "rustls-0.21.6", + urls = ["https://crates.io/api/v1/crates/rustls/0.21.6/download"], visibility = [], ) cargo.rust_library( - name = "rustls-0.21.10", - srcs = [":rustls-0.21.10.crate"], + name = "rustls-0.21.6", + srcs = [":rustls-0.21.6.crate"], crate = "rustls", - crate_root = "rustls-0.21.10.crate/src/lib.rs", + crate_root = "rustls-0.21.6.crate/src/lib.rs", edition = "2021", features = [ "dangerous_configuration", @@ -12488,9 +11990,9 @@ cargo.rust_library( visibility = [], deps = [ ":log-0.4.20", - ":ring-0.17.7", - ":rustls-webpki-0.101.7", - ":sct-0.7.1", + ":ring-0.16.20", + ":rustls-webpki-0.101.4", + ":sct-0.7.0", ], ) @@ -12529,40 +12031,40 @@ cargo.rust_library( ), }, visibility = [], - deps = [":rustls-pemfile-1.0.4"], + deps = [":rustls-pemfile-1.0.3"], ) http_archive( - name = "rustls-pemfile-1.0.4.crate", - sha256 = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c", - strip_prefix = "rustls-pemfile-1.0.4", - urls = ["https://crates.io/api/v1/crates/rustls-pemfile/1.0.4/download"], + name = "rustls-pemfile-1.0.3.crate", + sha256 = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2", + strip_prefix = "rustls-pemfile-1.0.3", + urls = ["https://crates.io/api/v1/crates/rustls-pemfile/1.0.3/download"], visibility = [], ) cargo.rust_library( - name = "rustls-pemfile-1.0.4", - srcs = [":rustls-pemfile-1.0.4.crate"], + name = "rustls-pemfile-1.0.3", + srcs = [":rustls-pemfile-1.0.3.crate"], crate = "rustls_pemfile", - crate_root = "rustls-pemfile-1.0.4.crate/src/lib.rs", + crate_root = "rustls-pemfile-1.0.3.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":base64-0.21.5"], + deps = [":base64-0.21.2"], ) http_archive( - name = "rustls-webpki-0.101.7.crate", - sha256 = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765", - strip_prefix = "rustls-webpki-0.101.7", - urls = ["https://crates.io/api/v1/crates/rustls-webpki/0.101.7/download"], + name = "rustls-webpki-0.101.4.crate", + sha256 = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d", + strip_prefix = "rustls-webpki-0.101.4", + urls = ["https://crates.io/api/v1/crates/rustls-webpki/0.101.4/download"], visibility = [], ) cargo.rust_library( - name = "rustls-webpki-0.101.7", - srcs = [":rustls-webpki-0.101.7.crate"], + name = "rustls-webpki-0.101.4", + srcs = [":rustls-webpki-0.101.4.crate"], crate = "webpki", - crate_root = "rustls-webpki-0.101.7.crate/src/lib.rs", + crate_root = "rustls-webpki-0.101.4.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -12571,8 +12073,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":ring-0.17.7", - ":untrusted-0.9.0", + ":ring-0.16.20", + ":untrusted-0.7.1", ], ) @@ -12614,18 +12116,18 @@ buildscript_run( ) http_archive( - name = "ryu-1.0.16.crate", - sha256 = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c", - strip_prefix = "ryu-1.0.16", - urls = ["https://crates.io/api/v1/crates/ryu/1.0.16/download"], + name = "ryu-1.0.15.crate", + sha256 = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741", + strip_prefix = "ryu-1.0.15", + urls = ["https://crates.io/api/v1/crates/ryu/1.0.15/download"], visibility = [], ) cargo.rust_library( - name = "ryu-1.0.16", - srcs = [":ryu-1.0.16.crate"], + name = "ryu-1.0.15", + srcs = [":ryu-1.0.15.crate"], crate = "ryu", - crate_root = "ryu-1.0.16.crate/src/lib.rs", + crate_root = "ryu-1.0.15.crate/src/lib.rs", edition = "2018", visibility = [], ) @@ -12646,10 +12148,10 @@ cargo.rust_library( edition = "2018", platform = { "windows-gnu": dict( - deps = [":winapi-util-0.1.6"], + deps = [":winapi-util-0.1.5"], ), "windows-msvc": dict( - deps = [":winapi-util-0.1.6"], + deps = [":winapi-util-0.1.5"], ), }, visibility = [], @@ -12691,23 +12193,23 @@ cargo.rust_library( ) http_archive( - name = "sct-0.7.1.crate", - sha256 = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414", - strip_prefix = "sct-0.7.1", - urls = ["https://crates.io/api/v1/crates/sct/0.7.1/download"], + name = "sct-0.7.0.crate", + sha256 = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4", + strip_prefix = "sct-0.7.0", + urls = ["https://crates.io/api/v1/crates/sct/0.7.0/download"], visibility = [], ) cargo.rust_library( - name = "sct-0.7.1", - srcs = [":sct-0.7.1.crate"], + name = "sct-0.7.0", + srcs = [":sct-0.7.0.crate"], crate = "sct", - crate_root = "sct-0.7.1.crate/src/lib.rs", - edition = "2021", + crate_root = "sct-0.7.0.crate/src/lib.rs", + edition = "2018", visibility = [], deps = [ - ":ring-0.17.7", - ":untrusted-0.9.0", + ":ring-0.16.20", + ":untrusted-0.7.1", ], ) @@ -12758,25 +12260,25 @@ cargo.rust_library( visibility = [], deps = [ ":async-stream-0.3.5", - ":async-trait-0.1.74", + ":async-trait-0.1.73", ":bigdecimal-0.3.1", - ":chrono-0.4.31", - ":futures-0.3.29", + ":chrono-0.4.26", + ":futures-0.3.28", ":log-0.4.20", ":ouroboros-0.15.6", - ":rust_decimal-1.33.1", + ":rust_decimal-1.32.0", ":sea-orm-macros-0.11.3", ":sea-query-0.28.5", ":sea-query-binder-0.3.1", ":sea-strum-0.23.0", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ":sqlx-0.6.3", - ":thiserror-1.0.50", - ":time-0.3.30", - ":tracing-0.1.40", - ":url-2.5.0", - ":uuid-1.6.1", + ":thiserror-1.0.47", + ":time-0.3.27", + ":tracing-0.1.37", + ":url-2.4.0", + ":uuid-1.4.1", ], ) @@ -12799,7 +12301,7 @@ cargo.rust_library( deps = [ ":bae-0.1.7", ":heck-0.3.3", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", ], @@ -12843,12 +12345,12 @@ cargo.rust_library( visibility = [], deps = [ ":bigdecimal-0.3.1", - ":chrono-0.4.31", - ":rust_decimal-1.33.1", + ":chrono-0.4.26", + ":rust_decimal-1.32.0", ":sea-query-derive-0.3.0", - ":serde_json-1.0.108", - ":time-0.3.30", - ":uuid-1.6.1", + ":serde_json-1.0.105", + ":time-0.3.27", + ":uuid-1.4.1", ], ) @@ -12886,13 +12388,13 @@ cargo.rust_library( visibility = [], deps = [ ":bigdecimal-0.3.1", - ":chrono-0.4.31", - ":rust_decimal-1.33.1", + ":chrono-0.4.26", + ":rust_decimal-1.32.0", ":sea-query-0.28.5", - ":serde_json-1.0.108", + ":serde_json-1.0.105", ":sqlx-0.6.3", - ":time-0.3.30", - ":uuid-1.6.1", + ":time-0.3.27", + ":uuid-1.4.1", ], ) @@ -12914,10 +12416,10 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":syn-1.0.109", - ":thiserror-1.0.50", + ":thiserror-1.0.47", ], ) @@ -12963,7 +12465,7 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.3.3", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":rustversion-1.0.14", ":syn-1.0.109", @@ -13020,7 +12522,7 @@ cargo.rust_library( ":generic-array-0.14.7", ":pkcs8-0.10.2", ":subtle-2.5.0", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) @@ -13045,9 +12547,9 @@ cargo.rust_library( visibility = [], deps = [ ":bitflags-1.3.2", - ":core-foundation-0.9.4", - ":core-foundation-sys-0.8.6", - ":libc-0.2.151", + ":core-foundation-0.9.3", + ":core-foundation-sys-0.8.4", + ":libc-0.2.147", ":security-framework-sys-2.9.1", ], ) @@ -13069,8 +12571,8 @@ cargo.rust_library( features = ["OSX_10_9"], visibility = [], deps = [ - ":core-foundation-sys-0.8.6", - ":libc-0.2.151", + ":core-foundation-sys-0.8.4", + ":libc-0.2.147", ], ) @@ -13101,96 +12603,28 @@ cargo.rust_library( ), }, visibility = [], - deps = [":tempfile-3.8.1"], -) - -http_archive( - name = "semver-1.0.20.crate", - sha256 = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090", - strip_prefix = "semver-1.0.20", - urls = ["https://crates.io/api/v1/crates/semver/1.0.20/download"], - visibility = [], -) - -cargo.rust_library( - name = "semver-1.0.20", - srcs = [":semver-1.0.20.crate"], - crate = "semver", - crate_root = "semver-1.0.20.crate/src/lib.rs", - edition = "2018", - env = { - "CARGO_MANIFEST_DIR": "semver-1.0.20.crate", - "CARGO_PKG_AUTHORS": "David Tolnay ", - "CARGO_PKG_DESCRIPTION": "Parser and evaluator for Cargo's flavor of Semantic Versioning", - "CARGO_PKG_NAME": "semver", - "CARGO_PKG_REPOSITORY": "https://github.com/dtolnay/semver", - "CARGO_PKG_VERSION": "1.0.20", - "CARGO_PKG_VERSION_MAJOR": "1", - "CARGO_PKG_VERSION_MINOR": "0", - "CARGO_PKG_VERSION_PATCH": "20", - }, - features = [ - "default", - "std", - ], - rustc_flags = ["@$(location :semver-1.0.20-build-script-run[rustc_flags])"], - visibility = [], -) - -cargo.rust_binary( - name = "semver-1.0.20-build-script-build", - srcs = [":semver-1.0.20.crate"], - crate = "build_script_build", - crate_root = "semver-1.0.20.crate/build.rs", - edition = "2018", - env = { - "CARGO_MANIFEST_DIR": "semver-1.0.20.crate", - "CARGO_PKG_AUTHORS": "David Tolnay ", - "CARGO_PKG_DESCRIPTION": "Parser and evaluator for Cargo's flavor of Semantic Versioning", - "CARGO_PKG_NAME": "semver", - "CARGO_PKG_REPOSITORY": "https://github.com/dtolnay/semver", - "CARGO_PKG_VERSION": "1.0.20", - "CARGO_PKG_VERSION_MAJOR": "1", - "CARGO_PKG_VERSION_MINOR": "0", - "CARGO_PKG_VERSION_PATCH": "20", - }, - features = [ - "default", - "std", - ], - visibility = [], -) - -buildscript_run( - name = "semver-1.0.20-build-script-run", - package_name = "semver", - buildscript_rule = ":semver-1.0.20-build-script-build", - features = [ - "default", - "std", - ], - version = "1.0.20", + deps = [":tempfile-3.8.0"], ) alias( name = "serde", - actual = ":serde-1.0.193", + actual = ":serde-1.0.186", visibility = ["PUBLIC"], ) http_archive( - name = "serde-1.0.193.crate", - sha256 = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89", - strip_prefix = "serde-1.0.193", - urls = ["https://crates.io/api/v1/crates/serde/1.0.193/download"], + name = "serde-1.0.186.crate", + sha256 = "9f5db24220c009de9bd45e69fb2938f4b6d2df856aa9304ce377b3180f83b7c1", + strip_prefix = "serde-1.0.186", + urls = ["https://crates.io/api/v1/crates/serde/1.0.186/download"], visibility = [], ) cargo.rust_library( - name = "serde-1.0.193", - srcs = [":serde-1.0.193.crate"], + name = "serde-1.0.186", + srcs = [":serde-1.0.186.crate"], crate = "serde", - crate_root = "serde-1.0.193.crate/src/lib.rs", + crate_root = "serde-1.0.186.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -13201,28 +12635,28 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":serde_derive-1.0.193"], + deps = [":serde_derive-1.0.186"], ) alias( name = "serde-aux", - actual = ":serde-aux-4.3.1", + actual = ":serde-aux-4.2.0", visibility = ["PUBLIC"], ) http_archive( - name = "serde-aux-4.3.1.crate", - sha256 = "184eba62ebddb71658697c8b08822edee89970bf318c5362189f0de27f85b498", - strip_prefix = "serde-aux-4.3.1", - urls = ["https://crates.io/api/v1/crates/serde-aux/4.3.1/download"], + name = "serde-aux-4.2.0.crate", + sha256 = "c3dfe1b7eb6f9dcf011bd6fad169cdeaae75eda0d61b1a99a3f015b41b0cae39", + strip_prefix = "serde-aux-4.2.0", + urls = ["https://crates.io/api/v1/crates/serde-aux/4.2.0/download"], visibility = [], ) cargo.rust_library( - name = "serde-aux-4.3.1", - srcs = [":serde-aux-4.3.1.crate"], + name = "serde-aux-4.2.0", + srcs = [":serde-aux-4.2.0.crate"], crate = "serde_aux", - crate_root = "serde-aux-4.3.1.crate/src/lib.rs", + crate_root = "serde-aux-4.2.0.crate/src/lib.rs", edition = "2021", features = [ "chrono", @@ -13230,55 +12664,55 @@ cargo.rust_library( ], visibility = [], deps = [ - ":chrono-0.4.31", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":chrono-0.4.26", + ":serde-1.0.186", + ":serde_json-1.0.105", ], ) http_archive( - name = "serde_derive-1.0.193.crate", - sha256 = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3", - strip_prefix = "serde_derive-1.0.193", - urls = ["https://crates.io/api/v1/crates/serde_derive/1.0.193/download"], + name = "serde_derive-1.0.186.crate", + sha256 = "5ad697f7e0b65af4983a4ce8f56ed5b357e8d3c36651bf6a7e13639c17b8e670", + strip_prefix = "serde_derive-1.0.186", + urls = ["https://crates.io/api/v1/crates/serde_derive/1.0.186/download"], visibility = [], ) cargo.rust_library( - name = "serde_derive-1.0.193", - srcs = [":serde_derive-1.0.193.crate"], + name = "serde_derive-1.0.186", + srcs = [":serde_derive-1.0.186.crate"], crate = "serde_derive", - crate_root = "serde_derive-1.0.193.crate/src/lib.rs", + crate_root = "serde_derive-1.0.186.crate/src/lib.rs", edition = "2015", features = ["default"], proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) alias( name = "serde_json", - actual = ":serde_json-1.0.108", + actual = ":serde_json-1.0.105", visibility = ["PUBLIC"], ) http_archive( - name = "serde_json-1.0.108.crate", - sha256 = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b", - strip_prefix = "serde_json-1.0.108", - urls = ["https://crates.io/api/v1/crates/serde_json/1.0.108/download"], + name = "serde_json-1.0.105.crate", + sha256 = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360", + strip_prefix = "serde_json-1.0.105", + urls = ["https://crates.io/api/v1/crates/serde_json/1.0.105/download"], visibility = [], ) cargo.rust_library( - name = "serde_json-1.0.108", - srcs = [":serde_json-1.0.108.crate"], + name = "serde_json-1.0.105", + srcs = [":serde_json-1.0.105.crate"], crate = "serde_json", - crate_root = "serde_json-1.0.108.crate/src/lib.rs", + crate_root = "serde_json-1.0.105.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -13290,10 +12724,10 @@ cargo.rust_library( ], visibility = [], deps = [ - ":indexmap-2.1.0", - ":itoa-1.0.10", - ":ryu-1.0.16", - ":serde-1.0.193", + ":indexmap-2.0.0", + ":itoa-1.0.9", + ":ryu-1.0.15", + ":serde-1.0.186", ], ) @@ -13312,7 +12746,7 @@ cargo.rust_library( crate_root = "serde_nanos-0.1.3.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":serde-1.0.193"], + deps = [":serde-1.0.186"], ) http_archive( @@ -13331,51 +12765,51 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":itoa-1.0.10", - ":serde-1.0.193", + ":itoa-1.0.9", + ":serde-1.0.186", ], ) http_archive( - name = "serde_repr-0.1.17.crate", - sha256 = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145", - strip_prefix = "serde_repr-0.1.17", - urls = ["https://crates.io/api/v1/crates/serde_repr/0.1.17/download"], + name = "serde_repr-0.1.16.crate", + sha256 = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00", + strip_prefix = "serde_repr-0.1.16", + urls = ["https://crates.io/api/v1/crates/serde_repr/0.1.16/download"], visibility = [], ) cargo.rust_library( - name = "serde_repr-0.1.17", - srcs = [":serde_repr-0.1.17.crate"], + name = "serde_repr-0.1.16", + srcs = [":serde_repr-0.1.16.crate"], crate = "serde_repr", - crate_root = "serde_repr-0.1.17.crate/src/lib.rs", + crate_root = "serde_repr-0.1.16.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) http_archive( - name = "serde_spanned-0.6.4.crate", - sha256 = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80", - strip_prefix = "serde_spanned-0.6.4", - urls = ["https://crates.io/api/v1/crates/serde_spanned/0.6.4/download"], + name = "serde_spanned-0.6.3.crate", + sha256 = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186", + strip_prefix = "serde_spanned-0.6.3", + urls = ["https://crates.io/api/v1/crates/serde_spanned/0.6.3/download"], visibility = [], ) cargo.rust_library( - name = "serde_spanned-0.6.4", - srcs = [":serde_spanned-0.6.4.crate"], + name = "serde_spanned-0.6.3", + srcs = [":serde_spanned-0.6.3.crate"], crate = "serde_spanned", - crate_root = "serde_spanned-0.6.4.crate/src/lib.rs", + crate_root = "serde_spanned-0.6.3.crate/src/lib.rs", edition = "2021", features = ["serde"], visibility = [], - deps = [":serde-1.0.193"], + deps = [":serde-1.0.186"], ) alias( @@ -13400,8 +12834,8 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":serde-1.0.193", - ":url-2.5.0", + ":serde-1.0.186", + ":url-2.4.0", ], ) @@ -13421,10 +12855,10 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":form_urlencoded-1.2.1", - ":itoa-1.0.10", - ":ryu-1.0.16", - ":serde-1.0.193", + ":form_urlencoded-1.2.0", + ":itoa-1.0.9", + ":ryu-1.0.15", + ":serde-1.0.186", ], ) @@ -13449,39 +12883,39 @@ cargo.rust_library( "std", ], named_deps = { - "chrono_0_4": ":chrono-0.4.31", + "chrono_0_4": ":chrono-0.4.26", "indexmap_1": ":indexmap-1.9.3", - "time_0_3": ":time-0.3.30", + "time_0_3": ":time-0.3.27", }, visibility = [], deps = [ ":base64-0.13.1", ":hex-0.4.3", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ":serde_with_macros-2.3.3", ], ) alias( name = "serde_with", - actual = ":serde_with-3.4.0", + actual = ":serde_with-3.3.0", visibility = ["PUBLIC"], ) http_archive( - name = "serde_with-3.4.0.crate", - sha256 = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23", - strip_prefix = "serde_with-3.4.0", - urls = ["https://crates.io/api/v1/crates/serde_with/3.4.0/download"], + name = "serde_with-3.3.0.crate", + sha256 = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237", + strip_prefix = "serde_with-3.3.0", + urls = ["https://crates.io/api/v1/crates/serde_with/3.3.0/download"], visibility = [], ) cargo.rust_library( - name = "serde_with-3.4.0", - srcs = [":serde_with-3.4.0.crate"], + name = "serde_with-3.3.0", + srcs = [":serde_with-3.3.0.crate"], crate = "serde_with", - crate_root = "serde_with-3.4.0.crate/src/lib.rs", + crate_root = "serde_with-3.3.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -13490,18 +12924,18 @@ cargo.rust_library( "std", ], named_deps = { - "chrono_0_4": ":chrono-0.4.31", + "chrono_0_4": ":chrono-0.4.26", "indexmap_1": ":indexmap-1.9.3", - "indexmap_2": ":indexmap-2.1.0", - "time_0_3": ":time-0.3.30", + "indexmap_2": ":indexmap-2.0.0", + "time_0_3": ":time-0.3.27", }, visibility = [], deps = [ - ":base64-0.21.5", + ":base64-0.21.2", ":hex-0.4.3", - ":serde-1.0.193", - ":serde_json-1.0.108", - ":serde_with_macros-3.4.0", + ":serde-1.0.186", + ":serde_json-1.0.105", + ":serde_with_macros-3.3.0", ], ) @@ -13523,79 +12957,79 @@ cargo.rust_library( visibility = [], deps = [ ":darling-0.20.3", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) http_archive( - name = "serde_with_macros-3.4.0.crate", - sha256 = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788", - strip_prefix = "serde_with_macros-3.4.0", - urls = ["https://crates.io/api/v1/crates/serde_with_macros/3.4.0/download"], + name = "serde_with_macros-3.3.0.crate", + sha256 = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c", + strip_prefix = "serde_with_macros-3.3.0", + urls = ["https://crates.io/api/v1/crates/serde_with_macros/3.3.0/download"], visibility = [], ) cargo.rust_library( - name = "serde_with_macros-3.4.0", - srcs = [":serde_with_macros-3.4.0.crate"], + name = "serde_with_macros-3.3.0", + srcs = [":serde_with_macros-3.3.0.crate"], crate = "serde_with_macros", - crate_root = "serde_with_macros-3.4.0.crate/src/lib.rs", + crate_root = "serde_with_macros-3.3.0.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ ":darling-0.20.3", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) alias( name = "serde_yaml", - actual = ":serde_yaml-0.9.27", + actual = ":serde_yaml-0.9.25", visibility = ["PUBLIC"], ) http_archive( - name = "serde_yaml-0.9.27.crate", - sha256 = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c", - strip_prefix = "serde_yaml-0.9.27", - urls = ["https://crates.io/api/v1/crates/serde_yaml/0.9.27/download"], + name = "serde_yaml-0.9.25.crate", + sha256 = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574", + strip_prefix = "serde_yaml-0.9.25", + urls = ["https://crates.io/api/v1/crates/serde_yaml/0.9.25/download"], visibility = [], ) cargo.rust_library( - name = "serde_yaml-0.9.27", - srcs = [":serde_yaml-0.9.27.crate"], + name = "serde_yaml-0.9.25", + srcs = [":serde_yaml-0.9.25.crate"], crate = "serde_yaml", - crate_root = "serde_yaml-0.9.27.crate/src/lib.rs", + crate_root = "serde_yaml-0.9.25.crate/src/lib.rs", edition = "2021", visibility = [], deps = [ - ":indexmap-2.1.0", - ":itoa-1.0.10", - ":ryu-1.0.16", - ":serde-1.0.193", + ":indexmap-2.0.0", + ":itoa-1.0.9", + ":ryu-1.0.15", + ":serde-1.0.186", ":unsafe-libyaml-0.2.9", ], ) http_archive( - name = "sha1-0.10.6.crate", - sha256 = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba", - strip_prefix = "sha1-0.10.6", - urls = ["https://crates.io/api/v1/crates/sha1/0.10.6/download"], + name = "sha1-0.10.5.crate", + sha256 = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3", + strip_prefix = "sha1-0.10.5", + urls = ["https://crates.io/api/v1/crates/sha1/0.10.5/download"], visibility = [], ) cargo.rust_library( - name = "sha1-0.10.6", - srcs = [":sha1-0.10.6.crate"], + name = "sha1-0.10.5", + srcs = [":sha1-0.10.5.crate"], crate = "sha1", - crate_root = "sha1-0.10.6.crate/src/lib.rs", + crate_root = "sha1-0.10.5.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -13603,22 +13037,22 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "linux-x86_64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "macos-arm64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "macos-x86_64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "windows-gnu": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "windows-msvc": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), }, visibility = [], @@ -13629,18 +13063,18 @@ cargo.rust_library( ) http_archive( - name = "sha2-0.10.8.crate", - sha256 = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8", - strip_prefix = "sha2-0.10.8", - urls = ["https://crates.io/api/v1/crates/sha2/0.10.8/download"], + name = "sha2-0.10.7.crate", + sha256 = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8", + strip_prefix = "sha2-0.10.7", + urls = ["https://crates.io/api/v1/crates/sha2/0.10.7/download"], visibility = [], ) cargo.rust_library( - name = "sha2-0.10.8", - srcs = [":sha2-0.10.8.crate"], + name = "sha2-0.10.7", + srcs = [":sha2-0.10.7.crate"], crate = "sha2", - crate_root = "sha2-0.10.8.crate/src/lib.rs", + crate_root = "sha2-0.10.7.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -13648,22 +13082,22 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "linux-x86_64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "macos-arm64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "macos-x86_64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "windows-gnu": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "windows-msvc": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), }, visibility = [], @@ -13689,22 +13123,22 @@ cargo.rust_library( edition = "2018", platform = { "linux-arm64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "linux-x86_64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "macos-arm64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "macos-x86_64": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "windows-gnu": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), "windows-msvc": dict( - deps = [":cpufeatures-0.2.11"], + deps = [":cpufeatures-0.2.9"], ), }, visibility = [], @@ -13717,18 +13151,18 @@ cargo.rust_library( ) http_archive( - name = "sharded-slab-0.1.7.crate", - sha256 = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6", - strip_prefix = "sharded-slab-0.1.7", - urls = ["https://crates.io/api/v1/crates/sharded-slab/0.1.7/download"], + name = "sharded-slab-0.1.4.crate", + sha256 = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31", + strip_prefix = "sharded-slab-0.1.4", + urls = ["https://crates.io/api/v1/crates/sharded-slab/0.1.4/download"], visibility = [], ) cargo.rust_library( - name = "sharded-slab-0.1.7", - srcs = [":sharded-slab-0.1.7.crate"], + name = "sharded-slab-0.1.4", + srcs = [":sharded-slab-0.1.4.crate"], crate = "sharded_slab", - crate_root = "sharded-slab-0.1.7.crate/src/lib.rs", + crate_root = "sharded-slab-0.1.4.crate/src/lib.rs", edition = "2018", visibility = [], deps = [":lazy_static-1.4.0"], @@ -13755,7 +13189,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":signal-hook-registry-1.4.1", ], ) @@ -13779,11 +13213,11 @@ cargo.rust_library( "support-v0_8", ], named_deps = { - "mio_0_8": ":mio-0.8.10", + "mio_0_8": ":mio-0.8.8", }, visibility = [], deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":signal-hook-0.3.17", ], ) @@ -13803,7 +13237,7 @@ cargo.rust_library( crate_root = "signal-hook-registry-1.4.1.crate/src/lib.rs", edition = "2015", visibility = [], - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ) http_archive( @@ -13829,34 +13263,7 @@ cargo.rust_library( ":pkcs8-0.7.6", ":rand_core-0.6.4", ":signature-1.6.4", - ":zeroize-1.7.0", - ], -) - -http_archive( - name = "signatory-0.27.1.crate", - sha256 = "c1e303f8205714074f6068773f0e29527e0453937fe837c9717d066635b65f31", - strip_prefix = "signatory-0.27.1", - urls = ["https://crates.io/api/v1/crates/signatory/0.27.1/download"], - visibility = [], -) - -cargo.rust_library( - name = "signatory-0.27.1", - srcs = [":signatory-0.27.1.crate"], - crate = "signatory", - crate_root = "signatory-0.27.1.crate/src/lib.rs", - edition = "2021", - features = [ - "default", - "std", - ], - visibility = [], - deps = [ - ":pkcs8-0.10.2", - ":rand_core-0.6.4", - ":signature-2.2.0", - ":zeroize-1.7.0", + ":zeroize-1.6.0", ], ) @@ -13890,18 +13297,18 @@ cargo.rust_library( ) http_archive( - name = "signature-2.2.0.crate", - sha256 = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de", - strip_prefix = "signature-2.2.0", - urls = ["https://crates.io/api/v1/crates/signature/2.2.0/download"], + name = "signature-2.1.0.crate", + sha256 = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500", + strip_prefix = "signature-2.1.0", + urls = ["https://crates.io/api/v1/crates/signature/2.1.0/download"], visibility = [], ) cargo.rust_library( - name = "signature-2.2.0", - srcs = [":signature-2.2.0.crate"], + name = "signature-2.1.0", + srcs = [":signature-2.1.0.crate"], crate = "signature", - crate_root = "signature-2.2.0.crate/src/lib.rs", + crate_root = "signature-2.1.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -13955,27 +13362,6 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "siphasher-1.0.0.crate", - sha256 = "54ac45299ccbd390721be55b412d41931911f654fa99e2cb8bfb57184b2061fe", - strip_prefix = "siphasher-1.0.0", - urls = ["https://crates.io/api/v1/crates/siphasher/1.0.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "siphasher-1.0.0", - srcs = [":siphasher-1.0.0.crate"], - crate = "siphasher", - crate_root = "siphasher-1.0.0.crate/src/lib.rs", - edition = "2018", - features = [ - "default", - "std", - ], - visibility = [], -) - http_archive( name = "slab-0.4.9.crate", sha256 = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67", @@ -14013,41 +13399,22 @@ cargo.rust_library( edition = "2018", features = ["union"], visibility = [], - deps = [":smallvec-1.11.2"], + deps = [":smallvec-1.11.0"], ) http_archive( - name = "smallstr-0.3.0.crate", - sha256 = "63b1aefdf380735ff8ded0b15f31aab05daf1f70216c01c02a12926badd1df9d", - strip_prefix = "smallstr-0.3.0", - urls = ["https://crates.io/api/v1/crates/smallstr/0.3.0/download"], + name = "smallvec-1.11.0.crate", + sha256 = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9", + strip_prefix = "smallvec-1.11.0", + urls = ["https://crates.io/api/v1/crates/smallvec/1.11.0/download"], visibility = [], ) cargo.rust_library( - name = "smallstr-0.3.0", - srcs = [":smallstr-0.3.0.crate"], - crate = "smallstr", - crate_root = "smallstr-0.3.0.crate/src/lib.rs", - edition = "2018", - features = ["union"], - visibility = [], - deps = [":smallvec-1.11.2"], -) - -http_archive( - name = "smallvec-1.11.2.crate", - sha256 = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970", - strip_prefix = "smallvec-1.11.2", - urls = ["https://crates.io/api/v1/crates/smallvec/1.11.2/download"], - visibility = [], -) - -cargo.rust_library( - name = "smallvec-1.11.2", - srcs = [":smallvec-1.11.2.crate"], + name = "smallvec-1.11.0", + srcs = [":smallvec-1.11.0.crate"], crate = "smallvec", - crate_root = "smallvec-1.11.2.crate/src/lib.rs", + crate_root = "smallvec-1.11.0.crate/src/lib.rs", edition = "2018", features = [ "const_generics", @@ -14058,32 +13425,32 @@ cargo.rust_library( ) http_archive( - name = "socket2-0.4.10.crate", - sha256 = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d", - strip_prefix = "socket2-0.4.10", - urls = ["https://crates.io/api/v1/crates/socket2/0.4.10/download"], + name = "socket2-0.4.9.crate", + sha256 = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662", + strip_prefix = "socket2-0.4.9", + urls = ["https://crates.io/api/v1/crates/socket2/0.4.9/download"], visibility = [], ) cargo.rust_library( - name = "socket2-0.4.10", - srcs = [":socket2-0.4.10.crate"], + name = "socket2-0.4.9", + srcs = [":socket2-0.4.9.crate"], crate = "socket2", - crate_root = "socket2-0.4.10.crate/src/lib.rs", + crate_root = "socket2-0.4.9.crate/src/lib.rs", edition = "2018", features = ["all"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( deps = [":winapi-0.3.9"], @@ -14096,32 +13463,32 @@ cargo.rust_library( ) http_archive( - name = "socket2-0.5.5.crate", - sha256 = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9", - strip_prefix = "socket2-0.5.5", - urls = ["https://crates.io/api/v1/crates/socket2/0.5.5/download"], + name = "socket2-0.5.3.crate", + sha256 = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877", + strip_prefix = "socket2-0.5.3", + urls = ["https://crates.io/api/v1/crates/socket2/0.5.3/download"], visibility = [], ) cargo.rust_library( - name = "socket2-0.5.5", - srcs = [":socket2-0.5.5.crate"], + name = "socket2-0.5.3", + srcs = [":socket2-0.5.3.crate"], crate = "socket2", - crate_root = "socket2-0.5.5.crate/src/lib.rs", + crate_root = "socket2-0.5.3.crate/src/lib.rs", edition = "2021", features = ["all"], platform = { "linux-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "linux-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-arm64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "macos-x86_64": dict( - deps = [":libc-0.2.151"], + deps = [":libc-0.2.147"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -14161,9 +13528,9 @@ cargo.rust_library( visibility = [], deps = [ ":ed25519-1.5.3", - ":libc-0.2.151", + ":libc-0.2.147", ":libsodium-sys-0.2.7", - ":serde-1.0.193", + ":serde-1.0.186", ], ) @@ -14199,10 +13566,19 @@ cargo.rust_library( crate_root = "spin-0.9.8.crate/src/lib.rs", edition = "2015", features = [ + "barrier", + "default", + "lazy", + "lock_api", + "lock_api_crate", "mutex", "once", + "rwlock", "spin_mutex", ], + named_deps = { + "lock_api_crate": ":lock_api-0.4.10", + }, visibility = [], ) @@ -14252,18 +13628,18 @@ cargo.rust_library( ) http_archive( - name = "spki-0.7.3.crate", - sha256 = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d", - strip_prefix = "spki-0.7.3", - urls = ["https://crates.io/api/v1/crates/spki/0.7.3/download"], + name = "spki-0.7.2.crate", + sha256 = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a", + strip_prefix = "spki-0.7.2", + urls = ["https://crates.io/api/v1/crates/spki/0.7.2/download"], visibility = [], ) cargo.rust_library( - name = "spki-0.7.3", - srcs = [":spki-0.7.3.crate"], + name = "spki-0.7.2", + srcs = [":spki-0.7.2.crate"], crate = "spki", - crate_root = "spki-0.7.3.crate/src/lib.rs", + crate_root = "spki-0.7.2.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -14278,22 +13654,22 @@ cargo.rust_library( ) http_archive( - name = "sqlformat-0.2.3.crate", - sha256 = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c", - strip_prefix = "sqlformat-0.2.3", - urls = ["https://crates.io/api/v1/crates/sqlformat/0.2.3/download"], + name = "sqlformat-0.2.1.crate", + sha256 = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e", + strip_prefix = "sqlformat-0.2.1", + urls = ["https://crates.io/api/v1/crates/sqlformat/0.2.1/download"], visibility = [], ) cargo.rust_library( - name = "sqlformat-0.2.3", - srcs = [":sqlformat-0.2.3.crate"], + name = "sqlformat-0.2.1", + srcs = [":sqlformat-0.2.1.crate"], crate = "sqlformat", - crate_root = "sqlformat-0.2.3.crate/src/lib.rs", + crate_root = "sqlformat-0.2.1.crate/src/lib.rs", edition = "2021", visibility = [], deps = [ - ":itertools-0.12.0", + ":itertools-0.10.5", ":nom-7.1.3", ":unicode_categories-0.1.1", ], @@ -14382,13 +13758,13 @@ cargo.rust_library( }, visibility = [], deps = [ - ":ahash-0.7.7", + ":ahash-0.7.6", ":atoi-1.0.0", ":base64-0.13.1", ":bitflags-1.3.2", - ":byteorder-1.5.0", - ":bytes-1.5.0", - ":chrono-0.4.31", + ":byteorder-1.4.3", + ":bytes-1.4.0", + ":chrono-0.4.26", ":crossbeam-queue-0.3.8", ":dirs-4.0.0", ":dotenvy-0.15.7", @@ -14398,37 +13774,37 @@ cargo.rust_library( ":futures-core-0.3.29", ":futures-intrusive-0.4.2", ":futures-util-0.3.29", - ":hashlink-0.8.4", + ":hashlink-0.8.3", ":hex-0.4.3", ":hkdf-0.12.3", ":hmac-0.12.1", ":indexmap-1.9.3", - ":itoa-1.0.10", - ":libc-0.2.151", + ":itoa-1.0.9", + ":libc-0.2.147", ":log-0.4.20", - ":md-5-0.10.6", - ":memchr-2.6.4", + ":md-5-0.10.5", + ":memchr-2.5.0", ":num-bigint-0.4.4", - ":once_cell-1.19.0", + ":once_cell-1.18.0", ":paste-1.0.14", - ":percent-encoding-2.3.1", + ":percent-encoding-2.3.0", ":rand-0.8.5", - ":rust_decimal-1.33.1", - ":rustls-0.20.9", - ":rustls-pemfile-1.0.4", - ":serde-1.0.193", - ":serde_json-1.0.108", - ":sha1-0.10.6", - ":sha2-0.10.8", - ":smallvec-1.11.2", - ":sqlformat-0.2.3", + ":rust_decimal-1.32.0", + ":rustls-0.20.8", + ":rustls-pemfile-1.0.3", + ":serde-1.0.186", + ":serde_json-1.0.105", + ":sha1-0.10.5", + ":sha2-0.10.7", + ":smallvec-1.11.0", + ":sqlformat-0.2.1", ":sqlx-rt-0.6.3", - ":stringprep-0.1.4", - ":thiserror-1.0.50", - ":time-0.3.30", + ":stringprep-0.1.3", + ":thiserror-1.0.47", + ":time-0.3.27", ":tokio-stream-0.1.14", - ":url-2.5.0", - ":uuid-1.6.1", + ":url-2.4.0", + ":uuid-1.4.1", ":webpki-roots-0.22.6", ":whoami-1.4.1", ], @@ -14466,14 +13842,14 @@ cargo.rust_library( ":dotenvy-0.15.7", ":either-1.9.0", ":heck-0.4.1", - ":once_cell-1.19.0", - ":proc-macro2-1.0.70", + ":once_cell-1.18.0", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":serde_json-1.0.108", + ":serde_json-1.0.105", ":sqlx-core-0.6.3", ":sqlx-rt-0.6.3", ":syn-1.0.109", - ":url-2.5.0", + ":url-2.4.0", ], ) @@ -14501,8 +13877,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":once_cell-1.19.0", - ":tokio-1.35.0", + ":once_cell-1.18.0", + ":tokio-1.32.0", ":tokio-rustls-0.23.4", ], ) @@ -14529,6 +13905,23 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "static_assertions-1.1.0.crate", + sha256 = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f", + strip_prefix = "static_assertions-1.1.0", + urls = ["https://crates.io/api/v1/crates/static_assertions/1.1.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "static_assertions-1.1.0", + srcs = [":static_assertions-1.1.0.crate"], + crate = "static_assertions", + crate_root = "static_assertions-1.1.0.crate/src/lib.rs", + edition = "2015", + visibility = [], +) + alias( name = "stream-cancel", actual = ":stream-cancel-0.8.1", @@ -14553,28 +13946,27 @@ cargo.rust_library( deps = [ ":futures-core-0.3.29", ":pin-project-1.1.3", - ":tokio-1.35.0", + ":tokio-1.32.0", ], ) http_archive( - name = "stringprep-0.1.4.crate", - sha256 = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6", - strip_prefix = "stringprep-0.1.4", - urls = ["https://crates.io/api/v1/crates/stringprep/0.1.4/download"], + name = "stringprep-0.1.3.crate", + sha256 = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da", + strip_prefix = "stringprep-0.1.3", + urls = ["https://crates.io/api/v1/crates/stringprep/0.1.3/download"], visibility = [], ) cargo.rust_library( - name = "stringprep-0.1.4", - srcs = [":stringprep-0.1.4.crate"], + name = "stringprep-0.1.3", + srcs = [":stringprep-0.1.3.crate"], crate = "stringprep", - crate_root = "stringprep-0.1.4.crate/src/lib.rs", + crate_root = "stringprep-0.1.3.crate/src/lib.rs", edition = "2015", visibility = [], deps = [ - ":finl_unicode-1.2.0", - ":unicode-bidi-0.3.14", + ":unicode-bidi-0.3.13", ":unicode-normalization-0.1.22", ], ) @@ -14626,27 +14018,6 @@ cargo.rust_library( deps = [":strum_macros-0.24.3"], ) -http_archive( - name = "strum-0.25.0.crate", - sha256 = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125", - strip_prefix = "strum-0.25.0", - urls = ["https://crates.io/api/v1/crates/strum/0.25.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "strum-0.25.0", - srcs = [":strum-0.25.0.crate"], - crate = "strum", - crate_root = "strum-0.25.0.crate/src/lib.rs", - edition = "2018", - features = [ - "default", - "std", - ], - visibility = [], -) - http_archive( name = "strum_macros-0.24.3.crate", sha256 = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59", @@ -14665,38 +14036,13 @@ cargo.rust_library( visibility = [], deps = [ ":heck-0.4.1", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":rustversion-1.0.14", ":syn-1.0.109", ], ) -http_archive( - name = "strum_macros-0.25.3.crate", - sha256 = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0", - strip_prefix = "strum_macros-0.25.3", - urls = ["https://crates.io/api/v1/crates/strum_macros/0.25.3/download"], - visibility = [], -) - -cargo.rust_library( - name = "strum_macros-0.25.3", - srcs = [":strum_macros-0.25.3.crate"], - crate = "strum_macros", - crate_root = "strum_macros-0.25.3.crate/src/lib.rs", - edition = "2018", - proc_macro = True, - visibility = [], - deps = [ - ":heck-0.4.1", - ":proc-macro2-1.0.70", - ":quote-1.0.33", - ":rustversion-1.0.14", - ":syn-2.0.40", - ], -) - http_archive( name = "subtle-2.5.0.crate", sha256 = "81cdd64d312baedb58e21336b31bc043b77e01cc99033ce76ef539f78e965ebc", @@ -14729,46 +14075,6 @@ cargo.rust_library( crate = "syn", crate_root = "syn-1.0.109.crate/src/lib.rs", edition = "2018", - features = [ - "clone-impls", - "default", - "derive", - "extra-traits", - "full", - "parsing", - "printing", - "proc-macro", - "quote", - "visit-mut", - ], - visibility = [], - deps = [ - ":proc-macro2-1.0.70", - ":quote-1.0.33", - ":unicode-ident-1.0.12", - ], -) - -alias( - name = "syn", - actual = ":syn-2.0.40", - visibility = ["PUBLIC"], -) - -http_archive( - name = "syn-2.0.40.crate", - sha256 = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e", - strip_prefix = "syn-2.0.40", - urls = ["https://crates.io/api/v1/crates/syn/2.0.40/download"], - visibility = [], -) - -cargo.rust_library( - name = "syn-2.0.40", - srcs = [":syn-2.0.40.crate"], - crate = "syn", - crate_root = "syn-2.0.40.crate/src/lib.rs", - edition = "2021", features = [ "clone-impls", "default", @@ -14780,42 +14086,54 @@ cargo.rust_library( "printing", "proc-macro", "quote", - "visit", "visit-mut", ], visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":unicode-ident-1.0.12", + ":unicode-ident-1.0.11", ], ) +alias( + name = "syn", + actual = ":syn-2.0.29", + visibility = ["PUBLIC"], +) + http_archive( - name = "syn_derive-0.1.8.crate", - sha256 = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b", - strip_prefix = "syn_derive-0.1.8", - urls = ["https://crates.io/api/v1/crates/syn_derive/0.1.8/download"], + name = "syn-2.0.29.crate", + sha256 = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a", + strip_prefix = "syn-2.0.29", + urls = ["https://crates.io/api/v1/crates/syn/2.0.29/download"], visibility = [], ) cargo.rust_library( - name = "syn_derive-0.1.8", - srcs = [":syn_derive-0.1.8.crate"], - crate = "syn_derive", - crate_root = "syn_derive-0.1.8.crate/src/lib.rs", + name = "syn-2.0.29", + srcs = [":syn-2.0.29.crate"], + crate = "syn", + crate_root = "syn-2.0.29.crate/src/lib.rs", edition = "2021", features = [ + "clone-impls", "default", + "derive", + "extra-traits", "full", + "parsing", + "printing", + "proc-macro", + "quote", + "visit", + "visit-mut", ], - proc_macro = True, visibility = [], deps = [ - ":proc-macro-error-1.0.4", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":unicode-ident-1.0.11", ], ) @@ -14836,88 +14154,6 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "system-configuration-0.5.1.crate", - sha256 = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7", - strip_prefix = "system-configuration-0.5.1", - urls = ["https://crates.io/api/v1/crates/system-configuration/0.5.1/download"], - visibility = [], -) - -cargo.rust_library( - name = "system-configuration-0.5.1", - srcs = [":system-configuration-0.5.1.crate"], - crate = "system_configuration", - crate_root = "system-configuration-0.5.1.crate/src/lib.rs", - edition = "2021", - visibility = [], - deps = [ - ":bitflags-1.3.2", - ":core-foundation-0.9.4", - ":system-configuration-sys-0.5.0", - ], -) - -http_archive( - name = "system-configuration-sys-0.5.0.crate", - sha256 = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9", - strip_prefix = "system-configuration-sys-0.5.0", - urls = ["https://crates.io/api/v1/crates/system-configuration-sys/0.5.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "system-configuration-sys-0.5.0", - srcs = [":system-configuration-sys-0.5.0.crate"], - crate = "system_configuration_sys", - crate_root = "system-configuration-sys-0.5.0.crate/src/lib.rs", - edition = "2021", - env = { - "CARGO_MANIFEST_DIR": "system-configuration-sys-0.5.0.crate", - "CARGO_PKG_AUTHORS": "Mullvad VPN", - "CARGO_PKG_DESCRIPTION": "Low level bindings to SystemConfiguration framework for macOS", - "CARGO_PKG_NAME": "system-configuration-sys", - "CARGO_PKG_REPOSITORY": "https://github.com/mullvad/system-configuration-rs", - "CARGO_PKG_VERSION": "0.5.0", - "CARGO_PKG_VERSION_MAJOR": "0", - "CARGO_PKG_VERSION_MINOR": "5", - "CARGO_PKG_VERSION_PATCH": "0", - }, - rustc_flags = ["@$(location :system-configuration-sys-0.5.0-build-script-run[rustc_flags])"], - visibility = [], - deps = [ - ":core-foundation-sys-0.8.6", - ":libc-0.2.151", - ], -) - -cargo.rust_binary( - name = "system-configuration-sys-0.5.0-build-script-build", - srcs = [":system-configuration-sys-0.5.0.crate"], - crate = "build_script_build", - crate_root = "system-configuration-sys-0.5.0.crate/build.rs", - edition = "2021", - env = { - "CARGO_MANIFEST_DIR": "system-configuration-sys-0.5.0.crate", - "CARGO_PKG_AUTHORS": "Mullvad VPN", - "CARGO_PKG_DESCRIPTION": "Low level bindings to SystemConfiguration framework for macOS", - "CARGO_PKG_NAME": "system-configuration-sys", - "CARGO_PKG_REPOSITORY": "https://github.com/mullvad/system-configuration-rs", - "CARGO_PKG_VERSION": "0.5.0", - "CARGO_PKG_VERSION_MAJOR": "0", - "CARGO_PKG_VERSION_MINOR": "5", - "CARGO_PKG_VERSION_PATCH": "0", - }, - visibility = [], -) - -buildscript_run( - name = "system-configuration-sys-0.5.0-build-script-run", - package_name = "system-configuration-sys", - buildscript_rule = ":system-configuration-sys-0.5.0-build-script-build", - version = "0.5.0", -) - http_archive( name = "tap-1.0.1.crate", sha256 = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369", @@ -14962,31 +14198,31 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.151", - ":xattr-1.1.2", + ":libc-0.2.147", + ":xattr-1.0.1", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.151", - ":xattr-1.1.2", + ":libc-0.2.147", + ":xattr-1.0.1", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.151", - ":xattr-1.1.2", + ":libc-0.2.147", + ":xattr-1.0.1", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.151", - ":xattr-1.1.2", + ":libc-0.2.147", + ":xattr-1.0.1", ], ), }, visibility = [], - deps = [":filetime-0.2.23"], + deps = [":filetime-0.2.22"], ) http_archive( @@ -15009,36 +14245,36 @@ cargo.rust_library( alias( name = "tempfile", - actual = ":tempfile-3.8.1", + actual = ":tempfile-3.8.0", visibility = ["PUBLIC"], ) http_archive( - name = "tempfile-3.8.1.crate", - sha256 = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5", - strip_prefix = "tempfile-3.8.1", - urls = ["https://crates.io/api/v1/crates/tempfile/3.8.1/download"], + name = "tempfile-3.8.0.crate", + sha256 = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef", + strip_prefix = "tempfile-3.8.0", + urls = ["https://crates.io/api/v1/crates/tempfile/3.8.0/download"], visibility = [], ) cargo.rust_library( - name = "tempfile-3.8.1", - srcs = [":tempfile-3.8.1.crate"], + name = "tempfile-3.8.0", + srcs = [":tempfile-3.8.0.crate"], crate = "tempfile", - crate_root = "tempfile-3.8.1.crate/src/lib.rs", + crate_root = "tempfile-3.8.0.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( - deps = [":rustix-0.38.28"], + deps = [":rustix-0.38.8"], ), "linux-x86_64": dict( - deps = [":rustix-0.38.28"], + deps = [":rustix-0.38.8"], ), "macos-arm64": dict( - deps = [":rustix-0.38.28"], + deps = [":rustix-0.38.8"], ), "macos-x86_64": dict( - deps = [":rustix-0.38.28"], + deps = [":rustix-0.38.8"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -15050,36 +14286,36 @@ cargo.rust_library( visibility = [], deps = [ ":cfg-if-1.0.0", - ":fastrand-2.0.1", + ":fastrand-2.0.0", ], ) http_archive( - name = "terminal_size-0.3.0.crate", - sha256 = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7", - strip_prefix = "terminal_size-0.3.0", - urls = ["https://crates.io/api/v1/crates/terminal_size/0.3.0/download"], + name = "terminal_size-0.2.6.crate", + sha256 = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237", + strip_prefix = "terminal_size-0.2.6", + urls = ["https://crates.io/api/v1/crates/terminal_size/0.2.6/download"], visibility = [], ) cargo.rust_library( - name = "terminal_size-0.3.0", - srcs = [":terminal_size-0.3.0.crate"], + name = "terminal_size-0.2.6", + srcs = [":terminal_size-0.2.6.crate"], crate = "terminal_size", - crate_root = "terminal_size-0.3.0.crate/src/lib.rs", - edition = "2021", + crate_root = "terminal_size-0.2.6.crate/src/lib.rs", + edition = "2018", platform = { "linux-arm64": dict( - deps = [":rustix-0.38.28"], + deps = [":rustix-0.37.23"], ), "linux-x86_64": dict( - deps = [":rustix-0.38.28"], + deps = [":rustix-0.37.23"], ), "macos-arm64": dict( - deps = [":rustix-0.38.28"], + deps = [":rustix-0.37.23"], ), "macos-x86_64": dict( - deps = [":rustix-0.38.28"], + deps = [":rustix-0.37.23"], ), "windows-gnu": dict( deps = [":windows-sys-0.48.0"], @@ -15093,56 +14329,31 @@ cargo.rust_library( alias( name = "test-log", - actual = ":test-log-0.2.14", + actual = ":test-log-0.2.12", visibility = ["PUBLIC"], ) http_archive( - name = "test-log-0.2.14.crate", - sha256 = "6159ab4116165c99fc88cce31f99fa2c9dbe08d3691cb38da02fc3b45f357d2b", - strip_prefix = "test-log-0.2.14", - urls = ["https://crates.io/api/v1/crates/test-log/0.2.14/download"], + name = "test-log-0.2.12.crate", + sha256 = "d9601d162c1d77e62c1ea0bc8116cd1caf143ce3af947536c3c9052a1677fe0c", + strip_prefix = "test-log-0.2.12", + urls = ["https://crates.io/api/v1/crates/test-log/0.2.12/download"], visibility = [], ) cargo.rust_library( - name = "test-log-0.2.14", - srcs = [":test-log-0.2.14.crate"], + name = "test-log-0.2.12", + srcs = [":test-log-0.2.12.crate"], crate = "test_log", - crate_root = "test-log-0.2.14.crate/src/lib.rs", - edition = "2021", - features = [ - "trace", - "tracing-subscriber", - ], - visibility = [], - deps = [ - ":test-log-macros-0.2.14", - ":tracing-subscriber-0.3.18", - ], -) - -http_archive( - name = "test-log-macros-0.2.14.crate", - sha256 = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d", - strip_prefix = "test-log-macros-0.2.14", - urls = ["https://crates.io/api/v1/crates/test-log-macros/0.2.14/download"], - visibility = [], -) - -cargo.rust_library( - name = "test-log-macros-0.2.14", - srcs = [":test-log-macros-0.2.14.crate"], - crate = "test_log_macros", - crate_root = "test-log-macros-0.2.14.crate/src/lib.rs", - edition = "2021", + crate_root = "test-log-0.2.12.crate/src/lib.rs", + edition = "2018", features = ["trace"], proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-1.0.109", ], ) @@ -15155,20 +14366,20 @@ cargo.rust_binary( visibility = [], deps = [ ":async-nats-0.31.0", - ":async-recursion-1.0.5", - ":async-trait-0.1.74", + ":async-recursion-1.0.4", + ":async-trait-0.1.73", ":axum-0.6.20", - ":base64-0.21.5", - ":blake3-1.5.0", + ":base64-0.21.2", + ":blake3-1.4.1", ":bollard-0.15.0", - ":bytes-1.5.0", - ":chrono-0.4.31", + ":bytes-1.4.0", + ":chrono-0.4.26", ":ciborium-0.2.1", - ":clap-4.4.11", + ":clap-4.3.24", ":color-eyre-0.6.2", - ":colored-2.1.0", - ":comfy-table-7.1.0", - ":config-0.13.4", + ":colored-2.0.4", + ":comfy-table-7.0.1", + ":config-0.13.3", ":console-0.15.7", ":convert_case-0.6.0", ":crossbeam-channel-0.5.8", @@ -15179,27 +14390,27 @@ cargo.rust_binary( ":diff-0.1.13", ":directories-5.0.1", ":docker-api-0.14.0", - ":dyn-clone-1.0.16", - ":flate2-1.0.28", - ":futures-0.3.29", + ":dyn-clone-1.0.13", + ":flate2-1.0.27", + ":futures-0.3.28", ":futures-lite-1.13.0", ":hex-0.4.3", - ":http-0.2.11", + ":http-0.2.9", ":hyper-0.14.27", ":hyperlocal-0.8.0", ":iftree-1.0.4", - ":indicatif-0.17.7", - ":indoc-2.0.4", + ":indicatif-0.17.6", + ":indoc-2.0.3", ":inquire-0.6.2", ":itertools-0.10.5", - ":jwt-simple-0.11.9", + ":jwt-simple-0.11.6", ":lazy_static-1.4.0", ":names-0.14.0", - ":nix-0.26.4", + ":nix-0.26.2", ":nkeys-0.2.0", ":num_cpus-1.16.0", - ":once_cell-1.19.0", - ":open-5.0.1", + ":once_cell-1.18.0", + ":open-5.0.0", ":opentelemetry-0.18.0", ":opentelemetry-otlp-0.11.0", ":opentelemetry-semantic-conventions-0.10.0", @@ -15207,53 +14418,54 @@ cargo.rust_binary( ":paste-1.0.14", ":pathdiff-0.2.1", ":petgraph-0.6.4", - ":pin-project-lite-0.2.13", + ":pin-project-lite-0.2.12", ":podman-api-0.10.0", + ":postcard-1.0.8", ":postgres-types-0.2.6", ":pretty_assertions_sorted-1.2.3", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", ":rabbitmq-stream-client-0.3.0", ":rand-0.8.5", - ":refinery-0.8.11", - ":regex-1.10.2", + ":refinery-0.8.10", + ":regex-1.9.3", ":remain-0.2.11", - ":reqwest-0.11.22", + ":reqwest-0.11.20", ":rust-s3-0.33.0", - ":rustls-0.21.10", + ":rustls-0.21.6", ":sea-orm-0.11.3", ":self-replace-1.3.5", - ":serde-1.0.193", - ":serde-aux-4.3.1", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde-aux-4.2.0", + ":serde_json-1.0.105", ":serde_url_params-0.2.1", - ":serde_with-3.4.0", - ":serde_yaml-0.9.27", + ":serde_with-3.3.0", + ":serde_yaml-0.9.25", ":sodiumoxide-0.2.7", ":stream-cancel-0.8.1", ":strum-0.24.1", - ":syn-2.0.40", + ":syn-2.0.29", ":tar-0.4.40", - ":tempfile-3.8.1", - ":test-log-0.2.14", - ":thiserror-1.0.50", - ":tokio-1.35.0", - ":tokio-postgres-0.7.10", + ":tempfile-3.8.0", + ":test-log-0.2.12", + ":thiserror-1.0.47", + ":tokio-1.32.0", + ":tokio-postgres-0.7.9", ":tokio-serde-0.8.0", ":tokio-stream-0.1.14", ":tokio-test-0.4.3", ":tokio-tungstenite-0.18.0", - ":tokio-util-0.7.10", + ":tokio-util-0.7.8", ":tokio-vsock-0.4.0", - ":toml-0.7.8", + ":toml-0.7.6", ":tower-0.4.13", - ":tower-http-0.4.4", - ":tracing-0.1.40", + ":tower-http-0.4.3", + ":tracing-0.1.37", ":tracing-opentelemetry-0.18.0", - ":tracing-subscriber-0.3.18", - ":ulid-1.1.0", - ":url-2.5.0", - ":uuid-1.6.1", + ":tracing-subscriber-0.3.17", + ":ulid-1.0.0", + ":url-2.4.0", + ":uuid-1.4.1", ":vfs-0.9.0", ":vfs-tar-0.4.0", ":y-sync-0.3.1", @@ -15263,48 +14475,48 @@ cargo.rust_binary( alias( name = "thiserror", - actual = ":thiserror-1.0.50", + actual = ":thiserror-1.0.47", visibility = ["PUBLIC"], ) http_archive( - name = "thiserror-1.0.50.crate", - sha256 = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2", - strip_prefix = "thiserror-1.0.50", - urls = ["https://crates.io/api/v1/crates/thiserror/1.0.50/download"], + name = "thiserror-1.0.47.crate", + sha256 = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f", + strip_prefix = "thiserror-1.0.47", + urls = ["https://crates.io/api/v1/crates/thiserror/1.0.47/download"], visibility = [], ) cargo.rust_library( - name = "thiserror-1.0.50", - srcs = [":thiserror-1.0.50.crate"], + name = "thiserror-1.0.47", + srcs = [":thiserror-1.0.47.crate"], crate = "thiserror", - crate_root = "thiserror-1.0.50.crate/src/lib.rs", + crate_root = "thiserror-1.0.47.crate/src/lib.rs", edition = "2021", visibility = [], - deps = [":thiserror-impl-1.0.50"], + deps = [":thiserror-impl-1.0.47"], ) http_archive( - name = "thiserror-impl-1.0.50.crate", - sha256 = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8", - strip_prefix = "thiserror-impl-1.0.50", - urls = ["https://crates.io/api/v1/crates/thiserror-impl/1.0.50/download"], + name = "thiserror-impl-1.0.47.crate", + sha256 = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b", + strip_prefix = "thiserror-impl-1.0.47", + urls = ["https://crates.io/api/v1/crates/thiserror-impl/1.0.47/download"], visibility = [], ) cargo.rust_library( - name = "thiserror-impl-1.0.50", - srcs = [":thiserror-impl-1.0.50.crate"], + name = "thiserror-impl-1.0.47", + srcs = [":thiserror-impl-1.0.47.crate"], crate = "thiserror_impl", - crate_root = "thiserror-impl-1.0.50.crate/src/lib.rs", + crate_root = "thiserror-impl-1.0.47.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) @@ -15325,23 +14537,49 @@ cargo.rust_library( visibility = [], deps = [ ":cfg-if-1.0.0", - ":once_cell-1.19.0", + ":once_cell-1.18.0", ], ) http_archive( - name = "time-0.3.30.crate", - sha256 = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5", - strip_prefix = "time-0.3.30", - urls = ["https://crates.io/api/v1/crates/time/0.3.30/download"], + name = "time-0.1.45.crate", + sha256 = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a", + strip_prefix = "time-0.1.45", + urls = ["https://crates.io/api/v1/crates/time/0.1.45/download"], + visibility = [], +) + +cargo.rust_library( + name = "time-0.1.45", + srcs = [":time-0.1.45.crate"], + crate = "time", + crate_root = "time-0.1.45.crate/src/lib.rs", + edition = "2015", + platform = { + "windows-gnu": dict( + deps = [":winapi-0.3.9"], + ), + "windows-msvc": dict( + deps = [":winapi-0.3.9"], + ), + }, + visibility = [], + deps = [":libc-0.2.147"], +) + +http_archive( + name = "time-0.3.27.crate", + sha256 = "0bb39ee79a6d8de55f48f2293a830e040392f1c5f16e336bdd1788cd0aadce07", + strip_prefix = "time-0.3.27", + urls = ["https://crates.io/api/v1/crates/time/0.3.27/download"], visibility = [], ) cargo.rust_library( - name = "time-0.3.30", - srcs = [":time-0.3.30.crate"], + name = "time-0.3.27", + srcs = [":time-0.3.27.crate"], crate = "time", - crate_root = "time-0.3.30.crate/src/lib.rs", + crate_root = "time-0.3.27.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -15355,45 +14593,44 @@ cargo.rust_library( ], visibility = [], deps = [ - ":deranged-0.3.10", - ":itoa-1.0.10", - ":powerfmt-0.2.0", - ":serde-1.0.193", - ":time-core-0.1.2", - ":time-macros-0.2.15", + ":deranged-0.3.8", + ":itoa-1.0.9", + ":serde-1.0.186", + ":time-core-0.1.1", + ":time-macros-0.2.13", ], ) http_archive( - name = "time-core-0.1.2.crate", - sha256 = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3", - strip_prefix = "time-core-0.1.2", - urls = ["https://crates.io/api/v1/crates/time-core/0.1.2/download"], + name = "time-core-0.1.1.crate", + sha256 = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb", + strip_prefix = "time-core-0.1.1", + urls = ["https://crates.io/api/v1/crates/time-core/0.1.1/download"], visibility = [], ) cargo.rust_library( - name = "time-core-0.1.2", - srcs = [":time-core-0.1.2.crate"], + name = "time-core-0.1.1", + srcs = [":time-core-0.1.1.crate"], crate = "time_core", - crate_root = "time-core-0.1.2.crate/src/lib.rs", + crate_root = "time-core-0.1.1.crate/src/lib.rs", edition = "2021", visibility = [], ) http_archive( - name = "time-macros-0.2.15.crate", - sha256 = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20", - strip_prefix = "time-macros-0.2.15", - urls = ["https://crates.io/api/v1/crates/time-macros/0.2.15/download"], + name = "time-macros-0.2.13.crate", + sha256 = "733d258752e9303d392b94b75230d07b0b9c489350c69b851fc6c065fde3e8f9", + strip_prefix = "time-macros-0.2.13", + urls = ["https://crates.io/api/v1/crates/time-macros/0.2.13/download"], visibility = [], ) cargo.rust_library( - name = "time-macros-0.2.15", - srcs = [":time-macros-0.2.15.crate"], + name = "time-macros-0.2.13", + srcs = [":time-macros-0.2.13.crate"], crate = "time_macros", - crate_root = "time-macros-0.2.15.crate/src/lib.rs", + crate_root = "time-macros-0.2.13.crate/src/lib.rs", edition = "2021", features = [ "formatting", @@ -15402,7 +14639,7 @@ cargo.rust_library( ], proc_macro = True, visibility = [], - deps = [":time-core-0.1.2"], + deps = [":time-core-0.1.1"], ) http_archive( @@ -15447,23 +14684,23 @@ cargo.rust_library( alias( name = "tokio", - actual = ":tokio-1.35.0", + actual = ":tokio-1.32.0", visibility = ["PUBLIC"], ) http_archive( - name = "tokio-1.35.0.crate", - sha256 = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c", - strip_prefix = "tokio-1.35.0", - urls = ["https://crates.io/api/v1/crates/tokio/1.35.0/download"], + name = "tokio-1.32.0.crate", + sha256 = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9", + strip_prefix = "tokio-1.32.0", + urls = ["https://crates.io/api/v1/crates/tokio/1.32.0/download"], visibility = [], ) cargo.rust_library( - name = "tokio-1.35.0", - srcs = [":tokio-1.35.0.crate"], + name = "tokio-1.32.0", + srcs = [":tokio-1.32.0.crate"], crate = "tokio", - crate_root = "tokio-1.35.0.crate/src/lib.rs", + crate_root = "tokio-1.32.0.crate/src/lib.rs", edition = "2021", features = [ "bytes", @@ -15494,41 +14731,41 @@ cargo.rust_library( platform = { "linux-arm64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":signal-hook-registry-1.4.1", - ":socket2-0.5.5", + ":socket2-0.5.3", ], ), "linux-x86_64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":signal-hook-registry-1.4.1", - ":socket2-0.5.5", + ":socket2-0.5.3", ], ), "macos-arm64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":signal-hook-registry-1.4.1", - ":socket2-0.5.5", + ":socket2-0.5.3", ], ), "macos-x86_64": dict( deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":signal-hook-registry-1.4.1", - ":socket2-0.5.5", + ":socket2-0.5.3", ], ), "windows-gnu": dict( deps = [ - ":socket2-0.5.5", + ":socket2-0.5.3", ":windows-sys-0.48.0", ], ), "windows-msvc": dict( deps = [ - ":socket2-0.5.5", + ":socket2-0.5.3", ":windows-sys-0.48.0", ], ), @@ -15539,12 +14776,12 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.5.0", - ":mio-0.8.10", + ":bytes-1.4.0", + ":mio-0.8.8", ":num_cpus-1.16.0", ":parking_lot-0.12.1", - ":pin-project-lite-0.2.13", - ":tokio-macros-2.2.0", + ":pin-project-lite-0.2.12", + ":tokio-macros-2.1.0", ":tracing", ], ) @@ -15565,55 +14802,56 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":pin-project-lite-0.2.13", - ":tokio-1.35.0", + ":pin-project-lite-0.2.12", + ":tokio-1.32.0", ], ) http_archive( - name = "tokio-macros-2.2.0.crate", - sha256 = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b", - strip_prefix = "tokio-macros-2.2.0", - urls = ["https://crates.io/api/v1/crates/tokio-macros/2.2.0/download"], + name = "tokio-macros-2.1.0.crate", + sha256 = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e", + strip_prefix = "tokio-macros-2.1.0", + urls = ["https://crates.io/api/v1/crates/tokio-macros/2.1.0/download"], visibility = [], ) cargo.rust_library( - name = "tokio-macros-2.2.0", - srcs = [":tokio-macros-2.2.0.crate"], + name = "tokio-macros-2.1.0", + srcs = [":tokio-macros-2.1.0.crate"], crate = "tokio_macros", - crate_root = "tokio-macros-2.2.0.crate/src/lib.rs", - edition = "2021", + crate_root = "tokio-macros-2.1.0.crate/src/lib.rs", + edition = "2018", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) alias( name = "tokio-postgres", - actual = ":tokio-postgres-0.7.10", + actual = ":tokio-postgres-0.7.9", visibility = ["PUBLIC"], ) http_archive( - name = "tokio-postgres-0.7.10.crate", - sha256 = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8", - strip_prefix = "tokio-postgres-0.7.10", - urls = ["https://crates.io/api/v1/crates/tokio-postgres/0.7.10/download"], + name = "tokio-postgres-0.7.9.crate", + sha256 = "000387915083ea6406ee44b50ca74813aba799fe682a7689e382bf9e13b74ce9", + strip_prefix = "tokio-postgres-0.7.9", + urls = ["https://crates.io/api/v1/crates/tokio-postgres/0.7.9/download"], visibility = [], ) cargo.rust_library( - name = "tokio-postgres-0.7.10", - srcs = [":tokio-postgres-0.7.10.crate"], + name = "tokio-postgres-0.7.9", + srcs = [":tokio-postgres-0.7.9.crate"], crate = "tokio_postgres", - crate_root = "tokio-postgres-0.7.10.crate/src/lib.rs", + crate_root = "tokio-postgres-0.7.9.crate/src/lib.rs", edition = "2018", features = [ + "array-impls", "default", "runtime", "with-chrono-0_4", @@ -15621,42 +14859,42 @@ cargo.rust_library( ], platform = { "linux-arm64": dict( - deps = [":socket2-0.5.5"], + deps = [":socket2-0.5.3"], ), "linux-x86_64": dict( - deps = [":socket2-0.5.5"], + deps = [":socket2-0.5.3"], ), "macos-arm64": dict( - deps = [":socket2-0.5.5"], + deps = [":socket2-0.5.3"], ), "macos-x86_64": dict( - deps = [":socket2-0.5.5"], + deps = [":socket2-0.5.3"], ), "windows-gnu": dict( - deps = [":socket2-0.5.5"], + deps = [":socket2-0.5.3"], ), "windows-msvc": dict( - deps = [":socket2-0.5.5"], + deps = [":socket2-0.5.3"], ), }, visibility = [], deps = [ - ":async-trait-0.1.74", - ":byteorder-1.5.0", - ":bytes-1.5.0", + ":async-trait-0.1.73", + ":byteorder-1.4.3", + ":bytes-1.4.0", ":fallible-iterator-0.2.0", ":futures-channel-0.3.29", ":futures-util-0.3.29", ":log-0.4.20", ":parking_lot-0.12.1", - ":percent-encoding-2.3.1", + ":percent-encoding-2.3.0", ":phf-0.11.2", - ":pin-project-lite-0.2.13", + ":pin-project-lite-0.2.12", ":postgres-protocol-0.6.6", ":postgres-types-0.2.6", ":rand-0.8.5", - ":tokio-1.35.0", - ":tokio-util-0.7.10", + ":tokio-1.32.0", + ":tokio-util-0.7.8", ":whoami-1.4.1", ], ) @@ -15679,7 +14917,7 @@ cargo.rust_library( deps = [ ":pin-project-1.1.3", ":rand-0.8.5", - ":tokio-1.35.0", + ":tokio-1.32.0", ], ) @@ -15704,9 +14942,9 @@ cargo.rust_library( ], visibility = [], deps = [ - ":rustls-0.20.9", - ":tokio-1.35.0", - ":webpki-0.22.4", + ":rustls-0.20.8", + ":tokio-1.32.0", + ":webpki-0.22.0", ], ) @@ -15731,8 +14969,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":rustls-0.21.10", - ":tokio-1.35.0", + ":rustls-0.21.6", + ":tokio-1.32.0", ], ) @@ -15764,13 +15002,13 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.5.0", - ":educe-0.4.23", + ":bytes-1.4.0", + ":educe-0.4.22", ":futures-core-0.3.29", ":futures-sink-0.3.29", ":pin-project-1.1.3", - ":serde-1.0.193", - ":serde_json-1.0.108", + ":serde-1.0.186", + ":serde_json-1.0.105", ], ) @@ -15802,8 +15040,8 @@ cargo.rust_library( visibility = [], deps = [ ":futures-core-0.3.29", - ":pin-project-lite-0.2.13", - ":tokio-1.35.0", + ":pin-project-lite-0.2.12", + ":tokio-1.32.0", ], ) @@ -15830,9 +15068,9 @@ cargo.rust_library( visibility = [], deps = [ ":async-stream-0.3.5", - ":bytes-1.5.0", + ":bytes-1.4.0", ":futures-core-0.3.29", - ":tokio-1.35.0", + ":tokio-1.32.0", ":tokio-stream-0.1.14", ], ) @@ -15867,24 +15105,24 @@ cargo.rust_library( deps = [ ":futures-util-0.3.29", ":log-0.4.20", - ":tokio-1.35.0", + ":tokio-1.32.0", ":tungstenite-0.18.0", ], ) http_archive( - name = "tokio-tungstenite-0.20.1.crate", - sha256 = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c", - strip_prefix = "tokio-tungstenite-0.20.1", - urls = ["https://crates.io/api/v1/crates/tokio-tungstenite/0.20.1/download"], + name = "tokio-tungstenite-0.20.0.crate", + sha256 = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2", + strip_prefix = "tokio-tungstenite-0.20.0", + urls = ["https://crates.io/api/v1/crates/tokio-tungstenite/0.20.0/download"], visibility = [], ) cargo.rust_library( - name = "tokio-tungstenite-0.20.1", - srcs = [":tokio-tungstenite-0.20.1.crate"], + name = "tokio-tungstenite-0.20.0", + srcs = [":tokio-tungstenite-0.20.0.crate"], crate = "tokio_tungstenite", - crate_root = "tokio-tungstenite-0.20.1.crate/src/lib.rs", + crate_root = "tokio-tungstenite-0.20.0.crate/src/lib.rs", edition = "2018", features = [ "connect", @@ -15896,30 +15134,30 @@ cargo.rust_library( deps = [ ":futures-util-0.3.29", ":log-0.4.20", - ":tokio-1.35.0", - ":tungstenite-0.20.1", + ":tokio-1.32.0", + ":tungstenite-0.20.0", ], ) alias( name = "tokio-util", - actual = ":tokio-util-0.7.10", + actual = ":tokio-util-0.7.8", visibility = ["PUBLIC"], ) http_archive( - name = "tokio-util-0.7.10.crate", - sha256 = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15", - strip_prefix = "tokio-util-0.7.10", - urls = ["https://crates.io/api/v1/crates/tokio-util/0.7.10/download"], + name = "tokio-util-0.7.8.crate", + sha256 = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d", + strip_prefix = "tokio-util-0.7.8", + urls = ["https://crates.io/api/v1/crates/tokio-util/0.7.8/download"], visibility = [], ) cargo.rust_library( - name = "tokio-util-0.7.10", - srcs = [":tokio-util-0.7.10.crate"], + name = "tokio-util-0.7.8", + srcs = [":tokio-util-0.7.8.crate"], crate = "tokio_util", - crate_root = "tokio-util-0.7.10.crate/src/lib.rs", + crate_root = "tokio-util-0.7.8.crate/src/lib.rs", edition = "2021", features = [ "codec", @@ -15929,12 +15167,12 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bytes-1.5.0", + ":bytes-1.4.0", ":futures-core-0.3.29", ":futures-sink-0.3.29", - ":pin-project-lite-0.2.13", - ":tokio-1.35.0", - ":tracing-0.1.40", + ":pin-project-lite-0.2.12", + ":tokio-1.32.0", + ":tracing-0.1.37", ], ) @@ -15960,10 +15198,10 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":bytes-1.5.0", - ":futures-0.3.29", - ":libc-0.2.151", - ":tokio-1.35.0", + ":bytes-1.4.0", + ":futures-0.3.28", + ":libc-0.2.147", + ":tokio-1.32.0", ":vsock-0.3.0", ], ) @@ -15984,28 +15222,28 @@ cargo.rust_library( edition = "2018", features = ["default"], visibility = [], - deps = [":serde-1.0.193"], + deps = [":serde-1.0.186"], ) alias( name = "toml", - actual = ":toml-0.7.8", + actual = ":toml-0.7.6", visibility = ["PUBLIC"], ) http_archive( - name = "toml-0.7.8.crate", - sha256 = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257", - strip_prefix = "toml-0.7.8", - urls = ["https://crates.io/api/v1/crates/toml/0.7.8/download"], + name = "toml-0.7.6.crate", + sha256 = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542", + strip_prefix = "toml-0.7.6", + urls = ["https://crates.io/api/v1/crates/toml/0.7.6/download"], visibility = [], ) cargo.rust_library( - name = "toml-0.7.8", - srcs = [":toml-0.7.8.crate"], + name = "toml-0.7.6", + srcs = [":toml-0.7.6.crate"], crate = "toml", - crate_root = "toml-0.7.8.crate/src/lib.rs", + crate_root = "toml-0.7.6.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -16014,45 +15252,45 @@ cargo.rust_library( ], visibility = [], deps = [ - ":serde-1.0.193", - ":serde_spanned-0.6.4", - ":toml_datetime-0.6.5", - ":toml_edit-0.19.15", + ":serde-1.0.186", + ":serde_spanned-0.6.3", + ":toml_datetime-0.6.3", + ":toml_edit-0.19.14", ], ) http_archive( - name = "toml_datetime-0.6.5.crate", - sha256 = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1", - strip_prefix = "toml_datetime-0.6.5", - urls = ["https://crates.io/api/v1/crates/toml_datetime/0.6.5/download"], + name = "toml_datetime-0.6.3.crate", + sha256 = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b", + strip_prefix = "toml_datetime-0.6.3", + urls = ["https://crates.io/api/v1/crates/toml_datetime/0.6.3/download"], visibility = [], ) cargo.rust_library( - name = "toml_datetime-0.6.5", - srcs = [":toml_datetime-0.6.5.crate"], + name = "toml_datetime-0.6.3", + srcs = [":toml_datetime-0.6.3.crate"], crate = "toml_datetime", - crate_root = "toml_datetime-0.6.5.crate/src/lib.rs", + crate_root = "toml_datetime-0.6.3.crate/src/lib.rs", edition = "2021", features = ["serde"], visibility = [], - deps = [":serde-1.0.193"], + deps = [":serde-1.0.186"], ) http_archive( - name = "toml_edit-0.19.15.crate", - sha256 = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421", - strip_prefix = "toml_edit-0.19.15", - urls = ["https://crates.io/api/v1/crates/toml_edit/0.19.15/download"], + name = "toml_edit-0.19.14.crate", + sha256 = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a", + strip_prefix = "toml_edit-0.19.14", + urls = ["https://crates.io/api/v1/crates/toml_edit/0.19.14/download"], visibility = [], ) cargo.rust_library( - name = "toml_edit-0.19.15", - srcs = [":toml_edit-0.19.15.crate"], + name = "toml_edit-0.19.14", + srcs = [":toml_edit-0.19.14.crate"], crate = "toml_edit", - crate_root = "toml_edit-0.19.15.crate/src/lib.rs", + crate_root = "toml_edit-0.19.14.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -16060,34 +15298,11 @@ cargo.rust_library( ], visibility = [], deps = [ - ":indexmap-2.1.0", - ":serde-1.0.193", - ":serde_spanned-0.6.4", - ":toml_datetime-0.6.5", - ":winnow-0.5.28", - ], -) - -http_archive( - name = "toml_edit-0.20.7.crate", - sha256 = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81", - strip_prefix = "toml_edit-0.20.7", - urls = ["https://crates.io/api/v1/crates/toml_edit/0.20.7/download"], - visibility = [], -) - -cargo.rust_library( - name = "toml_edit-0.20.7", - srcs = [":toml_edit-0.20.7.crate"], - crate = "toml_edit", - crate_root = "toml_edit-0.20.7.crate/src/lib.rs", - edition = "2021", - features = ["default"], - visibility = [], - deps = [ - ":indexmap-2.1.0", - ":toml_datetime-0.6.5", - ":winnow-0.5.28", + ":indexmap-2.0.0", + ":serde-1.0.186", + ":serde_spanned-0.6.3", + ":toml_datetime-0.6.3", + ":winnow-0.5.15", ], ) @@ -16139,27 +15354,27 @@ cargo.rust_library( visibility = [], deps = [ ":async-stream-0.3.5", - ":async-trait-0.1.74", + ":async-trait-0.1.73", ":axum-0.6.20", ":base64-0.13.1", - ":bytes-1.5.0", + ":bytes-1.4.0", ":futures-core-0.3.29", ":futures-util-0.3.29", - ":h2-0.3.22", - ":http-0.2.11", - ":http-body-0.4.6", + ":h2-0.3.21", + ":http-0.2.9", + ":http-body-0.4.5", ":hyper-0.14.27", ":hyper-timeout-0.4.1", - ":percent-encoding-2.3.1", + ":percent-encoding-2.3.0", ":pin-project-1.1.3", ":prost-derive-0.11.9", - ":tokio-1.35.0", + ":tokio-1.32.0", ":tokio-stream-0.1.14", - ":tokio-util-0.7.10", + ":tokio-util-0.7.8", ":tower-0.4.13", ":tower-layer-0.3.2", ":tower-service-0.3.2", - ":tracing-0.1.40", + ":tracing-0.1.37", ":tracing-futures-0.2.5", ], ) @@ -16187,7 +15402,7 @@ cargo.rust_library( visibility = [], deps = [ ":prettyplease-0.1.25", - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":prost-build-0.11.9", ":quote-1.0.33", ":syn-1.0.109", @@ -16244,36 +15459,36 @@ cargo.rust_library( ":futures-util-0.3.29", ":indexmap-1.9.3", ":pin-project-1.1.3", - ":pin-project-lite-0.2.13", + ":pin-project-lite-0.2.12", ":rand-0.8.5", ":slab-0.4.9", - ":tokio-1.35.0", - ":tokio-util-0.7.10", + ":tokio-1.32.0", + ":tokio-util-0.7.8", ":tower-layer-0.3.2", ":tower-service-0.3.2", - ":tracing-0.1.40", + ":tracing-0.1.37", ], ) alias( name = "tower-http", - actual = ":tower-http-0.4.4", + actual = ":tower-http-0.4.3", visibility = ["PUBLIC"], ) http_archive( - name = "tower-http-0.4.4.crate", - sha256 = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140", - strip_prefix = "tower-http-0.4.4", - urls = ["https://crates.io/api/v1/crates/tower-http/0.4.4/download"], + name = "tower-http-0.4.3.crate", + sha256 = "55ae70283aba8d2a8b411c695c437fe25b8b5e44e23e780662002fc72fb47a82", + strip_prefix = "tower-http-0.4.3", + urls = ["https://crates.io/api/v1/crates/tower-http/0.4.3/download"], visibility = [], ) cargo.rust_library( - name = "tower-http-0.4.4", - srcs = [":tower-http-0.4.4.crate"], + name = "tower-http-0.4.3", + srcs = [":tower-http-0.4.3.crate"], crate = "tower_http", - crate_root = "tower-http-0.4.4.crate/src/lib.rs", + crate_root = "tower-http-0.4.3.crate/src/lib.rs", edition = "2018", features = [ "cors", @@ -16283,17 +15498,17 @@ cargo.rust_library( ], visibility = [], deps = [ - ":bitflags-2.4.1", - ":bytes-1.5.0", + ":bitflags-2.4.0", + ":bytes-1.4.0", ":futures-core-0.3.29", ":futures-util-0.3.29", - ":http-0.2.11", - ":http-body-0.4.6", + ":http-0.2.9", + ":http-body-0.4.5", ":http-range-header-0.3.1", - ":pin-project-lite-0.2.13", + ":pin-project-lite-0.2.12", ":tower-layer-0.3.2", ":tower-service-0.3.2", - ":tracing-0.1.40", + ":tracing-0.1.37", ], ) @@ -16333,23 +15548,23 @@ cargo.rust_library( alias( name = "tracing", - actual = ":tracing-0.1.40", + actual = ":tracing-0.1.37", visibility = ["PUBLIC"], ) http_archive( - name = "tracing-0.1.40.crate", - sha256 = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef", - strip_prefix = "tracing-0.1.40", - urls = ["https://crates.io/api/v1/crates/tracing/0.1.40/download"], + name = "tracing-0.1.37.crate", + sha256 = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8", + strip_prefix = "tracing-0.1.37", + urls = ["https://crates.io/api/v1/crates/tracing/0.1.37/download"], visibility = [], ) cargo.rust_library( - name = "tracing-0.1.40", - srcs = [":tracing-0.1.40.crate"], + name = "tracing-0.1.37", + srcs = [":tracing-0.1.37.crate"], crate = "tracing", - crate_root = "tracing-0.1.40.crate/src/lib.rs", + crate_root = "tracing-0.1.37.crate/src/lib.rs", edition = "2018", features = [ "attributes", @@ -16360,49 +15575,50 @@ cargo.rust_library( ], visibility = [], deps = [ + ":cfg-if-1.0.0", ":log-0.4.20", - ":pin-project-lite-0.2.13", - ":tracing-attributes-0.1.27", - ":tracing-core-0.1.32", + ":pin-project-lite-0.2.12", + ":tracing-attributes-0.1.26", + ":tracing-core-0.1.31", ], ) http_archive( - name = "tracing-attributes-0.1.27.crate", - sha256 = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7", - strip_prefix = "tracing-attributes-0.1.27", - urls = ["https://crates.io/api/v1/crates/tracing-attributes/0.1.27/download"], + name = "tracing-attributes-0.1.26.crate", + sha256 = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab", + strip_prefix = "tracing-attributes-0.1.26", + urls = ["https://crates.io/api/v1/crates/tracing-attributes/0.1.26/download"], visibility = [], ) cargo.rust_library( - name = "tracing-attributes-0.1.27", - srcs = [":tracing-attributes-0.1.27.crate"], + name = "tracing-attributes-0.1.26", + srcs = [":tracing-attributes-0.1.26.crate"], crate = "tracing_attributes", - crate_root = "tracing-attributes-0.1.27.crate/src/lib.rs", + crate_root = "tracing-attributes-0.1.26.crate/src/lib.rs", edition = "2018", proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) http_archive( - name = "tracing-core-0.1.32.crate", - sha256 = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54", - strip_prefix = "tracing-core-0.1.32", - urls = ["https://crates.io/api/v1/crates/tracing-core/0.1.32/download"], + name = "tracing-core-0.1.31.crate", + sha256 = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a", + strip_prefix = "tracing-core-0.1.31", + urls = ["https://crates.io/api/v1/crates/tracing-core/0.1.31/download"], visibility = [], ) cargo.rust_library( - name = "tracing-core-0.1.32", - srcs = [":tracing-core-0.1.32.crate"], + name = "tracing-core-0.1.31", + srcs = [":tracing-core-0.1.31.crate"], crate = "tracing_core", - crate_root = "tracing-core-0.1.32.crate/src/lib.rs", + crate_root = "tracing-core-0.1.31.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -16411,7 +15627,7 @@ cargo.rust_library( "valuable", ], visibility = [], - deps = [":once_cell-1.19.0"], + deps = [":once_cell-1.18.0"], ) http_archive( @@ -16434,8 +15650,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":tracing-0.1.40", - ":tracing-subscriber-0.3.18", + ":tracing-0.1.37", + ":tracing-subscriber-0.3.17", ], ) @@ -16462,45 +15678,23 @@ cargo.rust_library( visibility = [], deps = [ ":pin-project-1.1.3", - ":tracing-0.1.40", - ], -) - -http_archive( - name = "tracing-log-0.1.4.crate", - sha256 = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2", - strip_prefix = "tracing-log-0.1.4", - urls = ["https://crates.io/api/v1/crates/tracing-log/0.1.4/download"], - visibility = [], -) - -cargo.rust_library( - name = "tracing-log-0.1.4", - srcs = [":tracing-log-0.1.4.crate"], - crate = "tracing_log", - crate_root = "tracing-log-0.1.4.crate/src/lib.rs", - edition = "2018", - visibility = [], - deps = [ - ":log-0.4.20", - ":once_cell-1.19.0", - ":tracing-core-0.1.32", + ":tracing-0.1.37", ], ) http_archive( - name = "tracing-log-0.2.0.crate", - sha256 = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3", - strip_prefix = "tracing-log-0.2.0", - urls = ["https://crates.io/api/v1/crates/tracing-log/0.2.0/download"], + name = "tracing-log-0.1.3.crate", + sha256 = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922", + strip_prefix = "tracing-log-0.1.3", + urls = ["https://crates.io/api/v1/crates/tracing-log/0.1.3/download"], visibility = [], ) cargo.rust_library( - name = "tracing-log-0.2.0", - srcs = [":tracing-log-0.2.0.crate"], + name = "tracing-log-0.1.3", + srcs = [":tracing-log-0.1.3.crate"], crate = "tracing_log", - crate_root = "tracing-log-0.2.0.crate/src/lib.rs", + crate_root = "tracing-log-0.1.3.crate/src/lib.rs", edition = "2018", features = [ "log-tracer", @@ -16508,9 +15702,9 @@ cargo.rust_library( ], visibility = [], deps = [ + ":lazy_static-1.4.0", ":log-0.4.20", - ":once_cell-1.19.0", - ":tracing-core-0.1.32", + ":tracing-core-0.1.31", ], ) @@ -16552,34 +15746,34 @@ cargo.rust_library( ], visibility = [], deps = [ - ":once_cell-1.19.0", + ":once_cell-1.18.0", ":opentelemetry-0.18.0", - ":tracing-0.1.40", - ":tracing-core-0.1.32", - ":tracing-log-0.1.4", - ":tracing-subscriber-0.3.18", + ":tracing-0.1.37", + ":tracing-core-0.1.31", + ":tracing-log-0.1.3", + ":tracing-subscriber-0.3.17", ], ) alias( name = "tracing-subscriber", - actual = ":tracing-subscriber-0.3.18", + actual = ":tracing-subscriber-0.3.17", visibility = ["PUBLIC"], ) http_archive( - name = "tracing-subscriber-0.3.18.crate", - sha256 = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b", - strip_prefix = "tracing-subscriber-0.3.18", - urls = ["https://crates.io/api/v1/crates/tracing-subscriber/0.3.18/download"], + name = "tracing-subscriber-0.3.17.crate", + sha256 = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77", + strip_prefix = "tracing-subscriber-0.3.17", + urls = ["https://crates.io/api/v1/crates/tracing-subscriber/0.3.17/download"], visibility = [], ) cargo.rust_library( - name = "tracing-subscriber-0.3.18", - srcs = [":tracing-subscriber-0.3.18.crate"], + name = "tracing-subscriber-0.3.17", + srcs = [":tracing-subscriber-0.3.17.crate"], crate = "tracing_subscriber", - crate_root = "tracing-subscriber-0.3.18.crate/src/lib.rs", + crate_root = "tracing-subscriber-0.3.17.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -16603,30 +15797,30 @@ cargo.rust_library( deps = [ ":matchers-0.1.0", ":nu-ansi-term-0.46.0", - ":once_cell-1.19.0", - ":regex-1.10.2", - ":sharded-slab-0.1.7", - ":smallvec-1.11.2", + ":once_cell-1.18.0", + ":regex-1.9.3", + ":sharded-slab-0.1.4", + ":smallvec-1.11.0", ":thread_local-1.1.7", - ":tracing-0.1.40", - ":tracing-core-0.1.32", - ":tracing-log-0.2.0", + ":tracing-0.1.37", + ":tracing-core-0.1.31", + ":tracing-log-0.1.3", ], ) http_archive( - name = "try-lock-0.2.5.crate", - sha256 = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b", - strip_prefix = "try-lock-0.2.5", - urls = ["https://crates.io/api/v1/crates/try-lock/0.2.5/download"], + name = "try-lock-0.2.4.crate", + sha256 = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed", + strip_prefix = "try-lock-0.2.4", + urls = ["https://crates.io/api/v1/crates/try-lock/0.2.4/download"], visibility = [], ) cargo.rust_library( - name = "try-lock-0.2.5", - srcs = [":try-lock-0.2.5.crate"], + name = "try-lock-0.2.4", + srcs = [":try-lock-0.2.4.crate"], crate = "try_lock", - crate_root = "try-lock-0.2.5.crate/src/lib.rs", + crate_root = "try-lock-0.2.4.crate/src/lib.rs", edition = "2015", visibility = [], ) @@ -16656,32 +15850,32 @@ cargo.rust_library( visibility = [], deps = [ ":base64-0.13.1", - ":byteorder-1.5.0", - ":bytes-1.5.0", - ":http-0.2.11", + ":byteorder-1.4.3", + ":bytes-1.4.0", + ":http-0.2.9", ":httparse-1.8.0", ":log-0.4.20", ":rand-0.8.5", - ":sha1-0.10.6", - ":thiserror-1.0.50", - ":url-2.5.0", + ":sha1-0.10.5", + ":thiserror-1.0.47", + ":url-2.4.0", ":utf-8-0.7.6", ], ) http_archive( - name = "tungstenite-0.20.1.crate", - sha256 = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9", - strip_prefix = "tungstenite-0.20.1", - urls = ["https://crates.io/api/v1/crates/tungstenite/0.20.1/download"], + name = "tungstenite-0.20.0.crate", + sha256 = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649", + strip_prefix = "tungstenite-0.20.0", + urls = ["https://crates.io/api/v1/crates/tungstenite/0.20.0/download"], visibility = [], ) cargo.rust_library( - name = "tungstenite-0.20.1", - srcs = [":tungstenite-0.20.1.crate"], + name = "tungstenite-0.20.0", + srcs = [":tungstenite-0.20.0.crate"], crate = "tungstenite", - crate_root = "tungstenite-0.20.1.crate/src/lib.rs", + crate_root = "tungstenite-0.20.0.crate/src/lib.rs", edition = "2018", features = [ "data-encoding", @@ -16693,78 +15887,78 @@ cargo.rust_library( ], visibility = [], deps = [ - ":byteorder-1.5.0", - ":bytes-1.5.0", - ":data-encoding-2.5.0", - ":http-0.2.11", + ":byteorder-1.4.3", + ":bytes-1.4.0", + ":data-encoding-2.4.0", + ":http-0.2.9", ":httparse-1.8.0", ":log-0.4.20", ":rand-0.8.5", - ":sha1-0.10.6", - ":thiserror-1.0.50", - ":url-2.5.0", + ":sha1-0.10.5", + ":thiserror-1.0.47", + ":url-2.4.0", ":utf-8-0.7.6", ], ) http_archive( - name = "typenum-1.17.0.crate", - sha256 = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825", - strip_prefix = "typenum-1.17.0", - urls = ["https://crates.io/api/v1/crates/typenum/1.17.0/download"], + name = "typenum-1.16.0.crate", + sha256 = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba", + strip_prefix = "typenum-1.16.0", + urls = ["https://crates.io/api/v1/crates/typenum/1.16.0/download"], visibility = [], ) cargo.rust_library( - name = "typenum-1.17.0", - srcs = [":typenum-1.17.0.crate"], + name = "typenum-1.16.0", + srcs = [":typenum-1.16.0.crate"], crate = "typenum", - crate_root = "typenum-1.17.0.crate/src/lib.rs", + crate_root = "typenum-1.16.0.crate/src/lib.rs", edition = "2018", env = { - "OUT_DIR": "$(location :typenum-1.17.0-build-script-main-run[out_dir])", + "OUT_DIR": "$(location :typenum-1.16.0-build-script-main-run[out_dir])", }, features = ["force_unix_path_separator"], visibility = [], ) cargo.rust_binary( - name = "typenum-1.17.0-build-script-main", - srcs = [":typenum-1.17.0.crate"], + name = "typenum-1.16.0-build-script-main", + srcs = [":typenum-1.16.0.crate"], crate = "build_script_main", - crate_root = "typenum-1.17.0.crate/build/main.rs", + crate_root = "typenum-1.16.0.crate/build/main.rs", edition = "2018", features = ["force_unix_path_separator"], visibility = [], ) buildscript_run( - name = "typenum-1.17.0-build-script-main-run", + name = "typenum-1.16.0-build-script-main-run", package_name = "typenum", - buildscript_rule = ":typenum-1.17.0-build-script-main", + buildscript_rule = ":typenum-1.16.0-build-script-main", features = ["force_unix_path_separator"], - version = "1.17.0", + version = "1.16.0", ) alias( name = "ulid", - actual = ":ulid-1.1.0", + actual = ":ulid-1.0.0", visibility = ["PUBLIC"], ) http_archive( - name = "ulid-1.1.0.crate", - sha256 = "7e37c4b6cbcc59a8dcd09a6429fbc7890286bcbb79215cea7b38a3c4c0921d93", - strip_prefix = "ulid-1.1.0", - urls = ["https://crates.io/api/v1/crates/ulid/1.1.0/download"], + name = "ulid-1.0.0.crate", + sha256 = "13a3aaa69b04e5b66cc27309710a569ea23593612387d67daaf102e73aa974fd", + strip_prefix = "ulid-1.0.0", + urls = ["https://crates.io/api/v1/crates/ulid/1.0.0/download"], visibility = [], ) cargo.rust_library( - name = "ulid-1.1.0", - srcs = [":ulid-1.1.0.crate"], + name = "ulid-1.0.0", + srcs = [":ulid-1.0.0.crate"], crate = "ulid", - crate_root = "ulid-1.1.0.crate/src/lib.rs", + crate_root = "ulid-1.0.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -16775,7 +15969,7 @@ cargo.rust_library( visibility = [], deps = [ ":rand-0.8.5", - ":serde-1.0.193", + ":serde-1.0.186", ], ) @@ -16816,18 +16010,18 @@ buildscript_run( ) http_archive( - name = "unicode-bidi-0.3.14.crate", - sha256 = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416", - strip_prefix = "unicode-bidi-0.3.14", - urls = ["https://crates.io/api/v1/crates/unicode-bidi/0.3.14/download"], + name = "unicode-bidi-0.3.13.crate", + sha256 = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460", + strip_prefix = "unicode-bidi-0.3.13", + urls = ["https://crates.io/api/v1/crates/unicode-bidi/0.3.13/download"], visibility = [], ) cargo.rust_library( - name = "unicode-bidi-0.3.14", - srcs = [":unicode-bidi-0.3.14.crate"], + name = "unicode-bidi-0.3.13", + srcs = [":unicode-bidi-0.3.13.crate"], crate = "unicode_bidi", - crate_root = "unicode-bidi-0.3.14.crate/src/lib.rs", + crate_root = "unicode-bidi-0.3.13.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -16838,18 +16032,18 @@ cargo.rust_library( ) http_archive( - name = "unicode-ident-1.0.12.crate", - sha256 = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b", - strip_prefix = "unicode-ident-1.0.12", - urls = ["https://crates.io/api/v1/crates/unicode-ident/1.0.12/download"], + name = "unicode-ident-1.0.11.crate", + sha256 = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c", + strip_prefix = "unicode-ident-1.0.11", + urls = ["https://crates.io/api/v1/crates/unicode-ident/1.0.11/download"], visibility = [], ) cargo.rust_library( - name = "unicode-ident-1.0.12", - srcs = [":unicode-ident-1.0.12.crate"], + name = "unicode-ident-1.0.11", + srcs = [":unicode-ident-1.0.11.crate"], crate = "unicode_ident", - crate_root = "unicode-ident-1.0.12.crate/src/lib.rs", + crate_root = "unicode-ident-1.0.11.crate/src/lib.rs", edition = "2018", visibility = [], ) @@ -16894,18 +16088,18 @@ cargo.rust_library( ) http_archive( - name = "unicode-width-0.1.11.crate", - sha256 = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85", - strip_prefix = "unicode-width-0.1.11", - urls = ["https://crates.io/api/v1/crates/unicode-width/0.1.11/download"], + name = "unicode-width-0.1.10.crate", + sha256 = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b", + strip_prefix = "unicode-width-0.1.10", + urls = ["https://crates.io/api/v1/crates/unicode-width/0.1.10/download"], visibility = [], ) cargo.rust_library( - name = "unicode-width-0.1.11", - srcs = [":unicode-width-0.1.11.crate"], + name = "unicode-width-0.1.10", + srcs = [":unicode-width-0.1.10.crate"], crate = "unicode_width", - crate_root = "unicode-width-0.1.11.crate/src/lib.rs", + crate_root = "unicode-width-0.1.10.crate/src/lib.rs", edition = "2015", features = ["default"], visibility = [], @@ -16980,42 +16174,25 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "untrusted-0.9.0.crate", - sha256 = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1", - strip_prefix = "untrusted-0.9.0", - urls = ["https://crates.io/api/v1/crates/untrusted/0.9.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "untrusted-0.9.0", - srcs = [":untrusted-0.9.0.crate"], - crate = "untrusted", - crate_root = "untrusted-0.9.0.crate/src/lib.rs", - edition = "2018", - visibility = [], -) - alias( name = "url", - actual = ":url-2.5.0", + actual = ":url-2.4.0", visibility = ["PUBLIC"], ) http_archive( - name = "url-2.5.0.crate", - sha256 = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633", - strip_prefix = "url-2.5.0", - urls = ["https://crates.io/api/v1/crates/url/2.5.0/download"], + name = "url-2.4.0.crate", + sha256 = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb", + strip_prefix = "url-2.4.0", + urls = ["https://crates.io/api/v1/crates/url/2.4.0/download"], visibility = [], ) cargo.rust_library( - name = "url-2.5.0", - srcs = [":url-2.5.0.crate"], + name = "url-2.4.0", + srcs = [":url-2.4.0.crate"], crate = "url", - crate_root = "url-2.5.0.crate/src/lib.rs", + crate_root = "url-2.4.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -17023,10 +16200,10 @@ cargo.rust_library( ], visibility = [], deps = [ - ":form_urlencoded-1.2.1", - ":idna-0.5.0", - ":percent-encoding-2.3.1", - ":serde-1.0.193", + ":form_urlencoded-1.2.0", + ":idna-0.4.0", + ":percent-encoding-2.3.0", + ":serde-1.0.186", ], ) @@ -17048,18 +16225,18 @@ cargo.rust_library( ) http_archive( - name = "utf8-width-0.1.7.crate", - sha256 = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3", - strip_prefix = "utf8-width-0.1.7", - urls = ["https://crates.io/api/v1/crates/utf8-width/0.1.7/download"], + name = "utf8-width-0.1.6.crate", + sha256 = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1", + strip_prefix = "utf8-width-0.1.6", + urls = ["https://crates.io/api/v1/crates/utf8-width/0.1.6/download"], visibility = [], ) cargo.rust_library( - name = "utf8-width-0.1.7", - srcs = [":utf8-width-0.1.7.crate"], + name = "utf8-width-0.1.6", + srcs = [":utf8-width-0.1.6.crate"], crate = "utf8_width", - crate_root = "utf8-width-0.1.7.crate/src/lib.rs", + crate_root = "utf8-width-0.1.6.crate/src/lib.rs", edition = "2021", visibility = [], ) @@ -17084,23 +16261,23 @@ cargo.rust_library( alias( name = "uuid", - actual = ":uuid-1.6.1", + actual = ":uuid-1.4.1", visibility = ["PUBLIC"], ) http_archive( - name = "uuid-1.6.1.crate", - sha256 = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560", - strip_prefix = "uuid-1.6.1", - urls = ["https://crates.io/api/v1/crates/uuid/1.6.1/download"], + name = "uuid-1.4.1.crate", + sha256 = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d", + strip_prefix = "uuid-1.4.1", + urls = ["https://crates.io/api/v1/crates/uuid/1.4.1/download"], visibility = [], ) cargo.rust_library( - name = "uuid-1.6.1", - srcs = [":uuid-1.6.1.crate"], + name = "uuid-1.4.1", + srcs = [":uuid-1.4.1.crate"], crate = "uuid", - crate_root = "uuid-1.6.1.crate/src/lib.rs", + crate_root = "uuid-1.4.1.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -17112,8 +16289,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":getrandom-0.2.11", - ":serde-1.0.193", + ":getrandom-0.2.10", + ":serde-1.0.186", ], ) @@ -17206,48 +16383,48 @@ cargo.rust_library( edition = "2018", visibility = [], deps = [ - ":libc-0.2.151", + ":libc-0.2.147", ":nix-0.24.3", ], ) http_archive( - name = "waker-fn-1.1.1.crate", - sha256 = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690", - strip_prefix = "waker-fn-1.1.1", - urls = ["https://crates.io/api/v1/crates/waker-fn/1.1.1/download"], + name = "waker-fn-1.1.0.crate", + sha256 = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca", + strip_prefix = "waker-fn-1.1.0", + urls = ["https://crates.io/api/v1/crates/waker-fn/1.1.0/download"], visibility = [], ) cargo.rust_library( - name = "waker-fn-1.1.1", - srcs = [":waker-fn-1.1.1.crate"], + name = "waker-fn-1.1.0", + srcs = [":waker-fn-1.1.0.crate"], crate = "waker_fn", - crate_root = "waker-fn-1.1.1.crate/src/lib.rs", + crate_root = "waker-fn-1.1.0.crate/src/lib.rs", edition = "2018", visibility = [], ) http_archive( - name = "walkdir-2.4.0.crate", - sha256 = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee", - strip_prefix = "walkdir-2.4.0", - urls = ["https://crates.io/api/v1/crates/walkdir/2.4.0/download"], + name = "walkdir-2.3.3.crate", + sha256 = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698", + strip_prefix = "walkdir-2.3.3", + urls = ["https://crates.io/api/v1/crates/walkdir/2.3.3/download"], visibility = [], ) cargo.rust_library( - name = "walkdir-2.4.0", - srcs = [":walkdir-2.4.0.crate"], + name = "walkdir-2.3.3", + srcs = [":walkdir-2.3.3.crate"], crate = "walkdir", - crate_root = "walkdir-2.4.0.crate/src/lib.rs", + crate_root = "walkdir-2.3.3.crate/src/lib.rs", edition = "2018", platform = { "windows-gnu": dict( - deps = [":winapi-util-0.1.6"], + deps = [":winapi-util-0.1.5"], ), "windows-msvc": dict( - deps = [":winapi-util-0.1.6"], + deps = [":winapi-util-0.1.5"], ), }, visibility = [], @@ -17269,22 +16446,22 @@ cargo.rust_library( crate_root = "want-0.3.1.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":try-lock-0.2.5"], + deps = [":try-lock-0.2.4"], ) http_archive( - name = "webpki-0.22.4.crate", - sha256 = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53", - strip_prefix = "webpki-0.22.4", - urls = ["https://crates.io/api/v1/crates/webpki/0.22.4/download"], + name = "webpki-0.22.0.crate", + sha256 = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd", + strip_prefix = "webpki-0.22.0", + urls = ["https://crates.io/api/v1/crates/webpki/0.22.0/download"], visibility = [], ) cargo.rust_library( - name = "webpki-0.22.4", - srcs = [":webpki-0.22.4.crate"], + name = "webpki-0.22.0", + srcs = [":webpki-0.22.0.crate"], crate = "webpki", - crate_root = "webpki-0.22.4.crate/src/lib.rs", + crate_root = "webpki-0.22.0.crate/src/lib.rs", edition = "2018", features = [ "alloc", @@ -17292,8 +16469,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":ring-0.17.7", - ":untrusted-0.9.0", + ":ring-0.16.20", + ":untrusted-0.7.1", ], ) @@ -17312,70 +16489,52 @@ cargo.rust_library( crate_root = "webpki-roots-0.22.6.crate/src/lib.rs", edition = "2018", visibility = [], - deps = [":webpki-0.22.4"], + deps = [":webpki-0.22.0"], ) http_archive( - name = "webpki-roots-0.25.3.crate", - sha256 = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10", - strip_prefix = "webpki-roots-0.25.3", - urls = ["https://crates.io/api/v1/crates/webpki-roots/0.25.3/download"], + name = "webpki-roots-0.25.2.crate", + sha256 = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc", + strip_prefix = "webpki-roots-0.25.2", + urls = ["https://crates.io/api/v1/crates/webpki-roots/0.25.2/download"], visibility = [], ) cargo.rust_library( - name = "webpki-roots-0.25.3", - srcs = [":webpki-roots-0.25.3.crate"], + name = "webpki-roots-0.25.2", + srcs = [":webpki-roots-0.25.2.crate"], crate = "webpki_roots", - crate_root = "webpki-roots-0.25.3.crate/src/lib.rs", + crate_root = "webpki-roots-0.25.2.crate/src/lib.rs", edition = "2018", visibility = [], ) http_archive( - name = "which-4.4.2.crate", - sha256 = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7", - strip_prefix = "which-4.4.2", - urls = ["https://crates.io/api/v1/crates/which/4.4.2/download"], + name = "which-4.4.0.crate", + sha256 = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269", + strip_prefix = "which-4.4.0", + urls = ["https://crates.io/api/v1/crates/which/4.4.0/download"], visibility = [], ) cargo.rust_library( - name = "which-4.4.2", - srcs = [":which-4.4.2.crate"], + name = "which-4.4.0", + srcs = [":which-4.4.0.crate"], crate = "which", - crate_root = "which-4.4.2.crate/src/lib.rs", - edition = "2021", + crate_root = "which-4.4.0.crate/src/lib.rs", + edition = "2018", platform = { - "linux-arm64": dict( - deps = [":home-0.5.5"], - ), - "linux-x86_64": dict( - deps = [":home-0.5.5"], - ), - "macos-arm64": dict( - deps = [":home-0.5.5"], - ), - "macos-x86_64": dict( - deps = [":home-0.5.5"], - ), "windows-gnu": dict( - deps = [ - ":home-0.5.5", - ":once_cell-1.19.0", - ], + deps = [":once_cell-1.18.0"], ), "windows-msvc": dict( - deps = [ - ":home-0.5.5", - ":once_cell-1.19.0", - ], + deps = [":once_cell-1.18.0"], ), }, visibility = [], deps = [ ":either-1.9.0", - ":rustix-0.38.28", + ":libc-0.2.147", ], ) @@ -17423,15 +16582,19 @@ cargo.rust_library( "handleapi", "impl-default", "knownfolders", + "minwinbase", "minwindef", + "ntdef", "ntsecapi", "ntstatus", "objbase", "processenv", + "profileapi", "shlobj", "std", "synchapi", "sysinfoapi", + "timezoneapi", "winbase", "wincon", "winerror", @@ -17466,15 +16629,19 @@ cargo.rust_binary( "handleapi", "impl-default", "knownfolders", + "minwinbase", "minwindef", + "ntdef", "ntsecapi", "ntstatus", "objbase", "processenv", + "profileapi", "shlobj", "std", "synchapi", "sysinfoapi", + "timezoneapi", "winbase", "wincon", "winerror", @@ -17498,15 +16665,19 @@ buildscript_run( "handleapi", "impl-default", "knownfolders", + "minwinbase", "minwindef", + "ntdef", "ntsecapi", "ntstatus", "objbase", "processenv", + "profileapi", "shlobj", "std", "synchapi", "sysinfoapi", + "timezoneapi", "winbase", "wincon", "winerror", @@ -17520,19 +16691,19 @@ buildscript_run( ) http_archive( - name = "winapi-util-0.1.6.crate", - sha256 = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596", - strip_prefix = "winapi-util-0.1.6", - urls = ["https://crates.io/api/v1/crates/winapi-util/0.1.6/download"], + name = "winapi-util-0.1.5.crate", + sha256 = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178", + strip_prefix = "winapi-util-0.1.5", + urls = ["https://crates.io/api/v1/crates/winapi-util/0.1.5/download"], visibility = [], ) cargo.rust_library( - name = "winapi-util-0.1.6", - srcs = [":winapi-util-0.1.6.crate"], + name = "winapi-util-0.1.5", + srcs = [":winapi-util-0.1.5.crate"], crate = "winapi_util", - crate_root = "winapi-util-0.1.6.crate/src/lib.rs", - edition = "2021", + crate_root = "winapi-util-0.1.5.crate/src/lib.rs", + edition = "2018", platform = { "windows-gnu": dict( deps = [":winapi-0.3.9"], @@ -17592,20 +16763,23 @@ third_party_rust_prebuilt_cxx_library( ) http_archive( - name = "windows-core-0.51.1.crate", - sha256 = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64", - strip_prefix = "windows-core-0.51.1", - urls = ["https://crates.io/api/v1/crates/windows-core/0.51.1/download"], + name = "windows-0.48.0.crate", + sha256 = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f", + strip_prefix = "windows-0.48.0", + urls = ["https://crates.io/api/v1/crates/windows/0.48.0/download"], visibility = [], ) cargo.rust_library( - name = "windows-core-0.51.1", - srcs = [":windows-core-0.51.1.crate"], - crate = "windows_core", - crate_root = "windows-core-0.51.1.crate/src/lib.rs", - edition = "2021", - features = ["default"], + name = "windows-0.48.0", + srcs = [":windows-0.48.0.crate"], + crate = "windows", + crate_root = "windows-0.48.0.crate/src/lib.rs", + edition = "2018", + features = [ + "Globalization", + "default", + ], visibility = [], deps = [":windows-targets-0.48.5"], ) @@ -17677,6 +16851,8 @@ cargo.rust_library( "Win32", "Win32_Foundation", "Win32_Globalization", + "Win32_NetworkManagement", + "Win32_NetworkManagement_IpHelper", "Win32_Networking", "Win32_Networking_WinSock", "Win32_Security", @@ -17709,40 +16885,6 @@ cargo.rust_library( deps = [":windows-targets-0.48.5"], ) -http_archive( - name = "windows-sys-0.52.0.crate", - sha256 = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d", - strip_prefix = "windows-sys-0.52.0", - urls = ["https://crates.io/api/v1/crates/windows-sys/0.52.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "windows-sys-0.52.0", - srcs = [":windows-sys-0.52.0.crate"], - crate = "windows_sys", - crate_root = "windows-sys-0.52.0.crate/src/lib.rs", - edition = "2021", - features = [ - "Win32", - "Win32_Foundation", - "Win32_NetworkManagement", - "Win32_NetworkManagement_IpHelper", - "Win32_Networking", - "Win32_Networking_WinSock", - "Win32_Storage", - "Win32_Storage_FileSystem", - "Win32_System", - "Win32_System_Console", - "Win32_System_Diagnostics", - "Win32_System_Diagnostics_Debug", - "Win32_System_Threading", - "default", - ], - visibility = [], - deps = [":windows-targets-0.52.0"], -) - http_archive( name = "windows-targets-0.42.2.crate", sha256 = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071", @@ -17796,34 +16938,6 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "windows-targets-0.52.0.crate", - sha256 = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd", - strip_prefix = "windows-targets-0.52.0", - urls = ["https://crates.io/api/v1/crates/windows-targets/0.52.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "windows-targets-0.52.0", - srcs = [":windows-targets-0.52.0.crate"], - crate = "windows_targets", - crate_root = "windows-targets-0.52.0.crate/src/lib.rs", - edition = "2021", - platform = { - "linux-x86_64": dict( - deps = [":windows_x86_64_gnu-0.52.0"], - ), - "windows-gnu": dict( - deps = [":windows_x86_64_gnu-0.52.0"], - ), - "windows-msvc": dict( - deps = [":windows_x86_64_msvc-0.52.0"], - ), - }, - visibility = [], -) - http_archive( name = "windows_x86_64_gnu-0.42.2.crate", sha256 = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36", @@ -17873,23 +16987,6 @@ cargo.rust_library( visibility = [], ) -http_archive( - name = "windows_x86_64_gnu-0.52.0.crate", - sha256 = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd", - strip_prefix = "windows_x86_64_gnu-0.52.0", - urls = ["https://crates.io/api/v1/crates/windows_x86_64_gnu/0.52.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "windows_x86_64_gnu-0.52.0", - srcs = [":windows_x86_64_gnu-0.52.0.crate"], - crate = "windows_x86_64_gnu", - crate_root = "windows_x86_64_gnu-0.52.0.crate/src/lib.rs", - edition = "2021", - visibility = [], -) - http_archive( name = "windows_x86_64_msvc-0.42.2.crate", sha256 = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0", @@ -17940,35 +17037,18 @@ cargo.rust_library( ) http_archive( - name = "windows_x86_64_msvc-0.52.0.crate", - sha256 = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04", - strip_prefix = "windows_x86_64_msvc-0.52.0", - urls = ["https://crates.io/api/v1/crates/windows_x86_64_msvc/0.52.0/download"], - visibility = [], -) - -cargo.rust_library( - name = "windows_x86_64_msvc-0.52.0", - srcs = [":windows_x86_64_msvc-0.52.0.crate"], - crate = "windows_x86_64_msvc", - crate_root = "windows_x86_64_msvc-0.52.0.crate/src/lib.rs", - edition = "2021", - visibility = [], -) - -http_archive( - name = "winnow-0.5.28.crate", - sha256 = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2", - strip_prefix = "winnow-0.5.28", - urls = ["https://crates.io/api/v1/crates/winnow/0.5.28/download"], + name = "winnow-0.5.15.crate", + sha256 = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc", + strip_prefix = "winnow-0.5.15", + urls = ["https://crates.io/api/v1/crates/winnow/0.5.15/download"], visibility = [], ) cargo.rust_library( - name = "winnow-0.5.28", - srcs = [":winnow-0.5.28.crate"], + name = "winnow-0.5.15", + srcs = [":winnow-0.5.15.crate"], crate = "winnow", - crate_root = "winnow-0.5.28.crate/src/lib.rs", + crate_root = "winnow-0.5.15.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -17976,7 +17056,7 @@ cargo.rust_library( "std", ], visibility = [], - deps = [":memchr-2.6.4"], + deps = [":memchr-2.5.0"], ) http_archive( @@ -18019,39 +17099,25 @@ cargo.rust_library( ) http_archive( - name = "xattr-1.1.2.crate", - sha256 = "d367426ae76bdfce3d8eaea6e94422afd6def7d46f9c89e2980309115b3c2c41", - strip_prefix = "xattr-1.1.2", - urls = ["https://crates.io/api/v1/crates/xattr/1.1.2/download"], + name = "xattr-1.0.1.crate", + sha256 = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985", + strip_prefix = "xattr-1.0.1", + urls = ["https://crates.io/api/v1/crates/xattr/1.0.1/download"], visibility = [], ) cargo.rust_library( - name = "xattr-1.1.2", - srcs = [":xattr-1.1.2.crate"], + name = "xattr-1.0.1", + srcs = [":xattr-1.0.1.crate"], crate = "xattr", - crate_root = "xattr-1.1.2.crate/src/lib.rs", + crate_root = "xattr-1.0.1.crate/src/lib.rs", edition = "2021", features = [ "default", "unsupported", ], - platform = { - "linux-arm64": dict( - deps = [":linux-raw-sys-0.4.12"], - ), - "linux-x86_64": dict( - deps = [":linux-raw-sys-0.4.12"], - ), - "macos-arm64": dict( - deps = [":libc-0.2.151"], - ), - "macos-x86_64": dict( - deps = [":libc-0.2.151"], - ), - }, visibility = [], - deps = [":rustix-0.38.28"], + deps = [":libc-0.2.147"], ) alias( @@ -18079,9 +17145,9 @@ cargo.rust_library( deps = [ ":futures-util-0.3.29", ":lib0-0.16.10", - ":thiserror-1.0.50", - ":tokio-1.35.0", - ":yrs-0.17.2", + ":thiserror-1.0.47", + ":tokio-1.32.0", + ":yrs-0.16.10", ], ) @@ -18128,68 +17194,24 @@ cargo.rust_library( ":lib0-0.16.10", ":rand-0.7.3", ":smallstr-0.2.0", - ":smallvec-1.11.2", - ":thiserror-1.0.50", - ], -) - -http_archive( - name = "yrs-0.17.2.crate", - sha256 = "68aea14c6c33f2edd8a5ff9415360cfa5b98d90cce30c5ee3be59a8419fb15a9", - strip_prefix = "yrs-0.17.2", - urls = ["https://crates.io/api/v1/crates/yrs/0.17.2/download"], - visibility = [], -) - -cargo.rust_library( - name = "yrs-0.17.2", - srcs = [":yrs-0.17.2.crate"], - crate = "yrs", - crate_root = "yrs-0.17.2.crate/src/lib.rs", - edition = "2018", - visibility = [], - deps = [ - ":atomic_refcell-0.1.13", - ":rand-0.7.3", - ":serde-1.0.193", - ":serde_json-1.0.108", - ":smallstr-0.3.0", - ":smallvec-1.11.2", - ":thiserror-1.0.50", + ":smallvec-1.11.0", + ":thiserror-1.0.47", ], ) http_archive( - name = "zerocopy-0.7.30.crate", - sha256 = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7", - strip_prefix = "zerocopy-0.7.30", - urls = ["https://crates.io/api/v1/crates/zerocopy/0.7.30/download"], - visibility = [], -) - -cargo.rust_library( - name = "zerocopy-0.7.30", - srcs = [":zerocopy-0.7.30.crate"], - crate = "zerocopy", - crate_root = "zerocopy-0.7.30.crate/src/lib.rs", - edition = "2018", - features = ["simd"], - visibility = [], -) - -http_archive( - name = "zeroize-1.7.0.crate", - sha256 = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d", - strip_prefix = "zeroize-1.7.0", - urls = ["https://crates.io/api/v1/crates/zeroize/1.7.0/download"], + name = "zeroize-1.6.0.crate", + sha256 = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9", + strip_prefix = "zeroize-1.6.0", + urls = ["https://crates.io/api/v1/crates/zeroize/1.6.0/download"], visibility = [], ) cargo.rust_library( - name = "zeroize-1.7.0", - srcs = [":zeroize-1.7.0.crate"], + name = "zeroize-1.6.0", + srcs = [":zeroize-1.6.0.crate"], crate = "zeroize", - crate_root = "zeroize-1.7.0.crate/src/lib.rs", + crate_root = "zeroize-1.6.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -18217,8 +17239,8 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":proc-macro2-1.0.70", + ":proc-macro2-1.0.66", ":quote-1.0.33", - ":syn-2.0.40", + ":syn-2.0.29", ], ) diff --git a/third-party/rust/Cargo.lock b/third-party/rust/Cargo.lock index ce8c5121b6..c9291e4644 100644 --- a/third-party/rust/Cargo.lock +++ b/third-party/rust/Cargo.lock @@ -25,32 +25,31 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.7.7" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", "once_cell", "version_check", ] [[package]] name = "ahash" -version = "0.8.6" +version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" +checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" dependencies = [ "cfg-if", "once_cell", "version_check", - "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.1.2" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" +checksum = "6748e8def348ed4d14996fa801f4122cd763fff530258cdc03f64b25f89d3a5a" dependencies = [ "memchr", ] @@ -84,50 +83,51 @@ dependencies = [ [[package]] name = "anstream" -version = "0.6.5" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d664a92ecae85fd0a7392615844904654d1d5f5514837f471ddef4a057aba1b6" +checksum = "0ca84f3628370c59db74ee214b3263d58f9aadd9b4fe7e711fd87dc452b7f163" dependencies = [ "anstyle", "anstyle-parse", "anstyle-query", "anstyle-wincon", "colorchoice", + "is-terminal", "utf8parse", ] [[package]] name = "anstyle" -version = "1.0.4" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" +checksum = "15c4c2c83f81532e5845a733998b6971faca23490340a418e9b72a3ec9de12ea" [[package]] name = "anstyle-parse" -version = "0.2.3" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c75ac65da39e5fe5ab759307499ddad880d724eed2f6ce5b5e8a26f4f387928c" +checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.0.2" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e28923312444cdd728e4738b3f9c9cac739500909bb3d3c94b43551b16517648" +checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.2" +version = "1.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cd54b81ec8d6180e24654d0b371ad22fc3dd083b6ff8ba325b72e00c87660a7" +checksum = "c677ab05e09154296dd37acecd46420c17b9713e8366facafa8fc0885167cf4c" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.48.0", ] [[package]] @@ -136,6 +136,12 @@ version = "1.0.75" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" +[[package]] +name = "array-init" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3d62b7694a562cdf5a74227903507c56ab2cc8bdd1f781ed5cb4cf9c9f810bfc" + [[package]] name = "arrayref" version = "0.3.7" @@ -154,18 +160,18 @@ version = "0.31.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8257238e2a3629ee5618502a75d1b91f8017c24638c75349fc8d2d80cf1f7c4c" dependencies = [ - "base64 0.21.5", - "bytes 1.5.0", + "base64 0.21.2", + "bytes 1.4.0", "futures", "http", "itoa", "memchr", - "nkeys 0.3.2", + "nkeys 0.3.1", "nuid", "once_cell", "rand 0.8.5", "regex", - "ring 0.16.20", + "ring", "rustls-native-certs", "rustls-pemfile", "rustls-webpki", @@ -174,7 +180,7 @@ dependencies = [ "serde_nanos", "serde_repr", "thiserror", - "time", + "time 0.3.27", "tokio", "tokio-retry", "tokio-rustls 0.24.1", @@ -184,13 +190,13 @@ dependencies = [ [[package]] name = "async-recursion" -version = "1.0.5" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" +checksum = "0e97ce7de6cf12de5d7226c73f5ba9811622f4db3a5b91b55c53e987e5f91cba" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -212,18 +218,18 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "async-trait" -version = "0.1.74" +version = "0.1.73" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" +checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -232,7 +238,7 @@ version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4057f2c32adbb2fc158e22fb38433c8e9bbf76b75a4732c7c0cbaf695fb65568" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures-sink", "futures-util", "memchr", @@ -248,6 +254,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atomic-polyfill" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4" +dependencies = [ + "critical-section", +] + [[package]] name = "atomic_refcell" version = "0.1.13" @@ -262,7 +277,7 @@ checksum = "1fcf00bc6d5abb29b5f97e3c61a90b6d3caa12f3faf897d4a3e3607c050a35a7" dependencies = [ "http", "log", - "rustls 0.20.9", + "rustls 0.20.8", "serde", "serde_json", "url", @@ -289,15 +304,15 @@ dependencies = [ "rust-ini", "serde", "thiserror", - "time", + "time 0.3.27", "url", ] [[package]] name = "aws-region" -version = "0.25.4" +version = "0.25.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42fed2b9fca70f2908268d057a607f2a906f47edbf856ea8587de9038d264e22" +checksum = "056557a61427d0e5ba29dd931031c8ffed4ee7a550e7cd55692a9d8deb0a9dba" dependencies = [ "thiserror", ] @@ -311,9 +326,9 @@ dependencies = [ "async-trait", "axum-core", "axum-macros", - "base64 0.21.5", + "base64 0.21.2", "bitflags 1.3.2", - "bytes 1.5.0", + "bytes 1.4.0", "futures-util", "http", "http-body", @@ -333,7 +348,7 @@ dependencies = [ "sha1", "sync_wrapper", "tokio", - "tokio-tungstenite 0.20.1", + "tokio-tungstenite 0.20.0", "tower", "tower-layer", "tower-service", @@ -346,7 +361,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes 1.4.0", "futures-util", "http", "http-body", @@ -365,7 +380,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -410,9 +425,9 @@ checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" [[package]] name = "base64" -version = "0.21.5" +version = "0.21.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" +checksum = "604178f6c5c21f02dc555784810edfb88d34ac2c73b2eae109655649ee73ce3d" [[package]] name = "base64ct" @@ -445,9 +460,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.1" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" [[package]] name = "bitvec" @@ -463,15 +478,16 @@ dependencies = [ [[package]] name = "blake3" -version = "1.5.0" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0231f06152bf547e9c2b5194f247cd97aacf6dcd8b15d8e5ec0663f64580da87" +checksum = "199c42ab6972d92c9f8995f086273d25c42fc0f7b2a1fcefba465c1352d25ba5" dependencies = [ "arrayref", "arrayvec", "cc", "cfg-if", "constant_time_eq", + "digest 0.10.7", ] [[package]] @@ -498,9 +514,9 @@ version = "0.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f03db470b3c0213c47e978da93200259a1eb4dae2e5512cba9955e2b540a6fc6" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "bollard-stubs", - "bytes 1.5.0", + "bytes 1.4.0", "futures-core", "futures-util", "hex", @@ -529,38 +545,59 @@ checksum = "b58071e8fd9ec1e930efd28e3a90c1251015872a2ce49f81f36421b86466932e" dependencies = [ "serde", "serde_repr", - "serde_with 3.4.0", + "serde_with 3.3.0", ] [[package]] name = "borsh" -version = "1.2.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9897ef0f1bd2362169de6d7e436ea2237dc1085d7d1e4db75f4be34d86f309d1" +checksum = "4114279215a005bc675e386011e594e1d9b800918cea18fcadadcce864a2046b" dependencies = [ "borsh-derive", - "cfg_aliases", + "hashbrown 0.13.2", ] [[package]] name = "borsh-derive" -version = "1.2.1" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "478b41ff04256c5c8330f3dfdaaae2a5cc976a8e75088bafa4625b0d0208de8c" +checksum = "0754613691538d51f329cce9af41d7b7ca150bc973056f1156611489475f54f7" +dependencies = [ + "borsh-derive-internal", + "borsh-schema-derive-internal", + "proc-macro-crate 0.1.5", + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "borsh-derive-internal" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "afb438156919598d2c7bad7e1c0adf3d26ed3840dbc010db1a882a65583ca2fb" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "borsh-schema-derive-internal" +version = "0.10.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "634205cc43f74a1b9046ef87c4540ebda95696ec0f315024860cad7c5b0f5ccd" dependencies = [ - "once_cell", - "proc-macro-crate 2.0.0", "proc-macro2", "quote", - "syn 2.0.40", - "syn_derive", + "syn 1.0.109", ] [[package]] name = "bstr" -version = "1.8.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "542f33a8835a0884b006a0c3df3dadd99c0c3f296ed26c2fdc8028e01ad6230c" +checksum = "6798148dccfbff0fae41c7574d2fa8f1ef3492fba0face179de5d8d447d67b05" dependencies = [ "memchr", "serde", @@ -568,9 +605,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.14.0" +version = "3.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" +checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" [[package]] name = "bytecheck" @@ -596,9 +633,9 @@ dependencies = [ [[package]] name = "byteorder" -version = "1.5.0" +version = "1.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" [[package]] name = "bytes" @@ -608,9 +645,9 @@ checksum = "0e4cec68f03f32e44924783795810fa50a7035d8c8ebe78580ad7e6c703fba38" [[package]] name = "bytes" -version = "1.5.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" dependencies = [ "serde", ] @@ -630,25 +667,20 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" -[[package]] -name = "cfg_aliases" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" - [[package]] name = "chrono" -version = "0.4.31" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f2c685bad3eb3d45a01354cedb7d5faa66194d1d58ba6e267a8de788f79db38" +checksum = "ec837a71355b28f6556dbd569b37b3f363091c0bd4b2e735674521b4c5fd9bc5" dependencies = [ "android-tzdata", "iana-time-zone", "js-sys", "num-traits", "serde", + "time 0.1.45", "wasm-bindgen", - "windows-targets 0.48.5", + "winapi", ] [[package]] @@ -680,19 +712,20 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.11" +version = "4.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfaff671f6b22ca62406885ece523383b9b64022e341e53e009a62ebc47a45f2" +checksum = "fb690e81c7840c0d7aade59f242ea3b41b9bc27bcd5997890e7702ae4b32e487" dependencies = [ "clap_builder", "clap_derive", + "once_cell", ] [[package]] name = "clap_builder" -version = "4.4.11" +version = "4.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a216b506622bb1d316cd51328dce24e07bdff4a6128a47c7e7fad11878d5adbb" +checksum = "5ed2e96bc16d8d740f6f48d663eddf4b8a0983e79210fd55479b7bcd0a69860e" dependencies = [ "anstream", "anstyle", @@ -703,27 +736,27 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.4.7" +version = "4.3.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +checksum = "54a9bb5758fc5dfe728d1019941681eccaf0cf8a4189b692a0ee2f2ecf90a050" dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "clap_lex" -version = "0.6.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" +checksum = "2da6da31387c7e4ef160ffab6d5e7f00c42626fe39aea70a7b0f1773f7dd6c1b" [[package]] name = "coarsetime" -version = "0.1.33" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "71367d3385c716342014ad17e3d19f7788ae514885a1f4c24f500260fb365e1a" +checksum = "a90d114103adbc625300f346d4d09dfb4ab1c4a8df6868435dd903392ecf4354" dependencies = [ "libc", "once_cell", @@ -731,6 +764,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "cobs" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" + [[package]] name = "color-eyre" version = "0.6.2" @@ -748,9 +787,9 @@ dependencies = [ [[package]] name = "color-spantrace" -version = "0.2.1" +version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd6be1b2a7e382e2b98b43b2adcca6bb0e465af0bdd38123873ae61eb17a72c2" +checksum = "1ba75b3d9449ecdccb27ecbc479fdc0b87fa2dd43d2f8298f9bf0e59aacc8dce" dependencies = [ "once_cell", "owo-colors", @@ -766,32 +805,33 @@ checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" [[package]] name = "colored" -version = "2.1.0" +version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbf2150cce219b664a8a70df7a1f933836724b503f8a413af9365b4dcc4d90b8" +checksum = "2674ec482fbc38012cf31e6c42ba0177b431a0cb6f15fe40efa5aab1bda516f6" dependencies = [ + "is-terminal", "lazy_static", "windows-sys 0.48.0", ] [[package]] name = "comfy-table" -version = "7.1.0" +version = "7.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c64043d6c7b7a4c58e39e7efccfdea7b93d885a795d0c054a69dbbf4dd52686" +checksum = "9ab77dbd8adecaf3f0db40581631b995f312a8a5ae3aa9993188bb8f23d83a5b" dependencies = [ "console", - "crossterm 0.27.0", - "strum 0.25.0", - "strum_macros 0.25.3", + "crossterm 0.26.1", + "strum", + "strum_macros", "unicode-width", ] [[package]] name = "config" -version = "0.13.4" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23738e11972c7643e4ec947840fc463b6a571afcd3e735bdfce7d03c7a784aca" +checksum = "d379af7f68bfc21714c6c7dea883544201741d2ce8274bb12fa54f89507f52a7" dependencies = [ "async-trait", "lazy_static", @@ -897,9 +937,9 @@ dependencies = [ [[package]] name = "core-foundation" -version = "0.9.4" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +checksum = "194a7a9e6de53fa55116934067c844d9d749312f75c6f6d0980e8c252f8c2146" dependencies = [ "core-foundation-sys", "libc", @@ -907,15 +947,15 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "cpufeatures" -version = "0.2.11" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" +checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" dependencies = [ "libc", ] @@ -930,37 +970,19 @@ dependencies = [ ] [[package]] -name = "crossbeam-channel" -version = "0.5.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" -dependencies = [ - "cfg-if", - "crossbeam-utils", -] - -[[package]] -name = "crossbeam-deque" -version = "0.8.3" +name = "critical-section" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" -dependencies = [ - "cfg-if", - "crossbeam-epoch", - "crossbeam-utils", -] +checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" [[package]] -name = "crossbeam-epoch" -version = "0.9.15" +name = "crossbeam-channel" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae211234986c545741a7dc064309f67ee1e5ad243d0e48335adc0484d960bcc7" +checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" dependencies = [ - "autocfg", "cfg-if", "crossbeam-utils", - "memoffset 0.9.0", - "scopeguard", ] [[package]] @@ -1000,14 +1022,17 @@ dependencies = [ [[package]] name = "crossterm" -version = "0.27.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" +checksum = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13" dependencies = [ - "bitflags 2.4.1", + "bitflags 1.3.2", "crossterm_winapi", "libc", + "mio", "parking_lot 0.12.1", + "signal-hook", + "signal-hook-mio", "winapi", ] @@ -1022,9 +1047,9 @@ dependencies = [ [[package]] name = "crypto-bigint" -version = "0.5.5" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0dc92fb57ca44df6db8059111ab3af99a63d5d0f8375d9972e319a379c6bab76" +checksum = "cf4c2f4e1afd912bc40bfd6fed5d9dc1f288e0ba01bfcc835cc5bc3eb13efe15" dependencies = [ "generic-array", "rand_core 0.6.4", @@ -1061,33 +1086,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "curve25519-dalek" -version = "4.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" -dependencies = [ - "cfg-if", - "cpufeatures", - "curve25519-dalek-derive", - "digest 0.10.7", - "fiat-crypto", - "platforms", - "rustc_version", - "subtle", -] - -[[package]] -name = "curve25519-dalek-derive" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.40", -] - [[package]] name = "darling" version = "0.14.4" @@ -1133,7 +1131,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -1155,7 +1153,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -1167,27 +1165,27 @@ dependencies = [ "anyhow", "html-escape", "nom", - "ordered-float 2.10.1", + "ordered-float 2.10.0", ] [[package]] name = "dashmap" -version = "5.5.3" +version = "5.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "978747c1d849a7d2ee5e8adc0159961c48fb7e5db2f06af6723b80123bb53856" +checksum = "edd72493923899c6f10c641bdbdeddc7183d6396641d99c1a0d1597f37f92e28" dependencies = [ "cfg-if", - "hashbrown 0.14.3", + "hashbrown 0.14.0", "lock_api", "once_cell", - "parking_lot_core 0.9.9", + "parking_lot_core 0.9.8", ] [[package]] name = "data-encoding" -version = "2.5.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e962a19be5cfc3f3bf6dd8f61eb50107f356ad6270fbb3ed41476571db78be5" +checksum = "c2e66c9d817f1720209181c316d28635c050fa304f9c79e47a520882661b7308" [[package]] name = "deadpool" @@ -1216,9 +1214,9 @@ dependencies = [ [[package]] name = "deadpool-runtime" -version = "0.1.3" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63dfa964fe2a66f3fde91fc70b267fe193d822c7e603e2a675a49a7f46ad3f49" +checksum = "eaa37046cc0f6c3cc6090fbdbf73ef0b8ef4cfcc37f6befc0020f63e8cf121e1" dependencies = [ "tokio", ] @@ -1256,11 +1254,10 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.10" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eb30d70a07a3b04884d2677f06bec33509dc67ca60d92949e5535352d3191dc" +checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" dependencies = [ - "powerfmt", "serde", ] @@ -1390,7 +1387,7 @@ dependencies = [ "asynchronous-codec", "base64 0.13.1", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "chrono", "containers-api 0.9.0", "docker-api-stubs", @@ -1425,22 +1422,22 @@ checksum = "1aaf95b3e5c8f23aa320147307562d361db0ae0d51242340f558153b4eb2439b" [[package]] name = "dyn-clone" -version = "1.0.16" +version = "1.0.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" +checksum = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555" [[package]] name = "ecdsa" -version = "0.16.9" +version = "0.16.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee27f32b5c5292967d2d4a9d7f1e0b0aed2c15daded5a60300e4abb9d8020bca" +checksum = "a4b1e0c257a9e9f25f90ff76d7a68360ed497ee519c8e428d1825ef0000799d4" dependencies = [ "der 0.7.8", "digest 0.10.7", "elliptic-curve", "rfc6979", - "signature 2.2.0", - "spki 0.7.3", + "signature 2.1.0", + "spki 0.7.2", ] [[package]] @@ -1452,15 +1449,6 @@ dependencies = [ "signature 1.6.4", ] -[[package]] -name = "ed25519" -version = "2.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" -dependencies = [ - "signature 2.2.0", -] - [[package]] name = "ed25519-compact" version = "2.0.4" @@ -1468,7 +1456,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a3d382e8464107391c8706b4c14b087808ecb909f6c15c34114bc42e53a9e4c" dependencies = [ "ct-codecs", - "getrandom 0.2.11", + "getrandom 0.2.10", ] [[package]] @@ -1477,30 +1465,17 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek 3.2.0", - "ed25519 1.5.3", + "curve25519-dalek", + "ed25519", "sha2 0.9.9", "zeroize", ] -[[package]] -name = "ed25519-dalek" -version = "2.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f628eaec48bfd21b865dc2950cfa014450c01d2fa2b69a86c2fd5844ec523c0" -dependencies = [ - "curve25519-dalek 4.1.1", - "ed25519 2.2.3", - "sha2 0.10.8", - "signature 2.2.0", - "subtle", -] - [[package]] name = "educe" -version = "0.4.23" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f0042ff8246a363dbe77d2ceedb073339e85a804b9a47636c6e016a9a32c05f" +checksum = "079044df30bb07de7d846d41a184c4b00e66ebdac93ee459253474f3a47e50ae" dependencies = [ "enum-ordinalize", "proc-macro2", @@ -1516,9 +1491,9 @@ checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "elliptic-curve" -version = "0.13.8" +version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6043086bf7973472e0c7dff2142ea0b680d30e18d9cc40f267efbf222bd47" +checksum = "968405c8fdc9b3bf4df0a6638858cc0b52462836ab6b1c87377785dd09cf1c0b" dependencies = [ "base16ct", "crypto-bigint", @@ -1535,6 +1510,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + [[package]] name = "encode_unicode" version = "0.3.6" @@ -1552,15 +1533,15 @@ dependencies = [ [[package]] name = "enum-ordinalize" -version = "3.1.15" +version = "3.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1bf1fa3f06bbff1ea5b1a9c7b14aa992a39657db60a2759457328d7e058f49ee" +checksum = "e4f76552f53cefc9a7f64987c3701b99d982f7690606fd67de1d09712fbf52f1" dependencies = [ "num-bigint", "num-traits", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -1571,12 +1552,23 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.8" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" +checksum = "6b30f669a7961ef1631673d2766cc92f52d64f7ef354d4fe0ddfd30ed52f0f4f" dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", "libc", - "windows-sys 0.52.0", ] [[package]] @@ -1587,9 +1579,9 @@ checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "eyre" -version = "0.6.10" +version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bbb8258be8305fb0237d7b295f47bb24ff1b136a535f473baf40e70468515aa" +checksum = "4c2b6b5a29c02cdc822728b7d7b8ae1bab3e3b05d44522770ddd49722eeac7eb" dependencies = [ "indenter", "once_cell", @@ -1612,9 +1604,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.1" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" [[package]] name = "ff" @@ -1626,30 +1618,18 @@ dependencies = [ "subtle", ] -[[package]] -name = "fiat-crypto" -version = "0.2.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "27573eac26f4dd11e2b1916c3fe1baa56407c83c71a773a8ba17ec0bca03b6b7" - [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", - "windows-sys 0.52.0", + "redox_syscall 0.3.5", + "windows-sys 0.48.0", ] -[[package]] -name = "finl_unicode" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" - [[package]] name = "fixedbitset" version = "0.4.2" @@ -1658,9 +1638,9 @@ checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.28" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" +checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" dependencies = [ "crc32fast", "miniz_oxide", @@ -1674,9 +1654,9 @@ checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ "percent-encoding", ] @@ -1689,9 +1669,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.29" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" dependencies = [ "futures-channel", "futures-core", @@ -1720,9 +1700,9 @@ checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" [[package]] name = "futures-executor" -version = "0.3.29" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" dependencies = [ "futures-core", "futures-task", @@ -1769,7 +1749,7 @@ checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -1840,9 +1820,9 @@ dependencies = [ [[package]] name = "getrandom" -version = "0.2.11" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe9006bed769170c11f845cf00c7c1e9092aeb3f268e007c3e760ac68008070f" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" dependencies = [ "cfg-if", "libc", @@ -1851,21 +1831,21 @@ dependencies = [ [[package]] name = "gimli" -version = "0.28.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" +checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d" dependencies = [ "aho-corasick", "bstr", + "fnv", "log", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex", ] [[package]] @@ -1881,17 +1861,17 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.22" +version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d6250322ef6e60f93f9a2162799302cd6f68f79f6e5d85c8c16f14d1d958178" +checksum = "91fc23aa11be92976ef4729127f1a74adf36d8436f7816b185d18df956790833" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "fnv", "futures-core", "futures-sink", "futures-util", "http", - "indexmap 2.1.0", + "indexmap 1.9.3", "slab", "tokio", "tokio-util", @@ -1904,32 +1884,64 @@ version = "1.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.7", + "ahash 0.7.6", +] + +[[package]] +name = "hashbrown" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" +dependencies = [ + "ahash 0.8.3", ] [[package]] name = "hashbrown" -version = "0.14.3" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" dependencies = [ - "ahash 0.8.6", + "ahash 0.8.3", "allocator-api2", ] [[package]] name = "hashlink" -version = "0.8.4" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "312f66718a2d7789ffef4f4b7b213138ed9f1eb3aa1d0d82fc99f88fb3ffd26f" +dependencies = [ + "hashbrown 0.14.0", +] + +[[package]] +name = "heapless" +version = "0.7.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" dependencies = [ - "hashbrown 0.14.3", + "atomic-polyfill", + "hash32", + "rustc_version", + "serde", + "spin 0.9.8", + "stable_deref_trait", ] [[package]] @@ -1952,9 +1964,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.3" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" +checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" [[package]] name = "hex" @@ -2004,15 +2016,6 @@ dependencies = [ "digest 0.10.7", ] -[[package]] -name = "home" -version = "0.5.5" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" -dependencies = [ - "windows-sys 0.48.0", -] - [[package]] name = "html-escape" version = "0.2.13" @@ -2024,22 +2027,22 @@ dependencies = [ [[package]] name = "http" -version = "0.2.11" +version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8947b1a6fad4393052c7ba1f4cd97bed3e953a95c79c92ad9b051a04611d9fbb" +checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "fnv", "itoa", ] [[package]] name = "http-body" -version = "0.4.6" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ceab25649e9960c0311ea418d17bee82c0dcec1bd053b5f9a66e265a693bed2" +checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "http", "pin-project-lite", ] @@ -2068,7 +2071,7 @@ version = "0.14.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffb1cfd654a8219eaef89881fdb3bb3b1cdc5fa75ded05d6933b2b382e395468" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures-channel", "futures-core", "futures-util", @@ -2079,7 +2082,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2 0.4.9", "tokio", "tower-service", "tracing", @@ -2088,14 +2091,14 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.2" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" +checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" dependencies = [ "futures-util", "http", "hyper", - "rustls 0.21.10", + "rustls 0.21.6", "tokio", "tokio-rustls 0.24.1", ] @@ -2126,16 +2129,16 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.58" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8326b86b6cff230b97d0d312a6c40a60726df3332e721f72a1b035f451663b20" +checksum = "2fad5b825842d2b38bd206f3e81d6957625fd7f0a361e345c30e01a0ae2dd613" dependencies = [ "android_system_properties", "core-foundation-sys", "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows-core", + "windows", ] [[package]] @@ -2155,9 +2158,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "0.5.0" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "634d9b1461af396cad843f47fdba5597a4f9e6ddd4bfb6ff5d85028c25cb12f6" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" dependencies = [ "unicode-bidi", "unicode-normalization", @@ -2174,22 +2177,23 @@ dependencies = [ "quote", "serde", "syn 1.0.109", - "toml 0.7.8", + "toml 0.7.6", "unicode-xid", ] [[package]] name = "ignore" -version = "0.4.21" +version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "747ad1b4ae841a78e8aba0d63adbfbeaea26b517b63705d47856b73015d27060" +checksum = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492" dependencies = [ - "crossbeam-deque", "globset", + "lazy_static", "log", "memchr", - "regex-automata 0.4.3", + "regex", "same-file", + "thread_local", "walkdir", "winapi-util", ] @@ -2213,20 +2217,20 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.1.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" dependencies = [ "equivalent", - "hashbrown 0.14.3", + "hashbrown 0.14.0", "serde", ] [[package]] name = "indicatif" -version = "0.17.7" +version = "0.17.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" +checksum = "0b297dc40733f23a0e52728a58fa9489a5b7638a324932de16b41adc3ef80730" dependencies = [ "console", "instant", @@ -2237,9 +2241,9 @@ dependencies = [ [[package]] name = "indoc" -version = "2.0.4" +version = "2.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" +checksum = "2c785eefb63ebd0e33416dfcb8d6da0bf27ce752843a45632a67bf10d4d4b5c4" [[package]] name = "inquire" @@ -2266,11 +2270,22 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "io-lifetimes" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" +dependencies = [ + "hermit-abi", + "libc", + "windows-sys 0.48.0", +] + [[package]] name = "ipnet" -version = "2.9.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" [[package]] name = "is-docker" @@ -2281,6 +2296,17 @@ dependencies = [ "once_cell", ] +[[package]] +name = "is-terminal" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" +dependencies = [ + "hermit-abi", + "rustix 0.38.8", + "windows-sys 0.48.0", +] + [[package]] name = "is-wsl" version = "0.4.0" @@ -2300,35 +2326,26 @@ dependencies = [ "either", ] -[[package]] -name = "itertools" -version = "0.12.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25db6b064527c5d482d0423354fcd07a89a2dfe07b67892e62411946db7f07b0" -dependencies = [ - "either", -] - [[package]] name = "itoa" -version = "1.0.10" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1a46d1a171d865aa5f83f92695765caa047a9b4cbae2cbf37dbd613a793fd4c" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" -version = "0.3.66" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cee9c64da59eae3b50095c18d3e74f8b73c0b86d2792824ff01bbce68ba229ca" +checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" dependencies = [ "wasm-bindgen", ] [[package]] name = "jwt-simple" -version = "0.11.9" +version = "0.11.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357892bb32159d763abdea50733fadcb9a8e1c319a9aa77592db8555d05af83e" +checksum = "733741e7bcd1532b56c9ba6c698c069f274f3782ad956f0d2c7f31650cedaa1b" dependencies = [ "anyhow", "binstring", @@ -2352,16 +2369,16 @@ dependencies = [ [[package]] name = "k256" -version = "0.13.2" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f01b677d82ef7a676aa37e099defd83a28e15687112cafdd112d60236b6115b" +checksum = "cadb76004ed8e97623117f3df85b17aaa6626ab0b0831e6573f104df16cd1bcc" dependencies = [ "cfg-if", "ecdsa", "elliptic-curve", "once_cell", - "sha2 0.10.8", - "signature 2.2.0", + "sha2 0.10.7", + "signature 2.1.0", ] [[package]] @@ -2384,26 +2401,15 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.151" +version = "0.2.147" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "302d7ab3130588088d277783b1e2d2e10c9e9e4a16dd9050e6ec93fb3e7048f4" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" [[package]] name = "libm" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" - -[[package]] -name = "libredox" -version = "0.0.1" +version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85c833ca1e66078851dba29046874e38f08b2c883700aa29a03ddd3b23814ee8" -dependencies = [ - "bitflags 2.4.1", - "libc", - "redox_syscall 0.4.1", -] +checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" [[package]] name = "libsodium-sys" @@ -2419,15 +2425,21 @@ dependencies = [ [[package]] name = "linux-raw-sys" -version = "0.4.12" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4cd1a83af159aa67994778be9070f0ae1bd732942279cabb14f86f986a21456" +checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" [[package]] -name = "lock_api" -version = "0.4.11" +name = "linux-raw-sys" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" + +[[package]] +name = "lock_api" +version = "0.4.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" dependencies = [ "autocfg", "scopeguard", @@ -2450,9 +2462,9 @@ dependencies = [ [[package]] name = "matchit" -version = "0.7.3" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" +checksum = "ed1202b2a6f884ae56f04cff409ab315c5ce26b5e58d7412e484f01fd52f52ef" [[package]] name = "maybe-async" @@ -2467,11 +2479,10 @@ dependencies = [ [[package]] name = "md-5" -version = "0.10.6" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d89e7ee0cfbedfc4da3340218492196241d89eefb6dab27de5df917a6d2e78cf" +checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" dependencies = [ - "cfg-if", "digest 0.10.7", ] @@ -2483,9 +2494,9 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" [[package]] name = "memchr" -version = "2.6.4" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" +checksum = "2dffe52ecf27772e601905b7522cb4ef790d2cc203488bbd0e2fe85fcb74566d" [[package]] name = "memmap2" @@ -2515,15 +2526,6 @@ dependencies = [ "autocfg", ] -[[package]] -name = "memoffset" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a634b1c61a95585bd15607c6ab0c4e5b226e695ff2800ba0cdccddf208c406c" -dependencies = [ - "autocfg", -] - [[package]] name = "mime" version = "0.3.17" @@ -2557,9 +2559,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.10" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f3d0b296e374a4e6f3c7b0a1f5a51d748a0d34c85e7dc48fc3fa9a87657fe09" +checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" dependencies = [ "libc", "log", @@ -2573,7 +2575,7 @@ version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01acbdc23469fd8fe07ab135923371d5f5a422fbf9c522158677c8eb15bc51c2" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "encoding_rs", "futures-util", "http", @@ -2623,15 +2625,16 @@ dependencies = [ [[package]] name = "nix" -version = "0.26.4" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "598beaf3cc6fdd9a5dfb1630c2800c7acd31df7aaf0f565796fba2b53ca1af1b" +checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" dependencies = [ "bitflags 1.3.2", "cfg-if", "libc", "memoffset 0.7.1", "pin-utils", + "static_assertions", ] [[package]] @@ -2642,27 +2645,27 @@ checksum = "0e66a7cd1358277b2a6f77078e70aea7315ff2f20db969cc61153103ec162594" dependencies = [ "byteorder", "data-encoding", - "ed25519-dalek 1.0.1", - "getrandom 0.2.11", + "ed25519-dalek", + "getrandom 0.2.10", "log", "rand 0.8.5", - "signatory 0.23.2", + "signatory", ] [[package]] name = "nkeys" -version = "0.3.2" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47" +checksum = "3e9261eb915c785ea65708bc45ef43507ea46914e1a73f1412d1a38aba967c8e" dependencies = [ "byteorder", "data-encoding", - "ed25519 2.2.3", - "ed25519-dalek 2.1.0", - "getrandom 0.2.11", + "ed25519", + "ed25519-dalek", + "getrandom 0.2.10", "log", "rand 0.8.5", - "signatory 0.27.1", + "signatory", ] [[package]] @@ -2746,9 +2749,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.17" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" dependencies = [ "autocfg", "libm", @@ -2782,7 +2785,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -2793,18 +2796,18 @@ checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" [[package]] name = "object" -version = "0.32.1" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf5f9dd3933bd50a9e1f149ec995f39ae2c496d31fd772c1fd45ebc27e902b0" +checksum = "77ac5bbd07aea88c60a577a1ce218075ffd59208b2d7ca97adf9bfc5aeb21ebe" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.18.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" [[package]] name = "opaque-debug" @@ -2814,9 +2817,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "open" -version = "5.0.1" +version = "5.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90878fb664448b54c4e592455ad02831e23a3f7e157374a8b95654731aac7349" +checksum = "cfabf1927dce4d6fdf563d63328a0a506101ced3ec780ca2135747336c98cef8" dependencies = [ "is-wsl", "libc", @@ -2926,9 +2929,9 @@ checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d" [[package]] name = "ordered-float" -version = "2.10.1" +version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" +checksum = "7940cf2ca942593318d07fcf2596cdca60a85c9e7fab408a5e21a4f9dcd40d87" dependencies = [ "num-traits", ] @@ -2996,7 +2999,7 @@ dependencies = [ "ecdsa", "elliptic-curve", "primeorder", - "sha2 0.10.8", + "sha2 0.10.7", ] [[package]] @@ -3008,14 +3011,14 @@ dependencies = [ "ecdsa", "elliptic-curve", "primeorder", - "sha2 0.10.8", + "sha2 0.10.7", ] [[package]] name = "parking" -version = "2.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "14f2252c834a40ed9bb5422029649578e63aa341ac401f74e719dd1afda8394e" [[package]] name = "parking_lot" @@ -3035,7 +3038,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.9", + "parking_lot_core 0.9.8", ] [[package]] @@ -3054,13 +3057,13 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.9" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" +checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", + "redox_syscall 0.3.5", "smallvec", "windows-targets 0.48.5", ] @@ -3106,9 +3109,9 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "petgraph" @@ -3117,7 +3120,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.1.0", + "indexmap 2.0.0", "serde", "serde_derive", ] @@ -3137,7 +3140,7 @@ version = "0.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "90fcb95eef784c2ac79119d1dd819e162b5da872ce6f3c3abe1e8ca1c082f72b" dependencies = [ - "siphasher 0.3.11", + "siphasher", ] [[package]] @@ -3177,14 +3180,14 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "pin-project-lite" -version = "0.2.13" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" +checksum = "12cc1b0bf1727a77a54b6654e7b5f1af8604923edc8b81885f8ec92f9e3f0a05" [[package]] name = "pin-utils" @@ -3233,7 +3236,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f950b2377845cebe5cf8b5165cb3cc1a5e0fa5cfa3e1f7f55707d8fd82e0a7b7" dependencies = [ "der 0.7.8", - "spki 0.7.3", + "spki 0.7.2", ] [[package]] @@ -3242,12 +3245,6 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" -[[package]] -name = "platforms" -version = "3.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" - [[package]] name = "podman-api" version = "0.10.0" @@ -3256,7 +3253,7 @@ checksum = "4d0ade207138f12695cb4be3b590283f1cf764c5c4909f39966c4b4b0dba7c1e" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "chrono", "containers-api 0.8.0", "flate2", @@ -3286,9 +3283,21 @@ dependencies = [ [[package]] name = "portable-atomic" -version = "1.6.0" +version = "1.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f32154ba0af3a075eefa1eda8bb414ee928f62303a54ea85b8d6638ff1a6ee9e" + +[[package]] +name = "postcard" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +checksum = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8" +dependencies = [ + "cobs", + "embedded-io", + "heapless", + "serde", +] [[package]] name = "postgres-derive" @@ -3299,7 +3308,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -3308,15 +3317,15 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "fallible-iterator", "hmac", "md-5", "memchr", "rand 0.8.5", - "sha2 0.10.8", + "sha2 0.10.7", "stringprep", ] @@ -3326,7 +3335,8 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" dependencies = [ - "bytes 1.5.0", + "array-init", + "bytes 1.4.0", "chrono", "fallible-iterator", "postgres-derive", @@ -3335,12 +3345,6 @@ dependencies = [ "serde_json", ] -[[package]] -name = "powerfmt" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" - [[package]] name = "ppv-lite86" version = "0.2.17" @@ -3379,30 +3383,30 @@ dependencies = [ [[package]] name = "primeorder" -version = "0.13.6" +version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "353e1ca18966c16d9deb1c69278edbc5f194139612772bd9537af60ac231e1e6" +checksum = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3" dependencies = [ "elliptic-curve", ] [[package]] name = "proc-macro-crate" -version = "1.3.1" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" +checksum = "1d6ea3c4595b96363c13943497db34af4460fb474a95c43f4446ad341b8c9785" dependencies = [ - "once_cell", - "toml_edit 0.19.15", + "toml 0.5.11", ] [[package]] name = "proc-macro-crate" -version = "2.0.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8366a6159044a37876a2b9817124296703c586a5c92e2c53751fa06d8d43e8" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ - "toml_edit 0.20.7", + "once_cell", + "toml_edit", ] [[package]] @@ -3431,9 +3435,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.70" +version = "1.0.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "39278fbbf5fb4f646ce651690877f89d1c5811a3d4acb27700c1cb3cdb78fd3b" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" dependencies = [ "unicode-ident", ] @@ -3444,7 +3448,7 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "prost-derive", ] @@ -3454,9 +3458,9 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "heck 0.4.1", - "itertools 0.10.5", + "itertools", "lazy_static", "log", "multimap", @@ -3477,7 +3481,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", - "itertools 0.10.5", + "itertools", "proc-macro2", "quote", "syn 1.0.109", @@ -3538,7 +3542,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "735d6cecec44dc27382b962309c05de47d40727ff9898a501ec8fadec76be9a8" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes 1.4.0", "dashmap", "futures", "pin-project 1.1.3", @@ -3633,7 +3637,7 @@ version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", ] [[package]] @@ -3656,29 +3660,29 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "redox_users" -version = "0.4.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" +checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ - "getrandom 0.2.11", - "libredox", + "getrandom 0.2.10", + "redox_syscall 0.2.16", "thiserror", ] [[package]] name = "refinery" -version = "0.8.11" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "529664dbccc0a296947615c997a857912d72d1c44be1fafb7bae54ecfa7a8c24" +checksum = "cdb0436d0dd7bd8d4fce1e828751fa79742b08e35f27cfea7546f8a322b5ef24" dependencies = [ "refinery-core", "refinery-macros", @@ -3686,9 +3690,9 @@ dependencies = [ [[package]] name = "refinery-core" -version = "0.8.11" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e895cb870cf06e92318cbbeb701f274d022d5ca87a16fa8244e291cd035ef954" +checksum = "19206547cd047e8f4dfa6b20c30d3ecaf24be05841b6aa0aa926a47a3d0662bb" dependencies = [ "async-trait", "cfg-if", @@ -3696,39 +3700,39 @@ dependencies = [ "log", "regex", "serde", - "siphasher 1.0.0", + "siphasher", "thiserror", - "time", + "time 0.3.27", "tokio", "tokio-postgres", - "toml 0.7.8", + "toml 0.7.6", "url", "walkdir", ] [[package]] name = "refinery-macros" -version = "0.8.11" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "123e8b80f8010c3ae38330c81e76938fc7adf6cdbfbaad20295bb8c22718b4f1" +checksum = "d94d4b9241859ba19eaa5c04c86e782eb3aa0aae2c5868e0cfa90c856e58a174" dependencies = [ "proc-macro2", "quote", "refinery-core", "regex", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "regex" -version = "1.10.2" +version = "1.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" +checksum = "81bc1d4caf89fac26a70747fe603c130093b53c773888797a6329091246d651a" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.3", - "regex-syntax 0.8.2", + "regex-automata 0.3.6", + "regex-syntax 0.7.4", ] [[package]] @@ -3742,13 +3746,13 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.3" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" +checksum = "fed1ceff11a1dddaee50c9dc8e4938bd106e9d89ae372f192311e7da498e3b69" dependencies = [ "aho-corasick", "memchr", - "regex-syntax 0.8.2", + "regex-syntax 0.7.4", ] [[package]] @@ -3759,9 +3763,9 @@ checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" [[package]] name = "regex-syntax" -version = "0.8.2" +version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" +checksum = "e5ea92a5b6195c6ef2a0295ea818b312502c6fc94dde986c5553242e18fd4ce2" [[package]] name = "remain" @@ -3771,26 +3775,26 @@ checksum = "bce3a7139d2ee67d07538ee5dba997364fbc243e7e7143e96eb830c74bfaa082" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "rend" -version = "0.4.1" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2571463863a6bd50c32f94402933f03457a3fbaf697a707c5be741e459f08fd" +checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" dependencies = [ "bytecheck", ] [[package]] name = "reqwest" -version = "0.11.22" +version = "0.11.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "046cd98826c46c2ac8ddecae268eb5c2e58628688a5fc7a2643704a73faba95b" +checksum = "3e9ad3fe7488d7e34558a2033d45a0c90b72d97b4f80705666fea71472e2e6a1" dependencies = [ - "base64 0.21.5", - "bytes 1.5.0", + "base64 0.21.2", + "bytes 1.4.0", "encoding_rs", "futures-core", "futures-util", @@ -3807,12 +3811,11 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls 0.21.10", + "rustls 0.21.6", "rustls-pemfile", "serde", "serde_json", "serde_urlencoded", - "system-configuration", "tokio", "tokio-rustls 0.24.1", "tokio-util", @@ -3822,7 +3825,7 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots 0.25.3", + "webpki-roots 0.25.2", "winreg", ] @@ -3852,25 +3855,11 @@ dependencies = [ "libc", "once_cell", "spin 0.5.2", - "untrusted 0.7.1", + "untrusted", "web-sys", "winapi", ] -[[package]] -name = "ring" -version = "0.17.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "688c63d65483050968b2a8937f7995f443e27041a0f7700aa59b0822aedebb74" -dependencies = [ - "cc", - "getrandom 0.2.11", - "libc", - "spin 0.9.8", - "untrusted 0.9.0", - "windows-sys 0.48.0", -] - [[package]] name = "rkyv" version = "0.7.42" @@ -3940,7 +3929,7 @@ dependencies = [ "aws-creds", "aws-region", "base64 0.13.1", - "bytes 1.5.0", + "bytes 1.4.0", "cfg-if", "futures", "hex", @@ -3954,9 +3943,9 @@ dependencies = [ "reqwest", "serde", "serde_derive", - "sha2 0.10.8", + "sha2 0.10.7", "thiserror", - "time", + "time 0.3.27", "tokio", "tokio-stream", "url", @@ -3964,13 +3953,13 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.33.1" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4" +checksum = "a4c4216490d5a413bc6d10fa4742bd7d4955941d062c0ef873141d6b0e7b30fd" dependencies = [ "arrayvec", "borsh", - "bytes 1.5.0", + "bytes 1.4.0", "num-traits", "rand 0.8.5", "rkyv", @@ -3995,37 +3984,51 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.28" +version = "0.37.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72e572a5e8ca657d7366229cdde4bd14c4eb5499a9573d4d366fe1b599daa316" +checksum = "4d69718bf81c6127a49dc64e44a742e8bb9213c0ff8869a22c308f84c1d4ab06" dependencies = [ - "bitflags 2.4.1", + "bitflags 1.3.2", "errno", + "io-lifetimes", "libc", - "linux-raw-sys", - "windows-sys 0.52.0", + "linux-raw-sys 0.3.8", + "windows-sys 0.48.0", +] + +[[package]] +name = "rustix" +version = "0.38.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "19ed4fa021d81c8392ce04db050a3da9a60299050b7ae1cf482d862b54a7218f" +dependencies = [ + "bitflags 2.4.0", + "errno", + "libc", + "linux-raw-sys 0.4.5", + "windows-sys 0.48.0", ] [[package]] name = "rustls" -version = "0.20.9" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b80e3dec595989ea8510028f30c408a4630db12c9cbb8de34203b89d6577e99" +checksum = "fff78fc74d175294f4e83b28343315ffcfb114b156f0185e9741cb5570f50e2f" dependencies = [ "log", - "ring 0.16.20", + "ring", "sct", "webpki", ] [[package]] name = "rustls" -version = "0.21.10" +version = "0.21.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9d5a6813c0759e4609cd494e8e725babae6a2ca7b62a5536a13daaec6fcb7ba" +checksum = "1d1feddffcfcc0b33f5c6ce9a29e341e4cd59c3f78e7ee45f4a40c038b1d6cbb" dependencies = [ "log", - "ring 0.17.7", + "ring", "rustls-webpki", "sct", ] @@ -4044,21 +4047,21 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "1.0.4" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1c74cae0a4cf6ccbbf5f359f08efdf8ee7e1dc532573bf0db71968cb56b1448c" +checksum = "2d3987094b1d07b653b7dfdc3f70ce9a1da9c51ac18c1b06b662e4f9a0e9f4b2" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", ] [[package]] name = "rustls-webpki" -version = "0.101.7" +version = "0.101.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" +checksum = "7d93931baf2d282fff8d3a532bbfd7653f734643161b87e3e01e59a04439bf0d" dependencies = [ - "ring 0.17.7", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -4069,9 +4072,9 @@ checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4" [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "same-file" @@ -4099,12 +4102,12 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.1" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" +checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" dependencies = [ - "ring 0.17.7", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -4129,7 +4132,7 @@ dependencies = [ "serde_json", "sqlx", "thiserror", - "time", + "time 0.3.27", "tracing", "url", "uuid", @@ -4159,7 +4162,7 @@ dependencies = [ "rust_decimal", "sea-query-derive", "serde_json", - "time", + "time 0.3.27", "uuid", ] @@ -4175,7 +4178,7 @@ dependencies = [ "sea-query", "serde_json", "sqlx", - "time", + "time 0.3.27", "uuid", ] @@ -4269,24 +4272,24 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.20" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" +checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" [[package]] name = "serde" -version = "1.0.193" +version = "1.0.186" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25dd9975e68d0cb5aa1120c288333fc98731bd1dd12f561e468ea4728c042b89" +checksum = "9f5db24220c009de9bd45e69fb2938f4b6d2df856aa9304ce377b3180f83b7c1" dependencies = [ "serde_derive", ] [[package]] name = "serde-aux" -version = "4.3.1" +version = "4.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "184eba62ebddb71658697c8b08822edee89970bf318c5362189f0de27f85b498" +checksum = "c3dfe1b7eb6f9dcf011bd6fad169cdeaae75eda0d61b1a99a3f015b41b0cae39" dependencies = [ "chrono", "serde", @@ -4295,22 +4298,22 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.193" +version = "1.0.186" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43576ca501357b9b071ac53cdc7da8ef0cbd9493d8df094cd821777ea6e894d3" +checksum = "5ad697f7e0b65af4983a4ce8f56ed5b357e8d3c36651bf6a7e13639c17b8e670" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "serde_json" -version = "1.0.108" +version = "1.0.105" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" +checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.0.0", "itoa", "ryu", "serde", @@ -4337,20 +4340,20 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.17" +version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" +checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "serde_spanned" -version = "0.6.4" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" +checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" dependencies = [ "serde", ] @@ -4390,24 +4393,24 @@ dependencies = [ "serde", "serde_json", "serde_with_macros 2.3.3", - "time", + "time 0.3.27", ] [[package]] name = "serde_with" -version = "3.4.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" +checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237" dependencies = [ - "base64 0.21.5", + "base64 0.21.2", "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.1.0", + "indexmap 2.0.0", "serde", "serde_json", - "serde_with_macros 3.4.0", - "time", + "serde_with_macros 3.3.0", + "time 0.3.27", ] [[package]] @@ -4419,28 +4422,28 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "serde_with_macros" -version = "3.4.0" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" +checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c" dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "serde_yaml" -version = "0.9.27" +version = "0.9.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" +checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.0.0", "itoa", "ryu", "serde", @@ -4449,9 +4452,9 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.6" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" dependencies = [ "cfg-if", "cpufeatures", @@ -4473,9 +4476,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.8" +version = "0.10.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" +checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" dependencies = [ "cfg-if", "cpufeatures", @@ -4484,9 +4487,9 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.7" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" dependencies = [ "lazy_static", ] @@ -4533,18 +4536,6 @@ dependencies = [ "zeroize", ] -[[package]] -name = "signatory" -version = "0.27.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e303f8205714074f6068773f0e29527e0453937fe837c9717d066635b65f31" -dependencies = [ - "pkcs8 0.10.2", - "rand_core 0.6.4", - "signature 2.2.0", - "zeroize", -] - [[package]] name = "signature" version = "1.6.4" @@ -4557,9 +4548,9 @@ dependencies = [ [[package]] name = "signature" -version = "2.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77549399552de45a898a580c1b41d445bf730df867cc44e6c0233bbc4b8329de" +checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -4577,12 +4568,6 @@ version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "38b58827f4464d87d377d175e90bf58eb00fd8716ff0a62f80356b5e61555d0d" -[[package]] -name = "siphasher" -version = "1.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54ac45299ccbd390721be55b412d41931911f654fa99e2cb8bfb57184b2061fe" - [[package]] name = "slab" version = "0.4.9" @@ -4601,26 +4586,17 @@ dependencies = [ "smallvec", ] -[[package]] -name = "smallstr" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "63b1aefdf380735ff8ded0b15f31aab05daf1f70216c01c02a12926badd1df9d" -dependencies = [ - "smallvec", -] - [[package]] name = "smallvec" -version = "1.11.2" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4dccd0940a2dcdf68d092b8cbab7dc0ad8fa938bf95787e1b916b0e3d0e8e970" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "socket2" -version = "0.4.10" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" dependencies = [ "libc", "winapi", @@ -4628,9 +4604,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "2538b18701741680e0322a2302176d3253a35388e2e62f172f64f4f16605f877" dependencies = [ "libc", "windows-sys 0.48.0", @@ -4642,7 +4618,7 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e26be3acb6c2d9a7aac28482586a7856436af4cfe7100031d219de2d2ecb0028" dependencies = [ - "ed25519 1.5.3", + "ed25519", "libc", "libsodium-sys", "serde", @@ -4659,6 +4635,9 @@ name = "spin" version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6980e8d7511241f8acf4aebddbb1ff938df5eebe98691418c4468d0b72a96a67" +dependencies = [ + "lock_api", +] [[package]] name = "spki" @@ -4681,9 +4660,9 @@ dependencies = [ [[package]] name = "spki" -version = "0.7.3" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d91ed6c858b01f942cd56b37a94b3e0a1798290327d1236e4d9cf4eaca44d29d" +checksum = "9d1e996ef02c474957d681f1b05213dfb0abab947b446a62d37770b23500184a" dependencies = [ "base64ct", "der 0.7.8", @@ -4691,11 +4670,11 @@ dependencies = [ [[package]] name = "sqlformat" -version = "0.2.3" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce81b7bd7c4493975347ef60d8c7e8b742d4694f4c49f93e0a12ea263938176c" +checksum = "0c12bc9199d1db8234678b7051747c07f517cdcf019262d1847b94ec8b1aee3e" dependencies = [ - "itertools 0.12.0", + "itertools", "nom", "unicode_categories", ] @@ -4716,13 +4695,13 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" dependencies = [ - "ahash 0.7.7", + "ahash 0.7.6", "atoi", "base64 0.13.1", "bigdecimal", "bitflags 1.3.2", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "chrono", "crossbeam-queue", "dirs", @@ -4749,18 +4728,18 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "rust_decimal", - "rustls 0.20.9", + "rustls 0.20.8", "rustls-pemfile", "serde", "serde_json", "sha1", - "sha2 0.10.8", + "sha2 0.10.7", "smallvec", "sqlformat", "sqlx-rt", "stringprep", "thiserror", - "time", + "time 0.3.27", "tokio-stream", "url", "uuid", @@ -4804,6 +4783,12 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" +[[package]] +name = "static_assertions" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" + [[package]] name = "stream-cancel" version = "0.8.1" @@ -4817,11 +4802,10 @@ dependencies = [ [[package]] name = "stringprep" -version = "0.1.4" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb41d74e231a107a1b4ee36bd1214b11285b77768d2e3824aedafa988fd36ee6" +checksum = "db3737bde7edce97102e0e2b15365bf7a20bfdb5f60f4f9e8d7004258a51a8da" dependencies = [ - "finl_unicode", "unicode-bidi", "unicode-normalization", ] @@ -4838,15 +4822,9 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros 0.24.3", + "strum_macros", ] -[[package]] -name = "strum" -version = "0.25.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" - [[package]] name = "strum_macros" version = "0.24.3" @@ -4860,19 +4838,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "strum_macros" -version = "0.25.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" -dependencies = [ - "heck 0.4.1", - "proc-macro2", - "quote", - "rustversion", - "syn 2.0.40", -] - [[package]] name = "subtle" version = "2.5.0" @@ -4892,54 +4857,21 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.40" +version = "2.0.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13fa70a4ee923979ffb522cacce59d34421ebdea5625e1073c4326ef9d2dd42e" +checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] -[[package]] -name = "syn_derive" -version = "0.1.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" -dependencies = [ - "proc-macro-error", - "proc-macro2", - "quote", - "syn 2.0.40", -] - [[package]] name = "sync_wrapper" version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" -[[package]] -name = "system-configuration" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" -dependencies = [ - "bitflags 1.3.2", - "core-foundation", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a75fb188eb626b924683e3b95e3a48e63551fcfb51949de2f06a9d91dbee93c9" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "tap" version = "1.0.1" @@ -4968,46 +4900,36 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.8.1" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ "cfg-if", - "fastrand 2.0.1", - "redox_syscall 0.4.1", - "rustix", + "fastrand 2.0.0", + "redox_syscall 0.3.5", + "rustix 0.38.8", "windows-sys 0.48.0", ] [[package]] name = "terminal_size" -version = "0.3.0" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21bebf2b7c9e0a515f6e0f8c51dc0f8e4696391e6f1ff30379559f8365fb0df7" +checksum = "8e6bf6f19e9f8ed8d4048dc22981458ebcf406d67e94cd422e5ecd73d63b3237" dependencies = [ - "rustix", + "rustix 0.37.23", "windows-sys 0.48.0", ] [[package]] name = "test-log" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6159ab4116165c99fc88cce31f99fa2c9dbe08d3691cb38da02fc3b45f357d2b" -dependencies = [ - "test-log-macros", - "tracing-subscriber", -] - -[[package]] -name = "test-log-macros" -version = "0.2.14" +version = "0.2.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" +checksum = "d9601d162c1d77e62c1ea0bc8116cd1caf143ce3af947536c3c9052a1677fe0c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 1.0.109", ] [[package]] @@ -5018,10 +4940,10 @@ dependencies = [ "async-recursion", "async-trait", "axum", - "base64 0.21.5", + "base64 0.21.2", "blake3", "bollard", - "bytes 1.5.0", + "bytes 1.4.0", "chrono", "ciborium", "clap", @@ -5051,11 +4973,11 @@ dependencies = [ "indicatif", "indoc", "inquire", - "itertools 0.10.5", + "itertools", "jwt-simple", "lazy_static", "names", - "nix 0.26.4", + "nix 0.26.2", "nkeys 0.2.0", "num_cpus", "once_cell", @@ -5069,6 +4991,7 @@ dependencies = [ "petgraph", "pin-project-lite", "podman-api", + "postcard", "postgres-types", "pretty_assertions_sorted", "proc-macro2", @@ -5080,19 +5003,19 @@ dependencies = [ "remain", "reqwest", "rust-s3", - "rustls 0.21.10", + "rustls 0.21.6", "sea-orm", "self-replace", "serde", "serde-aux", "serde_json", "serde_url_params", - "serde_with 3.4.0", + "serde_with 3.3.0", "serde_yaml", "sodiumoxide", "stream-cancel", - "strum 0.24.1", - "syn 2.0.40", + "strum", + "syn 2.0.29", "tar", "tempfile", "test-log", @@ -5105,7 +5028,7 @@ dependencies = [ "tokio-tungstenite 0.18.0", "tokio-util", "tokio-vsock", - "toml 0.7.8", + "toml 0.7.6", "tower", "tower-http", "tracing", @@ -5117,27 +5040,27 @@ dependencies = [ "vfs", "vfs-tar", "y-sync", - "yrs 0.16.10", + "yrs", ] [[package]] name = "thiserror" -version = "1.0.50" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" +checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.50" +version = "1.0.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" +checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] @@ -5152,13 +5075,23 @@ dependencies = [ [[package]] name = "time" -version = "0.3.30" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1b797afad3f312d1c66a56d11d0316f916356d11bd158fbc6ca6389ff6bf805a" +dependencies = [ + "libc", + "wasi 0.10.0+wasi-snapshot-preview1", + "winapi", +] + +[[package]] +name = "time" +version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" +checksum = "0bb39ee79a6d8de55f48f2293a830e040392f1c5f16e336bdd1788cd0aadce07" dependencies = [ "deranged", "itoa", - "powerfmt", "serde", "time-core", "time-macros", @@ -5166,15 +5099,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.2" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" [[package]] name = "time-macros" -version = "0.2.15" +version = "0.2.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" +checksum = "733d258752e9303d392b94b75230d07b0b9c489350c69b851fc6c065fde3e8f9" dependencies = [ "time-core", ] @@ -5196,19 +5129,19 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.35.0" +version = "1.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "841d45b238a16291a4e1584e61820b8ae57d696cc5015c459c229ccc6990cc1c" +checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" dependencies = [ "backtrace", - "bytes 1.5.0", + "bytes 1.4.0", "libc", "mio", "num_cpus", "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2 0.5.3", "tokio-macros", "windows-sys 0.48.0", ] @@ -5225,24 +5158,24 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.2.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" +checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "tokio-postgres" -version = "0.7.10" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" +checksum = "000387915083ea6406ee44b50ca74813aba799fe682a7689e382bf9e13b74ce9" dependencies = [ "async-trait", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "fallible-iterator", "futures-channel", "futures-util", @@ -5254,7 +5187,7 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand 0.8.5", - "socket2 0.5.5", + "socket2 0.5.3", "tokio", "tokio-util", "whoami", @@ -5277,7 +5210,7 @@ version = "0.23.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c43ee83903113e03984cb9e5cebe6c04a5116269e900e3ddba8f068a62adda59" dependencies = [ - "rustls 0.20.9", + "rustls 0.20.8", "tokio", "webpki", ] @@ -5288,7 +5221,7 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls 0.21.10", + "rustls 0.21.6", "tokio", ] @@ -5298,7 +5231,7 @@ version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "911a61637386b789af998ee23f50aa30d5fd7edcec8d6d3dedae5e5815205466" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "educe", "futures-core", "futures-sink", @@ -5325,7 +5258,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e89b3cbabd3ae862100094ae433e1def582cf86451b4e9bf83aa7ac1d8a7d719" dependencies = [ "async-stream", - "bytes 1.5.0", + "bytes 1.4.0", "futures-core", "tokio", "tokio-stream", @@ -5345,23 +5278,23 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.20.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" +checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2" dependencies = [ "futures-util", "log", "tokio", - "tungstenite 0.20.1", + "tungstenite 0.20.0", ] [[package]] name = "tokio-util" -version = "0.7.10" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" +checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures-core", "futures-sink", "pin-project-lite", @@ -5375,7 +5308,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52a15c15b1bc91f90902347eff163b5b682643aff0c8e972912cca79bd9208dd" dependencies = [ - "bytes 1.5.0", + "bytes 1.4.0", "futures", "libc", "tokio", @@ -5393,49 +5326,38 @@ dependencies = [ [[package]] name = "toml" -version = "0.7.8" +version = "0.7.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd79e69d3b627db300ff956027cc6c3798cef26d22526befdfcd12feeb6d2257" +checksum = "c17e963a819c331dcacd7ab957d80bc2b9a9c1e71c804826d2f283dd65306542" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.19.15", + "toml_edit", ] [[package]] name = "toml_datetime" -version = "0.6.5" +version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" +checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" dependencies = [ "serde", ] [[package]] name = "toml_edit" -version = "0.19.15" +version = "0.19.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" +checksum = "f8123f27e969974a3dfba720fdb560be359f57b44302d280ba72e76a74480e8a" dependencies = [ - "indexmap 2.1.0", + "indexmap 2.0.0", "serde", "serde_spanned", "toml_datetime", "winnow", ] -[[package]] -name = "toml_edit" -version = "0.20.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" -dependencies = [ - "indexmap 2.1.0", - "toml_datetime", - "winnow", -] - [[package]] name = "tonic" version = "0.8.3" @@ -5446,7 +5368,7 @@ dependencies = [ "async-trait", "axum", "base64 0.13.1", - "bytes 1.5.0", + "bytes 1.4.0", "futures-core", "futures-util", "h2", @@ -5503,12 +5425,12 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.4.4" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +checksum = "55ae70283aba8d2a8b411c695c437fe25b8b5e44e23e780662002fc72fb47a82" dependencies = [ - "bitflags 2.4.1", - "bytes 1.5.0", + "bitflags 2.4.0", + "bytes 1.4.0", "futures-core", "futures-util", "http", @@ -5534,10 +5456,11 @@ checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ + "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -5546,20 +5469,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" dependencies = [ "once_cell", "valuable", @@ -5587,23 +5510,12 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" -dependencies = [ - "log", - "once_cell", - "tracing-core", -] - -[[package]] -name = "tracing-log" -version = "0.2.0" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" dependencies = [ + "lazy_static", "log", - "once_cell", "tracing-core", ] @@ -5617,15 +5529,15 @@ dependencies = [ "opentelemetry", "tracing", "tracing-core", - "tracing-log 0.1.4", + "tracing-log", "tracing-subscriber", ] [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" dependencies = [ "matchers", "nu-ansi-term", @@ -5636,14 +5548,14 @@ dependencies = [ "thread_local", "tracing", "tracing-core", - "tracing-log 0.2.0", + "tracing-log", ] [[package]] name = "try-lock" -version = "0.2.5" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" +checksum = "3528ecfd12c466c6f163363caf2d02a71161dd5e1cc6ae7b34207ea2d42d81ed" [[package]] name = "tungstenite" @@ -5653,7 +5565,7 @@ checksum = "30ee6ab729cd4cf0fd55218530c4522ed30b7b6081752839b68fcec8d0960788" dependencies = [ "base64 0.13.1", "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "http", "httparse", "log", @@ -5666,12 +5578,12 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.20.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" +checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649" dependencies = [ "byteorder", - "bytes 1.5.0", + "bytes 1.4.0", "data-encoding", "http", "httparse", @@ -5685,15 +5597,15 @@ dependencies = [ [[package]] name = "typenum" -version = "1.17.0" +version = "1.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" +checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba" [[package]] name = "ulid" -version = "1.1.0" +version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e37c4b6cbcc59a8dcd09a6429fbc7890286bcbb79215cea7b38a3c4c0921d93" +checksum = "13a3aaa69b04e5b66cc27309710a569ea23593612387d67daaf102e73aa974fd" dependencies = [ "rand 0.8.5", "serde", @@ -5710,15 +5622,15 @@ dependencies = [ [[package]] name = "unicode-bidi" -version = "0.3.14" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f2528f27a9eb2b21e69c95319b30bd0efd85d09c379741b0f78ea1d86be2416" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" [[package]] name = "unicode-normalization" @@ -5737,9 +5649,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" -version = "0.1.11" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "unicode-xid" @@ -5765,17 +5677,11 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" -[[package]] -name = "untrusted" -version = "0.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" - [[package]] name = "url" -version = "2.5.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e6302e3bb753d46e83516cae55ae196fc0c309407cf11ab35cc51a4c2a4633" +checksum = "50bff7831e19200a85b17131d085c25d7811bc4e186efdaf54bbd132994a88cb" dependencies = [ "form_urlencoded", "idna", @@ -5791,9 +5697,9 @@ checksum = "09cc8ee72d2a9becf2f2febe0205bbed8fc6615b7cb429ad062dc7b7ddd036a9" [[package]] name = "utf8-width" -version = "0.1.7" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86bd8d4e895da8537e5315b8254664e6b769c4ff3db18321b297a1e7004392e3" +checksum = "5190c9442dcdaf0ddd50f37420417d219ae5261bbf5db120d0f9bab996c9cba1" [[package]] name = "utf8parse" @@ -5803,11 +5709,11 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.6.1" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e395fcf16a7a3d8127ec99782007af141946b4795001f876d54fb0d55978560" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" dependencies = [ - "getrandom 0.2.11", + "getrandom 0.2.10", "serde", ] @@ -5853,15 +5759,15 @@ dependencies = [ [[package]] name = "waker-fn" -version = "1.1.1" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3c4517f54858c779bbcbf228f4fca63d121bf85fbecb2dc578cdf4a39395690" +checksum = "9d5b2c62b4012a3e1eca5a7e077d13b3bf498c4073e33ccd58626607748ceeca" [[package]] name = "walkdir" -version = "2.4.0" +version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d71d857dc86794ca4c280d616f7da00d2dbfd8cd788846559a6813e6aa4b54ee" +checksum = "36df944cda56c7d8d8b7496af378e6b16de9284591917d307c9b4d313c44e698" dependencies = [ "same-file", "winapi-util", @@ -5882,6 +5788,12 @@ version = "0.9.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519" +[[package]] +name = "wasi" +version = "0.10.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f" + [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" @@ -5890,9 +5802,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ed0d4f68a3015cc185aff4db9506a015f4b96f95303897bfa23f846db54064e" +checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -5900,24 +5812,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b56f625e64f3a1084ded111c4d5f477df9f8c92df113852fa5a374dbda78826" +checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.39" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ac36a15a220124ac510204aec1c3e5db8a22ab06fd6706d881dc6149f8ed9a12" +checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" dependencies = [ "cfg-if", "js-sys", @@ -5927,9 +5839,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0162dbf37223cd2afce98f3d0785506dcb8d266223983e4b5b525859e6e182b2" +checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -5937,22 +5849,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f0eb82fcb7930ae6219a7ecfd55b217f5f0893484b7a13022ebb2b2bf20b5283" +checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.89" +version = "0.2.87" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ab9b36309365056cd639da3134bf87fa8f3d86008abf99e612384a6eecd459f" +checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" [[package]] name = "wasm-streams" @@ -5969,9 +5881,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.66" +version = "0.3.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "50c24a44ec86bb68fbecd1b3efed7e85ea5621b39b35ef2766b66cd984f8010f" +checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" dependencies = [ "js-sys", "wasm-bindgen", @@ -5979,12 +5891,12 @@ dependencies = [ [[package]] name = "webpki" -version = "0.22.4" +version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed63aea5ce73d0ff405984102c42de94fc55a6b75765d621c65262469b3c9b53" +checksum = "f095d78192e208183081cc07bc5515ef55216397af48b873e5edcd72637fa1bd" dependencies = [ - "ring 0.17.7", - "untrusted 0.9.0", + "ring", + "untrusted", ] [[package]] @@ -5998,20 +5910,19 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.25.3" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1778a42e8b3b90bff8d0f5032bf22250792889a5cdc752aa0020c84abe3aaf10" +checksum = "14247bb57be4f377dfb94c72830b8ce8fc6beac03cf4bf7b9732eadd414123fc" [[package]] name = "which" -version = "4.4.2" +version = "4.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" dependencies = [ "either", - "home", + "libc", "once_cell", - "rustix", ] [[package]] @@ -6042,9 +5953,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.6" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" +checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" dependencies = [ "winapi", ] @@ -6056,10 +5967,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] -name = "windows-core" -version = "0.51.1" +name = "windows" +version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f1f8cf84f35d2db49a46868f947758c7a1138116f7fac3bc844f43ade1292e64" +checksum = "e686886bc078bc1b0b600cac0147aadb815089b6e4da64016cbd754b6342700f" dependencies = [ "windows-targets 0.48.5", ] @@ -6082,15 +5993,6 @@ dependencies = [ "windows-targets 0.48.5", ] -[[package]] -name = "windows-sys" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" -dependencies = [ - "windows-targets 0.52.0", -] - [[package]] name = "windows-targets" version = "0.42.2" @@ -6121,21 +6023,6 @@ dependencies = [ "windows_x86_64_msvc 0.48.5", ] -[[package]] -name = "windows-targets" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" -dependencies = [ - "windows_aarch64_gnullvm 0.52.0", - "windows_aarch64_msvc 0.52.0", - "windows_i686_gnu 0.52.0", - "windows_i686_msvc 0.52.0", - "windows_x86_64_gnu 0.52.0", - "windows_x86_64_gnullvm 0.52.0", - "windows_x86_64_msvc 0.52.0", -] - [[package]] name = "windows_aarch64_gnullvm" version = "0.42.2" @@ -6148,12 +6035,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" - [[package]] name = "windows_aarch64_msvc" version = "0.42.2" @@ -6166,12 +6047,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" -[[package]] -name = "windows_aarch64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" - [[package]] name = "windows_i686_gnu" version = "0.42.2" @@ -6184,12 +6059,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" -[[package]] -name = "windows_i686_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" - [[package]] name = "windows_i686_msvc" version = "0.42.2" @@ -6202,12 +6071,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" -[[package]] -name = "windows_i686_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" - [[package]] name = "windows_x86_64_gnu" version = "0.42.2" @@ -6220,12 +6083,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" -[[package]] -name = "windows_x86_64_gnu" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" - [[package]] name = "windows_x86_64_gnullvm" version = "0.42.2" @@ -6238,12 +6095,6 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" - [[package]] name = "windows_x86_64_msvc" version = "0.42.2" @@ -6256,17 +6107,11 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" -[[package]] -name = "windows_x86_64_msvc" -version = "0.52.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" - [[package]] name = "winnow" -version = "0.5.28" +version = "0.5.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c830786f7720c2fd27a1a0e27a709dbd3c4d009b56d098fc742d4f4eab91fe2" +checksum = "7c2e3184b9c4e92ad5167ca73039d0c42476302ab603e2fec4487511f38ccefc" dependencies = [ "memchr", ] @@ -6292,13 +6137,11 @@ dependencies = [ [[package]] name = "xattr" -version = "1.1.2" +version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d367426ae76bdfce3d8eaea6e94422afd6def7d46f9c89e2980309115b3c2c41" +checksum = "f4686009f71ff3e5c4dbcf1a282d0a44db3f021ba69350cd42086b3e5f1c6985" dependencies = [ "libc", - "linux-raw-sys", - "rustix", ] [[package]] @@ -6311,7 +6154,7 @@ dependencies = [ "lib0", "thiserror", "tokio", - "yrs 0.17.2", + "yrs", ] [[package]] @@ -6329,51 +6172,16 @@ dependencies = [ "atomic_refcell", "lib0", "rand 0.7.3", - "smallstr 0.2.0", + "smallstr", "smallvec", "thiserror", ] -[[package]] -name = "yrs" -version = "0.17.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68aea14c6c33f2edd8a5ff9415360cfa5b98d90cce30c5ee3be59a8419fb15a9" -dependencies = [ - "atomic_refcell", - "rand 0.7.3", - "serde", - "serde_json", - "smallstr 0.3.0", - "smallvec", - "thiserror", -] - -[[package]] -name = "zerocopy" -version = "0.7.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "306dca4455518f1f31635ec308b6b3e4eb1b11758cefafc782827d0aa7acb5c7" -dependencies = [ - "zerocopy-derive", -] - -[[package]] -name = "zerocopy-derive" -version = "0.7.30" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be912bf68235a88fbefd1b73415cb218405958d1655b2ece9035a19920bdf6ba" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.40", -] - [[package]] name = "zeroize" -version = "1.7.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "525b4ec142c6b68a2d10f01f7bbf6755599ca3f81ea53b8431b7dd348f5fdb2d" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" dependencies = [ "zeroize_derive", ] @@ -6386,5 +6194,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.40", + "syn 2.0.29", ] diff --git a/third-party/rust/Cargo.toml b/third-party/rust/Cargo.toml index ae489d63f6..93fa96c9f3 100644 --- a/third-party/rust/Cargo.toml +++ b/third-party/rust/Cargo.toml @@ -32,11 +32,7 @@ ciborium = "0.2.1" clap = { version = "4.2.7", features = ["derive", "color", "env", "wrap_help"] } color-eyre = "0.6.2" colored = "2.0.4" -comfy-table = { version = "7.0.1", features = [ - "crossterm", - "tty", - "custom_styling", -] } +comfy-table = { version = "7.0.1", features = ["crossterm", "tty", "custom_styling"] } config = { version = "0.13.3", default-features = false, features = ["toml"] } console = "0.15.7" convert_case = "0.6.0" @@ -54,15 +50,8 @@ futures = "0.3.28" futures-lite = "1.13.0" hex = "0.4.3" http = "0.2.9" -hyper = { version = "0.14.26", features = [ - "client", - "http1", - "runtime", - "server", -] } -hyperlocal = { version = "0.8.0", default-features = false, features = [ - "client", -] } +hyper = { version = "0.14.26", features = ["client", "http1", "runtime", "server"] } +hyperlocal = { version = "0.8.0", default-features = false, features = ["client"] } iftree = "1.0.4" indicatif = "0.17.5" indoc = "2.0.1" @@ -76,10 +65,7 @@ nkeys = "0.2.0" num_cpus = "1.15.0" once_cell = "1.17.1" open = "5.0.0" -opentelemetry = { version = "~0.18.0", features = [ - "rt-tokio", - "trace", -] } # pinned, pending new release of tracing-opentelemetry, 0.18 +opentelemetry = { version = "~0.18.0", features = ["rt-tokio", "trace"] } # pinned, pending new release of tracing-opentelemetry, 0.18 opentelemetry-otlp = "~0.11.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 opentelemetry-semantic-conventions = "~0.10.0" # pinned, pending new release of tracing-opentelemetry, post 0.18 ouroboros = "0.15.6" @@ -88,6 +74,7 @@ pathdiff = "0.2.1" petgraph = { version = "0.6.3", features = ["serde-1"] } pin-project-lite = "0.2.9" podman-api = "0.10" +postcard = { version = "1.0.8", features = ["use-std"] } postgres-types = { version = "0.2.5", features = ["derive"] } pretty_assertions_sorted = "1.2.1" proc-macro2 = "1.0.56" @@ -97,22 +84,10 @@ rand = "0.8.5" refinery = { version = "0.8.9", features = ["tokio-postgres"] } regex = "1.8.1" remain = "0.2.8" -reqwest = { version = "0.11.17", default-features = false, features = [ - "rustls-tls", - "json", - "multipart", -] } -rust-s3 = { version = "0.33.0", default-features = false, features = [ - "tokio-rustls-tls", -] } +reqwest = { version = "0.11.17", default-features = false, features = ["rustls-tls", "json", "multipart"] } +rust-s3 = { version = "0.33.0", default-features = false, features = ["tokio-rustls-tls"] } rustls = "0.21.6" # pinned, pending update from tokio-rustls for async-nats -sea-orm = { version = "0.11", features = [ - "sqlx-postgres", - "runtime-tokio-rustls", - "macros", - "with-chrono", - "debug-print", -] } +sea-orm = { version = "0.11", features = ["sqlx-postgres", "runtime-tokio-rustls", "macros", "with-chrono", "debug-print"] } self-replace = "1.3.5" serde = { version = "1.0.160", features = ["derive", "rc"] } serde-aux = "4.2.0" @@ -126,16 +101,10 @@ strum = { version = "0.24.1", features = ["derive"] } syn = { version = "2.0.15", features = ["full", "extra-traits"] } tar = "0.4.38" tempfile = "3.5.0" -test-log = { version = "0.2.11", default-features = false, features = [ - "trace", -] } +test-log = { version = "0.2.11", default-features = false, features = ["trace"] } thiserror = "1.0.40" tokio = { version = "1.28.0", features = ["full"] } -tokio-postgres = { version = "0.7.8", features = [ - "runtime", - "with-chrono-0_4", - "with-serde_json-1", -] } +tokio-postgres = { version = "0.7.8", features = ["runtime", "with-chrono-0_4", "with-serde_json-1", "array-impls"] } tokio-serde = { version = "0.8.0", features = ["json"] } tokio-stream = "0.1.14" tokio-test = "0.4.2" diff --git a/third-party/rust/fixups/crossbeam-epoch/fixups.toml b/third-party/rust/fixups/crossbeam-epoch/fixups.toml index 5e026f75e0..46dfe3a3ac 100644 --- a/third-party/rust/fixups/crossbeam-epoch/fixups.toml +++ b/third-party/rust/fixups/crossbeam-epoch/fixups.toml @@ -1,2 +1,4 @@ +cargo_env = true + [[buildscript]] [buildscript.rustc_flags] diff --git a/third-party/rust/fixups/heapless/fixups.toml b/third-party/rust/fixups/heapless/fixups.toml new file mode 100644 index 0000000000..db40d72cb2 --- /dev/null +++ b/third-party/rust/fixups/heapless/fixups.toml @@ -0,0 +1 @@ +buildscript = [] diff --git a/third-party/rust/fixups/ring/fixups.toml b/third-party/rust/fixups/ring/fixups.toml index a9f3cc6ef0..ff12ace764 100644 --- a/third-party/rust/fixups/ring/fixups.toml +++ b/third-party/rust/fixups/ring/fixups.toml @@ -1,3 +1,6 @@ +# NOTE(nick,jacob): adding cargo env. Original vendored file continues below. +cargo_env = true + # vendored-date: 2023-05-03 # project: https://github.com/facebook/buck2 # commit-hash: f24bf491d4ee266e1ccb444919bce598ba88ad8a From 0cdc78351f31b988b06e3b5a7fdfb9fb18ba4457 Mon Sep 17 00:00:00 2001 From: Zachary Hamm Date: Tue, 31 Oct 2023 16:36:08 -0500 Subject: [PATCH 38/92] Add rebaser to runtime and harden conflicts and updates detection - Add rebaser to the Tiltfile - Thread through config for the rebaser/si-rabbitmq library - Begin work to add workspace snapshot to the DalContext Next steps: - Rework existing implementations around the workspace snapshot being on the DalContext - Handle writing the content store, persisting the workspace snapshot and sending it to the rebaser on commit WIP: Round 19 - Finish initial work of incorporating workspace snapshot into the DalContext - Persist workspace_snapshot on commit - Send rebase request to the rebaser on commit - Restore list_funcs route and almost get it working listing funcs (it *is* but for some reason our snapshot does not have the funcs we expected) Next steps: - Investigate why the snapshot we're getting is the wrong one on the sdf route with head visibility (probably has to do with workspace new) Signed-off-by: Zachary Hamm Co-authored-by: Nick Gerace WIP: 20 - Ensure builtin installation is idempotent by fetching the existing workspace snapshot if a builtin workspace exists - Begin debugging of the rebase request which seems to be working in reverse (replacing the updated workspace snapshot with the initial one instead of working the other way around) Next steps: - Fix either the way we're calling the rebaser or the rebaser to handle this basic scenario (rebase self with new data) WIP: round 21 - Clean up workspace snapshot initialization by cleaning up and marking as seen (i.e. now everything uses the "write" method under the hood) - Ensure "editing_changeset" uses "new_local" - Ensure the content hash for all category nodes is different (i.e. based on the "kind" passed in) - Make progress towards figuring out why no updates are detected for the imported funcs Next steps: - Continue down the comparison of unordered members case for the graph, which is triggered when merkle tree hashes are different for two unordered container nodes (in this case, the onto and to rebase Func Category nodes) Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig WIP: round 22 - Fix detecting conflicts and updates by handling more cases than just content nodes - This also fixed the universal tenancy's default change set's workspace snapshot to be what we expect: a snapshot with all the imported funcs in it - We also confirmed that the first non-universal workspace's change set's workspace snapshot id was the same as the aforementioned one (i.e. when you log into the UI, your workspace's default change set's workspace snapshot should contain all the funcs) Next steps: - Solve errors related to the "list_funcs" route, likely starting with the content store - Figure out a long term solution for migrating the content store (now, every time you ask for a "PgStore", it will try to migrate it every time) - Clean up excess "dbg!" statements, graphviz "dot" outputs, and other debugging junk Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig WIP: round 23 Primary changes: - Create a new integration test for debugging imported funcs from builtins - Ensure the rebaser is using for builtin migrations when bootstrapping the global "si_test_dal" database - Ensure the rebaser actually handles its shutdown signals (use "take_until_if" from stream-cancel, like pinga) - Add the ability to create a Stream from a Consumer in our RabbitMQ library (needed for the rebaser's shutdown signal handling) - Restore "dal_test::test" and the ability to run dal integration tests - Restore "Workspace::signup" for use in tests (only compileable for tests), but put it in a new "signup" module in "dal-test" since it is a test-only method - Clean up debugging logs and messages as well as tune logging levels Misc changes: - Fix a compilation error for si-rabbitmq unit test(s) - Remove the "count" check for the rebaser hang in sdf-server - Ignore errors and manually set the func variant when calling "list_funcs" in sdf - This is temporary and for debugging Side effects: - Remove the Drop impl on Consumer since "into_stream" needs to consume self (sad face) Next steps: - Use the integration test to debug the importing of builtin funcs and listing them from a new workspace Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm WIP: round 24 Primary: - Get farther in getting the integration test for listing funcs ready - Create new change set pointers when creating a test-specific DalContext - Ensure the rebaser can use an optional stream prefix (multiple tests will be spawning multiple rebasers) Secondary: - Ensure the rebaser does not delete change set streams upon graceful shutdown Notes: - The rebaser needs to check if it has active consumer loops running for change set streams on startup in case of a restart (...or it just needs to see what change set streams exist already and start consumer loops for them) Next steps: - Continue getting the dal test harness to work Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm WIP: round 25 - Fix the rebaser to keep track of original node index when updating edges. - Fix the test harness configuration of the dal context so that it uses the si_test_content_store database for the content-store in tests Signed-off-by: Zachary Hamm Co-authored-by: Jacob Helwig Co-authored-by: Nick Gerace --- Cargo.lock | 304 ++- bin/rebaser/src/main.rs | 1 + bin/sdf/BUCK | 1 + bin/sdf/Cargo.toml | 1 + bin/sdf/src/main.rs | 7 + dev/Tiltfile | 20 + lib/content-store-test/src/lib.rs | 12 + lib/content-store/src/store/pg.rs | 3 +- lib/content-store/src/store/pg/tools.rs | 2 +- lib/dal-test/BUCK | 1 + lib/dal-test/Cargo.toml | 1 + lib/dal-test/src/helpers.rs | 151 +- lib/dal-test/src/lib.rs | 39 +- lib/dal-test/src/signup.rs | 38 + lib/dal/Cargo.toml | 2 +- lib/dal/src/action_prototype.rs | 5 +- lib/dal/src/attribute/prototype.rs | 7 +- lib/dal/src/builtins/func.rs | 3 +- lib/dal/src/change_set_pointer.rs | 9 +- lib/dal/src/context.rs | 294 ++- lib/dal/src/func/backend.rs | 9 +- lib/dal/src/func/backend/validation.rs | 6 +- lib/dal/src/lib.rs | 20 +- lib/dal/src/node.rs | 9 +- lib/dal/src/pkg.rs | 18 +- lib/dal/src/pkg/import.rs | 53 +- lib/dal/src/provider/internal.rs | 1 + lib/dal/src/schema/ui_menu.rs | 6 +- lib/dal/src/socket.rs | 5 +- lib/dal/src/validation.rs | 2 +- lib/dal/src/validation/prototype.rs | 9 +- lib/dal/src/visibility.rs | 10 + lib/dal/src/workspace.rs | 36 +- lib/dal/src/workspace_snapshot.rs | 66 +- lib/dal/src/workspace_snapshot/api.rs | 25 +- .../api/attribute/prototype.rs | 14 +- .../src/workspace_snapshot/api/component.rs | 2 +- lib/dal/src/workspace_snapshot/api/func.rs | 54 +- lib/dal/src/workspace_snapshot/api/prop.rs | 6 +- .../api/provider/external.rs | 8 +- lib/dal/src/workspace_snapshot/api/schema.rs | 2 +- .../workspace_snapshot/api/schema/variant.rs | 2 +- .../api/schema/variant/root_prop.rs | 3 - lib/dal/src/workspace_snapshot/api/socket.rs | 12 +- lib/dal/src/workspace_snapshot/edge_weight.rs | 1 + lib/dal/src/workspace_snapshot/graph.rs | 168 +- .../node_weight/category_node_weight.rs | 4 +- lib/dal/src/workspace_snapshot/update.rs | 8 +- lib/dal/src/ws_event.rs | 3 +- lib/dal/tests/integration.rs | 2 +- .../tests/integration_test/internal/mod.rs | 58 +- .../mostly_everything_is_a_node_or_an_edge.rs | 7 +- .../builtins.rs | 31 + lib/dal/tests/integration_test/mod.rs | 2 +- lib/pinga-server/BUCK | 1 + lib/pinga-server/Cargo.toml | 1 + lib/pinga-server/src/server.rs | 3 + lib/rebaser-client/src/client.rs | 25 +- lib/rebaser-client/src/lib.rs | 2 + lib/rebaser-core/src/lib.rs | 38 +- lib/rebaser-server/BUCK | 4 +- lib/rebaser-server/Cargo.toml | 2 + lib/rebaser-server/src/config.rs | 12 + lib/rebaser-server/src/lib.rs | 1 + lib/rebaser-server/src/server.rs | 10 +- .../src/server/change_set_loop.rs | 51 +- .../src/server/management_loop.rs | 58 +- lib/sdf-server/src/server/routes.rs | 14 +- lib/sdf-server/src/server/server.rs | 19 +- lib/sdf-server/src/server/service.rs | 2 +- lib/sdf-server/src/server/service/func.rs | 1751 ++++++++--------- .../src/server/service/func/list_funcs.rs | 51 +- lib/si-rabbitmq/src/config.rs | 73 + lib/si-rabbitmq/src/consumer.rs | 41 +- lib/si-rabbitmq/src/environment.rs | 12 +- lib/si-rabbitmq/src/lib.rs | 4 +- lib/si-test-macros/src/dal_test.rs | 23 +- lib/si-test-macros/src/expand.rs | 4 +- 78 files changed, 2262 insertions(+), 1503 deletions(-) create mode 100644 lib/dal-test/src/signup.rs create mode 100644 lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/builtins.rs create mode 100644 lib/si-rabbitmq/src/config.rs diff --git a/Cargo.lock b/Cargo.lock index 40c56baa6f..1fb2af2a36 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -25,9 +25,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" dependencies = [ "getrandom 0.2.10", "once_cell", @@ -36,13 +36,14 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", "once_cell", "version_check", + "zerocopy", ] [[package]] @@ -166,7 +167,7 @@ dependencies = [ "http", "itoa", "memchr", - "nkeys 0.3.1", + "nkeys 0.3.2", "nuid", "once_cell", "rand 0.8.5", @@ -838,14 +839,14 @@ dependencies = [ [[package]] name = "comfy-table" -version = "7.0.1" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ab77dbd8adecaf3f0db40581631b995f312a8a5ae3aa9993188bb8f23d83a5b" +checksum = "7c64043d6c7b7a4c58e39e7efccfdea7b93d885a795d0c054a69dbbf4dd52686" dependencies = [ "console", - "crossterm 0.26.1", - "strum", - "strum_macros", + "crossterm 0.27.0", + "strum 0.25.0", + "strum_macros 0.25.3", "unicode-width", ] @@ -1052,9 +1053,9 @@ dependencies = [ [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" dependencies = [ "libc", ] @@ -1115,17 +1116,14 @@ dependencies = [ [[package]] name = "crossterm" -version = "0.26.1" +version = "0.27.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a84cda67535339806297f1b331d6dd6320470d2a0fe65381e79ee9e156dd3d13" +checksum = "f476fe445d41c9e991fd07515a6f463074b782242ccf4a5b7b1d1012e70824df" dependencies = [ - "bitflags 1.3.2", + "bitflags 2.4.1", "crossterm_winapi", "libc", - "mio", "parking_lot 0.12.1", - "signal-hook", - "signal-hook-mio", "winapi", ] @@ -1179,6 +1177,33 @@ dependencies = [ "zeroize", ] +[[package]] +name = "curve25519-dalek" +version = "4.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" +dependencies = [ + "cfg-if", + "cpufeatures", + "curve25519-dalek-derive", + "digest 0.10.7", + "fiat-crypto", + "platforms", + "rustc_version", + "subtle", +] + +[[package]] +name = "curve25519-dalek-derive" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + [[package]] name = "cyclone" version = "0.1.0" @@ -1308,7 +1333,7 @@ dependencies = [ "serde", "serde-aux", "serde_json", - "serde_with 3.3.0", + "serde_with 3.4.0", "si-cbor", "si-crypto", "si-data-nats", @@ -1316,7 +1341,7 @@ dependencies = [ "si-hash", "si-pkg", "sodiumoxide", - "strum", + "strum 0.24.1", "telemetry", "tempfile", "thiserror", @@ -1343,6 +1368,7 @@ dependencies = [ "module-index-client", "names", "pinga-server", + "rebaser-client", "rebaser-server", "remain", "serde", @@ -1455,7 +1481,7 @@ dependencies = [ "hashbrown 0.14.0", "lock_api", "once_cell", - "parking_lot_core 0.9.8", + "parking_lot_core 0.9.9", ] [[package]] @@ -1740,7 +1766,7 @@ dependencies = [ "digest 0.10.7", "elliptic-curve", "rfc6979", - "signature 2.1.0", + "signature 2.0.0", "spki 0.7.2", ] @@ -1753,6 +1779,15 @@ dependencies = [ "signature 1.6.4", ] +[[package]] +name = "ed25519" +version = "2.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "115531babc129696a58c64a4fef0a8bf9e9698629fb97e9e40767d235cfbcd53" +dependencies = [ + "signature 2.0.0", +] + [[package]] name = "ed25519-compact" version = "2.0.4" @@ -1769,12 +1804,24 @@ version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c762bae6dcaf24c4c84667b8579785430908723d5c889f469d76a41d59cc7a9d" dependencies = [ - "curve25519-dalek", - "ed25519", + "curve25519-dalek 3.2.0", + "ed25519 1.5.3", "sha2 0.9.9", "zeroize", ] +[[package]] +name = "ed25519-dalek" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7277392b266383ef8396db7fdeb1e77b6c52fed775f5df15bb24f35b72156980" +dependencies = [ + "curve25519-dalek 4.1.1", + "ed25519 2.2.3", + "sha2 0.10.8", + "signature 2.0.0", +] + [[package]] name = "educe" version = "0.4.22" @@ -1916,6 +1963,12 @@ dependencies = [ "subtle", ] +[[package]] +name = "fiat-crypto" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a481586acf778f1b1455424c343f71124b048ffa5f4fc3f8f6ae9dc432dcb3c7" + [[package]] name = "filetime" version = "0.2.22" @@ -1967,9 +2020,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" dependencies = [ "futures-channel", "futures-core", @@ -1998,9 +2051,9 @@ checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" dependencies = [ "futures-core", "futures-task", @@ -2188,7 +2241,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.7", ] [[package]] @@ -2197,7 +2250,7 @@ version = "0.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43a3c133739dddd0d2990f9a4bdf8eb4b21ef50e4851ca85ab661199821d510e" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", ] [[package]] @@ -2206,7 +2259,7 @@ version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" dependencies = [ - "ahash 0.8.3", + "ahash 0.8.6", "allocator-api2", ] @@ -2357,7 +2410,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.9", + "socket2 0.4.10", "tokio", "tower-service", "tracing", @@ -2366,9 +2419,9 @@ dependencies = [ [[package]] name = "hyper-rustls" -version = "0.24.1" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d78e1e73ec14cf7375674f74d7dde185c8206fd9dea6fb6295e8a98098aaa97" +checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http", @@ -2558,9 +2611,9 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.8.0" +version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "28b29a3cd74f0f4598934efe3aeba42bae0eb4680554128851ebbecb02af14e6" +checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" [[package]] name = "is-docker" @@ -2609,9 +2662,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" dependencies = [ "wasm-bindgen", ] @@ -2718,9 +2771,9 @@ checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" [[package]] name = "lock_api" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -2840,9 +2893,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" dependencies = [ "libc", "log", @@ -3016,9 +3069,9 @@ dependencies = [ [[package]] name = "nkeys" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3e9261eb915c785ea65708bc45ef43507ea46914e1a73f1412d1a38aba967c8e" +checksum = "aad178aad32087b19042ee36dfd450b73f5f934fbfb058b59b198684dfec4c47" dependencies = [ "byteorder", "data-encoding", @@ -3173,7 +3226,7 @@ dependencies = [ "remain", "serde", "si-hash", - "strum", + "strum 0.24.1", "tar", "tempfile", "thiserror", @@ -3417,7 +3470,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ "lock_api", - "parking_lot_core 0.9.8", + "parking_lot_core 0.9.9", ] [[package]] @@ -3436,13 +3489,13 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.3.5", + "redox_syscall 0.4.1", "smallvec", "windows-targets 0.48.5", ] @@ -3594,6 +3647,7 @@ dependencies = [ "derive_builder", "futures", "nats-subscriber", + "rebaser-client", "remain", "serde", "serde_json", @@ -3661,6 +3715,12 @@ version = "0.3.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "26072860ba924cbfa98ea39c8c19b4dd6a4a25423dbdf219c1eca91aa0cf6964" +[[package]] +name = "platforms" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14e6ab3f592e6fb464fc9712d8d6e6912de6473954635fd76a589d832cffcbb0" + [[package]] name = "podman-api" version = "0.10.0" @@ -3787,9 +3847,9 @@ dependencies = [ [[package]] name = "primeorder" -version = "0.13.2" +version = "0.13.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c2fcef82c0ec6eefcc179b978446c399b3cdf73c392c35604e399eee6df1ee3" +checksum = "c7dbe9ed3b56368bd99483eb32fe9c17fdd3730aebadc906918ce78d54c7eeb4" dependencies = [ "elliptic-curve", ] @@ -4096,6 +4156,7 @@ dependencies = [ "buck2-resources", "dal", "derive_builder", + "futures", "nats-subscriber", "rebaser-core", "remain", @@ -4134,11 +4195,20 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "redox_users" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" +checksum = "a18479200779601e498ada4e8c1e1f50e3ee19deb0259c25825a98b5603b2cb4" dependencies = [ "getrandom 0.2.10", "redox_syscall 0.2.16", @@ -4569,9 +4639,9 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ "ring", "untrusted", @@ -4583,6 +4653,7 @@ version = "0.1.0" dependencies = [ "clap", "color-eyre", + "rebaser-client", "sdf-server", "telemetry-application", "tokio", @@ -4616,7 +4687,7 @@ dependencies = [ "serde", "serde_json", "serde_url_params", - "serde_with 3.3.0", + "serde_with 3.4.0", "si-crypto", "si-data-nats", "si-data-pg", @@ -4625,7 +4696,7 @@ dependencies = [ "si-settings", "si-std", "sodiumoxide", - "strum", + "strum 0.24.1", "telemetry", "thiserror", "tokio", @@ -4869,9 +4940,9 @@ dependencies = [ [[package]] name = "serde_repr" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8725e1dfadb3a50f7e5ce0b1a540466f6ed3fe7a0fca2ac2b8b831d31316bd00" +checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2", "quote", @@ -4880,9 +4951,9 @@ dependencies = [ [[package]] name = "serde_spanned" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96426c9936fd7a0124915f9185ea1d20aa9445cc9821142f0a73bc9207a2e186" +checksum = "12022b835073e5b11e90a14f86838ceb1c8fb0325b72416845c487ac0fa95e80" dependencies = [ "serde", ] @@ -4927,9 +4998,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ca3b16a3d82c4088f343b7480a93550b3eabe1a358569c2dfe38bbcead07237" +checksum = "64cd236ccc1b7a29e7e2739f27c0b2dd199804abc4290e32f59f3b68d6405c23" dependencies = [ "base64 0.21.2", "chrono", @@ -4956,9 +5027,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e6be15c453eb305019bfa438b1593c731f36a289a7853f7707ee29e870b3b3c" +checksum = "93634eb5f75a2323b16de4748022ac4297f9e76b6dced2be287a099f41b5e788" dependencies = [ "darling 0.20.3", "proc-macro2", @@ -4968,9 +5039,9 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.25" +version = "0.9.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" +checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" dependencies = [ "indexmap 2.0.0", "itoa", @@ -5033,7 +5104,7 @@ dependencies = [ "serde_json", "si-cli", "si-posthog", - "strum", + "strum 0.24.1", "telemetry-application", "tokio", ] @@ -5161,7 +5232,7 @@ dependencies = [ "serde", "serde_json", "si-hash", - "strum", + "strum 0.24.1", "tempfile", "thiserror", "tokio", @@ -5178,7 +5249,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "strum", + "strum 0.24.1", "telemetry", "thiserror", "tokio", @@ -5215,7 +5286,7 @@ version = "0.1.0" dependencies = [ "remain", "serde", - "serde_with 3.3.0", + "serde_with 3.4.0", "thiserror", ] @@ -5270,6 +5341,18 @@ dependencies = [ "zeroize", ] +[[package]] +name = "signatory" +version = "0.27.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c1e303f8205714074f6068773f0e29527e0453937fe837c9717d066635b65f31" +dependencies = [ + "pkcs8 0.10.2", + "rand_core 0.6.4", + "signature 2.0.0", + "zeroize", +] + [[package]] name = "signature" version = "1.6.4" @@ -5282,9 +5365,9 @@ dependencies = [ [[package]] name = "signature" -version = "2.1.0" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e1788eed21689f9cf370582dfc467ef36ed9c707f073528ddafa8d83e3b8500" +checksum = "8fe458c98333f9c8152221191a77e2a44e8325d0193484af2e9421a53019e57d" dependencies = [ "digest 0.10.7", "rand_core 0.6.4", @@ -5328,9 +5411,9 @@ checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" [[package]] name = "socket2" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64a4a911eed85daf18834cfaa86a79b7d266ff93ff5ba14005426219480ed662" +checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" dependencies = [ "libc", "winapi", @@ -5352,7 +5435,7 @@ version = "0.2.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e26be3acb6c2d9a7aac28482586a7856436af4cfe7100031d219de2d2ecb0028" dependencies = [ - "ed25519", + "ed25519 1.5.3", "libc", "libsodium-sys", "serde", @@ -5426,7 +5509,7 @@ version = "0.6.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa8241483a83a3f33aa5fff7e7d9def398ff9990b2752b6c6112b83c6d246029" dependencies = [ - "ahash 0.7.6", + "ahash 0.7.7", "atoi", "base64 0.13.1", "bigdecimal", @@ -5553,9 +5636,15 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ - "strum_macros", + "strum_macros 0.24.3", ] +[[package]] +name = "strum" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" + [[package]] name = "strum_macros" version = "0.24.3" @@ -5569,6 +5658,19 @@ dependencies = [ "syn 1.0.109", ] +[[package]] +name = "strum_macros" +version = "0.25.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" +dependencies = [ + "heck 0.4.1", + "proc-macro2", + "quote", + "rustversion", + "syn 2.0.39", +] + [[package]] name = "subtle" version = "2.5.0" @@ -5658,9 +5760,9 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.8.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" dependencies = [ "cfg-if", "fastrand 2.0.0", @@ -5985,9 +6087,9 @@ dependencies = [ [[package]] name = "toml_datetime" -version = "0.6.3" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cda73e2f1397b1262d6dfdcef8aafae14d1de7748d66822d3bfeeb6d03e5e4b" +checksum = "3550f4e9685620ac18a50ed434eb3aec30db8ba93b0287467bca5826ea25baf1" dependencies = [ "serde", ] @@ -6157,12 +6259,12 @@ dependencies = [ [[package]] name = "tracing-log" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +checksum = "f751112709b4e791d8ce53e32c4ed2d353565a795ce84da2285393f41557bdf2" dependencies = [ - "lazy_static", "log", + "once_cell", "tracing-core", ] @@ -6356,9 +6458,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc" dependencies = [ "getrandom 0.2.10", "serde", @@ -6510,9 +6612,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -6520,9 +6622,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" dependencies = [ "bumpalo", "log", @@ -6535,9 +6637,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.37" +version = "0.4.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c02dbc21516f9f1f04f187958890d7e6026df8d16540b7ad9492bc34a67cea03" +checksum = "9afec9963e3d0994cac82455b2b3502b81a7f40f9a0d32181f7528d9f4b43e02" dependencies = [ "cfg-if", "js-sys", @@ -6547,9 +6649,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -6557,9 +6659,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" dependencies = [ "proc-macro2", "quote", @@ -6570,9 +6672,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" [[package]] name = "wasm-streams" @@ -6589,9 +6691,9 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.64" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +checksum = "5db499c5f66323272151db0e666cd34f78617522fb0c1604d31a27c50c206a85" dependencies = [ "js-sys", "wasm-bindgen", diff --git a/bin/rebaser/src/main.rs b/bin/rebaser/src/main.rs index 44d9817db0..7718ddfbf7 100644 --- a/bin/rebaser/src/main.rs +++ b/bin/rebaser/src/main.rs @@ -47,6 +47,7 @@ async fn run(args: args::Args, mut telemetry: ApplicationTelemetryClient) -> Res } let config = Config::try_from(args)?; + dbg!(&config); start_tracing_level_signal_handler_task(&telemetry)?; diff --git a/bin/sdf/BUCK b/bin/sdf/BUCK index e0ea652ce1..370db876e4 100644 --- a/bin/sdf/BUCK +++ b/bin/sdf/BUCK @@ -8,6 +8,7 @@ load( rust_binary( name = "sdf", deps = [ + "//lib/rebaser-client:rebaser-client", "//lib/sdf-server:sdf-server", "//lib/telemetry-application-rs:telemetry-application", "//third-party/rust:clap", diff --git a/bin/sdf/Cargo.toml b/bin/sdf/Cargo.toml index 23726dbb7a..6869db0199 100644 --- a/bin/sdf/Cargo.toml +++ b/bin/sdf/Cargo.toml @@ -12,6 +12,7 @@ path = "src/main.rs" [dependencies] clap = { workspace = true } color-eyre = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client" } sdf-server = { path = "../../lib/sdf-server" } telemetry-application = { path = "../../lib/telemetry-application-rs" } tokio = { workspace = true } diff --git a/bin/sdf/src/main.rs b/bin/sdf/src/main.rs index ec24c4fd01..97f95f470d 100644 --- a/bin/sdf/src/main.rs +++ b/bin/sdf/src/main.rs @@ -13,6 +13,8 @@ use telemetry_application::{ TelemetryClient, TelemetryConfig, }; +use rebaser_client::Config as RebaserClientConfig; + mod args; type JobProcessor = sdf_server::NatsProcessor; @@ -92,6 +94,10 @@ async fn run(args: args::Args, mut telemetry: ApplicationTelemetryClient) -> Res let module_index_url = config.module_index_url().to_string(); + // TODO: accept command line arguments and or environment variables to configure the rebaser + // client + let rebaser_config = RebaserClientConfig::default(); + let services_context = ServicesContext::new( pg_pool, nats_conn, @@ -101,6 +107,7 @@ async fn run(args: args::Args, mut telemetry: ApplicationTelemetryClient) -> Res Some(pkgs_path), Some(module_index_url), symmetric_crypto_service, + rebaser_config, ); if let MigrationMode::Run | MigrationMode::RunAndQuit = config.migration_mode() { diff --git a/dev/Tiltfile b/dev/Tiltfile index c32e6b4962..ea60b06cee 100644 --- a/dev/Tiltfile +++ b/dev/Tiltfile @@ -16,6 +16,7 @@ groups = { "veritech", "sdf", "module-index", + "rebaser", ], "frontend": [ "web", @@ -70,6 +71,23 @@ compose_services = ["jaeger", "nats", "otelcol", "postgres", "rabbitmq"] for service in compose_services: dc_resource(service, labels = ["platform"]) +# Locally build and run `rebaser-server` +rebaser_target = "//bin/rebaser:rebaser" +local_resource( + "rebaser", + labels = ["backend"], + cmd = "buck2 build {}".format(rebaser_target), + serve_cmd = "buck2 run {}".format(rebaser_target), + allow_parallel = True, + resource_deps = [ + "otelcol", + "postgres", + "rabbitmq" + ], + deps = _buck2_dep_inputs(rebaser_target), + trigger_mode = trigger_mode +) + # Locally build and run `module-index` module_index_target = "//bin/module-index:module-index" local_resource( @@ -151,6 +169,8 @@ local_resource( "pinga", "postgres", "veritech", + "rabbitmq", + "rebaser", ], deps = _buck2_dep_inputs(sdf_target), trigger_mode = trigger_mode, diff --git a/lib/content-store-test/src/lib.rs b/lib/content-store-test/src/lib.rs index a9f8195114..c476ebba8c 100644 --- a/lib/content-store-test/src/lib.rs +++ b/lib/content-store-test/src/lib.rs @@ -76,6 +76,18 @@ impl PgTestMigrationClient { Ok(()) } + /// Returns a reference to the pg pool used by the content store + pub fn pg_pool(&self) -> &PgPool { + &self.pg_pool + } + + /// Return a PgStore for the PgPool used by the test content store + pub async fn global_store(&self) -> Result { + PgStore::new(self.pg_pool.clone()) + .await + .wrap_err("failed to create PgStore for global") + } + /// Drop and create the public schema for the global content store test database. pub async fn drop_and_create_public_schema(&self) -> Result<()> { Ok(self.pg_pool.drop_and_create_public_schema().await?) diff --git a/lib/content-store/src/store/pg.rs b/lib/content-store/src/store/pg.rs index e25382465e..15c6080bf7 100644 --- a/lib/content-store/src/store/pg.rs +++ b/lib/content-store/src/store/pg.rs @@ -42,8 +42,9 @@ impl PgStore { } /// Create a new [`PgStore`] from a given [`PgPool`]. - pub async fn new_production() -> StoreResult { + pub async fn new_production_with_migration() -> StoreResult { let pg_pool = PgStoreTools::new_production_pg_pool().await?; + PgStoreTools::migrate(&pg_pool).await?; Ok(Self { inner: Default::default(), pg_pool, diff --git a/lib/content-store/src/store/pg/tools.rs b/lib/content-store/src/store/pg/tools.rs index d01a50ced2..e78e36e438 100644 --- a/lib/content-store/src/store/pg/tools.rs +++ b/lib/content-store/src/store/pg/tools.rs @@ -8,7 +8,7 @@ mod embedded { } const DBNAME: &str = "si_content_store"; -const APPLICATION_NAME: &str = "si_test_content_store"; +const APPLICATION_NAME: &str = "si-content-store"; /// A unit struct that provides helpers for performing [`PgStore`] migrations. #[allow(missing_debug_implementations)] diff --git a/lib/dal-test/BUCK b/lib/dal-test/BUCK index 08e87ae561..ebf2b2f8d1 100644 --- a/lib/dal-test/BUCK +++ b/lib/dal-test/BUCK @@ -10,6 +10,7 @@ rust_library( "//lib/dal:dal", "//lib/module-index-client:module-index-client", "//lib/pinga-server:pinga-server", + "//lib/rebaser-client:rebaser-client", "//lib/rebaser-server:rebaser-server", "//lib/si-crypto:si-crypto", "//lib/si-data-nats:si-data-nats", diff --git a/lib/dal-test/Cargo.toml b/lib/dal-test/Cargo.toml index 73bd622b70..ff1a2847d3 100644 --- a/lib/dal-test/Cargo.toml +++ b/lib/dal-test/Cargo.toml @@ -13,6 +13,7 @@ council-server = { path = "../../lib/council-server" } dal = { path = "../../lib/dal" } module-index-client = { path = "../../lib/module-index-client" } pinga-server = { path = "../../lib/pinga-server" } +rebaser-client = { path = "../../lib/rebaser-client"} rebaser-server = { path = "../../lib/rebaser-server" } si-crypto = { path = "../../lib/si-crypto" } si-data-nats = { path = "../../lib/si-data-nats" } diff --git a/lib/dal-test/src/helpers.rs b/lib/dal-test/src/helpers.rs index 0f4ee35049..a2588dc1c1 100644 --- a/lib/dal-test/src/helpers.rs +++ b/lib/dal-test/src/helpers.rs @@ -1,20 +1,17 @@ use color_eyre::Result; +use dal::change_set_pointer::{ChangeSetPointer, ChangeSetPointerId}; use dal::{ - func::{ - argument::{FuncArgument, FuncArgumentId}, - binding::FuncBindingId, - binding_return_value::FuncBindingReturnValueId, - }, - ChangeSet, DalContext, Func, FuncBinding, FuncId, HistoryActor, StandardModel, User, UserClaim, - UserPk, Visibility, Workspace, WorkspaceSignup, + ChangeSet, DalContext, Func, FuncId, HistoryActor, StandardModel, User, UserClaim, UserPk, + Visibility, Workspace, }; use jwt_simple::algorithms::RSAKeyPairLike; use jwt_simple::{claims::Claims, reexports::coarsetime::Duration}; use names::{Generator, Name}; use crate::jwt_private_signing_key; +use crate::signup::WorkspaceSignup; -pub mod component_bag; +// pub mod component_bag; pub fn generate_fake_name() -> String { Generator::with_naming(Name::Numbered).next().unwrap() @@ -41,7 +38,7 @@ pub async fn workspace_signup(ctx: &DalContext) -> Result<(WorkspaceSignup, Stri let user_name = format!("frank {workspace_name}"); let user_email = format!("{workspace_name}@example.com"); - let nw = Workspace::signup(&mut ctx, &workspace_name, &user_name, &user_email) + let nw = WorkspaceSignup::new(&mut ctx, &workspace_name, &user_name, &user_email) .await .wrap_err("cannot signup a new workspace")?; let auth_token = create_auth_token(UserClaim { @@ -52,77 +49,79 @@ pub async fn workspace_signup(ctx: &DalContext) -> Result<(WorkspaceSignup, Stri Ok((nw, auth_token)) } -pub async fn create_user(ctx: &DalContext) -> User { - let name = generate_fake_name(); - User::new( - ctx, - UserPk::generate(), - &name, - &format!("{name}@test.systeminit.com"), - None::<&str>, - ) - .await - .expect("cannot create user") -} - -pub async fn create_change_set(ctx: &DalContext) -> ChangeSet { - let name = generate_fake_name(); - ChangeSet::new(ctx, &name, None) - .await - .expect("cannot create change_set") -} - -pub fn create_visibility_for_change_set(change_set: &ChangeSet) -> Visibility { - Visibility::new(change_set.pk, None) -} - -/// Creates a new [`Visibility`] backed by a new [`ChangeSet`] -pub async fn create_visibility_for_new_change_set(ctx: &DalContext) -> Visibility { - let _history_actor = HistoryActor::SystemInit; - let change_set = create_change_set(ctx).await; - - create_visibility_for_change_set(&change_set) -} - -pub async fn create_change_set_and_update_ctx(ctx: &mut DalContext) { - let visibility = create_visibility_for_new_change_set(ctx).await; - ctx.update_visibility(visibility); -} - -/// Get the "si:identity" [`Func`] and execute (if necessary). -pub async fn setup_identity_func( - ctx: &DalContext, -) -> ( - FuncId, - FuncBindingId, - FuncBindingReturnValueId, - FuncArgumentId, +// pub async fn create_user(ctx: &DalContext) -> User { +// let name = generate_fake_name(); +// User::new( +// ctx, +// UserPk::generate(), +// &name, +// &format!("{name}@test.systeminit.com"), +// None::<&str>, +// ) +// .await +// .expect("cannot create user") +// } +// + +pub async fn create_change_set_and_update_ctx( + ctx: &mut DalContext, + base_change_set_id: ChangeSetPointerId, ) { - let identity_func: Func = Func::find_by_attr(ctx, "name", &"si:identity".to_string()) + let base_change_set = ChangeSetPointer::find(ctx, base_change_set_id) .await - .expect("could not find identity func by name attr") - .pop() - .expect("identity func not found"); - - let identity_func_identity_arg = FuncArgument::list_for_func(ctx, *identity_func.id()) + .expect("could not perform find change set") + .expect("no change set found"); + let mut change_set = ChangeSetPointer::new(ctx, generate_fake_name(), Some(base_change_set_id)) .await - .expect("cannot list identity func args") - .pop() - .expect("cannot find identity func identity arg"); - - let (identity_func_binding, identity_func_binding_return_value) = - FuncBinding::create_and_execute( + .expect("could not create change set pointer"); + change_set + .update_pointer( ctx, - serde_json::json![{ "identity": null }], - *identity_func.id(), - vec![], + base_change_set + .workspace_snapshot_id + .expect("no workspace snapshot set on base change set"), ) .await - .expect("could not find or create identity func binding"); - ( - *identity_func.id(), - *identity_func_binding.id(), - *identity_func_binding_return_value.id(), - *identity_func_identity_arg.id(), - ) + .expect("could not update pointer"); + ctx.update_visibility(Visibility::new_for_change_set_pointer(change_set.id)); + ctx.update_snapshot_to_visibility() + .await + .expect("could not update snapshot to visibility"); } + +// /// Get the "si:identity" [`Func`] and execute (if necessary). +// pub async fn setup_identity_func( +// ctx: &DalContext, +// ) -> ( +// FuncId, +// FuncBindingId, +// FuncBindingReturnValueId, +// FuncArgumentId, +// ) { +// let identity_func: Func = Func::find_by_attr(ctx, "name", &"si:identity".to_string()) +// .await +// .expect("could not find identity func by name attr") +// .pop() +// .expect("identity func not found"); +// +// let identity_func_identity_arg = FuncArgument::list_for_func(ctx, *identity_func.id()) +// .await +// .expect("cannot list identity func args") +// .pop() +// .expect("cannot find identity func identity arg"); +// +// let (identity_func_binding, identity_func_binding_return_value) = +// FuncBinding::create_and_execute( +// ctx, +// serde_json::json![{ "identity": null }], +// *identity_func.id(), +// ) +// .await +// .expect("could not find or create identity func binding"); +// ( +// *identity_func.id(), +// *identity_func_binding.id(), +// *identity_func_binding_return_value.id(), +// *identity_func_identity_arg.id(), +// ) +// } diff --git a/lib/dal-test/src/lib.rs b/lib/dal-test/src/lib.rs index 992af762f1..fb66835380 100644 --- a/lib/dal-test/src/lib.rs +++ b/lib/dal-test/src/lib.rs @@ -19,6 +19,7 @@ use dal::{ use derive_builder::Builder; use jwt_simple::prelude::RS256KeyPair; use lazy_static::lazy_static; +use rebaser_client::Config as RebaserClientConfig; use si_crypto::{ SymmetricCryptoService, SymmetricCryptoServiceConfig, SymmetricCryptoServiceConfigFile, }; @@ -39,7 +40,8 @@ pub use si_test_macros::{dal_test as test, sdf_test}; pub use telemetry; pub use tracing_subscriber; -// pub mod helpers; +pub mod helpers; +mod signup; // pub mod test_harness; const ENV_VAR_NATS_URL: &str = "SI_TEST_NATS_URL"; @@ -94,6 +96,8 @@ pub struct Config { #[builder(default)] pkgs_path: Option, symmetric_crypto_service_config: SymmetricCryptoServiceConfig, + #[builder(default)] + rebaser_config: RebaserClientConfig, } impl Config { @@ -176,6 +180,9 @@ pub struct TestContext { /// This should be configurable in the future, but for now, the only kind of store used is the /// [`PgStore`](content_store::PgStore). content_store: PgStore, + + /// The configuration for the rebaser client used in tests + rebaser_config: RebaserClientConfig, } impl TestContext { @@ -250,6 +257,7 @@ impl TestContext { self.config.pkgs_path.to_owned(), None, self.symmetric_crypto_service.clone(), + self.rebaser_config.clone(), ) } @@ -312,13 +320,14 @@ impl TestContextBuilder { } async fn build_inner(&self, pg_pool: PgPool, content_store: PgStore) -> Result { + let universal_prefix = random_identifier_string(); + // Need to make a new NatsConfig so that we can add the test-specific subject prefix // without leaking it to other tests. let mut nats_config = self.config.nats.clone(); - let nats_subject_prefix = random_identifier_string(); - nats_config.subject_prefix = Some(nats_subject_prefix.clone()); + nats_config.subject_prefix = Some(universal_prefix.clone()); let mut config = self.config.clone(); - config.nats.subject_prefix = Some(nats_subject_prefix); + config.nats.subject_prefix = Some(universal_prefix.clone()); let nats_conn = NatsClient::new(&nats_config) .await @@ -330,6 +339,9 @@ impl TestContextBuilder { SymmetricCryptoService::from_config(&self.config.symmetric_crypto_service_config) .await?; + let mut rebaser_config = RebaserClientConfig::default(); + rebaser_config.set_stream_prefix(universal_prefix); + Ok(TestContext { config, pg_pool, @@ -338,6 +350,7 @@ impl TestContextBuilder { encryption_key: self.encryption_key.clone(), symmetric_crypto_service, content_store, + rebaser_config, }) } @@ -483,6 +496,7 @@ pub fn rebaser_server(services_context: &ServicesContext) -> Result Result<()> { let pinga_server_handle = pinga_server.shutdown_handle(); tokio::spawn(pinga_server.run()); - // Do not start up the Rebaser server since we do not need it for initial migrations. - debug!("skipping Rebaser server startup and shutdown for initial migrations"); + // Start up a Rebaser server for migrations + info!("starting Rebaser server for initial migrations"); + let rebaser_server = rebaser_server(&services_ctx)?; + let rebaser_server_handle = rebaser_server.shutdown_handle(); + tokio::spawn(rebaser_server.run()); // Start up a Veritech server as a task exclusively to allow the migrations to run info!("starting Veritech server for initial migrations"); @@ -607,6 +624,9 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { info!("creating builtins"); // TODO: @stack72 - remove this code path and install these from the module-index?? + let content_store_pg_store = content_store_pg_test_migration_client + .global_store() + .await?; dal::migrate_builtins( services_ctx.pg_pool(), services_ctx.nats_conn(), @@ -621,6 +641,8 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { .expect("no pkgs path configured"), test_context.config.module_index_url.clone(), services_ctx.symmetric_crypto_service(), + services_ctx.rebaser_config().clone(), + &content_store_pg_store, ) .await .wrap_err("failed to run builtin migrations")?; @@ -630,6 +652,11 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { info!("shutting down initial migrations Pinga server"); pinga_server_handle.shutdown().await; + // Shutdown the Rebaser server (each test gets their own server instance with an exclusively + // unique subject prefix) + info!("shutting down initial migrations Rebaser server"); + rebaser_server_handle.shutdown().await; + // Shutdown the Veritech server (each test gets their own server instance with an exclusively // unique subject prefix) info!("shutting down initial migrations Veritech server"); diff --git a/lib/dal-test/src/signup.rs b/lib/dal-test/src/signup.rs new file mode 100644 index 0000000000..9312e14e58 --- /dev/null +++ b/lib/dal-test/src/signup.rs @@ -0,0 +1,38 @@ +use dal::{DalContext, HistoryActor, KeyPair, User, UserPk, Workspace, WorkspacePk}; +use serde::{Deserialize, Serialize}; + +/// A wrapper for creating [`Workspaces`](Workspace) for integration tests. +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +pub struct WorkspaceSignup { + pub key_pair: KeyPair, + pub user: User, + pub workspace: Workspace, +} + +impl WorkspaceSignup { + pub async fn new( + ctx: &mut DalContext, + workspace_name: impl AsRef, + user_name: impl AsRef, + user_email: impl AsRef, + ) -> color_eyre::Result { + let workspace = Workspace::new(ctx, WorkspacePk::generate(), workspace_name).await?; + let key_pair = KeyPair::new(ctx, "default").await?; + + let user = User::new( + ctx, + UserPk::generate(), + &user_name, + &user_email, + None::<&str>, + ) + .await?; + ctx.update_history_actor(HistoryActor::User(user.pk())); + + Ok(Self { + key_pair, + user, + workspace, + }) + } +} diff --git a/lib/dal/Cargo.toml b/lib/dal/Cargo.toml index de80fd3c7d..40e7e20144 100644 --- a/lib/dal/Cargo.toml +++ b/lib/dal/Cargo.toml @@ -32,6 +32,7 @@ paste = { workspace = true } petgraph = { workspace = true } postgres-types = { workspace = true } rand = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client" } refinery = { workspace = true } regex = { workspace = true } remain = { workspace = true } @@ -61,7 +62,6 @@ content-store-test = { path = "../../lib/content-store-test" } dal-test = { path = "../../lib/dal-test" } itertools = { workspace = true } pretty_assertions_sorted = { workspace = true } -rebaser-client = { path = "../../lib/rebaser-client" } rebaser-core = { path = "../../lib/rebaser-core" } rebaser-server = { path = "../../lib/rebaser-server" } tempfile = { workspace = true } diff --git a/lib/dal/src/action_prototype.rs b/lib/dal/src/action_prototype.rs index 67b306fa48..4cc18c8e4a 100644 --- a/lib/dal/src/action_prototype.rs +++ b/lib/dal/src/action_prototype.rs @@ -4,8 +4,11 @@ use si_pkg::ActionFuncSpecKind; use std::default::Default; use strum::{AsRefStr, Display, EnumDiscriminants}; +use si_pkg::ActionFuncSpecKind; +use telemetry::prelude::*; + use crate::workspace_snapshot::content_address::ContentAddress; -use crate::{pk, SchemaVariantId, Timestamp}; +use crate::{pk, SchemaVariantId, StandardModel, Timestamp}; // const FIND_FOR_CONTEXT: &str = include_str!("./queries/action_prototype/find_for_context.sql"); // const FIND_FOR_CONTEXT_AND_KIND: &str = diff --git a/lib/dal/src/attribute/prototype.rs b/lib/dal/src/attribute/prototype.rs index e8e4b34a92..10ca56a79a 100644 --- a/lib/dal/src/attribute/prototype.rs +++ b/lib/dal/src/attribute/prototype.rs @@ -9,19 +9,14 @@ //! [`Map`](crate::prop::PropKind::Map): Which key of the `Map` the value is //! for. - use content_store::ContentHash; use serde::{Deserialize, Serialize}; - use strum::EnumDiscriminants; use telemetry::prelude::*; - use crate::workspace_snapshot::content_address::ContentAddress; -use crate::{ - pk, StandardModel, Timestamp, -}; +use crate::{pk, StandardModel, Timestamp}; pub mod argument; diff --git a/lib/dal/src/builtins/func.rs b/lib/dal/src/builtins/func.rs index 1e145ab386..6ef31db97c 100644 --- a/lib/dal/src/builtins/func.rs +++ b/lib/dal/src/builtins/func.rs @@ -58,9 +58,8 @@ pub async fn migrate_intrinsics(ctx: &DalContext) -> BuiltinsResult<()> { .await? .is_none() { - dbg!("intrinsics pkg not found, importing"); import_pkg_from_pkg(ctx, &intrinsics_pkg, None).await?; - ctx.blocking_commit().await?; + // ctx.blocking_commit().await?; } Ok(()) diff --git a/lib/dal/src/change_set_pointer.rs b/lib/dal/src/change_set_pointer.rs index 650e1857e4..68d663d622 100644 --- a/lib/dal/src/change_set_pointer.rs +++ b/lib/dal/src/change_set_pointer.rs @@ -78,13 +78,20 @@ impl ChangeSetPointer { }) } + pub fn editing_changeset(&self) -> ChangeSetPointerResult { + let mut new_local = Self::new_local()?; + new_local.base_change_set_id = self.base_change_set_id; + new_local.workspace_snapshot_id = self.workspace_snapshot_id; + new_local.name = self.name.to_owned(); + Ok(new_local) + } + pub async fn new( ctx: &DalContext, name: impl AsRef, base_change_set_id: Option, ) -> ChangeSetPointerResult { let name = name.as_ref(); - dbg!(name, &base_change_set_id); let row = ctx .txns() .await? diff --git a/lib/dal/src/context.rs b/lib/dal/src/context.rs index 79e485a71d..770df4c829 100644 --- a/lib/dal/src/context.rs +++ b/lib/dal/src/context.rs @@ -2,6 +2,9 @@ use std::{mem, path::PathBuf, sync::Arc}; use content_store::{PgStore, StoreError}; use futures::Future; +use rebaser_client::ChangeSetReplyMessage; +use rebaser_client::ClientError as RebaserClientError; +use rebaser_client::Config as RebaserClientConfig; use serde::{Deserialize, Serialize}; use si_crypto::SymmetricCryptoService; use si_data_nats::{NatsClient, NatsError, NatsTxn}; @@ -12,13 +15,17 @@ use tokio::sync::{MappedMutexGuard, Mutex, MutexGuard}; use ulid::Ulid; use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::workspace_snapshot::WorkspaceSnapshotId; +use crate::Workspace; use crate::{ - change_set_pointer::ChangeSetPointerId, + change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}, job::{ processor::{JobQueueProcessor, JobQueueProcessorError}, producer::{BlockingJobError, BlockingJobResult, JobProducer}, }, - HistoryActor, StandardModel, Tenancy, TenancyError, Visibility, WorkspacePk, + workspace_snapshot::WorkspaceSnapshotError, + HistoryActor, StandardModel, Tenancy, TenancyError, Visibility, WorkspacePk, WorkspaceSnapshot, }; /// A context type which contains handles to common core service dependencies. @@ -43,6 +50,8 @@ pub struct ServicesContext { module_index_url: Option, /// A service that can encrypt and decrypt values with a set of symmetric keys symmetric_crypto_service: SymmetricCryptoService, + /// Config for the the rebaser service + rebaser_config: RebaserClientConfig, } impl ServicesContext { @@ -57,6 +66,7 @@ impl ServicesContext { pkgs_path: Option, module_index_url: Option, symmetric_crypto_service: SymmetricCryptoService, + rebaser_config: RebaserClientConfig, ) -> Self { Self { pg_pool, @@ -67,6 +77,7 @@ impl ServicesContext { pkgs_path, module_index_url, symmetric_crypto_service, + rebaser_config, } } @@ -114,12 +125,23 @@ impl ServicesContext { &self.symmetric_crypto_service } + pub fn rebaser_config(&self) -> &RebaserClientConfig { + &self.rebaser_config + } + /// Builds and returns a new [`Connections`]. pub async fn connections(&self) -> PgPoolResult { let pg_conn = self.pg_pool.get().await?; let nats_conn = self.nats_conn.clone(); let job_processor = self.job_processor.clone(); - Ok(Connections::new(pg_conn, nats_conn, job_processor)) + let rebaser_config = self.rebaser_config.clone(); + + Ok(Connections::new( + pg_conn, + nats_conn, + job_processor, + rebaser_config, + )) } } @@ -163,28 +185,34 @@ impl ConnectionState { } } - async fn commit(self) -> Result { + async fn commit( + self, + rebase_request: Option, + ) -> Result { match self { Self::Connections(_) => { trace!("no active transactions present when commit was called, taking no action"); Ok(self) } Self::Transactions(txns) => { - let conns = txns.commit_into_conns().await?; + let conns = txns.commit_into_conns(rebase_request).await?; Ok(Self::Connections(conns)) } Self::Invalid => Err(TransactionsError::TxnCommit), } } - async fn blocking_commit(self) -> Result { + async fn blocking_commit( + self, + rebase_request: Option, + ) -> Result { match self { Self::Connections(_) => { trace!("no active transactions present when commit was called, taking no action"); Ok(self) } Self::Transactions(txns) => { - let conns = txns.blocking_commit_into_conns().await?; + let conns = txns.blocking_commit_into_conns(rebase_request).await?; Ok(Self::Connections(conns)) } Self::Invalid => Err(TransactionsError::TxnCommit), @@ -206,6 +234,8 @@ impl ConnectionState { } } +pub enum DalContextError {} + /// A context type which holds references to underlying services, transactions, and context for DAL objects. #[derive(Clone, Debug)] pub struct DalContext { @@ -231,6 +261,10 @@ pub struct DalContext { /// This should be configurable in the future, but for now, the only kind of store used is the /// [`PgStore`](content_store::PgStore). content_store: Arc>, + /// The workspace snapshot for this context + workspace_snapshot: Option>>, + /// The change set pointer for this context + change_set_pointer: Option, } impl DalContext { @@ -245,18 +279,137 @@ impl DalContext { } } + pub async fn get_workspace_default_change_set_id( + &self, + ) -> Result { + let workspace = Workspace::get_by_pk( + self, + &self.tenancy().workspace_pk().unwrap_or(WorkspacePk::NONE), + ) + .await + // use a proper error + .map_err(|err| TransactionsError::ChangeSet(err.to_string()))?; + + let cs_id = workspace + .map(|workspace| workspace.default_change_set_id()) + .unwrap_or(ChangeSetPointerId::NONE); + + Ok(cs_id) + } + + pub async fn update_snapshot_to_visibility(&mut self) -> Result<(), TransactionsError> { + let change_set_id = match self.change_set_id() { + ChangeSetPointerId::NONE => self.get_workspace_default_change_set_id().await?, + other => other, + }; + + let change_set_pointer = ChangeSetPointer::find(self, change_set_id) + .await + .map_err(|err| TransactionsError::ChangeSet(err.to_string()))? + .ok_or(TransactionsError::ChangeSetPointerNotFound( + self.change_set_id(), + ))?; + + let workspace_snapshot = + WorkspaceSnapshot::find_for_change_set(self, change_set_pointer.id) + .await + .map_err(|err| TransactionsError::WorkspaceSnapshot(err.to_string()))?; + + self.set_change_set_pointer(change_set_pointer)?; + self.set_workspace_snapshot(workspace_snapshot); + + Ok(()) + } + + pub async fn write_snapshot(&self) -> Result, TransactionsError> { + if let Some(snapshot) = &self.workspace_snapshot { + let vector_clock_id = self.change_set_pointer()?.vector_clock_id(); + + Ok(Some( + snapshot + .lock() + .await + .write(self, vector_clock_id) + .await + .map_err(|err| TransactionsError::WorkspaceSnapshot(err.to_string()))?, + )) + } else { + Ok(None) + } + } + + fn get_rebase_request( + &self, + onto_workspace_snapshot_id: WorkspaceSnapshotId, + ) -> Result { + let vector_clock_id = self.change_set_pointer()?.vector_clock_id(); + Ok(RebaseRequest { + onto_workspace_snapshot_id, + to_rebase_change_set_id: self.change_set_id().into(), + onto_vector_clock_id: vector_clock_id, + }) + } + /// Consumes all inner transactions and committing all changes made within them. pub async fn commit(&self) -> Result<(), TransactionsError> { if self.blocking { self.blocking_commit().await?; } else { + let rebase_request = match self.write_snapshot().await? { + Some(workspace_snapshot_id) => { + Some(self.get_rebase_request(workspace_snapshot_id)?) + } + None => None, + }; + let mut guard = self.conns_state.lock().await; - *guard = guard.take().commit().await?; + *guard = guard.take().commit(rebase_request).await?; } Ok(()) } + pub fn change_set_pointer(&self) -> Result<&ChangeSetPointer, TransactionsError> { + match self.change_set_pointer.as_ref() { + Some(csp_ref) => Ok(csp_ref), + None => Err(TransactionsError::ChangeSetPointerNotSet), + } + } + + /// Fetch the change set pointer for the current change set visibility + /// Should only be called by DalContextBuilder or by ourselves if changing visibility or + /// refetching after a commit + pub fn set_change_set_pointer( + &mut self, + change_set_pointer: ChangeSetPointer, + ) -> Result<&ChangeSetPointer, TransactionsError> { + // "fork" a new change set pointer for this dal context "edit session". This gives us a new + // Ulid generator and new vector clock id so that concurrent editing conflicts can be + // resolved by the rebaser. This change set pointer is not persisted to the database (the + // rebaser will persist a new one if it can) + self.change_set_pointer = Some( + change_set_pointer + .editing_changeset() + .map_err(|err| TransactionsError::ChangeSet(err.to_string()))?, + ); + + Ok(self.change_set_pointer()?) + } + + pub fn set_workspace_snapshot(&mut self, workspace_snapshot: WorkspaceSnapshot) { + self.workspace_snapshot = Some(Arc::new(Mutex::new(workspace_snapshot))); + } + + /// Fetch the workspace snapshot for the current visibility + pub fn workspace_snapshot( + &self, + ) -> Result<&Arc>, WorkspaceSnapshotError> { + match &self.workspace_snapshot { + Some(workspace_snapshot) => Ok(&workspace_snapshot), + None => Err(WorkspaceSnapshotError::WorkspaceSnapshotNotFetched), + } + } + pub fn blocking(&self) -> bool { self.blocking } @@ -276,9 +429,17 @@ impl DalContext { /// Consumes all inner transactions, committing all changes made within them, and /// blocks until all queued jobs have reported as finishing. pub async fn blocking_commit(&self) -> Result<(), TransactionsError> { + let rebase_request = match self.write_snapshot().await? { + Some(workspace_snapshot_id) => Some(self.get_rebase_request(workspace_snapshot_id)?), + None => None, + }; + info!( + "rebase request during blocking commit: {:?}", + &rebase_request + ); let mut guard = self.conns_state.lock().await; - *guard = guard.take().blocking_commit().await?; + *guard = guard.take().blocking_commit(rebase_request).await?; Ok(()) } @@ -636,7 +797,7 @@ impl DalContextBuilder { let conns = self.connections().await?; let raw_content_store = match &self.content_store { Some(found_content_store) => found_content_store.clone(), - None => PgStore::new_production().await?, + None => PgStore::new_production_with_migration().await?, }; Ok(DalContext { @@ -648,6 +809,8 @@ impl DalContextBuilder { history_actor: HistoryActor::SystemInit, content_store: Arc::new(Mutex::new(raw_content_store)), no_dependent_values: self.no_dependent_values, + workspace_snapshot: None, + change_set_pointer: None, }) } @@ -667,6 +830,8 @@ impl DalContextBuilder { history_actor: HistoryActor::SystemInit, no_dependent_values: self.no_dependent_values, content_store: Arc::new(Mutex::new(content_store)), + workspace_snapshot: None, + change_set_pointer: None, }) } @@ -676,9 +841,9 @@ impl DalContextBuilder { access_builder: AccessBuilder, ) -> Result { let conns = self.connections().await?; - let raw_content_store = match &self.content_store { + let raw_content_store = match dbg!(&self.content_store) { Some(found_content_store) => found_content_store.clone(), - None => PgStore::new_production().await?, + None => PgStore::new_production_with_migration().await?, }; Ok(DalContext { @@ -690,6 +855,8 @@ impl DalContextBuilder { visibility: Visibility::new_head(false), no_dependent_values: self.no_dependent_values, content_store: Arc::new(Mutex::new(raw_content_store)), + workspace_snapshot: None, + change_set_pointer: None, }) } @@ -701,10 +868,10 @@ impl DalContextBuilder { let conns = self.connections().await?; let raw_content_store = match &self.content_store { Some(found_content_store) => found_content_store.clone(), - None => PgStore::new_production().await?, + None => PgStore::new_production_with_migration().await?, }; - Ok(DalContext { + let mut ctx = DalContext { services_context: self.services_context.clone(), blocking: self.blocking, conns_state: Arc::new(Mutex::new(ConnectionState::new_from_conns(conns))), @@ -713,7 +880,13 @@ impl DalContextBuilder { history_actor: request_context.history_actor, no_dependent_values: self.no_dependent_values, content_store: Arc::new(Mutex::new(raw_content_store)), - }) + workspace_snapshot: None, + change_set_pointer: None, + }; + + ctx.update_snapshot_to_visibility().await?; + + Ok(ctx) } /// Gets a reference to the PostgreSQL connection pool. @@ -759,6 +932,12 @@ impl DalContextBuilder { #[remain::sorted] #[derive(Debug, Error)] pub enum TransactionsError { + #[error("change set error: {0}")] + ChangeSet(String), + #[error("change set pointer not found for change set id: {0}")] + ChangeSetPointerNotFound(ChangeSetPointerId), + #[error("Change set pointer not set on DalContext")] + ChangeSetPointerNotSet, #[error(transparent)] JobQueueProcessor(#[from] JobQueueProcessorError), #[error(transparent)] @@ -767,6 +946,10 @@ pub enum TransactionsError { Pg(#[from] PgError), #[error(transparent)] PgPool(#[from] PgPoolError), + #[error("rebase of snapshot {0} change set id {1} failed {2}")] + RebaseFailed(WorkspaceSnapshotId, ChangeSetPointerId, String), + #[error(transparent)] + RebaserClient(#[from] RebaserClientError), #[error(transparent)] SerdeJson(#[from] serde_json::Error), #[error("store error: {0}")] @@ -779,6 +962,8 @@ pub enum TransactionsError { TxnRollback, #[error("cannot start transactions without connections; state={0}")] TxnStart(&'static str), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(String), } /// A type which holds ownership over connections that can be used to start transactions. @@ -786,6 +971,7 @@ pub enum TransactionsError { pub struct Connections { pg_conn: InstrumentedClient, nats_conn: NatsClient, + rebaser_config: RebaserClientConfig, job_processor: Box, } @@ -796,10 +982,12 @@ impl Connections { pg_conn: InstrumentedClient, nats_conn: NatsClient, job_processor: Box, + rebaser_config: RebaserClientConfig, ) -> Self { Self { pg_conn, nats_conn, + rebaser_config, job_processor, } } @@ -809,8 +997,14 @@ impl Connections { let pg_txn = PgTxn::create(self.pg_conn).await?; let nats_txn = self.nats_conn.transaction(); let job_processor = self.job_processor; + let rebaser_config = self.rebaser_config; - Ok(Transactions::new(pg_txn, nats_txn, job_processor)) + Ok(Transactions::new( + pg_txn, + nats_txn, + job_processor, + rebaser_config, + )) } /// Gets a reference to a PostgreSQL connection. @@ -834,19 +1028,61 @@ pub struct Transactions { pg_txn: PgTxn, /// A NATS transaction. nats_txn: NatsTxn, + /// Rebaser client + rebaser_config: RebaserClientConfig, job_processor: Box, } +#[derive(Clone, Debug)] +pub struct RebaseRequest { + to_rebase_change_set_id: ChangeSetPointerId, + onto_workspace_snapshot_id: WorkspaceSnapshotId, + onto_vector_clock_id: VectorClockId, +} + +async fn rebase( + rebaser_config: RebaserClientConfig, + rebase_request: RebaseRequest, +) -> Result<(), TransactionsError> { + let mut rebaser_client = rebaser_client::Client::new(rebaser_config).await?; + + rebaser_client + .open_stream_for_change_set(rebase_request.to_rebase_change_set_id.into()) + .await?; + + let response = rebaser_client + .request_rebase( + rebase_request.to_rebase_change_set_id.into(), + rebase_request.onto_workspace_snapshot_id.into(), + rebase_request.onto_vector_clock_id.into(), + ) + .await?; + + match response { + ChangeSetReplyMessage::Success { .. } => Ok(()), + ChangeSetReplyMessage::Error { message } => Err(TransactionsError::RebaseFailed( + rebase_request.onto_workspace_snapshot_id, + rebase_request.to_rebase_change_set_id, + message, + )), + ChangeSetReplyMessage::ConflictsFound { .. } => { + todo!("conflicts ???"); + } + } +} + impl Transactions { /// Creates and returns a new `Transactions` instance. fn new( pg_txn: PgTxn, nats_txn: NatsTxn, job_processor: Box, + rebaser_config: RebaserClientConfig, ) -> Self { Self { pg_txn, nats_txn, + rebaser_config, job_processor, } } @@ -863,22 +1099,38 @@ impl Transactions { /// Consumes all inner transactions, committing all changes made within them, and returns /// underlying connections. - pub async fn commit_into_conns(self) -> Result { + pub async fn commit_into_conns( + self, + rebase_request: Option, + ) -> Result { let pg_conn = self.pg_txn.commit_into_conn().await?; let nats_conn = self.nats_txn.commit_into_conn().await?; + + if let Some(rebase_request) = rebase_request { + rebase(self.rebaser_config.clone(), rebase_request).await?; + } + self.job_processor.process_queue().await?; - let conns = Connections::new(pg_conn, nats_conn, self.job_processor); + let conns = Connections::new(pg_conn, nats_conn, self.job_processor, self.rebaser_config); Ok(conns) } /// Consumes all inner transactions, committing all changes made within them, and returns /// underlying connections. Blocking until all queued jobs have reported as finishing. - pub async fn blocking_commit_into_conns(self) -> Result { + pub async fn blocking_commit_into_conns( + self, + rebase_request: Option, + ) -> Result { let pg_conn = self.pg_txn.commit_into_conn().await?; let nats_conn = self.nats_txn.commit_into_conn().await?; + + if let Some(rebase_request) = rebase_request { + rebase(self.rebaser_config.clone(), rebase_request).await?; + } + self.job_processor.blocking_process_queue().await?; - let conns = Connections::new(pg_conn, nats_conn, self.job_processor); + let conns = Connections::new(pg_conn, nats_conn, self.job_processor, self.rebaser_config); Ok(conns) } @@ -891,7 +1143,7 @@ impl Transactions { pub async fn rollback_into_conns(self) -> Result { let pg_conn = self.pg_txn.rollback_into_conn().await?; let nats_conn = self.nats_txn.rollback_into_conn().await?; - let conns = Connections::new(pg_conn, nats_conn, self.job_processor); + let conns = Connections::new(pg_conn, nats_conn, self.job_processor, self.rebaser_config); Ok(conns) } diff --git a/lib/dal/src/func/backend.rs b/lib/dal/src/func/backend.rs index 4cc699f291..51def8a0fb 100644 --- a/lib/dal/src/func/backend.rs +++ b/lib/dal/src/func/backend.rs @@ -1,12 +1,9 @@ +use thiserror::Error; + use serde::{de::DeserializeOwned, Deserialize, Serialize}; use strum::{AsRefStr, Display, EnumIter, EnumString}; use telemetry::prelude::*; -use thiserror::Error; - -use veritech_client::{ - ActionRunResultSuccess, BeforeFunction, Client as VeritechClient, FunctionResult, OutputStream, - ResolverFunctionResponseType, -}; +use veritech_client::ResolverFunctionResponseType; use crate::{label_list::ToLabelList, StandardModel}; diff --git a/lib/dal/src/func/backend/validation.rs b/lib/dal/src/func/backend/validation.rs index 8da446ba2c..5422088871 100644 --- a/lib/dal/src/func/backend/validation.rs +++ b/lib/dal/src/func/backend/validation.rs @@ -1,10 +1,6 @@ - - use serde::{Deserialize, Serialize}; - - -use crate::validation::{Validation}; +use crate::validation::Validation; #[derive(Deserialize, Serialize, Debug, Clone)] pub struct FuncBackendValidation { diff --git a/lib/dal/src/lib.rs b/lib/dal/src/lib.rs index 6dac5a631d..33e29f121f 100644 --- a/lib/dal/src/lib.rs +++ b/lib/dal/src/lib.rs @@ -4,7 +4,9 @@ use std::path::PathBuf; use std::sync::Arc; use std::time::Duration; +use content_store::PgStore; use rand::Rng; +use rebaser_client::Config as RebaserClientConfig; use serde_with::{DeserializeFromStr, SerializeDisplay}; use si_crypto::SymmetricCryptoService; use si_data_nats::{NatsClient, NatsError}; @@ -109,7 +111,7 @@ pub use tenancy::{Tenancy, TenancyError}; pub use timestamp::{Timestamp, TimestampError}; pub use user::{User, UserClaim, UserError, UserPk, UserResult}; pub use visibility::{Visibility, VisibilityError}; -pub use workspace::{Workspace, WorkspaceError, WorkspacePk, WorkspaceResult, WorkspaceSignup}; +pub use workspace::{Workspace, WorkspaceError, WorkspacePk, WorkspaceResult}; pub use workspace_snapshot::graph::WorkspaceSnapshotGraph; pub use workspace_snapshot::WorkspaceSnapshot; pub use ws_event::{WsEvent, WsEventError, WsEventResult, WsPayload}; @@ -195,7 +197,7 @@ pub async fn migrate(pg: &PgPool) -> ModelResult<()> { #[allow(clippy::too_many_arguments)] #[instrument(skip_all)] pub async fn migrate_builtins( - pg: &PgPool, + dal_pg: &PgPool, nats: &NatsClient, job_processor: Box, veritech: veritech_client::Client, @@ -204,9 +206,11 @@ pub async fn migrate_builtins( pkgs_path: PathBuf, module_index_url: String, symmetric_crypto_service: &SymmetricCryptoService, + rebaser_config: RebaserClientConfig, + content_store_pg: &PgStore, ) -> ModelResult<()> { let services_context = ServicesContext::new( - pg.clone(), + dal_pg.clone(), nats.clone(), job_processor, veritech, @@ -214,13 +218,17 @@ pub async fn migrate_builtins( Some(pkgs_path), Some(module_index_url), symmetric_crypto_service.clone(), + rebaser_config, ); let dal_context = services_context.into_builder(true); - let mut ctx = dal_context.build_default().await?; + let mut ctx = dal_context + .build_default_with_content_store(content_store_pg.to_owned()) + .await?; - let workspace = Workspace::builtin(&ctx).await?; + let workspace = Workspace::builtin(&mut ctx).await?; ctx.update_tenancy(Tenancy::new(*workspace.pk())); - ctx.blocking_commit().await?; + ctx.update_to_head(); + ctx.update_snapshot_to_visibility().await?; builtins::migrate(&ctx, selected_test_builtin_schemas).await?; diff --git a/lib/dal/src/node.rs b/lib/dal/src/node.rs index 6f0056b08c..56fb8051cc 100644 --- a/lib/dal/src/node.rs +++ b/lib/dal/src/node.rs @@ -3,18 +3,11 @@ use rand::prelude::SliceRandom; use serde::{Deserialize, Serialize}; - - use strum::EnumDiscriminants; use telemetry::prelude::*; - - use crate::workspace_snapshot::content_address::ContentAddress; -use crate::{ - pk, StandardModel, Timestamp, -}; - +use crate::{pk, StandardModel, Timestamp}; // const LIST_FOR_KIND: &str = include_str!("queries/node/list_for_kind.sql"); // const LIST_LIVE: &str = include_str!("queries/node/list_live.sql"); diff --git a/lib/dal/src/pkg.rs b/lib/dal/src/pkg.rs index bf7686a4eb..8fa02b380e 100644 --- a/lib/dal/src/pkg.rs +++ b/lib/dal/src/pkg.rs @@ -7,9 +7,9 @@ use crate::{ change_set_pointer::ChangeSetPointerError, installed_pkg::InstalledPkgError, ChangeSetPk, FuncBackendKind, FuncBackendResponseType, FuncId, }; +use crate::{workspace_snapshot::WorkspaceSnapshotError, SchemaVariantId}; -use crate::workspace_snapshot::WorkspaceSnapshotError; -pub use import::{import_pkg, import_pkg_from_pkg, ImportOptions}; +pub use import::ImportOptions; // mod export; mod import; @@ -37,6 +37,8 @@ pub enum PkgError { WorkspaceSnaphot(#[from] WorkspaceSnapshotError), } +pub use import::{import_pkg, import_pkg_from_pkg}; + impl PkgError { // fn prop_tree_invalid(message: impl Into) -> Self { // Self::PropTreeInvalid(message.into()) @@ -195,12 +197,12 @@ where } } -// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -// #[serde(rename_all = "camelCase", tag = "kind")] -// pub struct ModuleImportedPayload { -// schema_variant_ids: Vec, -// } -// +#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +#[serde(rename_all = "camelCase", tag = "kind")] +pub struct ModuleImportedPayload { + schema_variant_ids: Vec, +} + // #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] // #[serde(rename_all = "camelCase")] // pub struct WorkspaceImportPayload { diff --git a/lib/dal/src/pkg/import.rs b/lib/dal/src/pkg/import.rs index 78281ce105..33975e3de2 100644 --- a/lib/dal/src/pkg/import.rs +++ b/lib/dal/src/pkg/import.rs @@ -116,8 +116,6 @@ pub struct ImportOptions { async fn import_change_set( ctx: &DalContext, change_set_pk: Option, - workspace_snapshot: &mut WorkspaceSnapshot, - change_set_pointer: &ChangeSetPointer, metadata: &SiPkgMetadata, funcs: &[SiPkgFunc<'_>], schemas: &[SiPkgSchema<'_>], @@ -131,6 +129,8 @@ async fn import_change_set( Vec<(String, Vec)>, Vec, )> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.lock().await; + for func_spec in funcs { let unique_id = func_spec.unique_id().to_string(); @@ -142,8 +142,6 @@ async fn import_change_set( || special_case_funcs.contains(&func_spec.name()) || func_spec.is_from_builtin().unwrap_or(false) { - dbg!(func_spec.name()); - if let Some(func_id) = workspace_snapshot.func_find_by_name(func_spec.name())? { let func = workspace_snapshot.func_get_by_id(ctx, func_id).await?; @@ -154,8 +152,7 @@ async fn import_change_set( ); } else if let Some(_func) = import_func( ctx, - workspace_snapshot, - change_set_pointer, + &mut workspace_snapshot, None, func_spec, installed_pkg_id, @@ -199,8 +196,7 @@ async fn import_change_set( } else { import_func( ctx, - workspace_snapshot, - change_set_pointer, + &mut workspace_snapshot, change_set_pk, func_spec, installed_pkg_id, @@ -264,6 +260,10 @@ async fn import_change_set( // edge_skips.push(skip); // } // } + // + + // workspace_snapshot.cleanup(); + // workspace_snapshot.dot(); Ok(( installed_schema_variant_ids, @@ -1449,19 +1449,9 @@ pub async fn import_pkg_from_pkg( match metadata.kind() { SiPkgKind::Module => { - dbg!("installing module", metadata.name(), ctx.change_set_id()); - let change_set_pointer_id: ChangeSetPointerId = ctx.change_set_id(); - let change_set_pointer = ChangeSetPointer::find(ctx, change_set_pointer_id) - .await? - .expect("head should exist"); - let mut workspace_snapshot = - WorkspaceSnapshot::find_for_change_set(ctx, change_set_pointer_id).await?; - let (installed_schema_variant_ids, _, _) = import_change_set( ctx, None, - &mut workspace_snapshot, - &change_set_pointer, &metadata, &pkg.funcs()?, &[], // &pkg.schemas()?, @@ -1473,6 +1463,15 @@ pub async fn import_pkg_from_pkg( ) .await?; + dbg!(ctx + .workspace_snapshot()? + .lock() + .await + .list_funcs(ctx) + .await + .expect("should get list funcs") + .len()); + Ok((installed_pkg_id, installed_schema_variant_ids, None)) } SiPkgKind::WorkspaceBackup => { @@ -1571,10 +1570,8 @@ pub async fn import_pkg(ctx: &DalContext, pkg_file_path: impl AsRef) -> Pk async fn create_func( ctx: &DalContext, workspace_snapshot: &mut WorkspaceSnapshot, - change_set_pointer: &ChangeSetPointer, func_spec: &SiPkgFunc<'_>, ) -> PkgResult { - dbg!("create func"); let name = func_spec.name(); let func_spec_data = func_spec @@ -1585,17 +1582,14 @@ async fn create_func( let func = workspace_snapshot .func_create( ctx, - change_set_pointer, name, func_spec_data.backend_kind().into(), func_spec_data.response_type().into(), ) .await?; - dbg!("created func"); - let func = workspace_snapshot - .func_modify_by_id(ctx, change_set_pointer, func.id, |func| { + .func_modify_by_id(ctx, func.id, |func| { func.display_name = func_spec_data .display_name() .map(|display_name| display_name.to_owned()); @@ -1608,7 +1602,6 @@ async fn create_func( Ok(()) }) .await?; - dbg!("func modified"); Ok(func) } @@ -1616,12 +1609,11 @@ async fn create_func( async fn update_func( ctx: &DalContext, workspace_snapshot: &mut WorkspaceSnapshot, - change_set_pointer: &ChangeSetPointer, func: &Func, func_spec_data: &SiPkgFuncData, ) -> PkgResult<()> { workspace_snapshot - .func_modify_by_id(ctx, change_set_pointer, func.id, |func| { + .func_modify_by_id(ctx, func.id, |func| { func.name = func_spec_data.name().to_owned(); func.backend_kind = func_spec_data.backend_kind().into(); func.backend_response_type = func_spec_data.response_type().into(); @@ -1644,7 +1636,6 @@ async fn update_func( async fn import_func( ctx: &DalContext, workspace_snapshot: &mut WorkspaceSnapshot, - change_set_pointer: &ChangeSetPointer, change_set_pk: Option, func_spec: &SiPkgFunc<'_>, installed_pkg_id: Option, @@ -1653,7 +1644,6 @@ async fn import_func( ) -> PkgResult> { let func = match change_set_pk { None => { - dbg!("importing", func_spec.name()); let hash = func_spec.hash().to_string(); let existing_func = InstalledPkgAsset::list_for_kind_and_hash(ctx, InstalledPkgAssetKind::Func, &hash) @@ -1666,10 +1656,7 @@ async fn import_func( (workspace_snapshot.func_get_by_id(ctx, id).await?, false) } }, - None => ( - create_func(ctx, workspace_snapshot, change_set_pointer, func_spec).await?, - true, - ), + None => (create_func(ctx, workspace_snapshot, func_spec).await?, true), }; if is_builtin { diff --git a/lib/dal/src/provider/internal.rs b/lib/dal/src/provider/internal.rs index 7499e7ffc0..bcc263c870 100644 --- a/lib/dal/src/provider/internal.rs +++ b/lib/dal/src/provider/internal.rs @@ -70,6 +70,7 @@ use content_store::ContentHash; use serde::{Deserialize, Serialize}; use strum::EnumDiscriminants; +use telemetry::prelude::*; use crate::workspace_snapshot::content_address::ContentAddress; use crate::{pk, StandardModel, Timestamp}; diff --git a/lib/dal/src/schema/ui_menu.rs b/lib/dal/src/schema/ui_menu.rs index b56921dd1d..7637b56b83 100644 --- a/lib/dal/src/schema/ui_menu.rs +++ b/lib/dal/src/schema/ui_menu.rs @@ -1,11 +1,7 @@ use serde::{Deserialize, Serialize}; use telemetry::prelude::*; -use crate::{ - impl_standard_model, pk, StandardModel, Tenancy, Timestamp, Visibility, -}; - - +use crate::{impl_standard_model, pk, StandardModel, Tenancy, Timestamp, Visibility}; const FIND_FOR_SCHEMA: &str = include_str!("../queries/ui_menus_find_for_schema.sql"); diff --git a/lib/dal/src/socket.rs b/lib/dal/src/socket.rs index be6499b9a2..af6c3d3585 100644 --- a/lib/dal/src/socket.rs +++ b/lib/dal/src/socket.rs @@ -4,13 +4,10 @@ use serde::{Deserialize, Serialize}; use strum::{AsRefStr, Display, EnumDiscriminants, EnumIter, EnumString}; use telemetry::prelude::*; - use si_pkg::SocketSpecArity; use crate::workspace_snapshot::content_address::ContentAddress; -use crate::{ - label_list::ToLabelList, pk, StandardModel, Timestamp, -}; +use crate::{label_list::ToLabelList, pk, StandardModel, Timestamp}; // const FIND_BY_NAME_FOR_EDGE_KIND_AND_NODE: &str = // include_str!("queries/socket/find_by_name_for_edge_kind_and_node.sql"); diff --git a/lib/dal/src/validation.rs b/lib/dal/src/validation.rs index 632fd93064..aa93f649d3 100644 --- a/lib/dal/src/validation.rs +++ b/lib/dal/src/validation.rs @@ -13,7 +13,7 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use thiserror::Error; -use crate::{FuncId}; +use crate::FuncId; pub mod prototype; // pub mod resolver; diff --git a/lib/dal/src/validation/prototype.rs b/lib/dal/src/validation/prototype.rs index 9b71b6b911..66ba700654 100644 --- a/lib/dal/src/validation/prototype.rs +++ b/lib/dal/src/validation/prototype.rs @@ -1,18 +1,11 @@ use content_store::ContentHash; use serde::{Deserialize, Serialize}; - - use strum::EnumDiscriminants; use telemetry::prelude::*; - use crate::workspace_snapshot::content_address::ContentAddress; -use crate::{ - func::FuncId, pk, - StandardModel, Timestamp, -}; - +use crate::{func::FuncId, pk, StandardModel, Timestamp}; // pub mod context; diff --git a/lib/dal/src/visibility.rs b/lib/dal/src/visibility.rs index 8d390b57b3..d38b2ca3dc 100644 --- a/lib/dal/src/visibility.rs +++ b/lib/dal/src/visibility.rs @@ -5,7 +5,9 @@ use si_data_pg::PgError; use telemetry::prelude::*; use thiserror::Error; +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerId}; use serde_aux::field_attributes::deserialize_number_from_string; +use ulid::Ulid; #[remain::sorted] #[derive(Error, Debug)] @@ -48,6 +50,14 @@ impl Visibility { Visibility::new(ChangeSetPk::NONE, deleted_at) } + // TODO(nick,zack,jacob): remove this once the old change set dies. + pub fn new_for_change_set_pointer(change_set_pointer_id: ChangeSetPointerId) -> Self { + Visibility { + change_set_pk: ChangeSetPk::from(Ulid::from(change_set_pointer_id)), + deleted_at: None, + } + } + pub fn to_deleted(&self) -> Self { let mut other = *self; other.deleted_at = Some(Utc::now()); diff --git a/lib/dal/src/workspace.rs b/lib/dal/src/workspace.rs index 4e5b0adef8..8d8375d05c 100644 --- a/lib/dal/src/workspace.rs +++ b/lib/dal/src/workspace.rs @@ -50,13 +50,6 @@ pub type WorkspaceResult = Result; pk!(WorkspacePk); pk!(WorkspaceId); -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -pub struct WorkspaceSignup { - pub key_pair: KeyPair, - pub user: User, - pub workspace: Workspace, -} - #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct Workspace { pk: WorkspacePk, @@ -86,13 +79,15 @@ impl Workspace { &self.pk } + pub fn default_change_set_id(&self) -> ChangeSetPointerId { + self.default_change_set_id + } + /// Find or create the builtin [`Workspace`]. #[instrument(skip_all)] - pub async fn builtin(ctx: &DalContext) -> WorkspaceResult { - dbg!("create builtin workspace"); + pub async fn builtin(ctx: &mut DalContext) -> WorkspaceResult { // Check if the builtin already exists. if let Some(found_builtin) = Self::find_builtin(ctx).await? { - dbg!("already have builtin"); return Ok(found_builtin); } @@ -100,12 +95,12 @@ impl Workspace { // workspace snapshot. let name = "builtin"; - dbg!("change set pointer new"); let mut change_set = ChangeSetPointer::new_head(ctx).await?; let workspace_snapshot = WorkspaceSnapshot::initial(ctx, &change_set).await?; change_set .update_pointer(ctx, workspace_snapshot.id()) .await?; + let change_set_id = change_set.id; let head_pk = WorkspaceId::NONE; let row = ctx @@ -114,7 +109,7 @@ impl Workspace { .pg() .query_one( "INSERT INTO workspaces (pk, name, default_change_set_id) VALUES ($1, $2, $3) RETURNING *", - &[&head_pk, &name, &change_set.id], + &[&head_pk, &name, &change_set_id], ) .await?; Self::try_from(row) @@ -170,17 +165,24 @@ impl Workspace { name: impl AsRef, ) -> WorkspaceResult { // Get the default change set from the builtin workspace. - let builtin = Self::builtin(ctx).await?; + let builtin = match Self::find_builtin(ctx).await? { + Some(found_builtin) => found_builtin, + None => { + // TODO(nick,jacob): replace this with an error. + todo!("this should not happen") + } + }; // Create a new change set whose base is the default change set of the workspace. // Point to the snapshot that the builtin's default change set is pointing to. let mut change_set = ChangeSetPointer::new(ctx, "HEAD", Some(builtin.default_change_set_id)).await?; - let workspace_snapshot = + let mut workspace_snapshot = WorkspaceSnapshot::find_for_change_set(ctx, builtin.default_change_set_id).await?; change_set .update_pointer(ctx, workspace_snapshot.id()) .await?; + let change_set_id = change_set.id; let name = name.as_ref(); let row = ctx @@ -189,7 +191,7 @@ impl Workspace { .pg() .query_one( "INSERT INTO workspaces (pk, name, default_change_set_id) VALUES ($1, $2, $3) RETURNING *", - &[&pk, &name, &change_set.id], + &[&pk, &name, &change_set_id], ) .await?; let new_workspace = Self::try_from(row)?; @@ -197,10 +199,12 @@ impl Workspace { ctx.update_tenancy(Tenancy::new(new_workspace.pk)); // TODO(nick,zack,jacob): convert visibility (or get rid of it?) to use our the new change set id. + // should set_change_set_pointer and set_workspace_snapshot happen in update_visibility? ctx.update_visibility(Visibility::new( - ChangeSetPk::from(Ulid::from(change_set.id)), + ChangeSetPk::from(Ulid::from(change_set_id)), None, )); + ctx.update_snapshot_to_visibility().await?; let _history_event = HistoryEvent::new( ctx, diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs index 94bf377915..d646f69cfb 100644 --- a/lib/dal/src/workspace_snapshot.rs +++ b/lib/dal/src/workspace_snapshot.rs @@ -32,7 +32,7 @@ pub mod update; pub mod vector_clock; use chrono::{DateTime, Utc}; -use content_store::{ContentHash, StoreError}; +use content_store::{ContentHash, Store, StoreError}; use petgraph::prelude::*; use serde::{Deserialize, Serialize}; use si_cbor::CborError; @@ -108,13 +108,15 @@ pub enum WorkspaceSnapshotError { WorkspaceSnapshotGraph(#[from] WorkspaceSnapshotGraphError), #[error("workspace snapshot graph missing")] WorkspaceSnapshotGraphMissing, + #[error("no workspace snapshot was fetched for this dal context")] + WorkspaceSnapshotNotFetched, } pub type WorkspaceSnapshotResult = Result; pk!(WorkspaceSnapshotId); -#[derive(Debug, Serialize, Deserialize)] +#[derive(Debug, Serialize, Deserialize, Clone)] pub struct WorkspaceSnapshot { id: WorkspaceSnapshotId, created_at: DateTime, @@ -153,7 +155,7 @@ impl WorkspaceSnapshot { ctx: &DalContext, change_set: &ChangeSetPointer, ) -> WorkspaceSnapshotResult { - let mut graph = WorkspaceSnapshotGraph::new(change_set)?; + let mut graph: WorkspaceSnapshotGraph = WorkspaceSnapshotGraph::new(change_set)?; // Create the category nodes under root. let component_node_index = @@ -178,14 +180,24 @@ impl WorkspaceSnapshot { schema_node_index, )?; - Self::new_inner(ctx, graph).await + // We do not care about any field other than "working_copy" because "write" will populate + // them using the assigned working copy. + let mut initial = Self { + id: WorkspaceSnapshotId::NONE, + created_at: Utc::now(), + snapshot: vec![], + working_copy: Some(graph), + }; + initial.write(ctx, change_set.vector_clock_id()).await?; + + Ok(initial) } pub async fn write( &mut self, ctx: &DalContext, vector_clock_id: VectorClockId, - ) -> WorkspaceSnapshotResult<()> { + ) -> WorkspaceSnapshotResult { // Pull out the working copy and clean it up. let working_copy = self.working_copy()?; working_copy.cleanup(); @@ -193,24 +205,11 @@ impl WorkspaceSnapshot { // Mark everything left as seen. working_copy.mark_graph_seen(vector_clock_id)?; - // Stamp the new workspace snapshot. - let object = Self::new_inner(ctx, working_copy.clone()).await?; - - // Reset relevant fields on self. - self.id = object.id; - self.created_at = object.created_at; - self.snapshot = object.snapshot; - self.working_copy = None; - Ok(()) - } + // Write out to the content store. + ctx.content_store().lock().await.write().await?; - /// This _private_ method crates a new, immutable [`WorkspaceSnapshot`] from a - /// [`WorkspaceSnapshotGraph`]. - async fn new_inner( - ctx: &DalContext, - graph: WorkspaceSnapshotGraph, - ) -> WorkspaceSnapshotResult { - let serialized_snapshot = si_cbor::encode(&graph)?; + // Stamp the new workspace snapshot. + let serialized_snapshot = si_cbor::encode(&working_copy)?; let row = ctx .txns() .await? @@ -220,7 +219,15 @@ impl WorkspaceSnapshot { &[&serialized_snapshot], ) .await?; - Self::try_from(row) + let object = Self::try_from(row)?; + + // Reset relevant fields on self. + self.id = object.id; + self.created_at = object.created_at; + self.snapshot = object.snapshot; + self.working_copy = None; + + Ok(self.id) } pub fn id(&self) -> WorkspaceSnapshotId { @@ -291,6 +298,13 @@ impl WorkspaceSnapshot { .get_edge_by_index_stableish(edge_index)?) } + pub fn edge_endpoints( + &mut self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotResult<(NodeIndex, NodeIndex)> { + Ok(self.working_copy()?.edge_endpoints(edge_index)?) + } + pub fn import_subgraph( &mut self, other: &mut Self, @@ -306,7 +320,7 @@ impl WorkspaceSnapshot { &mut self, original_node_index: NodeIndex, new_node_index: NodeIndex, - ) -> WorkspaceSnapshotResult<()> { + ) -> WorkspaceSnapshotResult> { Ok(self .working_copy()? .replace_references(original_node_index, new_node_index)?) @@ -327,6 +341,10 @@ impl WorkspaceSnapshot { Ok(self.working_copy()?.find_equivalent_node(id, lineage_id)?) } + pub fn cleanup(&mut self) { + self.working_copy().expect("oh no").cleanup(); + } + pub fn dot(&mut self) { self.working_copy() .expect("failed on accessing or creating a working copy") diff --git a/lib/dal/src/workspace_snapshot/api.rs b/lib/dal/src/workspace_snapshot/api.rs index db6d9c9346..17b8e3235b 100644 --- a/lib/dal/src/workspace_snapshot/api.rs +++ b/lib/dal/src/workspace_snapshot/api.rs @@ -1,7 +1,9 @@ +use crate::change_set_pointer::ChangeSetPointer; use petgraph::stable_graph::EdgeIndex; use petgraph::stable_graph::Edges; use petgraph::visit::EdgeRef; use petgraph::Directed; +use std::collections::HashMap; use ulid::Ulid; use crate::workspace_snapshot::edge_weight::EdgeWeight; @@ -24,10 +26,11 @@ pub mod socket; pub mod validation; impl WorkspaceSnapshot { - pub fn get_category_child(&mut self, kind: CategoryNodeKind) -> WorkspaceSnapshotResult<()> { - //Ok(self.working_copy()?.get_category_child(kind)?) - - Ok(()) + pub fn get_category( + &mut self, + kind: CategoryNodeKind, + ) -> WorkspaceSnapshotResult<(Ulid, NodeIndex)> { + Ok(self.working_copy()?.get_category(kind)?) } pub fn edges_directed( @@ -100,8 +103,16 @@ impl WorkspaceSnapshot { pub fn remove_edge( &mut self, - edge_index: EdgeIndex, - ) -> WorkspaceSnapshotResult> { - Ok(self.working_copy()?.remove_edge_by_index(edge_index)) + change_set: &ChangeSetPointer, + source_node_index: NodeIndex, + target_node_index: NodeIndex, + edge_kind: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult> { + Ok(self.working_copy()?.remove_edge( + change_set, + source_node_index, + target_node_index, + edge_kind, + )?) } } diff --git a/lib/dal/src/workspace_snapshot/api/attribute/prototype.rs b/lib/dal/src/workspace_snapshot/api/attribute/prototype.rs index 1c935c415b..85686ed10e 100644 --- a/lib/dal/src/workspace_snapshot/api/attribute/prototype.rs +++ b/lib/dal/src/workspace_snapshot/api/attribute/prototype.rs @@ -1,16 +1,9 @@ -use content_store::{Store}; +use content_store::Store; use petgraph::prelude::*; - -use crate::attribute::prototype::{ - AttributePrototypeContent, AttributePrototypeContentV1, -}; +use crate::attribute::prototype::{AttributePrototypeContent, AttributePrototypeContentV1}; use crate::change_set_pointer::ChangeSetPointer; - - - - use crate::workspace_snapshot::content_address::ContentAddress; use crate::workspace_snapshot::edge_weight::{ EdgeWeight, EdgeWeightKind, EdgeWeightKindDiscriminants, @@ -18,8 +11,7 @@ use crate::workspace_snapshot::edge_weight::{ use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; use crate::{ - AttributePrototype, AttributePrototypeId, DalContext, FuncId, Timestamp, - WorkspaceSnapshot, + AttributePrototype, AttributePrototypeId, DalContext, FuncId, Timestamp, WorkspaceSnapshot, }; impl WorkspaceSnapshot { diff --git a/lib/dal/src/workspace_snapshot/api/component.rs b/lib/dal/src/workspace_snapshot/api/component.rs index 5aab98b22d..562411ef78 100644 --- a/lib/dal/src/workspace_snapshot/api/component.rs +++ b/lib/dal/src/workspace_snapshot/api/component.rs @@ -45,7 +45,7 @@ impl WorkspaceSnapshot { // Root --> Component Category --> Component (this) let component_category_index = self .working_copy()? - .get_category_child(CategoryNodeKind::Component)?; + .get_category(CategoryNodeKind::Component)?; self.working_copy()?.add_edge( component_category_index, EdgeWeight::new(change_set, EdgeWeightKind::Use)?, diff --git a/lib/dal/src/workspace_snapshot/api/func.rs b/lib/dal/src/workspace_snapshot/api/func.rs index b5367d049a..5db216af22 100644 --- a/lib/dal/src/workspace_snapshot/api/func.rs +++ b/lib/dal/src/workspace_snapshot/api/func.rs @@ -1,3 +1,5 @@ +use std::time::Instant; + use content_store::{ContentHash, Store}; use ulid::Ulid; @@ -6,7 +8,9 @@ use crate::change_set_pointer::ChangeSetPointer; use crate::func::intrinsics::IntrinsicFunc; use crate::func::{FuncContent, FuncContentV1, FuncGraphNode}; -use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; @@ -23,7 +27,6 @@ impl WorkspaceSnapshot { pub async fn func_create( &mut self, ctx: &DalContext, - change_set: &ChangeSetPointer, name: impl AsRef, backend_kind: FuncBackendKind, backend_response_type: FuncBackendResponseType, @@ -53,13 +56,12 @@ impl WorkspaceSnapshot { .await .add(&FuncContent::V1(content.clone()))?; + let change_set = ctx.change_set_pointer()?; let id = change_set.generate_ulid()?; let node_weight = NodeWeight::new_func(change_set, id, name.clone(), hash)?; let node_index = self.working_copy()?.add_node(node_weight)?; - let (_, func_category_index) = self - .working_copy()? - .get_category_child(CategoryNodeKind::Func)?; + let (_, func_category_index) = self.working_copy()?.get_category(CategoryNodeKind::Func)?; self.working_copy()?.add_edge( func_category_index, EdgeWeight::new(change_set, EdgeWeightKind::Use)?, @@ -115,13 +117,36 @@ impl WorkspaceSnapshot { ))?) } + pub async fn list_funcs(&mut self, ctx: &DalContext) -> WorkspaceSnapshotResult> { + // let start = Instant::now(); + let mut funcs = vec![]; + let (_, func_category_index) = self.working_copy()?.get_category(CategoryNodeKind::Func)?; + + let func_node_indexes = self.outgoing_targets_for_edge_weight_kind_by_index( + func_category_index, + EdgeWeightKindDiscriminants::Use, + )?; + + for index in func_node_indexes { + if let NodeWeight::Func(func_inner) = self.get_node_weight(index)? { + let func_id: FuncId = func_inner.id().into(); + + let func = self.func_get_by_id(ctx, func_id).await?; + funcs.push(func); + } else { + panic!("not a func node weight???"); + } + } + // dbg!(start.elapsed()); + + Ok(funcs) + } + pub fn func_find_by_name( &mut self, name: impl AsRef, ) -> WorkspaceSnapshotResult> { - let (_, func_category_index) = self - .working_copy()? - .get_category_child(CategoryNodeKind::Func)?; + let (_, func_category_index) = self.working_copy()?.get_category(CategoryNodeKind::Func)?; let func_id = self .working_copy()? @@ -133,35 +158,26 @@ impl WorkspaceSnapshot { pub async fn func_modify_by_id( &mut self, ctx: &DalContext, - change_set: &ChangeSetPointer, id: FuncId, lambda: L, ) -> WorkspaceSnapshotResult where L: FnOnce(&mut Func) -> WorkspaceSnapshotResult<()>, { - let (_, inner) = dbg!(self.func_get_content(ctx, id).await)?; - - dbg!("got content", &inner); + let (_, inner) = self.func_get_content(ctx, id).await?; let mut func = Func::assemble(id, &inner); lambda(&mut func)?; let updated = FuncContentV1::from(func); - dbg!("updated content", &updated); - let hash = ctx .content_store() .lock() .await .add(&FuncContent::V1(updated.clone()))?; - dbg!("added content"); - self.working_copy()? - .update_content(change_set, id.into(), hash)?; - - dbg!("update content"); + .update_content(ctx.change_set_pointer()?, id.into(), hash)?; Ok(Func::assemble(id, &updated)) } diff --git a/lib/dal/src/workspace_snapshot/api/prop.rs b/lib/dal/src/workspace_snapshot/api/prop.rs index 990246c979..e832ce583d 100644 --- a/lib/dal/src/workspace_snapshot/api/prop.rs +++ b/lib/dal/src/workspace_snapshot/api/prop.rs @@ -6,14 +6,10 @@ use crate::change_set_pointer::ChangeSetPointer; use crate::prop::{PropContent, PropContentV1, PropGraphNode}; use crate::property_editor::schema::WidgetKind; - - use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::{WorkspaceSnapshotError, WorkspaceSnapshotResult}; -use crate::{ - DalContext, Prop, PropId, PropKind, SchemaVariantId, Timestamp, WorkspaceSnapshot, -}; +use crate::{DalContext, Prop, PropId, PropKind, SchemaVariantId, Timestamp, WorkspaceSnapshot}; pub enum PropParent { OrderedProp(PropId), diff --git a/lib/dal/src/workspace_snapshot/api/provider/external.rs b/lib/dal/src/workspace_snapshot/api/provider/external.rs index 7b166c00b0..99e9002811 100644 --- a/lib/dal/src/workspace_snapshot/api/provider/external.rs +++ b/lib/dal/src/workspace_snapshot/api/provider/external.rs @@ -1,11 +1,10 @@ -use content_store::{Store}; +use content_store::Store; use crate::change_set_pointer::ChangeSetPointer; use crate::provider::external::{ ExternalProviderContent, ExternalProviderContentV1, ExternalProviderGraphNode, }; - use crate::socket::{DiagramKind, SocketEdgeKind, SocketKind}; use crate::workspace_snapshot::api::socket::SocketParent; @@ -13,10 +12,7 @@ use crate::workspace_snapshot::content_address::ContentAddress; use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::WorkspaceSnapshotResult; -use crate::{ - DalContext, FuncId, SchemaVariantId, SocketArity, Timestamp, - WorkspaceSnapshot, -}; +use crate::{DalContext, FuncId, SchemaVariantId, SocketArity, Timestamp, WorkspaceSnapshot}; impl WorkspaceSnapshot { pub async fn external_provider_create_with_socket( diff --git a/lib/dal/src/workspace_snapshot/api/schema.rs b/lib/dal/src/workspace_snapshot/api/schema.rs index f65bfc37c3..a8cb4a7a81 100644 --- a/lib/dal/src/workspace_snapshot/api/schema.rs +++ b/lib/dal/src/workspace_snapshot/api/schema.rs @@ -43,7 +43,7 @@ impl WorkspaceSnapshot { let schema_category_index = self .working_copy()? - .get_category_child(CategoryNodeKind::Schema)?; + .get_category(CategoryNodeKind::Schema)?; /*self.working_copy()?.add_edge( schema_category_index, EdgeWeight::new(change_set, EdgeWeightKind::Use)?, diff --git a/lib/dal/src/workspace_snapshot/api/schema/variant.rs b/lib/dal/src/workspace_snapshot/api/schema/variant.rs index 1deb51d397..c8dc8a16f5 100644 --- a/lib/dal/src/workspace_snapshot/api/schema/variant.rs +++ b/lib/dal/src/workspace_snapshot/api/schema/variant.rs @@ -269,7 +269,7 @@ impl WorkspaceSnapshot { ctx: &DalContext, ) -> WorkspaceSnapshotResult> { /* - let schema_category_index = self.get_category_child(CategoryNodeKind::Schema)?; + let schema_category_index = self.get_category(CategoryNodeKind::Schema)?; let schema_indices = self.outgoing_targets_for_edge_weight_kind_by_index( schema_category_index, EdgeWeightKindDiscriminants::Use, diff --git a/lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs b/lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs index 0a83004c57..eebb73b306 100644 --- a/lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs +++ b/lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs @@ -1,6 +1,3 @@ - - - use crate::change_set_pointer::ChangeSetPointer; use crate::property_editor::schema::WidgetKind; diff --git a/lib/dal/src/workspace_snapshot/api/socket.rs b/lib/dal/src/workspace_snapshot/api/socket.rs index 3dc243da13..50ed4d8470 100644 --- a/lib/dal/src/workspace_snapshot/api/socket.rs +++ b/lib/dal/src/workspace_snapshot/api/socket.rs @@ -1,19 +1,19 @@ -use content_store::{Store}; +use content_store::Store; use ulid::Ulid; use crate::change_set_pointer::ChangeSetPointer; - - -use crate::socket::{SocketContent, SocketContentV1, SocketEdgeKind, SocketGraphNode, SocketKind, DiagramKind}; +use crate::socket::{ + DiagramKind, SocketContent, SocketContentV1, SocketEdgeKind, SocketGraphNode, SocketKind, +}; use crate::workspace_snapshot::content_address::ContentAddress; use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; use crate::workspace_snapshot::node_weight::NodeWeight; use crate::workspace_snapshot::WorkspaceSnapshotResult; use crate::{ - DalContext, ExternalProviderId, InternalProviderId, - SchemaVariantId, SocketArity, Timestamp, WorkspaceSnapshot, + DalContext, ExternalProviderId, InternalProviderId, SchemaVariantId, SocketArity, Timestamp, + WorkspaceSnapshot, }; pub enum SocketParent { diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs index daafa09936..634b7bf116 100644 --- a/lib/dal/src/workspace_snapshot/edge_weight.rs +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -20,6 +20,7 @@ pub type EdgeWeightResult = Result; #[remain::sorted] #[derive(Default, Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash, EnumDiscriminants)] +#[strum_discriminants(derive(Serialize, Deserialize))] pub enum EdgeWeightKind { /// A function used by a [`SchemaVariant`] to perform an action that affects its resource ActionPrototype(ActionKind), diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs index 3dd172e928..b5a7559aaf 100644 --- a/lib/dal/src/workspace_snapshot/graph.rs +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -2,7 +2,7 @@ use chrono::Utc; use content_store::{ContentHash, Store, StoreError}; use petgraph::graph::Edge; use petgraph::stable_graph::Edges; -use petgraph::{algo, prelude::*, visit::DfsEvent}; +use petgraph::{algo, prelude::*, visit::DfsEvent, EdgeDirection}; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet, VecDeque}; use telemetry::prelude::*; @@ -129,7 +129,7 @@ impl WorkspaceSnapshotGraph { // Because outgoing edges are part of a node's identity, we create a new "from" node // as we are effectively writing to that node (we'll need to update the merkle tree // hash), and everything in the graph should be treated as copy-on-write. - let new_from_node_index = self.copy_node_index(from_node_index)?; + let new_from_node_index = self.copy_node_by_index(from_node_index)?; // Add the new edge to the new version of the "from" node. let new_edge_index = @@ -160,8 +160,8 @@ impl WorkspaceSnapshotGraph { Ok(new_node_index) } - pub fn get_category_child( - &mut self, + pub fn get_category( + &self, kind: CategoryNodeKind, ) -> WorkspaceSnapshotGraphResult<(Ulid, NodeIndex)> { for edgeref in self.graph.edges_directed(self.root(), Outgoing) { @@ -188,7 +188,7 @@ impl WorkspaceSnapshotGraph { } pub fn func_find_by_name( - &self, + &mut self, parent_node_index: NodeIndex, name: impl AsRef, ) -> WorkspaceSnapshotGraphResult> { @@ -472,7 +472,7 @@ impl WorkspaceSnapshotGraph { Ok(None) } - fn copy_node_index( + fn copy_node_by_index( &mut self, node_index_to_copy: NodeIndex, ) -> WorkspaceSnapshotGraphResult { @@ -526,34 +526,50 @@ impl WorkspaceSnapshotGraph { ); event })?; + let mut to_rebase_node_indexes = Vec::new(); - if let NodeWeight::Content(onto_content_weight) = onto_node_weight { - if onto_content_weight.content_address() == ContentAddress::Root { - // There can only be one (valid/current) `ContentAddress::Root` at any - // given moment, and the `lineage_id` isn't really relevant as it's not - // globally stable (even though it is locally stable). This matters as we - // may be dealing with a `WorkspaceSnapshotGraph` that is coming to us - // externally from a module that we're attempting to import. The external - // `WorkspaceSnapshotGraph` will be `self`, and the "local" one will be - // `onto`. - to_rebase_node_indexes.push(self.root_index); - } else { - // Only retain node indexes... or indices... if they are part of the current - // graph. There may still be garbage from previous updates to the graph - // laying around. - let mut potential_to_rebase_node_indexes = self - .get_node_index_by_lineage(onto_node_weight.lineage_id()) - .map_err(|err| { + if onto_node_index == onto.root() { + // There can only be one (valid/current) `ContentAddress::Root` at any + // given moment, and the `lineage_id` isn't really relevant as it's not + // globally stable (even though it is locally stable). This matters as we + // may be dealing with a `WorkspaceSnapshotGraph` that is coming to us + // externally from a module that we're attempting to import. The external + // `WorkspaceSnapshotGraph` will be `self`, and the "local" one will be + // `onto`. + to_rebase_node_indexes.push(self.root_index); + } else { + // Only retain node indexes... or indices... if they are part of the current + // graph. There may still be garbage from previous updates to the graph + // laying around. + let mut potential_to_rebase_node_indexes: Vec = self + .get_node_index_by_lineage(onto_node_weight.lineage_id()) + .map_err(|err| { + error!( + "Unable to find NodeIndex(es) for lineage_id {}: {}", + onto_node_weight.lineage_id(), + err, + ); + event + })?; + potential_to_rebase_node_indexes + .retain(|node_index| self.has_path_to_root(*node_index)); + to_rebase_node_indexes.extend(potential_to_rebase_node_indexes); + + // Since category nodes may be created from scratch from a different workspace, + // they may have different lineage ids. We still want to consider the same + // category kind as an equivalent node, even though it might have a different + // lineage id. + if let NodeWeight::Category(onto_category_node_weight) = onto_node_weight { + let category_node_kind = onto_category_node_weight.kind(); + let (_, to_rebase_category_node_index) = + self.get_category(category_node_kind).map_err(|err| { error!( - "Unable to find NodeIndex(es) for lineage_id {}: {}", - onto_node_weight.lineage_id(), - err, + "Unable to get to rebase Category node for kind {:?} from onto {:?}: {}", + onto_category_node_weight.kind(), onto, err, ); event })?; - potential_to_rebase_node_indexes - .retain(|node_index| self.has_path_to_root(*node_index)); - to_rebase_node_indexes.extend(potential_to_rebase_node_indexes); + to_rebase_node_indexes.push(to_rebase_category_node_index); } } @@ -581,6 +597,11 @@ impl WorkspaceSnapshotGraph { { // If the merkle tree hashes are the same, then the entire sub-graph is // identical, and we don't need to check any further. + debug!( + "onto {} and to rebase {} merkle tree hashes are the same", + onto_node_weight.id(), + to_rebase_node_weight.id() + ); continue; } any_content_with_lineage_has_changed = true; @@ -639,11 +660,11 @@ impl WorkspaceSnapshotGraph { // Eventually, this will only happen on the root node itself, since // Objects, Maps, and Arrays should all have an ordering, for at // least display purposes. - warn!( + debug!( "Found what appears to be two unordered containers: onto {:?}, to_rebase {:?}", onto_node_index, to_rebase_node_index, ); - println!( + debug!( "Comparing unordered containers: {:?}, {:?}", onto_node_index, to_rebase_node_index ); @@ -675,7 +696,7 @@ impl WorkspaceSnapshotGraph { return Err(event); } (Some(to_rebase_ordering_node_index), Some(onto_ordering_node_index)) => { - println!( + debug!( "Comparing ordered containers: {:?}, {:?}", onto_node_index, to_rebase_node_index ); @@ -810,11 +831,15 @@ impl WorkspaceSnapshotGraph { let to_rebase_container_item_weight = self.get_node_weight(to_rebase_container_item_index)?; if removed_items.contains(&to_rebase_container_item_weight.id()) { - for edge in self + for edgeref in self .graph .edges_connecting(to_rebase_container_index, to_rebase_container_item_index) { - updates.push(Update::RemoveEdge(edge.id())); + updates.push(Update::RemoveEdge { + source: edgeref.source(), + destination: edgeref.target(), + edge_kind: edgeref.weight().kind().into(), + }); } } } @@ -912,7 +937,11 @@ impl WorkspaceSnapshotGraph { } else { // Entry was deleted in `onto`, and has not been modified in `to_rebase`: // Remove the edge. - updates.push(Update::RemoveEdge(to_rebase_edgeref.id())); + updates.push(Update::RemoveEdge { + source: to_rebase_edgeref.source(), + destination: to_rebase_edgeref.target(), + edge_kind: to_rebase_edgeref.weight().kind().into(), + }); } } } @@ -1000,7 +1029,9 @@ impl WorkspaceSnapshotGraph { #[derive(Debug, Copy, Clone)] struct EdgeInfo { + pub source_node_index: NodeIndex, pub target_node_index: NodeIndex, + pub edge_kind: EdgeWeightKindDiscriminants, pub edge_index: EdgeIndex, } @@ -1019,7 +1050,9 @@ impl WorkspaceSnapshotGraph { target_lineage: target_node_weight.lineage_id(), }, EdgeInfo { + source_node_index: edgeref.source(), target_node_index: edgeref.target(), + edge_kind: edgeref.weight().kind().into(), edge_index: edgeref.id(), }, ); @@ -1034,7 +1067,9 @@ impl WorkspaceSnapshotGraph { target_lineage: target_node_weight.lineage_id(), }, EdgeInfo { + source_node_index: edgeref.source(), target_node_index: edgeref.target(), + edge_kind: edgeref.weight().kind().into(), edge_index: edgeref.id(), }, ); @@ -1086,7 +1121,11 @@ impl WorkspaceSnapshotGraph { )) } else { // Item not modified & removed by `onto`: No conflict; Update::RemoveEdge - updates.push(Update::RemoveEdge(only_to_rebase_edge_info.edge_index)); + updates.push(Update::RemoveEdge { + source: only_to_rebase_edge_info.source_node_index, + destination: only_to_rebase_edge_info.target_node_index, + edge_kind: only_to_rebase_edge_info.edge_kind, + }); } } } @@ -1162,13 +1201,11 @@ impl WorkspaceSnapshotGraph { ) -> WorkspaceSnapshotGraphResult> { let mut results = Vec::new(); for node_index in self.graph.node_indices() { - if let NodeWeight::Content(node_weight) = self.get_node_weight(node_index)? { - if node_weight.lineage_id() == lineage_id { - results.push(node_index); - } + let node_weight = self.get_node_weight(node_index)?; + if node_weight.lineage_id() == lineage_id { + results.push(node_index); } } - Ok(results) } @@ -1317,10 +1354,6 @@ impl WorkspaceSnapshotGraph { Ok(prop_node_indexes.get(0).copied()) } - pub(crate) fn remove_edge_by_index(&mut self, edge_index: EdgeIndex) -> Option { - self.graph.remove_edge(edge_index) - } - /// [`StableGraph`] guarantees the stability of [`NodeIndex`] across removals, however there /// are **NO** guarantees around the stability of [`EdgeIndex`] across removals. If /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] found before @@ -1331,10 +1364,12 @@ impl WorkspaceSnapshotGraph { source_node_index: NodeIndex, target_node_index: NodeIndex, edge_kind: EdgeWeightKindDiscriminants, - ) -> WorkspaceSnapshotGraphResult<()> { + ) -> WorkspaceSnapshotGraphResult> { + let mut updated = HashMap::new(); + let mut edges_to_remove = Vec::new(); - let new_source_node_index = self.copy_node_index(source_node_index)?; - self.replace_references(source_node_index, new_source_node_index)?; + let new_source_node_index = self.copy_node_by_index(source_node_index)?; + updated.extend(self.replace_references(source_node_index, new_source_node_index)?); for edgeref in self .graph @@ -1373,10 +1408,10 @@ impl WorkspaceSnapshotGraph { let new_container_ordering_node_index = self.add_node(NodeWeight::Ordering(new_container_ordering_node_weight))?; - self.replace_references( + updated.extend(self.replace_references( previous_container_ordering_node_index, new_container_ordering_node_index, - )?; + )?); } } } @@ -1388,7 +1423,7 @@ impl WorkspaceSnapshotGraph { self.get_node_index_by_id(self.get_node_weight(new_source_node_index)?.id())?, )?; - Ok(()) + Ok(updated) } pub(crate) fn get_edge_by_index_stableish( @@ -1401,11 +1436,22 @@ impl WorkspaceSnapshotGraph { .ok_or(WorkspaceSnapshotGraphError::EdgeDoesNotExist(edge_index)) } + pub fn edge_endpoints( + &self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotGraphResult<(NodeIndex, NodeIndex)> { + let (source, destination) = self + .graph + .edge_endpoints(edge_index) + .ok_or(WorkspaceSnapshotGraphError::EdgeDoesNotExist(edge_index))?; + Ok((source, destination)) + } + pub fn replace_references( &mut self, original_node_index: NodeIndex, new_node_index: NodeIndex, - ) -> WorkspaceSnapshotGraphResult<()> { + ) -> WorkspaceSnapshotGraphResult> { let mut old_to_new_node_indices: HashMap = HashMap::new(); old_to_new_node_indices.insert(original_node_index, new_node_index); @@ -1420,7 +1466,7 @@ impl WorkspaceSnapshotGraph { let new_node_index = match old_to_new_node_indices.get(&old_node_index) { Some(found_new_node_index) => *found_new_node_index, None => { - let new_node_index = self.copy_node_index(old_node_index)?; + let new_node_index = self.copy_node_by_index(old_node_index)?; old_to_new_node_indices.insert(old_node_index, new_node_index); new_node_index } @@ -1463,7 +1509,11 @@ impl WorkspaceSnapshotGraph { self.root_index = *new_root_node_index; } - Ok(()) + // Before returning, remove the root from the map because we should always "ask" what the + // root is rather than relying on a potentially stale reference. + old_to_new_node_indices.remove(&self.root_index); + + Ok(old_to_new_node_indices) } pub fn update_content( @@ -1473,12 +1523,13 @@ impl WorkspaceSnapshotGraph { new_content_hash: ContentHash, ) -> WorkspaceSnapshotGraphResult<()> { let original_node_index = self.get_node_index_by_id(id)?; - let new_node_index = self.copy_node_index(original_node_index)?; + let new_node_index = self.copy_node_by_index(original_node_index)?; let node_weight = self.get_node_weight_mut(new_node_index)?; node_weight.increment_vector_clock(change_set)?; node_weight.new_content_hash(new_content_hash)?; - self.replace_references(original_node_index, new_node_index) + self.replace_references(original_node_index, new_node_index)?; + Ok(()) } pub fn update_order( @@ -1490,11 +1541,12 @@ impl WorkspaceSnapshotGraph { let original_node_index = self .ordering_node_index_for_container(self.get_node_index_by_id(container_id)?)? .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; - let new_node_index = self.copy_node_index(original_node_index)?; + let new_node_index = self.copy_node_by_index(original_node_index)?; let node_weight = self.get_node_weight_mut(new_node_index)?; node_weight.set_order(change_set, new_order)?; - self.replace_references(original_node_index, new_node_index) + self.replace_references(original_node_index, new_node_index)?; + Ok(()) } fn update_merkle_tree_hash( diff --git a/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs index 322a1aed11..994b9cb950 100644 --- a/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs +++ b/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs @@ -7,7 +7,7 @@ use crate::change_set_pointer::ChangeSetPointer; use crate::workspace_snapshot::vector_clock::VectorClockId; use crate::workspace_snapshot::{node_weight::NodeWeightResult, vector_clock::VectorClock}; -#[derive(Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] pub enum CategoryNodeKind { Component, Func, @@ -101,7 +101,7 @@ impl CategoryNodeWeight { kind, vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, - content_hash: Default::default(), + content_hash: ContentHash::from(&serde_json::json![kind]), merkle_tree_hash: Default::default(), vector_clock_recently_seen: Default::default(), }) diff --git a/lib/dal/src/workspace_snapshot/update.rs b/lib/dal/src/workspace_snapshot/update.rs index 9f18592cb2..f27cf2cc29 100644 --- a/lib/dal/src/workspace_snapshot/update.rs +++ b/lib/dal/src/workspace_snapshot/update.rs @@ -1,6 +1,6 @@ use petgraph::prelude::*; -use super::edge_weight::EdgeWeight; +use super::edge_weight::{EdgeWeight, EdgeWeightKindDiscriminants}; use serde::{Deserialize, Serialize}; #[remain::sorted] @@ -14,7 +14,11 @@ pub enum Update { destination: NodeIndex, edge_weight: EdgeWeight, }, - RemoveEdge(EdgeIndex), + RemoveEdge { + source: NodeIndex, + destination: NodeIndex, + edge_kind: EdgeWeightKindDiscriminants, + }, ReplaceSubgraph { onto: NodeIndex, // Check if already exists in "onto". Grab node weight from "to_rebase" and see if there is diff --git a/lib/dal/src/ws_event.rs b/lib/dal/src/ws_event.rs index 2f04fed039..ff8b779f05 100644 --- a/lib/dal/src/ws_event.rs +++ b/lib/dal/src/ws_event.rs @@ -4,6 +4,7 @@ use si_data_pg::PgError; use thiserror::Error; use crate::change_set::{ChangeSetActorPayload, ChangeSetMergeVotePayload}; +use crate::pkg::ModuleImportedPayload; use crate::user::{CursorPayload, OnlinePayload}; use crate::{ ChangeSetPk, DalContext, PropId, SocketId, StandardModelError, TransactionsError, WorkspacePk, @@ -51,7 +52,7 @@ pub enum WsPayload { // FixReturn(FixReturn), // ImportWorkspaceVote(ImportWorkspaceVotePayload), // LogLine(LogLinePayload), - // ModuleImported(ModuleImportedPayload), + ModuleImported(ModuleImportedPayload), Online(OnlinePayload), // ResourceRefreshed(ResourceRefreshedPayload), // SchemaCreated(SchemaPk), diff --git a/lib/dal/tests/integration.rs b/lib/dal/tests/integration.rs index a602d02f8d..0ad4379906 100644 --- a/lib/dal/tests/integration.rs +++ b/lib/dal/tests/integration.rs @@ -1,3 +1,3 @@ const TEST_PG_DBNAME: &str = "si_test_dal"; -// mod integration_test; +mod integration_test; diff --git a/lib/dal/tests/integration_test/internal/mod.rs b/lib/dal/tests/integration_test/internal/mod.rs index 47d1c18820..1740fd168b 100644 --- a/lib/dal/tests/integration_test/internal/mod.rs +++ b/lib/dal/tests/integration_test/internal/mod.rs @@ -1,30 +1,30 @@ -mod action_prototype; -mod attribute; -mod change_set; -mod component; -mod diagram; -mod edge; -mod func; -mod func_execution; -mod graph; -mod history_event; -mod key_pair; +// mod action_prototype; +// mod attribute; +// mod change_set; +// mod component; +// mod diagram; +// mod edge; +// mod func; +// mod func_execution; +// mod graph; +// mod history_event; +// mod key_pair; mod mostly_everything_is_a_node_or_an_edge; -mod node; -mod node_menu; -mod pkg; -mod prop; -mod prop_tree; -mod property_editor; -mod provider; -mod schema; -mod secret; -mod socket; -mod standard_model; -mod status_update; -mod tenancy; -mod user; -mod validation_prototype; -mod validation_resolver; -mod visibility; -mod workspace; +// mod node; +// mod node_menu; +// mod pkg; +// mod prop; +// mod prop_tree; +// mod property_editor; +// mod provider; +// mod schema; +// mod secret; +// mod socket; +// mod standard_model; +// mod status_update; +// mod tenancy; +// mod user; +// mod validation_prototype; +// mod validation_resolver; +// mod visibility; +// mod workspace; diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs index e984e92b8f..80e7cb4e8a 100644 --- a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge.rs @@ -3,6 +3,7 @@ //! //! For all tests in this module, provide "SI_TEST_BUILTIN_SCHEMAS=none" as an environment variable. -mod change_set; -mod content_store; -mod rebaser; +mod builtins; +// mod change_set; +// mod content_store; +// mod rebaser; diff --git a/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/builtins.rs b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/builtins.rs new file mode 100644 index 0000000000..4290f85925 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/builtins.rs @@ -0,0 +1,31 @@ +use dal::func::intrinsics::IntrinsicFunc; +use dal::DalContext; +use dal_test::test; +use strum::IntoEnumIterator; + +// TODO(nick): restore dal_test::helpers module to ensure the macro works. +#[test] +async fn builtins(ctx: &DalContext) { + let mut snapshot = ctx + .workspace_snapshot() + .expect("could not get workspace snapshot") + .lock() + .await; + + let mut funcs: Vec = snapshot + .list_funcs(ctx) + .await + .expect("list funcs should work") + .iter() + .map(|f| f.name.to_owned()) + .collect(); + + let mut intrinsics: Vec = IntrinsicFunc::iter() + .map(|intrinsic| intrinsic.name().to_owned()) + .collect(); + + funcs.sort(); + intrinsics.sort(); + + assert_eq!(intrinsics, funcs); +} diff --git a/lib/dal/tests/integration_test/mod.rs b/lib/dal/tests/integration_test/mod.rs index e5377e54db..581065c3cd 100644 --- a/lib/dal/tests/integration_test/mod.rs +++ b/lib/dal/tests/integration_test/mod.rs @@ -1,6 +1,6 @@ /// Contains tests that will become part of individual package testing (i.e. testing that a "Docker /// Image" connects and works as intended with a "Butane Container"). -mod external; +// mod external; /// Contains tests that test SI directly and use test-exclusive builtins. All tests in this module /// should (eventually) pass with `SI_TEST_BUILTIN_SCHEMAS=test`. mod internal; diff --git a/lib/pinga-server/BUCK b/lib/pinga-server/BUCK index ae13882f48..3fed623346 100644 --- a/lib/pinga-server/BUCK +++ b/lib/pinga-server/BUCK @@ -6,6 +6,7 @@ rust_library( "//lib/buck2-resources:buck2-resources", "//lib/dal:dal", "//lib/nats-subscriber:nats-subscriber", + "//lib/rebaser-client:rebaser-client", "//lib/si-crypto:si-crypto", "//lib/si-data-nats:si-data-nats", "//lib/si-data-pg:si-data-pg", diff --git a/lib/pinga-server/Cargo.toml b/lib/pinga-server/Cargo.toml index 69da8c9c91..5506e17b15 100644 --- a/lib/pinga-server/Cargo.toml +++ b/lib/pinga-server/Cargo.toml @@ -14,6 +14,7 @@ nats-subscriber = { path = "../../lib/nats-subscriber" } remain = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client" } si-crypto = { path = "../../lib/si-crypto" } si-data-nats = { path = "../../lib/si-data-nats" } si-data-pg = { path = "../../lib/si-data-pg" } diff --git a/lib/pinga-server/src/server.rs b/lib/pinga-server/src/server.rs index 131c58355a..598dfa3b5d 100644 --- a/lib/pinga-server/src/server.rs +++ b/lib/pinga-server/src/server.rs @@ -10,6 +10,7 @@ use dal::{ }; use futures::{FutureExt, Stream, StreamExt}; use nats_subscriber::{Request, SubscriberError}; +use rebaser_client::Config as RebaserClientConfig; use si_crypto::{SymmetricCryptoError, SymmetricCryptoService, SymmetricCryptoServiceConfig}; use si_data_nats::{NatsClient, NatsConfig, NatsError}; use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; @@ -106,6 +107,7 @@ impl Server { let job_processor = Self::create_job_processor(nats.clone()); let symmetric_crypto_service = Self::create_symmetric_crypto_service(config.symmetric_crypto_service()).await?; + let rebaser_config = RebaserClientConfig::default(); let services_context = ServicesContext::new( pg_pool, @@ -116,6 +118,7 @@ impl Server { None, None, symmetric_crypto_service, + rebaser_config, ); Self::from_services( diff --git a/lib/rebaser-client/src/client.rs b/lib/rebaser-client/src/client.rs index 8de750e551..bb61fe628b 100644 --- a/lib/rebaser-client/src/client.rs +++ b/lib/rebaser-client/src/client.rs @@ -5,7 +5,7 @@ use rebaser_core::{ ChangeSetMessage, ChangeSetReplyMessage, ManagementMessage, ManagementMessageAction, StreamNameGenerator, }; -use si_rabbitmq::{Consumer, ConsumerOffsetSpecification, Environment, Producer}; +use si_rabbitmq::{Config, Consumer, ConsumerOffsetSpecification, Environment, Producer}; use std::collections::HashMap; use std::time::Duration; use telemetry::prelude::*; @@ -22,6 +22,7 @@ pub struct Client { management_stream: Stream, streams: HashMap, reply_timeout: Duration, + config: Config, } #[allow(missing_debug_implementations)] @@ -34,12 +35,13 @@ struct Stream { impl Client { /// Creates a new [`Client`] to communicate with a running rebaser /// [`Server`](rebaser_server::Server). - pub async fn new() -> ClientResult { - let environment = Environment::new().await?; + pub async fn new(config: Config) -> ClientResult { + let environment = Environment::new(&config).await?; let id = Ulid::new(); - let management_stream = StreamNameGenerator::management(); - let management_reply_stream = StreamNameGenerator::management_reply(id); + let management_stream = StreamNameGenerator::management(config.stream_prefix()); + let management_reply_stream = + StreamNameGenerator::management_reply(id, config.stream_prefix()); environment.create_stream(&management_reply_stream).await?; let management_reply_consumer = Consumer::new( @@ -62,6 +64,7 @@ impl Client { }, streams: HashMap::new(), reply_timeout: Duration::from_secs(REPLY_TIMEOUT_SECONDS), + config, }) } @@ -154,8 +157,12 @@ impl Client { let change_set_stream: String = serde_json::from_value(contents)?; // TODO(nick): move stream generation to a common crate. - let environment = Environment::new().await?; - let reply_stream = StreamNameGenerator::change_set_reply(change_set_id, self.id); + let environment = Environment::new(&self.config).await?; + let reply_stream = StreamNameGenerator::change_set_reply( + change_set_id, + self.id, + self.config.stream_prefix(), + ); environment.create_stream(&reply_stream).await?; // FIXME(nick): name the producer properly. @@ -200,7 +207,7 @@ impl Client { if let Err(e) = handle.close().await { error!("{e}"); } - let environment = Environment::new().await?; + let environment = Environment::new(&self.config).await?; environment.delete_stream(stream.reply_stream).await?; } None => { @@ -234,7 +241,7 @@ impl Client { } // Finally, delete the reply stream. - match Environment::new().await { + match Environment::new(&self.config).await { Ok(environment) => { if let Err(e) = environment .delete_stream(self.management_stream.reply_stream) diff --git a/lib/rebaser-client/src/lib.rs b/lib/rebaser-client/src/lib.rs index 46533629be..ba5d7eb4bd 100644 --- a/lib/rebaser-client/src/lib.rs +++ b/lib/rebaser-client/src/lib.rs @@ -25,6 +25,8 @@ mod client; pub use client::Client; +pub use rebaser_core::ChangeSetReplyMessage; +pub use si_rabbitmq::Config; use si_rabbitmq::{Delivery, RabbitError}; use telemetry::prelude::error; diff --git a/lib/rebaser-core/src/lib.rs b/lib/rebaser-core/src/lib.rs index 407c494ce3..3c83cfb52e 100644 --- a/lib/rebaser-core/src/lib.rs +++ b/lib/rebaser-core/src/lib.rs @@ -92,24 +92,46 @@ pub struct StreamNameGenerator; impl StreamNameGenerator { /// Returns the name of the management stream. - pub fn management() -> &'static str { - "rebaser-management" + pub fn management(stream_prefix: Option>) -> String { + Self::assemble_with_prefix("rebaser-management", stream_prefix) } /// Returns the name of the stream that the rebaser will reply to for messages sent to the /// management stream from a specific client. - pub fn management_reply(client_id: Ulid) -> String { - format!("rebaser-management-reply-{client_id}") + pub fn management_reply(client_id: Ulid, stream_prefix: Option>) -> String { + Self::assemble_with_prefix( + format!("rebaser-management-reply-{client_id}"), + stream_prefix, + ) } /// Returns the name of a stream for a given change set. - pub fn change_set(change_set_id: Ulid) -> String { - format!("rebaser-{change_set_id}") + pub fn change_set(change_set_id: Ulid, stream_prefix: Option>) -> String { + Self::assemble_with_prefix(format!("rebaser-{change_set_id}"), stream_prefix) } /// Returns the name of the stream that the rebaser will reply to for messages sent to a change /// set stream from a specific client. - pub fn change_set_reply(change_set_id: Ulid, client_id: Ulid) -> String { - format!("rebaser-{change_set_id}-reply-{client_id}") + pub fn change_set_reply( + change_set_id: Ulid, + client_id: Ulid, + stream_prefix: Option>, + ) -> String { + Self::assemble_with_prefix( + format!("rebaser-{change_set_id}-reply-{client_id}"), + stream_prefix, + ) + } + + fn assemble_with_prefix( + base_stream_name: impl AsRef, + maybe_stream_prefix: Option>, + ) -> String { + let base_stream_name = base_stream_name.as_ref(); + if let Some(stream_prefix) = maybe_stream_prefix { + format!("{}-{base_stream_name}", stream_prefix.as_ref()) + } else { + base_stream_name.to_string() + } } } diff --git a/lib/rebaser-server/BUCK b/lib/rebaser-server/BUCK index 1043ff0709..041425bbdb 100644 --- a/lib/rebaser-server/BUCK +++ b/lib/rebaser-server/BUCK @@ -18,9 +18,11 @@ rust_library( "//lib/telemetry-rs:telemetry", "//lib/veritech-client:veritech-client", "//third-party/rust:derive_builder", + "//third-party/rust:futures", "//third-party/rust:remain", "//third-party/rust:serde", "//third-party/rust:serde_json", + "//third-party/rust:stream-cancel", "//third-party/rust:thiserror", "//third-party/rust:tokio", "//third-party/rust:ulid", @@ -28,4 +30,4 @@ rust_library( srcs = glob([ "src/**/*.rs", ]), -) \ No newline at end of file +) diff --git a/lib/rebaser-server/Cargo.toml b/lib/rebaser-server/Cargo.toml index 7c204be83f..b296456b2d 100644 --- a/lib/rebaser-server/Cargo.toml +++ b/lib/rebaser-server/Cargo.toml @@ -21,9 +21,11 @@ telemetry = { path = "../../lib/telemetry-rs" } veritech-client = { path = "../../lib/veritech-client" } derive_builder = { workspace = true } +futures = { workspace = true } remain = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +stream-cancel = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } ulid = { workspace = true } diff --git a/lib/rebaser-server/src/config.rs b/lib/rebaser-server/src/config.rs index 0c4c8b57e9..6a946b06e9 100644 --- a/lib/rebaser-server/src/config.rs +++ b/lib/rebaser-server/src/config.rs @@ -6,6 +6,7 @@ use serde::{Deserialize, Serialize}; use si_crypto::{SymmetricCryptoServiceConfig, SymmetricCryptoServiceConfigFile}; use si_data_nats::NatsConfig; use si_data_pg::PgPoolConfig; +use si_rabbitmq::Config as SiRabbitMqConfig; use si_std::{CanonicalFile, CanonicalFileError}; use telemetry::prelude::*; use thiserror::Error; @@ -50,6 +51,9 @@ pub struct Config { recreate_management_stream: bool, symmetric_crypto_service: SymmetricCryptoServiceConfig, + + #[builder(default)] + rabbitmq_config: SiRabbitMqConfig, } impl StandardConfig for Config { @@ -89,6 +93,11 @@ impl Config { pub fn symmetric_crypto_service(&self) -> &SymmetricCryptoServiceConfig { &self.symmetric_crypto_service } + + /// Gets a reference to the config for the SiRabbitMqConfig + pub fn rabbitmq_config(&self) -> &SiRabbitMqConfig { + &self.rabbitmq_config + } } /// The configuration file for creating a [`Server`]. @@ -104,6 +113,8 @@ pub struct ConfigFile { recreate_management_stream: bool, #[serde(default = "default_symmetric_crypto_config")] symmetric_crypto_service: SymmetricCryptoServiceConfigFile, + #[serde(default)] + rabbitmq_config: SiRabbitMqConfig, } impl Default for ConfigFile { @@ -114,6 +125,7 @@ impl Default for ConfigFile { cyclone_encryption_key_path: default_cyclone_encryption_key_path(), recreate_management_stream: false, symmetric_crypto_service: default_symmetric_crypto_config(), + rabbitmq_config: Default::default(), } } } diff --git a/lib/rebaser-server/src/lib.rs b/lib/rebaser-server/src/lib.rs index 6a4c6fc901..8f77ca482b 100644 --- a/lib/rebaser-server/src/lib.rs +++ b/lib/rebaser-server/src/lib.rs @@ -27,6 +27,7 @@ pub use config::ConfigBuilder; pub use config::ConfigError; pub use config::ConfigFile; pub use server::Server; +pub use si_rabbitmq::Config as SiRabbitMqConfig; pub use si_settings::StandardConfig; pub use si_settings::StandardConfigFile; diff --git a/lib/rebaser-server/src/server.rs b/lib/rebaser-server/src/server.rs index 4f4dc6d940..236050788a 100644 --- a/lib/rebaser-server/src/server.rs +++ b/lib/rebaser-server/src/server.rs @@ -9,7 +9,7 @@ use si_crypto::SymmetricCryptoServiceConfig; use si_crypto::{SymmetricCryptoError, SymmetricCryptoService}; use si_data_nats::{NatsClient, NatsConfig, NatsError}; use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; -use si_rabbitmq::RabbitError; +use si_rabbitmq::{Config as SiRabbitMqConfig, RabbitError}; use std::{io, path::Path, sync::Arc}; use telemetry::prelude::*; use thiserror::Error; @@ -107,6 +107,8 @@ pub struct Server { /// If enabled, re-create the RabbitMQ Stream. If disabled, create the Stream if it does not /// exist. recreate_management_stream: bool, + /// The configuration for the si-rabbitmq library + rabbitmq_config: SiRabbitMqConfig, } impl Server { @@ -123,6 +125,7 @@ impl Server { let job_processor = Self::create_job_processor(nats.clone()); let symmetric_crypto_service = Self::create_symmetric_crypto_service(config.symmetric_crypto_service()).await?; + let rabbitmq_config = config.rabbitmq_config(); Self::from_services( encryption_key, @@ -132,6 +135,7 @@ impl Server { job_processor, symmetric_crypto_service, config.recreate_management_stream(), + rabbitmq_config.to_owned(), ) } @@ -145,6 +149,7 @@ impl Server { job_processor: Box, symmetric_crypto_service: SymmetricCryptoService, recreate_management_stream: bool, + rabbitmq_config: SiRabbitMqConfig, ) -> ServerResult { // An mpsc channel which can be used to externally shut down the server. let (external_shutdown_tx, external_shutdown_rx) = mpsc::channel(4); @@ -169,6 +174,7 @@ impl Server { shutdown_watch_rx, external_shutdown_tx, graceful_shutdown_rx, + rabbitmq_config, }) } @@ -184,6 +190,7 @@ impl Server { self.symmetric_crypto_service, self.encryption_key, self.shutdown_watch_rx, + self.rabbitmq_config, ) .await; @@ -217,6 +224,7 @@ impl Server { #[instrument(name = "rebaser.init.create_pg_pool", skip_all)] async fn create_pg_pool(pg_pool_config: &PgPoolConfig) -> ServerResult { + dbg!(&pg_pool_config); let pool = PgPool::new(pg_pool_config).await?; debug!("successfully started pg pool (note that not all connections may be healthy)"); Ok(pool) diff --git a/lib/rebaser-server/src/server/change_set_loop.rs b/lib/rebaser-server/src/server/change_set_loop.rs index 8ffdc01b43..ad06ae4ef0 100644 --- a/lib/rebaser-server/src/server/change_set_loop.rs +++ b/lib/rebaser-server/src/server/change_set_loop.rs @@ -8,7 +8,9 @@ use dal::{ WorkspaceSnapshot, }; use rebaser_core::{ChangeSetMessage, ChangeSetReplyMessage}; -use si_rabbitmq::{Consumer, Delivery, Environment, Producer, RabbitError}; +use si_rabbitmq::{ + Config as SiRabbitMqConfig, Consumer, Delivery, Environment, Producer, RabbitError, +}; use std::collections::HashMap; use telemetry::prelude::*; use thiserror::Error; @@ -42,8 +44,9 @@ type ChangeSetLoopResult = Result; pub(crate) async fn change_set_loop_infallible_wrapper( ctx_builder: DalContextBuilder, consumer: Consumer, + rabbitmq_config: SiRabbitMqConfig, ) { - if let Err(err) = change_set_loop(ctx_builder, consumer).await { + if let Err(err) = change_set_loop(ctx_builder, consumer, &rabbitmq_config).await { error!(error = ?err, "change set loop failed"); } } @@ -51,9 +54,10 @@ pub(crate) async fn change_set_loop_infallible_wrapper( async fn change_set_loop( ctx_builder: DalContextBuilder, mut consumer: Consumer, + rabbitmq_config: &SiRabbitMqConfig, ) -> ChangeSetLoopResult> { // Create an environment for reply streams. - let environment = Environment::new().await?; + let environment = Environment::new(rabbitmq_config).await?; while let Some(delivery) = consumer.next().await? { let mut ctx = ctx_builder.build_default().await?; ctx.update_visibility(Visibility::new_head(false)); @@ -144,7 +148,7 @@ async fn process_delivery( onto_vector_clock_id, ) .await?; - debug!("conflicts and updates detected: {conflicts:?} {updates:?}"); + info!("conflicts and updates detected: {conflicts:?} {updates:?}"); // If there are conflicts, immediately assemble a reply message that conflicts were found. // Otherwise, we can perform updates and assemble a "success" reply message. @@ -174,7 +178,7 @@ async fn process_delivery( // Send reply to the "reply to stream" for the specific client. let inbound_stream = inbound_stream.as_ref(); let reply_to_stream = reply_to_stream.as_ref(); - debug!( + info!( "processed delivery from \"{inbound_stream}\", committed transaction and sending reply to \"{reply_to_stream}\"", ); let mut producer = Producer::new(&environment, reply_to_stream).await?; @@ -184,7 +188,7 @@ async fn process_delivery( // Close the producer _after_ logging, but do not make it an infallible close. We do that // because the function managing the change set loop is infallible and will log the error. - debug!("sent reply to \"{reply_to_stream}\""); + info!("sent reply to \"{reply_to_stream}\""); producer.close().await?; Ok(()) @@ -205,28 +209,48 @@ async fn perform_updates_and_write_out_and_update_pointer( destination, edge_weight, } => { - let source = *updated.get(source).unwrap_or(source); + let updated_source = *updated.get(source).unwrap_or(source); let destination = find_in_to_rebase_or_create_using_onto( *destination, &mut updated, onto_workspace_snapshot, to_rebase_workspace_snapshot, )?; - to_rebase_workspace_snapshot.add_edge(source, edge_weight.clone(), destination)?; + let new_edge_index = to_rebase_workspace_snapshot.add_edge( + updated_source, + edge_weight.clone(), + destination, + )?; + let (new_source, _) = + to_rebase_workspace_snapshot.edge_endpoints(new_edge_index)?; + updated.insert(*source, new_source); } - Update::RemoveEdge(edge) => { - // TODO(nick): debug log or handle whether or not the edge was deleted. - let _ = to_rebase_workspace_snapshot.remove_edge(*edge)?; + Update::RemoveEdge { + source, + destination, + edge_kind, + } => { + let updated_source = *updated.get(source).unwrap_or(source); + let destination = *updated.get(destination).unwrap_or(destination); + updated.extend(to_rebase_workspace_snapshot.remove_edge( + to_rebase_change_set, + updated_source, + destination, + *edge_kind, + )?); } Update::ReplaceSubgraph { onto, to_rebase } => { - let to_rebase = *updated.get(to_rebase).unwrap_or(to_rebase); + let updated_to_rebase = *updated.get(to_rebase).unwrap_or(to_rebase); let new_subgraph_root = find_in_to_rebase_or_create_using_onto( *onto, &mut updated, onto_workspace_snapshot, to_rebase_workspace_snapshot, )?; - to_rebase_workspace_snapshot.replace_references(to_rebase, new_subgraph_root)?; + updated.extend( + to_rebase_workspace_snapshot + .replace_references(updated_to_rebase, new_subgraph_root)?, + ); } } } @@ -239,6 +263,7 @@ async fn perform_updates_and_write_out_and_update_pointer( to_rebase_change_set .update_pointer(ctx, to_rebase_workspace_snapshot.id()) .await?; + // dbg!(to_rebase_workspace_snapshot.id()); Ok(()) } diff --git a/lib/rebaser-server/src/server/management_loop.rs b/lib/rebaser-server/src/server/management_loop.rs index 3898090a12..7312fa72b6 100644 --- a/lib/rebaser-server/src/server/management_loop.rs +++ b/lib/rebaser-server/src/server/management_loop.rs @@ -1,15 +1,18 @@ use dal::{DalContext, JobQueueProcessor, ServicesContext}; - +use futures::{FutureExt, StreamExt}; use rebaser_core::{ManagementMessage, ManagementMessageAction, StreamNameGenerator}; +use si_crypto::SymmetricCryptoService; use si_data_nats::NatsClient; use si_data_pg::PgPool; -use si_rabbitmq::{Consumer, ConsumerHandle, ConsumerOffsetSpecification, Environment, Producer}; +use si_rabbitmq::{ + Config as SiRabbitMqConfig, Consumer, ConsumerHandle, ConsumerOffsetSpecification, Environment, + Producer, +}; +use si_rabbitmq::{Delivery, RabbitError}; use std::collections::HashMap; - use std::sync::Arc; +use stream_cancel::StreamExt as StreamCancelStreamExt; use telemetry::prelude::*; - -use si_crypto::SymmetricCryptoService; use tokio::sync::watch; use ulid::Ulid; @@ -24,6 +27,7 @@ pub(crate) async fn management_loop_infallible_wrapper( symmetric_crypto_service: SymmetricCryptoService, encryption_key: Arc, shutdown_watch_rx: watch::Receiver<()>, + rabbitmq_config: SiRabbitMqConfig, ) { if let Err(err) = management_loop( recreate_management_stream, @@ -33,6 +37,7 @@ pub(crate) async fn management_loop_infallible_wrapper( job_processor, symmetric_crypto_service, encryption_key, + rabbitmq_config, shutdown_watch_rx, ) .await @@ -49,7 +54,8 @@ async fn management_loop( job_processor: Box, symmetric_crypto_service: SymmetricCryptoService, encryption_key: Arc, - _shutdown_watch_rx: watch::Receiver<()>, + rabbitmq_config: SiRabbitMqConfig, + mut shutdown_watch_rx: watch::Receiver<()>, ) -> ServerResult<()> { let services_context = ServicesContext::new( pg_pool, @@ -60,6 +66,7 @@ async fn management_loop( None, None, symmetric_crypto_service, + rabbitmq_config.clone(), ); // let ctx_builder = DalContext::builder(services_context, false); @@ -77,23 +84,32 @@ async fn management_loop( // NOTE: QUERY DB FOR OFFSET NUMBER OR GO TO FIRST SPECIFICATION // Prepare the environment and management stream. - let management_stream = StreamNameGenerator::management(); - let environment = Environment::new().await?; + let management_stream = StreamNameGenerator::management(rabbitmq_config.stream_prefix()); + let environment = Environment::new(&rabbitmq_config).await?; if recreate_management_stream { - environment.delete_stream(management_stream).await?; + environment.delete_stream(&management_stream).await?; } - environment.create_stream(management_stream).await?; + environment.create_stream(&management_stream).await?; let mut management_consumer = Consumer::new( &environment, - management_stream, + &management_stream, ConsumerOffsetSpecification::Next, ) .await?; let management_handle = management_consumer.handle(); let mut rebaser_handles: HashMap = HashMap::new(); - while let Some(management_delivery) = management_consumer.next().await? { + let mut inbound_management_stream = management_consumer + .into_stream() + .await? + .take_until_if(Box::pin(shutdown_watch_rx.changed().map(|_| true))); + + while let Some(unprocessed_management_delivery) = inbound_management_stream.next().await { + let management_delivery = Delivery::try_from( + unprocessed_management_delivery.map_err(RabbitError::ConsumerDelivery)?, + )?; + let contents = management_delivery .message_contents .ok_or(ServerError::MissingManagementMessageContents)?; @@ -120,7 +136,14 @@ async fn management_loop( } } ManagementMessageAction::OpenChangeSet => { - let new_stream = StreamNameGenerator::change_set(mm.change_set_id); + info!( + "finding or creating stream for change set: {}", + mm.change_set_id + ); + let new_stream = StreamNameGenerator::change_set( + mm.change_set_id, + rabbitmq_config.stream_prefix(), + ); let stream_already_exists = environment.create_stream(&new_stream).await?; // Only create the new stream and loop if the stream does not already exist. @@ -132,9 +155,11 @@ async fn management_loop( rebaser_handles.insert(mm.change_set_id, (new_stream.clone(), handle)); let ctx_builder = DalContext::builder(services_context.clone(), false); + let rabbitmq_config = rabbitmq_config.clone(); tokio::spawn(change_set_loop::change_set_loop_infallible_wrapper( ctx_builder, consumer, + rabbitmq_config, )); } @@ -147,13 +172,10 @@ async fn management_loop( } // Once the loop is done, perform cleanup. - for (_, (stream, handle)) in rebaser_handles.drain() { - if let Err(err) = handle.close().await { + for (_, (_change_set_stream, change_set_consumer_handle)) in rebaser_handles.drain() { + if let Err(err) = change_set_consumer_handle.close().await { warn!(error = ?err, "closing change set consumer failed during cleanup"); } - if let Err(err) = environment.delete_stream(stream).await { - warn!(error = ?err, "deleting change set stream failed during cleanup"); - } } if let Err(err) = management_handle.close().await { warn!(error = ?err, "closing management consumer failed during cleanup"); diff --git a/lib/sdf-server/src/server/routes.rs b/lib/sdf-server/src/server/routes.rs index 11fcf1b076..08378c1c68 100644 --- a/lib/sdf-server/src/server/routes.rs +++ b/lib/sdf-server/src/server/routes.rs @@ -26,13 +26,13 @@ pub fn routes(state: AppState) -> Router { "/api/change_set", crate::server::service::change_set::routes(), ) - .nest("/api/session", crate::server::service::session::routes()); - // .nest( - // "/api/component", - // crate::server::service::component::routes(), - // ) - // .nest("/api/fix", crate::server::service::fix::routes()) - // .nest("/api/func", crate::server::service::func::routes()) + .nest("/api/session", crate::server::service::session::routes()) + // .nest( + // "/api/component", + // crate::server::service::component::routes(), + // ) + // .nest("/api/fix", crate::server::service::fix::routes()) + .nest("/api/func", crate::server::service::func::routes()); // .nest("/api/pkg", crate::server::service::pkg::routes()) // .nest("/api/provider", crate::server::service::provider::routes()) // .nest( diff --git a/lib/sdf-server/src/server/server.rs b/lib/sdf-server/src/server/server.rs index 6b1ee2768d..a711e19b18 100644 --- a/lib/sdf-server/src/server/server.rs +++ b/lib/sdf-server/src/server/server.rs @@ -306,9 +306,10 @@ pub async fn migrate_builtins_from_module_index(services_context: &ServicesConte dal_context.set_no_dependent_values(); let mut ctx = dal_context.build_default().await?; - let workspace = Workspace::builtin(&ctx).await?; + let workspace = Workspace::builtin(&mut ctx).await?; ctx.update_tenancy(Tenancy::new(*workspace.pk())); - ctx.blocking_commit().await?; + ctx.update_to_head(); + ctx.update_snapshot_to_visibility().await?; info!("migrating intrinsic functions"); builtins::func::migrate_intrinsics(&ctx).await?; @@ -384,8 +385,6 @@ async fn install_builtins( { println!("Pkg {pkg_name} Install failed, {err}"); } else { - ctx.commit().await?; - count += 1; println!( "Pkg {pkg_name} Install finished successfully. {count} of {total} installed.", @@ -400,6 +399,18 @@ async fn install_builtins( dal.commit().await?; + let mut ctx = ctx.clone(); + ctx.update_snapshot_to_visibility().await?; + dbg!(ctx + .workspace_snapshot() + .expect("wsnapshot") + .lock() + .await + .list_funcs(&ctx) + .await + .expect("list funcs") + .len()); + Ok(()) } diff --git a/lib/sdf-server/src/server/service.rs b/lib/sdf-server/src/server/service.rs index c9e6e4e65a..a6a95baf45 100644 --- a/lib/sdf-server/src/server/service.rs +++ b/lib/sdf-server/src/server/service.rs @@ -2,7 +2,7 @@ pub mod change_set; // pub mod component; pub mod diagram; // pub mod fix; -// pub mod func; +pub mod func; // pub mod pkg; // pub mod provider; // pub mod qualification; diff --git a/lib/sdf-server/src/server/service/func.rs b/lib/sdf-server/src/server/service/func.rs index 1c6d10ff91..18989b69a7 100644 --- a/lib/sdf-server/src/server/service/func.rs +++ b/lib/sdf-server/src/server/service/func.rs @@ -1,199 +1,175 @@ -use std::collections::HashMap; - -use axum::{ - response::Response, - routing::{get, post}, - Json, Router, +use axum::response::Response; +use axum::Json; +use axum::{routing::get, Router}; +use dal::workspace_snapshot::WorkspaceSnapshotError; +use dal::{ + Func, FuncBackendKind, FuncBackendResponseType, FuncId, StandardModel, TransactionsError, }; use serde::{Deserialize, Serialize}; use thiserror::Error; -use tokio::task::JoinError; - -use dal::authentication_prototype::{AuthenticationPrototype, AuthenticationPrototypeError}; -use dal::func::execution::FuncExecutionError; -use dal::{ - attribute::context::{AttributeContextBuilder, AttributeContextBuilderError}, - func::{ - argument::{FuncArgument, FuncArgumentError, FuncArgumentId, FuncArgumentKind}, - binding_return_value::FuncBindingReturnValueError, - }, - prop_tree::PropTreeError, - prototype_context::PrototypeContextError, - schema::variant::SchemaVariantError, - ActionKind, ActionPrototype, ActionPrototypeError, AttributeContext, AttributeContextError, - AttributePrototype, AttributePrototypeArgumentError, AttributePrototypeArgumentId, - AttributePrototypeError, AttributePrototypeId, AttributeValueError, ChangeSetError, - ComponentError, ComponentId, DalContext, ExternalProviderError, ExternalProviderId, Func, - FuncBackendKind, FuncBackendResponseType, FuncBindingError, FuncId, InternalProvider, - InternalProviderError, InternalProviderId, LeafInputLocation, Prop, PropError, PropId, - PrototypeListForFuncError, SchemaVariant, SchemaVariantId, StandardModel, StandardModelError, - TenancyError, TransactionsError, ValidationPrototype, ValidationPrototypeError, WsEventError, -}; -use crate::server::{impl_default_error_into_response, state::AppState}; -use crate::service::func::get_func::GetFuncResponse; +use crate::server::impl_default_error_into_response; +use crate::server::state::AppState; -pub mod create_func; -pub mod delete_func; -pub mod execute; -pub mod get_func; +// pub mod create_func; +// pub mod delete_func; +// pub mod execute; +// pub mod get_func; pub mod list_funcs; -pub mod list_input_sources; -pub mod revert_func; -pub mod save_and_exec; -pub mod save_func; +// pub mod list_input_sources; +// pub mod revert_func; +// pub mod save_and_exec; +// pub mod save_func; #[remain::sorted] #[derive(Error, Debug)] pub enum FuncError { - #[error("action func {0} assigned to multiple kinds")] - ActionFuncMultipleKinds(FuncId), - #[error("action kind missing on prototypes for action func {0}")] - ActionKindMissing(FuncId), - #[error(transparent)] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute context error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("That attribute is already set by the function named \"{0}\"")] - AttributePrototypeAlreadySetByFunc(String), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute prototype missing")] - AttributePrototypeMissing, - #[error("attribute prototype {0} is missing argument {1}")] - AttributePrototypeMissingArgument(AttributePrototypeId, AttributePrototypeArgumentId), - #[error("attribute prototype argument {0} is internal provider id")] - AttributePrototypeMissingInternalProviderId(AttributePrototypeArgumentId), - #[error("attribute prototype {0} is missing its prop {1}")] - AttributePrototypeMissingProp(AttributePrototypeId, PropId), - #[error("attribute prototype {0} has no PropId or ExternalProviderId")] - AttributePrototypeMissingPropIdOrExternalProviderId(AttributePrototypeId), - #[error("attribute prototype {0} schema is missing")] - AttributePrototypeMissingSchema(AttributePrototypeId), - #[error("attribute prototype {0} schema_variant is missing")] - AttributePrototypeMissingSchemaVariant(AttributePrototypeId), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("attribute value missing")] - AttributeValueMissing, - #[error("authentication prototype error: {0}")] - AuthenticationPrototypeError(#[from] AuthenticationPrototypeError), - #[error("change set error: {0}")] - ChangeSet(#[from] ChangeSetError), - #[error("component error: {0}")] - Component(#[from] ComponentError), - #[error("component missing schema variant")] - ComponentMissingSchemaVariant(ComponentId), + // #[error("action func {0} assigned to multiple kinds")] + // ActionFuncMultipleKinds(FuncId), + // #[error("action kind missing on prototypes for action func {0}")] + // ActionKindMissing(FuncId), + // #[error(transparent)] + // ActionPrototype(#[from] ActionPrototypeError), + // #[error("attribute context error: {0}")] + // AttributeContext(#[from] AttributeContextError), + // #[error("attribute context builder error: {0}")] + // AttributeContextBuilder(#[from] AttributeContextBuilderError), + // #[error("attribute prototype error: {0}")] + // AttributePrototype(#[from] AttributePrototypeError), + // #[error("That attribute is already set by the function named \"{0}\"")] + // AttributePrototypeAlreadySetByFunc(String), + // #[error("attribute prototype argument error: {0}")] + // AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), + // #[error("attribute prototype missing")] + // AttributePrototypeMissing, + // #[error("attribute prototype {0} is missing argument {1}")] + // AttributePrototypeMissingArgument(AttributePrototypeId, AttributePrototypeArgumentId), + // #[error("attribute prototype argument {0} is internal provider id")] + // AttributePrototypeMissingInternalProviderId(AttributePrototypeArgumentId), + // #[error("attribute prototype {0} is missing its prop {1}")] + // AttributePrototypeMissingProp(AttributePrototypeId, PropId), + // #[error("attribute prototype {0} has no PropId or ExternalProviderId")] + // AttributePrototypeMissingPropIdOrExternalProviderId(AttributePrototypeId), + // #[error("attribute prototype {0} schema is missing")] + // AttributePrototypeMissingSchema(AttributePrototypeId), + // #[error("attribute prototype {0} schema_variant is missing")] + // AttributePrototypeMissingSchemaVariant(AttributePrototypeId), + // #[error("attribute value error: {0}")] + // AttributeValue(#[from] AttributeValueError), + // #[error("attribute value missing")] + // AttributeValueMissing, + // #[error("change set error: {0}")] + // ChangeSet(#[from] ChangeSetError), + // #[error("component error: {0}")] + // Component(#[from] ComponentError), + // #[error("component missing schema variant")] + // ComponentMissingSchemaVariant(ComponentId), #[error(transparent)] ContextTransaction(#[from] TransactionsError), - #[error("editing reconciliation functions is not implemented")] - EditingReconciliationFuncsNotImplemented, - #[error(transparent)] - ExternalProvider(#[from] ExternalProviderError), - #[error(transparent)] - Func(#[from] dal::FuncError), - #[error("func argument not found")] - FuncArgNotFound, - #[error("func argument error: {0}")] - FuncArgument(#[from] FuncArgumentError), - #[error("func argument already exists for that name")] - FuncArgumentAlreadyExists, - #[error("func argument {0} missing attribute prototype argument for prototype {1}")] - FuncArgumentMissingPrototypeArgument(FuncArgumentId, AttributePrototypeId), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("func binding return value not found")] - FuncBindingReturnValueMissing, + // #[error("editing reconciliation functions is not implemented")] + // EditingReconciliationFuncsNotImplemented, + // #[error(transparent)] + // ExternalProvider(#[from] ExternalProviderError), + // #[error(transparent)] + // Func(#[from] dal::FuncError), + // #[error("func argument not found")] + // FuncArgNotFound, + // #[error("func argument error: {0}")] + // FuncArgument(#[from] FuncArgumentError), + // #[error("func argument already exists for that name")] + // FuncArgumentAlreadyExists, + // #[error("func argument {0} missing attribute prototype argument for prototype {1}")] + // FuncArgumentMissingPrototypeArgument(FuncArgumentId, AttributePrototypeId), + // #[error("func binding error: {0}")] + // FuncBinding(#[from] FuncBindingError), + // #[error("func binding return value error: {0}")] + // FuncBindingReturnValue(#[from] FuncBindingReturnValueError), + // #[error("func binding return value not found")] + // FuncBindingReturnValueMissing, #[error("func {0} cannot be converted to frontend variant")] FuncCannotBeTurnedIntoVariant(FuncId), - // XXX: we will be able to remove this error once we make output sockets typed - #[error("Cannot bind function to both an output socket and a prop")] - FuncDestinationPropAndOutputSocket, - #[error("cannot bind func to different prop kinds")] - FuncDestinationPropKindMismatch, - #[error("Function execution: {0}")] - FuncExecution(#[from] FuncExecutionError), - #[error("Function execution failed: {0}")] - FuncExecutionFailed(String), - #[error("Function execution failed: this function is not connected to any assets, and was not executed")] - FuncExecutionFailedNoPrototypes, - #[error("Function still has associations: {0}")] - FuncHasAssociations(FuncId), - #[error("Function named \"{0}\" already exists in this changeset")] - FuncNameExists(String), - #[error("The function name \"{0}\" is reserved")] - FuncNameReserved(String), - #[error("Function not found")] - FuncNotFound, - #[error("func is not revertible")] - FuncNotRevertible, - #[error("Function not runnable")] - FuncNotRunnable, - #[error("Cannot create that type of function")] - FuncNotSupported, - #[error("Function options are incompatible with variant")] - FuncOptionsAndVariantMismatch, - #[error("Hyper error: {0}")] - Hyper(#[from] hyper::http::Error), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), - #[error("failed to join async task; bug!")] - Join(#[from] JoinError), - #[error("Missing required options for creating a function")] - MissingOptions, - #[error("Function is read-only")] - NotWritable, - #[error(transparent)] - Pg(#[from] si_data_pg::PgError), - #[error(transparent)] - PgPool(#[from] Box), - #[error("prop error: {0}")] - Prop(#[from] PropError), + // // XXX: we will be able to remove this error once we make output sockets typed + // #[error("Cannot bind function to both an output socket and a prop")] + // FuncDestinationPropAndOutputSocket, + // #[error("cannot bind func to different prop kinds")] + // FuncDestinationPropKindMismatch, + // #[error("Function execution: {0}")] + // FuncExecution(#[from] FuncExecutionError), + // #[error("Function execution failed: {0}")] + // FuncExecutionFailed(String), + // #[error("Function execution failed: this function is not connected to any assets, and was not executed")] + // FuncExecutionFailedNoPrototypes, + // #[error("Function still has associations: {0}")] + // FuncHasAssociations(FuncId), + // #[error("Function named \"{0}\" already exists in this changeset")] + // FuncNameExists(String), + // #[error("The function name \"{0}\" is reserved")] + // FuncNameReserved(String), + // #[error("Function not found")] + // FuncNotFound, + // #[error("func is not revertible")] + // FuncNotRevertible, + // #[error("Cannot create that type of function")] + // FuncNotSupported, + // #[error("Function options are incompatible with variant")] + // FuncOptionsAndVariantMismatch, + // #[error("Hyper error: {0}")] + // Hyper(#[from] hyper::http::Error), + // #[error("internal provider error: {0}")] + // InternalProvider(#[from] InternalProviderError), + // #[error("failed to join async task; bug!")] + // Join(#[from] JoinError), + // #[error("Missing required options for creating a function")] + // MissingOptions, + // #[error("Function is read-only")] + // NotWritable, + // #[error(transparent)] + // Pg(#[from] si_data_pg::PgError), + // #[error(transparent)] + // PgPool(#[from] Box), + // #[error("prop error: {0}")] + // Prop(#[from] PropError), #[error("prop for value not found")] PropNotFound, - #[error("prop tree error: {0}")] - PropTree(#[from] PropTreeError), - #[error("prototype context error: {0}")] - PrototypeContext(#[from] PrototypeContextError), - #[error("prototype list for func error: {0}")] - PrototypeListForFunc(#[from] PrototypeListForFuncError), - #[error("schema variant error: {0}")] - SchemaVariant(#[from] SchemaVariantError), - #[error("schema variant missing schema")] - SchemaVariantMissingSchema(SchemaVariantId), - #[error("Could not find schema variant for prop {0}")] - SchemaVariantNotFoundForProp(PropId), - #[error("json serialization error: {0}")] - SerdeJson(#[from] serde_json::Error), + // #[error("prop tree error: {0}")] + // PropTree(#[from] PropTreeError), + // #[error("prototype context error: {0}")]self + // PrototypeContext(#[from] PrototypeContextError), + // #[error("prototype list for func error: {0}")] + // PrototypeListForFunc(#[from] PrototypeListForFuncError), + // #[error("schema variant error: {0}")] + // SchemaVariant(#[from] SchemaVariantError), + // #[error("schema variant missing schema")] + // SchemaVariantMissingSchema(SchemaVariantId), + // #[error("Could not find schema variant for prop {0}")] + // SchemaVariantNotFoundForProp(PropId), + // #[error("json serialization error: {0}")] + // SerdeJson(#[from] serde_json::Error), + // #[error(transparent)] + // StandardModel(#[from] StandardModelError), + // #[error("tenancy error: {0}")] + // Tenancy(#[from] TenancyError), + // #[error("unexpected func variant ({0:?}) creating attribute func")] + // UnexpectedFuncVariantCreatingAttributeFunc(FuncVariant), + // #[error("A validation already exists for that attribute")] + // ValidationAlreadyExists, + // #[error("validation prototype error: {0}")] + // ValidationPrototype(#[from] ValidationPrototypeError), + // #[error("validation prototype schema is missing")] + // ValidationPrototypeMissingSchema, + // #[error("validation prototype {0} schema_variant is missing")] + // ValidationPrototypeMissingSchemaVariant(SchemaVariantId), + // #[error("could not publish websocket event: {0}")] + // WsEvent(#[from] WsEventError), #[error(transparent)] - StandardModel(#[from] StandardModelError), - #[error("tenancy error: {0}")] - Tenancy(#[from] TenancyError), - #[error("unexpected func variant ({0:?}) creating attribute func")] - UnexpectedFuncVariantCreatingAttributeFunc(FuncVariant), - #[error("A validation already exists for that attribute")] - ValidationAlreadyExists, - #[error("validation prototype error: {0}")] - ValidationPrototype(#[from] ValidationPrototypeError), - #[error("validation prototype schema is missing")] - ValidationPrototypeMissingSchema, - #[error("validation prototype {0} schema_variant is missing")] - ValidationPrototypeMissingSchemaVariant(SchemaVariantId), - #[error("could not publish websocket event: {0}")] - WsEvent(#[from] WsEventError), + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } -impl From for FuncError { - fn from(value: si_data_pg::PgPoolError) -> Self { - Self::PgPool(Box::new(value)) - } -} +//impl From for FuncError { +// fn from(value: si_data_pg::PgPoolError) -> Self { +// Self::PgPool(Box::new(value)) +// } +//} pub type FuncResult = Result; @@ -231,7 +207,7 @@ impl TryFrom<&Func> for FuncVariant { type Error = FuncError; fn try_from(func: &Func) -> Result { - match (func.backend_kind(), func.backend_response_type()) { + match (func.backend_kind, func.backend_response_type) { (FuncBackendKind::JsAttribute, response_type) => match response_type { FuncBackendResponseType::CodeGeneration => Ok(FuncVariant::CodeGeneration), FuncBackendResponseType::Qualification => Ok(FuncVariant::Qualification), @@ -252,721 +228,720 @@ impl TryFrom<&Func> for FuncVariant { | (FuncBackendKind::String, _) | (FuncBackendKind::Unset, _) | (FuncBackendKind::Validation, _) => { - Err(FuncError::FuncCannotBeTurnedIntoVariant(*func.id())) + Err(FuncError::FuncCannotBeTurnedIntoVariant(func.id)) } } } } -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct AttributePrototypeArgumentView { - func_argument_id: FuncArgumentId, - func_argument_name: Option, - id: Option, - internal_provider_id: Option, -} - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct AttributePrototypeView { - id: AttributePrototypeId, - component_id: Option, - prop_id: Option, - external_provider_id: Option, - prototype_arguments: Vec, -} - -impl AttributePrototypeView { - pub fn to_attribute_context(&self) -> FuncResult { - let mut builder = AttributeContextBuilder::new(); - if let Some(component_id) = self.component_id { - builder.set_component_id(component_id); - } - if let Some(prop_id) = self.prop_id { - builder.set_prop_id(prop_id); - } - if let Some(external_provider_id) = self.external_provider_id { - builder.set_external_provider_id(external_provider_id); - } - - Ok(builder.to_context()?) - } -} - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct ValidationPrototypeView { - schema_variant_id: SchemaVariantId, - prop_id: PropId, -} - -#[remain::sorted] -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(tag = "type", rename_all = "camelCase")] -pub enum FuncAssociations { - #[serde(rename_all = "camelCase")] - Action { - schema_variant_ids: Vec, - kind: Option, - }, - #[serde(rename_all = "camelCase")] - Attribute { - prototypes: Vec, - arguments: Vec, - }, - #[serde(rename_all = "camelCase")] - Authentication { - schema_variant_ids: Vec, - }, - #[serde(rename_all = "camelCase")] - CodeGeneration { - schema_variant_ids: Vec, - component_ids: Vec, - inputs: Vec, - }, - #[serde(rename_all = "camelCase")] - Qualification { - schema_variant_ids: Vec, - component_ids: Vec, - inputs: Vec, - }, - #[serde(rename_all = "camelCase")] - SchemaVariantDefinitions { - schema_variant_ids: Vec, - }, - #[serde(rename_all = "camelCase")] - Validation { - prototypes: Vec, - }, -} - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct FuncArgumentView { - pub id: FuncArgumentId, - pub name: String, - pub kind: FuncArgumentKind, - pub element_kind: Option, -} - -async fn is_func_revertible(ctx: &DalContext, func: &Func) -> FuncResult { - // refetch to get updated visibility - let is_in_change_set = match Func::get_by_id(ctx, func.id()).await? { - Some(func) => func.visibility().in_change_set(), - None => return Ok(false), - }; - // Clone a new ctx vith head visibility - let ctx = ctx.clone_with_head(); - let head_func = Func::get_by_id(&ctx, func.id()).await?; - - Ok(head_func.is_some() && is_in_change_set) -} - -async fn prototype_view_for_attribute_prototype( - ctx: &DalContext, - func_id: FuncId, - proto: &AttributePrototype, -) -> FuncResult { - let prop_id = if proto.context.prop_id().is_some() { - Some(proto.context.prop_id()) - } else { - None - }; - - let external_provider_id = if proto.context.external_provider_id().is_some() { - Some(proto.context.external_provider_id()) - } else { - None - }; - - if prop_id.is_none() && external_provider_id.is_none() { - return Err(FuncError::AttributePrototypeMissingPropIdOrExternalProviderId(*proto.id())); - } - - let component_id = if proto.context.component_id().is_some() { - Some(proto.context.component_id()) - } else { - None - }; - - let prototype_arguments = - FuncArgument::list_for_func_with_prototype_arguments(ctx, func_id, *proto.id()) - .await? - .iter() - .map( - |(func_arg, maybe_proto_arg)| AttributePrototypeArgumentView { - func_argument_id: *func_arg.id(), - func_argument_name: Some(func_arg.name().to_owned()), - id: maybe_proto_arg.as_ref().map(|proto_arg| *proto_arg.id()), - internal_provider_id: maybe_proto_arg - .as_ref() - .map(|proto_arg| proto_arg.internal_provider_id()), - }, - ) - .collect(); - - Ok(AttributePrototypeView { - id: *proto.id(), - prop_id, - component_id, - external_provider_id, - prototype_arguments, - }) -} - -async fn action_prototypes_into_schema_variants_and_components( - ctx: &DalContext, - func_id: FuncId, -) -> FuncResult<(Option, Vec)> { - let mut variant_ids = vec![]; - let mut action_kind: Option = None; - - for proto in ActionPrototype::find_for_func(ctx, func_id).await? { - if let Some(action_kind) = &action_kind { - if action_kind != proto.kind() { - return Err(FuncError::ActionFuncMultipleKinds(func_id)); - } - } else { - action_kind = Some(*proto.kind()); - } - - if proto.schema_variant_id().is_some() { - variant_ids.push(proto.schema_variant_id()); - } - } - - if !variant_ids.is_empty() && action_kind.is_none() { - return Err(FuncError::ActionKindMissing(func_id)); - } - - Ok((action_kind, variant_ids)) -} - -async fn attribute_prototypes_into_schema_variants_and_components( - ctx: &DalContext, - func_id: FuncId, -) -> FuncResult<(Vec, Vec)> { - let schema_variants_components = - AttributePrototype::find_for_func_as_variant_and_component(ctx, func_id).await?; - - let mut schema_variant_ids = vec![]; - let mut component_ids = vec![]; - - for (schema_variant_id, component_id) in schema_variants_components { - if component_id == ComponentId::NONE { - schema_variant_ids.push(schema_variant_id); - } else { - component_ids.push(component_id); - } - } - - Ok((schema_variant_ids, component_ids)) -} - -pub async fn get_leaf_function_inputs( - ctx: &DalContext, - func_id: FuncId, -) -> FuncResult> { - Ok(FuncArgument::list_for_func(ctx, func_id) - .await? - .iter() - .filter_map(|arg| LeafInputLocation::maybe_from_arg_name(arg.name())) - .collect()) -} - -pub async fn get_func_view(ctx: &DalContext, func: &Func) -> FuncResult { - let arguments = FuncArgument::list_for_func(ctx, *func.id()).await?; - - let (associations, input_type) = match func.backend_kind() { - FuncBackendKind::JsAttribute => { - let (associations, input_type) = match func.backend_response_type() { - FuncBackendResponseType::CodeGeneration - | FuncBackendResponseType::Qualification => { - let (schema_variant_ids, component_ids) = - attribute_prototypes_into_schema_variants_and_components(ctx, *func.id()) - .await?; - - let inputs = get_leaf_function_inputs(ctx, *func.id()).await?; - let input_type = - compile_leaf_function_input_types(ctx, &schema_variant_ids, &inputs) - .await?; - - ( - Some(match func.backend_response_type() { - FuncBackendResponseType::CodeGeneration => { - FuncAssociations::CodeGeneration { - schema_variant_ids, - component_ids, - inputs, - } - } - - FuncBackendResponseType::Qualification => { - FuncAssociations::Qualification { - schema_variant_ids, - component_ids, - inputs: get_leaf_function_inputs(ctx, *func.id()).await?, - } - } - _ => unreachable!("the match above ensures this is unreachable"), - }), - input_type, - ) - } - _ => { - let protos = AttributePrototype::find_for_func(ctx, func.id()).await?; - - let mut prototypes = Vec::with_capacity(protos.len()); - for proto in &protos { - prototypes.push( - prototype_view_for_attribute_prototype(ctx, *func.id(), proto).await?, - ); - } - - let ts_types = compile_attribute_function_types(ctx, &prototypes).await?; - - ( - Some(FuncAssociations::Attribute { - prototypes, - arguments: arguments - .iter() - .map(|arg| FuncArgumentView { - id: *arg.id(), - name: arg.name().to_owned(), - kind: arg.kind().to_owned(), - element_kind: arg.element_kind().cloned(), - }) - .collect(), - }), - ts_types, - ) - } - }; - (associations, input_type) - } - FuncBackendKind::JsAction => { - let (kind, schema_variant_ids) = - action_prototypes_into_schema_variants_and_components(ctx, *func.id()).await?; - - let ts_types = compile_action_types(ctx, &schema_variant_ids).await?; - - let associations = Some(FuncAssociations::Action { - schema_variant_ids, - kind, - }); - - (associations, ts_types) - } - FuncBackendKind::JsReconciliation => { - return Err(FuncError::EditingReconciliationFuncsNotImplemented); - } - FuncBackendKind::JsValidation => { - let protos = ValidationPrototype::list_for_func(ctx, *func.id()).await?; - let input_type = compile_validation_types(ctx, &protos).await?; - - let associations = Some(FuncAssociations::Validation { - prototypes: protos - .iter() - .map(|proto| ValidationPrototypeView { - schema_variant_id: proto.context().schema_variant_id(), - prop_id: proto.context().prop_id(), - }) - .collect(), - }); - (associations, input_type) - } - FuncBackendKind::JsAuthentication => { - let schema_variant_ids = AuthenticationPrototype::find_for_func(ctx, *func.id()) - .await? - .iter() - .map(|p| p.schema_variant_id()) - .collect(); - - ( - Some(FuncAssociations::Authentication { schema_variant_ids }), - concat!( - "type Input = Record;\n", - "\n", - "declare namespace requestStorage {\n", - " function setEnv(key: string, value: any);\n", - " function setItem(key: string, value: any);\n", - " function deleteEnv(key: string);\n", - " function deleteItem(key: string);\n", - "}", - ) - .to_owned(), - ) - } - _ => (None, String::new()), - }; - - let is_revertible = is_func_revertible(ctx, func).await?; - let types = [ - compile_return_types(*func.backend_response_type(), *func.backend_kind()), - &input_type, - langjs_types(), - ] - .join("\n"); - - Ok(GetFuncResponse { - id: func.id().to_owned(), - variant: func.try_into()?, - display_name: func.display_name().map(Into::into), - name: func.name().to_owned(), - description: func.description().map(|d| d.to_owned()), - code: func.code_plaintext()?, - is_builtin: func.builtin(), - is_revertible, - associations, - types, - }) -} - -pub fn compile_return_types(ty: FuncBackendResponseType, kind: FuncBackendKind) -> &'static str { - if matches!(kind, FuncBackendKind::JsAttribute) - && !matches!( - ty, - FuncBackendResponseType::CodeGeneration | FuncBackendResponseType::Qualification - ) - { - return ""; // attribute functions have their output compiled dynamically - } - - match ty { - FuncBackendResponseType::Boolean => "type Output = boolean | null;", - FuncBackendResponseType::String => "type Output = string | null;", - FuncBackendResponseType::Integer => "type Output = number | null;", - FuncBackendResponseType::Qualification => { - "type Output { - result: 'success' | 'warning' | 'failure'; - message?: string | null; -}" - } - FuncBackendResponseType::CodeGeneration => { - "type Output { - format: string; - code: string; -}" - } - FuncBackendResponseType::Validation => { - "type Output { - valid: boolean; - message: string; -}" - } - FuncBackendResponseType::Reconciliation => { - "type Output { - updates: { [key: string]: unknown }; - actions: string[]; - message: string | null; -}" - } - FuncBackendResponseType::Action => { - "type Output { - status: 'ok' | 'warning' | 'error'; - payload?: { [key: string]: unknown } | null; - message?: string | null; -}" - } - FuncBackendResponseType::Json => "type Output = any;", - // Note: there is no ts function returning those - FuncBackendResponseType::Identity => "interface Output extends Input {}", - FuncBackendResponseType::Array => "type Output = any[];", - FuncBackendResponseType::Map => "type Output = Record;", - FuncBackendResponseType::Object => "type Output = any;", - FuncBackendResponseType::Unset => "type Output = undefined | null;", - FuncBackendResponseType::Void => "type Output = void;", - FuncBackendResponseType::SchemaVariantDefinition => concat!( - include_str!("./ts_types/asset_builder.d.ts"), - "\n", - "type Output = any;" - ), - } -} - -pub fn compile_return_types_2(ty: FuncBackendResponseType, kind: FuncBackendKind) -> &'static str { - if matches!(kind, FuncBackendKind::JsAttribute) - && !matches!( - ty, - FuncBackendResponseType::CodeGeneration | FuncBackendResponseType::Qualification - ) - { - return ""; // attribute functions have their output compiled dynamically - } - - match ty { - FuncBackendResponseType::Boolean => "type Output = boolean | null;", - FuncBackendResponseType::String => "type Output = string | null;", - FuncBackendResponseType::Integer => "type Output = number | null;", - FuncBackendResponseType::Qualification => { - "type Output { - result: 'success' | 'warning' | 'failure'; - message?: string | null; -}" - } - FuncBackendResponseType::CodeGeneration => { - "type Output { - format: string; - code: string; -}" - } - FuncBackendResponseType::Validation => { - "type Output { - valid: boolean; - message: string; -}" - } - FuncBackendResponseType::Reconciliation => { - "type Output { - updates: { [key: string]: unknown }; - actions: string[]; - message: string | null; -}" - } - FuncBackendResponseType::Action => { - "type Output { - status: 'ok' | 'warning' | 'error'; - payload?: { [key: string]: unknown } | null; - message?: string | null; -}" - } - FuncBackendResponseType::Json => "type Output = any;", - // Note: there is no ts function returning those - FuncBackendResponseType::Identity => "interface Output extends Input {}", - FuncBackendResponseType::Array => "type Output = any[];", - FuncBackendResponseType::Map => "type Output = Record;", - FuncBackendResponseType::Object => "type Output = any;", - FuncBackendResponseType::Unset => "type Output = undefined | null;", - FuncBackendResponseType::Void => "type Output = void;", - FuncBackendResponseType::SchemaVariantDefinition => concat!( - include_str!("./ts_types/asset_types_with_secrets.d.ts"), - "\n", - "type Output = any;" - ), - } -} - -async fn compile_validation_types( - ctx: &DalContext, - prototypes: &[ValidationPrototype], -) -> FuncResult { - let mut input_fields = Vec::new(); - for prototype in prototypes { - let prop = Prop::get_by_id(ctx, &prototype.context().prop_id()) - .await? - .ok_or(PropError::NotFound( - prototype.context().prop_id(), - *ctx.visibility(), - ))?; - let ts_type = prop.ts_type(ctx).await?; - input_fields.push(ts_type); - } - if input_fields.is_empty() { - Ok("type Input = never;".to_owned()) - } else { - let variants = input_fields.join(" | "); - let types = format!("type Input = {variants};"); - Ok(types) - } -} - -async fn get_per_variant_types_for_prop_path( - ctx: &DalContext, - variant_ids: &[SchemaVariantId], - path: &[&str], -) -> FuncResult { - let mut per_variant_types = vec![]; - - for variant_id in variant_ids { - let prop = SchemaVariant::find_prop_in_tree(ctx, *variant_id, path).await?; - let ts_type = prop.ts_type(ctx).await?; - - if !per_variant_types.contains(&ts_type) { - per_variant_types.push(ts_type); - } - } - - Ok(per_variant_types.join(" | ")) -} - -async fn compile_leaf_function_input_types( - ctx: &DalContext, - schema_variant_ids: &[SchemaVariantId], - inputs: &[LeafInputLocation], -) -> FuncResult { - let mut ts_type = "type Input = {\n".to_string(); - - for input_location in inputs { - let input_property = format!( - "{}?: {} | null;\n", - input_location.arg_name(), - get_per_variant_types_for_prop_path( - ctx, - schema_variant_ids, - &input_location.prop_path(), - ) - .await? - ); - ts_type.push_str(&input_property); - } - ts_type.push_str("};"); - - Ok(ts_type) -} - -async fn compile_attribute_function_types( - ctx: &DalContext, - prototype_views: &[AttributePrototypeView], -) -> FuncResult { - let mut input_ts_types = "type Input = {\n".to_string(); - - let mut output_ts_types = vec![]; - let mut argument_types = HashMap::new(); - for prototype_view in prototype_views { - for arg in &prototype_view.prototype_arguments { - if let Some(ip_id) = arg.internal_provider_id { - let ip = InternalProvider::get_by_id(ctx, &ip_id) - .await? - .ok_or(InternalProviderError::NotFound(ip_id))?; - - let ts_type = if ip.prop_id().is_none() { - "object".to_string() - } else { - Prop::get_by_id(ctx, ip.prop_id()) - .await? - .ok_or(PropError::NotFound( - *ip.prop_id(), - ctx.visibility().to_owned(), - ))? - .ts_type(ctx) - .await? - }; - - if !argument_types.contains_key(&arg.func_argument_name) { - argument_types.insert(arg.func_argument_name.clone(), vec![ts_type]); - } else if let Some(ts_types_for_arg) = - argument_types.get_mut(&arg.func_argument_name) - { - if !ts_types_for_arg.contains(&ts_type) { - ts_types_for_arg.push(ts_type) - } - } - } - - let output_type = if let Some(output_prop_id) = prototype_view.prop_id { - Prop::get_by_id(ctx, &output_prop_id) - .await? - .ok_or(PropError::NotFound( - output_prop_id, - ctx.visibility().to_owned(), - ))? - .ts_type(ctx) - .await? - } else { - "any".to_string() - }; - - if !output_ts_types.contains(&output_type) { - output_ts_types.push(output_type); - } - } - } - for (arg_name, ts_types) in argument_types.iter() { - input_ts_types.push_str( - format!( - "{}?: {} | null;\n", - arg_name.as_ref().unwrap_or(&"".to_string()).to_owned(), - ts_types.join(" | ") - ) - .as_str(), - ); - } - input_ts_types.push_str("};"); - - let output_ts = format!("type Output = {};", output_ts_types.join(" | ")); - - Ok(format!("{}\n{}", input_ts_types, output_ts)) -} - -// Note: ComponentKind::Credential is unused and the implementation is broken, so let's ignore it for now -async fn compile_action_types( - ctx: &DalContext, - variant_ids: &[SchemaVariantId], -) -> FuncResult { - let mut ts_types = vec![]; - for variant_id in variant_ids { - let prop = SchemaVariant::find_prop_in_tree(ctx, *variant_id, &["root"]).await?; - ts_types.push(prop.ts_type(ctx).await?); - } - - Ok(format!( - "type Input {{ - kind: 'standard'; - properties: {}; -}}", - ts_types.join(" | "), - )) -} - -// TODO: stop duplicating definition -// TODO: use execa types instead of any -// TODO: add os, fs and path types (possibly fetch but I think it comes with DOM) -fn langjs_types() -> &'static str { - "declare namespace YAML { - function stringify(obj: unknown): string; -} - - declare namespace zlib { - function gzip(inputstr: string, callback: any); - } - - declare namespace requestStorage { - function getEnv(key: string): string; - function getItem(key: string): any; - function getEnvKeys(): string[]; - function getKeys(): string[]; - } - - declare namespace siExec { - - interface WatchArgs { - cmd: string, - args?: readonly string[], - execaOptions?: Options, - retryMs?: number, - maxRetryCount?: number, - callback: (child: execa.ExecaReturnValue) => Promise, - } - - interface WatchResult { - result: SiExecResult, - failed?: 'deadlineExceeded' | 'commandFailed', - } - - type SiExecResult = ExecaReturnValue; - - async function waitUntilEnd(execaFile: string, execaArgs?: string[], execaOptions?: any): Promise; - async function watch(options: WatchArgs, deadlineCount?: number): Promise; -}" -} +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct AttributePrototypeArgumentView { +// func_argument_id: FuncArgumentId, +// func_argument_name: Option, +// id: Option, +// internal_provider_id: Option, +// } + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct AttributePrototypeView { +// id: AttributePrototypeId, +// component_id: Option, +// prop_id: Option, +// external_provider_id: Option, +// prototype_arguments: Vec, +// } + +// impl AttributePrototypeView { +// pub fn to_attribute_context(&self) -> FuncResult { +// let mut builder = AttributeContextBuilder::new(); +// if let Some(component_id) = self.component_id { +// builder.set_component_id(component_id); +// } +// if let Some(prop_id) = self.prop_id { +// builder.set_prop_id(prop_id); +// } +// if let Some(external_provider_id) = self.external_provider_id { +// builder.set_external_provider_id(external_provider_id); +// } + +// Ok(builder.to_context()?) +// } +// } + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct ValidationPrototypeView { +// schema_variant_id: SchemaVariantId, +// prop_id: PropId, +// } + +// #[remain::sorted] +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(tag = "type", rename_all = "camelCase")] +// pub enum FuncAssociations { +// #[serde(rename_all = "camelCase")] +// Action { +// schema_variant_ids: Vec, +// kind: Option, +// }, +// #[serde(rename_all = "camelCase")] +// Attribute { +// prototypes: Vec, +// arguments: Vec, +// }, +// #[serde(rename_all = "camelCase")] +// Authentication { +// schema_variant_ids: Vec, +// }, +// #[serde(rename_all = "camelCase")] +// CodeGeneration { +// schema_variant_ids: Vec, +// component_ids: Vec, +// inputs: Vec, +// }, +// #[serde(rename_all = "camelCase")] +// Qualification { +// schema_variant_ids: Vec, +// component_ids: Vec, +// inputs: Vec, +// }, +// #[serde(rename_all = "camelCase")] +// SchemaVariantDefinitions { +// schema_variant_ids: Vec, +// }, +// #[serde(rename_all = "camelCase")] +// Validation { +// prototypes: Vec, +// }, +// } + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct FuncArgumentView { +// pub id: FuncArgumentId, +// pub name: String, +// pub kind: FuncArgumentKind, +// pub element_kind: Option, +// } + +// async fn is_func_revertible(ctx: &DalContext, func: &Func) -> FuncResult { +// // refetch to get updated visibility +// let is_in_change_set = match Func::get_by_id(ctx, func.id()).await? { +// Some(func) => func.visibility().in_change_set(), +// None => return Ok(false), +// }; +// // Clone a new ctx vith head visibility +// let ctx = ctx.clone_with_head(); +// let head_func = Func::get_by_id(&ctx, func.id()).await?; + +// Ok(head_func.is_some() && is_in_change_set) +// } + +// async fn prototype_view_for_attribute_prototype( +// ctx: &DalContext, +// func_id: FuncId, +// proto: &AttributePrototype, +// ) -> FuncResult { +// let prop_id = if proto.context.prop_id().is_some() { +// Some(proto.context.prop_id()) +// } else { +// None +// }; + +// let external_provider_id = if proto.context.external_provider_id().is_some() { +// Some(proto.context.external_provider_id()) +// } else { +// None +// }; + +// if prop_id.is_none() && external_provider_id.is_none() { +// return Err(FuncError::AttributePrototypeMissingPropIdOrExternalProviderId(*proto.id())); +// } + +// let component_id = if proto.context.component_id().is_some() { +// Some(proto.context.component_id()) +// } else { +// None +// }; + +// let prototype_arguments = +// FuncArgument::list_for_func_with_prototype_arguments(ctx, func_id, *proto.id()) +// .await? +// .iter() +// .map( +// |(func_arg, maybe_proto_arg)| AttributePrototypeArgumentView { +// func_argument_id: *func_arg.id(), +// func_argument_name: Some(func_arg.name().to_owned()), +// id: maybe_proto_arg.as_ref().map(|proto_arg| *proto_arg.id()), +// internal_provider_id: maybe_proto_arg +// .as_ref() +// .map(|proto_arg| proto_arg.internal_provider_id()), +// }, +// ) +// .collect(); + +// Ok(AttributePrototypeView { +// id: *proto.id(), +// prop_id, +// component_id, +// external_provider_id, +// prototype_arguments, +// }) +// } + +// async fn action_prototypes_into_schema_variants_and_components( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> FuncResult<(Option, Vec)> { +// let mut variant_ids = vec![]; +// let mut action_kind: Option = None; + +// for proto in ActionPrototype::find_for_func(ctx, func_id).await? { +// if let Some(action_kind) = &action_kind { +// if action_kind != proto.kind() { +// return Err(FuncError::ActionFuncMultipleKinds(func_id)); +// } +// } else { +// action_kind = Some(*proto.kind()); +// } + +// if proto.schema_variant_id().is_some() { +// variant_ids.push(proto.schema_variant_id()); +// } +// } + +// if !variant_ids.is_empty() && action_kind.is_none() { +// return Err(FuncError::ActionKindMissing(func_id)); +// } + +// Ok((action_kind, variant_ids)) +// } + +// async fn attribute_prototypes_into_schema_variants_and_components( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> FuncResult<(Vec, Vec)> { +// let schema_variants_components = +// AttributePrototype::find_for_func_as_variant_and_component(ctx, func_id).await?; + +// let mut schema_variant_ids = vec![]; +// let mut component_ids = vec![]; + +// for (schema_variant_id, component_id) in schema_variants_components { +// if component_id == ComponentId::NONE { +// schema_variant_ids.push(schema_variant_id); +// } else { +// component_ids.push(component_id); +// } +// } + +// Ok((schema_variant_ids, component_ids)) +// } + +// pub async fn get_leaf_function_inputs( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> FuncResult> { +// Ok(FuncArgument::list_for_func(ctx, func_id) +// .await? +// .iter() +// .filter_map(|arg| LeafInputLocation::maybe_from_arg_name(arg.name())) +// .collect()) +// } + +// pub async fn get_func_view(ctx: &DalContext, func: &Func) -> FuncResult { +// let arguments = FuncArgument::list_for_func(ctx, *func.id()).await?; + +// let (associations, input_type) = match func.backend_kind() { +// FuncBackendKind::JsAttribute => { +// let (associations, input_type) = match func.backend_response_type() { +// FuncBackendResponseType::CodeGeneration +// | FuncBackendResponseType::Qualification => { +// let (schema_variant_ids, component_ids) = +// attribute_prototypes_into_schema_variants_and_components(ctx, *func.id()) +// .await?; + +// let inputs = get_leaf_function_inputs(ctx, *func.id()).await?; +// let input_type = +// compile_leaf_function_input_types(ctx, &schema_variant_ids, &inputs) +// .await?; + +// ( +// Some(match func.backend_response_type() { +// FuncBackendResponseType::CodeGeneration => { +// FuncAssociations::CodeGeneration { +// schema_variant_ids, +// component_ids, +// inputs, +// } +// } + +// FuncBackendResponseType::Qualification => { +// FuncAssociations::Qualification { +// schema_variant_ids, +// component_ids, +// inputs: get_leaf_function_inputs(ctx, *func.id()).await?, +// } +// } +// _ => unreachable!("the match above ensures this is unreachable"), +// }), +// input_type, +// ) +// } +// _ => { +// let protos = AttributePrototype::find_for_func(ctx, func.id()).await?; + +// let mut prototypes = Vec::with_capacity(protos.len()); +// for proto in &protos { +// prototypes.push( +// prototype_view_for_attribute_prototype(ctx, *func.id(), proto).await?, +// ); +// } + +// let ts_types = compile_attribute_function_types(ctx, &prototypes).await?; + +// ( +// Some(FuncAssociations::Attribute { +// prototypes, +// arguments: arguments +// .iter() +// .map(|arg| FuncArgumentView { +// id: *arg.id(), +// name: arg.name().to_owned(), +// kind: arg.kind().to_owned(), +// element_kind: arg.element_kind().cloned(), +// }) +// .collect(), +// }), +// ts_types, +// ) +// } +// }; +// (associations, input_type) +// } +// FuncBackendKind::JsAction => { +// let (kind, schema_variant_ids) = +// action_prototypes_into_schema_variants_and_components(ctx, *func.id()).await?; + +// let ts_types = compile_action_types(ctx, &schema_variant_ids).await?; + +// let associations = Some(FuncAssociations::Action { +// schema_variant_ids, +// kind, +// }); + +// (associations, ts_types) +// } +// FuncBackendKind::JsReconciliation => { +// return Err(FuncError::EditingReconciliationFuncsNotImplemented); +// } +// FuncBackendKind::JsValidation => { +// let protos = ValidationPrototype::list_for_func(ctx, *func.id()).await?; +// let input_type = compile_validation_types(ctx, &protos).await?; + +// let associations = Some(FuncAssociations::Validation { +// prototypes: protos +// .iter() +// .map(|proto| ValidationPrototypeView { +// schema_variant_id: proto.context().schema_variant_id(), +// prop_id: proto.context().prop_id(), +// }) +// .collect(), +// }); +// (associations, input_type) +// } +// FuncBackendKind::JsAuthentication => { +// let schema_variant_ids = AuthenticationPrototype::find_for_func(ctx, *func.id()) +// .await? +// .iter() +// .map(|p| p.schema_variant_id()) +// .collect(); + +// ( +// Some(FuncAssociations::Authentication { schema_variant_ids }), +// concat!( +// "type Input = Record;\n", +// "\n", +// "declare namespace requestStorage {\n", +// " function setEnv(key: string, value: any);\n", +// " function setItem(key: string, value: any);\n", +// " function deleteEnv(key: string);\n", +// " function deleteItem(key: string);\n", +// "}", +// ) +// .to_owned(), +// ) +// } +// _ => (None, String::new()), +// }; + +// let is_revertible = is_func_revertible(ctx, func).await?; +// let types = [ +// compile_return_types(*func.backend_response_type(), *func.backend_kind()), +// &input_type, +// langjs_types(), +// ] +// .join("\n"); + +// Ok(GetFuncResponse { +// id: func.id().to_owned(), +// variant: func.try_into()?, +// display_name: func.display_name().map(Into::into), +// name: func.name().to_owned(), +// description: func.description().map(|d| d.to_owned()), +// code: func.code_plaintext()?, +// is_builtin: func.builtin(), +// is_revertible, +// associations, +// types, +// }) +// } + +// pub fn compile_return_types(ty: FuncBackendResponseType, kind: FuncBackendKind) -> &'static str { +// if matches!(kind, FuncBackendKind::JsAttribute) +// && !matches!( +// ty, +// FuncBackendResponseType::CodeGeneration | FuncBackendResponseType::Qualification +// ) +// { +// return ""; // attribute functions have their output compiled dynamically +// } + +// match ty { +// FuncBackendResponseType::Boolean => "type Output = boolean | null;", +// FuncBackendResponseType::String => "type Output = string | null;", +// FuncBackendResponseType::Integer => "type Output = number | null;", +// FuncBackendResponseType::Qualification => { +// "type Output { +// result: 'success' | 'warning' | 'failure'; +// message?: string | null; +// }" +// } +// FuncBackendResponseType::CodeGeneration => { +// "type Output { +// format: string; +// code: string; +// }" +// } +// FuncBackendResponseType::Validation => { +// "type Output { +// valid: boolean; +// message: string; +// }" +// } +// FuncBackendResponseType::Reconciliation => { +// "type Output { +// updates: { [key: string]: unknown }; +// actions: string[]; +// message: string | null; +// }" +// } +// FuncBackendResponseType::Action => { +// "type Output { +// status: 'ok' | 'warning' | 'error'; +// payload?: { [key: string]: unknown } | null; +// message?: string | null; +// }" +// } +// FuncBackendResponseType::Json => "type Output = any;", +// // Note: there is no ts function returning those +// FuncBackendResponseType::Identity => "interface Output extends Input {}", +// FuncBackendResponseType::Array => "type Output = any[];", +// FuncBackendResponseType::Map => "type Output = Record;", +// FuncBackendResponseType::Object => "type Output = any;", +// FuncBackendResponseType::Unset => "type Output = undefined | null;", +// FuncBackendResponseType::Void => "type Output = void;", +// FuncBackendResponseType::SchemaVariantDefinition => concat!( +// include_str!("./ts_types/asset_builder.d.ts"), +// "\n", +// "type Output = any;" +// ), +// } +// } + +// pub fn compile_return_types_2(ty: FuncBackendResponseType, kind: FuncBackendKind) -> &'static str { +// if matches!(kind, FuncBackendKind::JsAttribute) +// && !matches!( +// ty, +// FuncBackendResponseType::CodeGeneration | FuncBackendResponseType::Qualification +// ) +// { +// return ""; // attribute functions have their output compiled dynamically +// } + +// match ty { +// FuncBackendResponseType::Boolean => "type Output = boolean | null;", +// FuncBackendResponseType::String => "type Output = string | null;", +// FuncBackendResponseType::Integer => "type Output = number | null;", +// FuncBackendResponseType::Qualification => { +// "type Output { +// result: 'success' | 'warning' | 'failure'; +// message?: string | null; +// }" +// } +// FuncBackendResponseType::CodeGeneration => { +// "type Output { +// format: string; +// code: string; +// }" +// } +// FuncBackendResponseType::Validation => { +// "type Output { +// valid: boolean; +// message: string; +// }" +// } +// FuncBackendResponseType::Reconciliation => { +// "type Output { +// updates: { [key: string]: unknown }; +// actions: string[]; +// message: string | null; +// }" +// } +// FuncBackendResponseType::Action => { +// "type Output { +// status: 'ok' | 'warning' | 'error'; +// payload?: { [key: string]: unknown } | null; +// message?: string | null; +// }" +// } +// FuncBackendResponseType::Json => "type Output = any;", +// // Note: there is no ts function returning those +// FuncBackendResponseType::Identity => "interface Output extends Input {}", +// FuncBackendResponseType::Array => "type Output = any[];", +// FuncBackendResponseType::Map => "type Output = Record;", +// FuncBackendResponseType::Object => "type Output = any;", +// FuncBackendResponseType::Unset => "type Output = undefined | null;", +// FuncBackendResponseType::Void => "type Output = void;", +// FuncBackendResponseType::SchemaVariantDefinition => concat!( +// include_str!("./ts_types/asset_types_with_secrets.d.ts"), +// "\n", +// "type Output = any;" +// ), +// } +// } + +// async fn compile_validation_types( +// ctx: &DalContext, +// prototypes: &[ValidationPrototype], +// ) -> FuncResult { +// let mut input_fields = Vec::new(); +// for prototype in prototypes { +// let prop = Prop::get_by_id(ctx, &prototype.context().prop_id()) +// .await? +// .ok_or(PropError::NotFound( +// prototype.context().prop_id(), +// *ctx.visibility(), +// ))?; +// let ts_type = prop.ts_type(ctx).await?; +// input_fields.push(ts_type); +// } +// if input_fields.is_empty() { +// Ok("type Input = never;".to_owned()) +// } else { +// let variants = input_fields.join(" | "); +// let types = format!("type Input = {variants};"); +// Ok(types) +// } +// } + +// async fn get_per_variant_types_for_prop_path( +// ctx: &DalContext, +// variant_ids: &[SchemaVariantId], +// path: &[&str], +// ) -> FuncResult { +// let mut per_variant_types = vec![]; + +// for variant_id in variant_ids { +// let prop = SchemaVariant::find_prop_in_tree(ctx, *variant_id, path).await?; +// let ts_type = prop.ts_type(ctx).await?; + +// if !per_variant_types.contains(&ts_type) { +// per_variant_types.push(ts_type); +// } +// } + +// Ok(per_variant_types.join(" | ")) +// } + +// async fn compile_leaf_function_input_types( +// ctx: &DalContext, +// schema_variant_ids: &[SchemaVariantId], +// inputs: &[LeafInputLocation], +// ) -> FuncResult { +// let mut ts_type = "type Input = {\n".to_string(); + +// for input_location in inputs { +// let input_property = format!( +// "{}?: {} | null;\n", +// input_location.arg_name(), +// get_per_variant_types_for_prop_path( +// ctx, +// schema_variant_ids, +// &input_location.prop_path(), +// ) +// .await? +// ); +// ts_type.push_str(&input_property); +// } +// ts_type.push_str("};"); + +// Ok(ts_type) +// } + +// async fn compile_attribute_function_types( +// ctx: &DalContext, +// prototype_views: &[AttributePrototypeView], +// ) -> FuncResult { +// let mut input_ts_types = "type Input = {\n".to_string(); + +// let mut output_ts_types = vec![]; +// let mut argument_types = HashMap::new(); +// for prototype_view in prototype_views { +// for arg in &prototype_view.prototype_arguments { +// if let Some(ip_id) = arg.internal_provider_id { +// let ip = InternalProvider::get_by_id(ctx, &ip_id) +// .await? +// .ok_or(InternalProviderError::NotFound(ip_id))?; + +// let ts_type = if ip.prop_id().is_none() { +// "object".to_string() +// } else { +// Prop::get_by_id(ctx, ip.prop_id()) +// .await? +// .ok_or(PropError::NotFound( +// *ip.prop_id(), +// ctx.visibility().to_owned(), +// ))? +// .ts_type(ctx) +// .await? +// }; + +// if !argument_types.contains_key(&arg.func_argument_name) { +// argument_types.insert(arg.func_argument_name.clone(), vec![ts_type]); +// } else if let Some(ts_types_for_arg) = +// argument_types.get_mut(&arg.func_argument_name) +// { +// if !ts_types_for_arg.contains(&ts_type) { +// ts_types_for_arg.push(ts_type) +// } +// } +// } + +// let output_type = if let Some(output_prop_id) = prototype_view.prop_id { +// Prop::get_by_id(ctx, &output_prop_id) +// .await? +// .ok_or(PropError::NotFound( +// output_prop_id, +// ctx.visibility().to_owned(), +// ))? +// .ts_type(ctx) +// .await? +// } else { +// "any".to_string() +// }; + +// if !output_ts_types.contains(&output_type) { +// output_ts_types.push(output_type); +// } +// } +// } +// for (arg_name, ts_types) in argument_types.iter() { +// input_ts_types.push_str( +// format!( +// "{}?: {} | null;\n", +// arg_name.as_ref().unwrap_or(&"".to_string()).to_owned(), +// ts_types.join(" | ") +// ) +// .as_str(), +// ); +// } +// input_ts_types.push_str("};"); + +// let output_ts = format!("type Output = {};", output_ts_types.join(" | ")); + +// Ok(format!("{}\n{}", input_ts_types, output_ts)) +// } + +// // Note: ComponentKind::Credential is unused and the implementation is broken, so let's ignore it for now +// async fn compile_action_types( +// ctx: &DalContext, +// variant_ids: &[SchemaVariantId], +// ) -> FuncResult { +// let mut ts_types = vec![]; +// for variant_id in variant_ids { +// let prop = SchemaVariant::find_prop_in_tree(ctx, *variant_id, &["root"]).await?; +// ts_types.push(prop.ts_type(ctx).await?); +// } + +// Ok(format!( +// "type Input {{ +// kind: 'standard'; +// properties: {}; +// }}", +// ts_types.join(" | "), +// )) +// } + +// // TODO: stop duplicating definition +// // TODO: use execa types instead of any +// // TODO: add os, fs and path types (possibly fetch but I think it comes with DOM) +// fn langjs_types() -> &'static str { +// "declare namespace YAML { +// function stringify(obj: unknown): string; +// } + +// declare namespace zlib { +// function gzip(inputstr: string, callback: any); +// } + +// declare namespace requestStorage { +// function getEnv(key: string): string; +// function getItem(key: string): any; +// function getEnvKeys(): string[]; +// function getKeys(): string[]; +// } + +// declare namespace siExec { + +// interface WatchArgs { +// cmd: string, +// args?: readonly string[], +// execaOptions?: Options, +// retryMs?: number, +// maxRetryCount?: number, +// callback: (child: execa.ExecaReturnValue) => Promise, +// } + +// interface WatchResult { +// result: SiExecResult, +// failed?: 'deadlineExceeded' | 'commandFailed', +// } + +// type SiExecResult = ExecaReturnValue; + +// async function waitUntilEnd(execaFile: string, execaArgs?: string[], execaOptions?: any): Promise; +// async function watch(options: WatchArgs, deadlineCount?: number): Promise; +// }" +// } pub fn routes() -> Router { - Router::new() - .route("/list_funcs", get(list_funcs::list_funcs)) - .route("/get_func", get(get_func::get_func)) - .route( - "/get_func_last_execution", - get(get_func::get_latest_func_execution), - ) - .route("/create_func", post(create_func::create_func)) - .route("/save_func", post(save_func::save_func)) - .route("/delete_func", post(delete_func::delete_func)) - .route("/save_and_exec", post(save_and_exec::save_and_exec)) - .route("/execute", post(execute::execute)) - .route("/revert_func", post(revert_func::revert_func)) - .route( - "/list_input_sources", - get(list_input_sources::list_input_sources), - ) + Router::new().route("/list_funcs", get(list_funcs::list_funcs)) + // .route("/get_func", get(get_func::get_func)) + // .route( + // "/get_func_last_execution", + // get(get_func::get_latest_func_execution), + // ) + // .route("/create_func", post(create_func::create_func)) + // .route("/save_func", post(save_func::save_func)) + // .route("/delete_func", post(delete_func::delete_func)) + // .route("/save_and_exec", post(save_and_exec::save_and_exec)) + // .route("/execute", post(execute::execute)) + // .route("/revert_func", post(revert_func::revert_func)) + // .route( + // "/list_input_sources", + // get(list_input_sources::list_input_sources), + // ) } diff --git a/lib/sdf-server/src/server/service/func/list_funcs.rs b/lib/sdf-server/src/server/service/func/list_funcs.rs index aa45d75409..0da5ad0b55 100644 --- a/lib/sdf-server/src/server/service/func/list_funcs.rs +++ b/lib/sdf-server/src/server/service/func/list_funcs.rs @@ -35,36 +35,39 @@ pub async fn list_funcs( ) -> FuncResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let try_func_views: Vec> = Func::find_by_attr_in( - &ctx, - "backend_kind", - &[ - &FuncBackendKind::JsAction.as_ref().to_string(), - &FuncBackendKind::JsAuthentication.as_ref().to_string(), - &FuncBackendKind::JsAttribute.as_ref().to_string(), - &FuncBackendKind::JsValidation.as_ref().to_string(), - ], - ) - .await? - .iter() - .filter(|f| !f.hidden()) - .map(|func| { - Ok(ListedFuncView { - id: func.id().to_owned(), - handler: func.handler().map(|handler| handler.to_owned()), - variant: func.try_into()?, - name: func.name().into(), - display_name: func.display_name().map(Into::into), - is_builtin: func.builtin(), + //ctx.workspace_snapshot()?.lock().await.dot(); + + let funcs = ctx + .workspace_snapshot()? + .lock() + .await + .list_funcs(&ctx) + .await?; + + dbg!(&funcs); + + let try_func_views: Vec> = funcs + .iter() + .filter(|f| !f.hidden) + .map(|func| { + Ok(ListedFuncView { + id: func.id, + handler: func.handler.to_owned().map(|handler| handler.to_owned()), + variant: FuncVariant::Attribute, + name: func.name.to_owned(), + display_name: func.display_name.to_owned().map(Into::into), + is_builtin: func.builtin, + }) }) - }) - .collect(); + .collect(); + + dbg!(&try_func_views); let mut funcs = vec![]; for func_view in try_func_views { match func_view { Ok(func_view) => funcs.push(func_view), - Err(err) => Err(err)?, + Err(err) => {} } } diff --git a/lib/si-rabbitmq/src/config.rs b/lib/si-rabbitmq/src/config.rs new file mode 100644 index 0000000000..07856cb044 --- /dev/null +++ b/lib/si-rabbitmq/src/config.rs @@ -0,0 +1,73 @@ +use serde::{Deserialize, Serialize}; + +/// The configuration settings for the si-rabbitmq [`Environment`](`crate::Environment`) +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Config { + host: String, + username: String, + password: String, + port: u16, + stream_prefix: Option, +} + +impl Default for Config { + fn default() -> Self { + Self { + host: "localhost".into(), + username: "guest".into(), + password: "guest".into(), + port: 5552, + stream_prefix: None, + } + } +} + +impl Config { + /// Create a new config for the rabbitmq [`Environment`](`crate::Environment`) + pub fn new( + host: String, + username: String, + password: String, + port: u16, + stream_prefix: Option, + ) -> Self { + Self { + host, + username, + password, + port, + stream_prefix, + } + } + + /// The hostname of the rabbitmq stream server we will connect to + pub fn host(&self) -> &str { + self.host.as_str() + } + + /// The rabbitmq username + pub fn username(&self) -> &str { + self.username.as_str() + } + + /// The rabbitmq password + pub fn password(&self) -> &str { + self.password.as_str() + } + + /// The port of the rabbitmq stream server we will connect to (usually 5552) + pub fn port(&self) -> u16 { + self.port + } + + /// The stream prefix to be used when creating, using and deleting rabbitmq streams + pub fn stream_prefix(&self) -> Option<&str> { + self.stream_prefix.as_deref() + } + + /// Set the stream prefix on the config + pub fn set_stream_prefix(&mut self, stream_prefix: impl Into) -> &mut Self { + self.stream_prefix = Some(stream_prefix.into()); + self + } +} diff --git a/lib/si-rabbitmq/src/consumer.rs b/lib/si-rabbitmq/src/consumer.rs index 56d92df29a..c4811299f4 100644 --- a/lib/si-rabbitmq/src/consumer.rs +++ b/lib/si-rabbitmq/src/consumer.rs @@ -1,9 +1,16 @@ -use futures::StreamExt; +use futures::{Stream, StreamExt, TryStreamExt}; +use rabbitmq_stream_client::error::ConsumerDeliveryError as UpstreamConsumerDeliveryError; +use rabbitmq_stream_client::types::Delivery as UpstreamDelivery; use rabbitmq_stream_client::types::OffsetSpecification; use rabbitmq_stream_client::{ Consumer as UpstreamConsumer, ConsumerHandle as UpstreamConsumerHandle, }; +use std::future::Future; +use std::iter::Map; +use std::pin::Pin; use telemetry::prelude::*; +use tokio::sync::watch; +use tokio::sync::watch::error::RecvError; use tokio::task; use crate::Delivery; @@ -49,6 +56,14 @@ impl Consumer { Ok(None) } + /// Converts the inner [`Consumer`] into a [`Stream`]. + pub async fn into_stream( + self, + ) -> RabbitResult>> + { + Ok(self.inner.into_stream()) + } + /// Provides a [`ConsumerHandle`]. pub fn handle(&self) -> ConsumerHandle { self.inner.handle() @@ -60,15 +75,15 @@ impl Consumer { } } -impl Drop for Consumer { - fn drop(&mut self) { - let handle = self.handle(); - - // Close the consumer associated to the handle provided. - task::spawn(async { - if let Err(e) = handle.close().await { - warn!("error when closing consumer on drop: {e}"); - } - }); - } -} +// impl Drop for Consumer { +// fn drop(&mut self) { +// let handle = self.handle(); +// +// // Close the consumer associated to the handle provided. +// task::spawn(async { +// if let Err(e) = handle.close().await { +// warn!("error when closing consumer on drop: {e}"); +// } +// }); +// } +// } diff --git a/lib/si-rabbitmq/src/environment.rs b/lib/si-rabbitmq/src/environment.rs index 8c1828c461..0291e090bf 100644 --- a/lib/si-rabbitmq/src/environment.rs +++ b/lib/si-rabbitmq/src/environment.rs @@ -2,7 +2,7 @@ use rabbitmq_stream_client::error::{StreamCreateError, StreamDeleteError}; use rabbitmq_stream_client::types::{ByteCapacity, ResponseCode}; use rabbitmq_stream_client::Environment as UpstreamEnvironment; -use crate::error::RabbitResult; +use crate::{config::Config, error::RabbitResult}; const STREAM_LENGTH_CAPACTIY_IN_MEGABYTES: u64 = 10; @@ -14,12 +14,12 @@ pub struct Environment { impl Environment { /// Creates a new [`Environment`], which contains a connection to a RabbitMQ node. - pub async fn new() -> RabbitResult { + pub async fn new(config: &Config) -> RabbitResult { let inner = UpstreamEnvironment::builder() - .host("localhost") - .username("guest") - .password("guest") - .port(5552) + .host(config.host()) + .username(config.username()) + .password(config.password()) + .port(config.port()) .build() .await?; Ok(Self { inner }) diff --git a/lib/si-rabbitmq/src/lib.rs b/lib/si-rabbitmq/src/lib.rs index 958d72ec05..d4190ea442 100644 --- a/lib/si-rabbitmq/src/lib.rs +++ b/lib/si-rabbitmq/src/lib.rs @@ -22,12 +22,14 @@ clippy::missing_panics_doc )] +mod config; mod consumer; mod delivery; mod environment; mod error; mod producer; +pub use config::Config; pub use consumer::Consumer; pub use consumer::ConsumerHandle; pub use consumer::ConsumerOffsetSpecification; @@ -45,7 +47,7 @@ mod tests { #[test] async fn round_trip() { - let environment = Environment::new() + let environment = Environment::new(&Config::default()) .await .expect("could not create environment"); diff --git a/lib/si-test-macros/src/dal_test.rs b/lib/si-test-macros/src/dal_test.rs index 139076dbb1..05d16d1a1a 100644 --- a/lib/si-test-macros/src/dal_test.rs +++ b/lib/si-test-macros/src/dal_test.rs @@ -29,6 +29,11 @@ pub(crate) fn expand(item: ItemFn, args: Args) -> TokenStream { fn fn_setup<'a>(params: impl Iterator) -> DalTestFnSetup { let mut expander = DalTestFnSetupExpander::new(); + expander.setup_start_veritech_server(); + expander.setup_start_pinga_server(); + expander.setup_start_council_server(); + expander.setup_start_rebaser_server(); + for param in params { match param { FnArg::Typed(pat_type) => match &*pat_type.ty { @@ -171,15 +176,15 @@ fn fn_setup<'a>(params: impl Iterator) -> DalTestFnSetup { } } - if expander.has_args() { - // TODO(fnichol): we can use a macro attribute to opt-out and not run a veritech server in - // the future, but for now (as before), every test starts with its own veritech server with - // a randomized subject prefix - expander.setup_start_veritech_server(); - expander.setup_start_pinga_server(); - expander.setup_start_council_server(); - expander.setup_start_rebaser_server(); - } + // if expander.has_args() { + // // TODO(fnichol): we can use a macro attribute to opt-out and not run a veritech server in + // // the future, but for now (as before), every test starts with its own veritech server with + // // a randomized subject prefix + // expander.setup_start_veritech_server(); + // expander.setup_start_pinga_server(); + // expander.setup_start_council_server(); + // expander.setup_start_rebaser_server(); + // } expander.finish() } diff --git a/lib/si-test-macros/src/expand.rs b/lib/si-test-macros/src/expand.rs index cfc865871d..cfb9e22f34 100644 --- a/lib/si-test-macros/src/expand.rs +++ b/lib/si-test-macros/src/expand.rs @@ -590,7 +590,7 @@ pub(crate) trait FnSetupExpander { .await .wrap_err("failed to build default dal ctx for dal_context_default")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); - ::dal_test::helpers::create_change_set_and_update_ctx(&mut ctx).await; + ::dal_test::helpers::create_change_set_and_update_ctx(&mut ctx, #nw.workspace.default_change_set_id()).await; ctx.blocking_commit() .await .wrap_err("failed to commit create_change_set_and_update_ctx")?; @@ -624,7 +624,7 @@ pub(crate) trait FnSetupExpander { .await .wrap_err("failed to build default dal ctx for dal_context_default_mut")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); - ::dal_test::helpers::create_change_set_and_update_ctx(&mut ctx).await; + ::dal_test::helpers::create_change_set_and_update_ctx(&mut ctx, #nw.workspace.default_change_set_id()).await; ctx.blocking_commit() .await .wrap_err("failed to commit create_change_set_and_update_ctx_mut")?; From d5802fb34e3e5dedbfd4a753ad9b75d8ba833f26 Mon Sep 17 00:00:00 2001 From: Zachary Hamm Date: Wed, 8 Nov 2023 13:07:53 -0600 Subject: [PATCH 39/92] fix(sdf): fix list-funcs to restore categorization by backend kind Signed-off-by: Zachary Hamm Co-authored-by: Jacob Helwig Co-authored-by: Nick Gerace --- flake.nix | 1 + .../src/server/service/func/list_funcs.rs | 18 +++++++++++++++--- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/flake.nix b/flake.nix index d0ded1371b..62f74e5c2b 100644 --- a/flake.nix +++ b/flake.nix @@ -69,6 +69,7 @@ libiconv darwin.apple_sdk.frameworks.Security darwin.apple_sdk.frameworks.SystemConfiguration + darwin.apple_sdk.frameworks.CoreFoundation ]; # This isn't an exact science, but confirmed the system interpreter by diff --git a/lib/sdf-server/src/server/service/func/list_funcs.rs b/lib/sdf-server/src/server/service/func/list_funcs.rs index 0da5ad0b55..085a6ad1ae 100644 --- a/lib/sdf-server/src/server/service/func/list_funcs.rs +++ b/lib/sdf-server/src/server/service/func/list_funcs.rs @@ -46,14 +46,26 @@ pub async fn list_funcs( dbg!(&funcs); + let customizable_backend_kinds = [ + FuncBackendKind::JsAction, + FuncBackendKind::JsAttribute, + FuncBackendKind::JsValidation, + ]; + let try_func_views: Vec> = funcs .iter() - .filter(|f| !f.hidden) + .filter(|f| { + if f.hidden { + return false; + } else { + return customizable_backend_kinds.contains(&f.backend_kind); + } + }) .map(|func| { Ok(ListedFuncView { id: func.id, handler: func.handler.to_owned().map(|handler| handler.to_owned()), - variant: FuncVariant::Attribute, + variant: func.try_into()?, name: func.name.to_owned(), display_name: func.display_name.to_owned().map(Into::into), is_builtin: func.builtin, @@ -67,7 +79,7 @@ pub async fn list_funcs( for func_view in try_func_views { match func_view { Ok(func_view) => funcs.push(func_view), - Err(err) => {} + Err(err) => Err(err)?, } } From d908032e248dd72c3e0746439301908fbe8368d1 Mon Sep 17 00:00:00 2001 From: Zachary Hamm Date: Wed, 8 Nov 2023 16:11:45 -0600 Subject: [PATCH 40/92] Add fully-featured assets to the new engine and UI - Add FuncBackendKind to the func node weight and ensure the node weight is updated when the func is modified Signed-off-by: Zachary Hamm Co-authored-by: Jacob Helwig Co-authored-by: Nick Gerace WIP: 26 - Add get_bulk method to ContentStore trait and implement it for local and pg concrete impls. - Use get_bulk in list_funcs - Add array-impls feature to tokio-postgres crate to support get_bulk Next steps: - Func Arguments? Signed-off-by: Zachary Hamm Co-authored-by: Jacob Helwig Co-authored-by: Nick Gerace WIP: no cbor in content-store pg WIP: don't migrate on every content store request WIP: partially restore get_func route WIP: round ?? - Replace si_cbor with postcard for both workspace snapshots and content store - Remove serde json identifier ("version") for FuncContent and serde "flatten" for Timestamp on FuncContent to make postcard happy Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm WIP: restore func arguments Next steps: rework API for func so that import does not need to lock the workspace for the entire import (and then we can import func arguments!) Signed-off-by: Zachary Hamm Co-authored-by: Nick Gerace WIP: move towards using previous Dal API interface for func WIP: import and list function arguments Next steps: create/save funcs? import schema variants WIP: import schemas (hopefully) - Import Schemas - Integrate SchemaUiMenus into Schemas - This is temporary until the engine switchover is done as we may want metadata node(s) in the graph to ultimately take their place - Migrate Schema from the "api" module back to its original home Next steps: - Import SchemaVariants (and SchemaVariantDefinitions?) - Migrate SchemaVariant and releavant parts from the "api" module back to their original homes Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig WIP: list change set pointers, create new change set Add status and tenancy to change set pointers, list open change sets now lists change set pointers instead of change sets. Create change set creates a change set pointer that is "based" on the default change set for the workspace and points to the workspace snapshot pointed to by that change set. Next steps: save_func WIP: save_func route partial restoration Restores the ability to modify a function Fixes the ReplaceSubgraph portion of the rebaser to: 1. Import the subgraph matching the equivalent node 2. return the new subgraph node index as the new subgraph root so that we replace references to the old subgraph with references to it Next step: create_func WIP: restore create_func Next steps: - Complete import of schema variants - Discuss write slowness and design of rebaser WIP: save changed / added func args for attr funcs Next step: implement removal/deletion of func args (our first deletion in the graph!) WIP: convert schema variant subsystem to old access patterns - From the creation of schema variants all the way down, move all objects from the central api pattern to the old "active record style" access patterns Next steps: - Use the work performed in this commit to import schema variants Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig Co-authored-by: Zachary Hamm WIP: fixing the graph conflict tests and the rebaser WIP: use <= and >= comparisons for vector clock check WIP: fix all graph tests - The content hash check added before descending into the container check caused a double ReplaceSubgraph update to be added to the updates list. We fix this by only adding the ReplaceSubgraph once, in the content hash check. - The content hash check caused a NodeContent conflict to be added if two ordering nodes were being compared. But the conflict that matters for ordering nodes is the ChildOrder conflict. The nodes could still be different even if ChildOrder is not conflicting, if there is a new edge added to the end of the ordering but the rest of the edges are in the same order. - A test was added for the case where we update a node content and update its children, and send it to the rebaser. This test is failing and points to a bug in the rebaser. WIP: fix replace subgraph in the rebaser to handle updated node content WIP: import schema variants and significantly improve graph perf Primary: - Import schema variants with the RootProp tree and the essentials (includes implicit InternalProviders, default AttributePrototypes, Sockets, etc.) - Derive the "color" from the - Restore relevant schema and diagram routes - While "list_schema_variants" has been fully restored, routes like "create_schema" and "get_diagram" have been partially restored with placeholders - Remove reliance on NodeAddMenu - Can be fully removed from the codebase as we get closer to completing the switchover - Likely, SchemaVariants will eventually provide all this information from their Prop tree (i.e. fields under the "/root/si" tree) Bug Fixes and Performance Improvments: - Significantly improve graph queries related to indices, IDs and lineage IDs (e.g. "get_node_index_by_id" now runs in constant time (O(1)) rather than linear time (O(n))) - This should translate to significantly improved performance for _all_ usages of the graph - This was achieved via sidecar hashmaps as well as through query improvements (e.g. "replace_references" using smaller DFS post order search process via a work queue) - Require "add_edge" users to provide IDs to ensure that indices are not out of date - This fixes issuses related to users of Funcs being the unintended parents (e.g. Funcs not being used by the FuncCategory node) Secondary: - Import the "DOCKER_IMAGE" builtin for integration tests - This is loosely tested in the existing "builtins" test by ensuring that Schema::list and SchemaVariant::list_for_schema funcs do not return empty vecs Signed-off-by: Nick Gerace WIP: fix unbalanced div in WorkspaceCustomizeAssets WIP: add AttributePrototypeArgument nodes with static values AttributePrototypeArgument (APA) nodes are a hop from AttributePrototypes that provide the following edges: 1. A PrototypeArgument edge from the AttributePrototype to the APA. 2. A Use edge from the APA to the FuncArgument that this APA provides the value for. 3. A PrototypeArgumentValue edge from the APA to the source of the value for this specific APA. PrototypeArgumentValue edge weights take a PrototypeArgumentValueKind enum which discriminates the type of value. Currently that is only: - InternalProvider for arguments that take their value from an internal provider. - StaticValue, for arguments that take a static value (for constant inputs to a function, providing default values) WIP: implement prop_find_prop_id_by_path and begin set_color Also removes the kind data from the PrototypeArgumentVaue edge weight. Signed-off-by: Zachary Hamm Co-authored-by: Jacob Helwig Co-authored-by: Nick Gerace WIP: implement set_color for SchemaVariant Excercises the StaticArgumentValue edge on AttributePrototypeArgument. Remove some async functions that are in fact sync. Signed-off-by: Zachary Hamm Co-authored-by: Jacob Helwig Co-authored-by: Nick Gerace WIP: start restoring components and add key to prototype edge - Start restoring components - Add key to prototype edge for maps and arrays Next steps: - Start "Component::new" Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig WIP: impl and use set_color for SchemaVariant Note: postcard won't deserialize into a serde_json::Value so we're serializing to string first since we need StaticArgumentValue to be completely agnostic of its type. Begins a process of changing workspace graph interfaces to accept `impl Into` instead of `Ulid` so we don't have to litter the code with `into()` calls. WIP: remove the workspace_snapshot/api module - Bring over component, node and attribute value api functions - Comment out functions that are not yet necessary Next steps: - Create attribute values in "Component::new" Signed-off-by: Nick Gerace WIP: create attribute values for input and output sockets - Create attribute values for input and output sockets (more specifically, for their corresponding providers) when creating a component Next steps: - Create attribute values for all props including and underneath the RootProp, stopping at arrays and maps Signed-off-by: Nick Gerace WIP: import prop trees Continues refactoring some workspace_snapshot methods to accept impl Into. Adds #[inline(always)] speculatively to get_node_index_by_id. This genuinely appeared to speed up the imports for me (by about 10 seconds). WIP: import sockets Next: attribute functions/leaf functions, then action prototypes WIP: move performing updates to the graph library This changes helps test rebasing internals apart from the rebaser service. - Move performing updates to the graph library - Move updating the pointer outside of performing updates Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig WIP: import leaf functions WIP: only check for cycles in add_edge in tests WIP: do not process duplicates for category nodes - Do not process duplicate "to rebase indices" for category nodes - We use a HashSet instead of a Vec to accomplish this - Move all tests to a submodule for graph - Add test to simulate rebase - Don't take in a mutable "onto" graph (leftover from when "perform_updates" was in the rebaser and it needed to be mutable for the snapshot to have a working copy) Signed-off-by: Nick Gerace Co-authored-by: Jacob Helwig WIP: impl set_default_value for all props WIP: obliterate compiler and clippy warnings - Obliterate compiler and clippy warnings - Allow dead code where it will likely be reused in the near future - This could backfire a bit, but it should be fine Signed-off-by: Nick Gerace WIP: WidgetOptions deserializable, finalize before default val set WIP: replace sockets Signed-off-by: Nick Gerace WIP: restore component creation Primary: - Ensure that importing subgraphs includes checking if nodes existing in self before peforming the copy - Before this change, a Func that was used (eventually) by a SchemaVariant would be needlessly copied and duplicate Funcs would exist on the same graph... causing all kinds of accessor errors - Restore component creation in the UI - Create them using the SchemaVariantId and not SchemaId - Restore components in diagram assembly (edges will still be empty) - Restore setting component position - Destroy Node and NodeAddMenu - Components now track their own position Category Nodes: - Step towards ensuring that category nodes are not assumed to be used by the root node - They should be able to be used by any/most nodes so long as two categories of the same kind are not used by the same source - Disable tracking category nodes with different lineage when detecting conflicts and updates - This needs more thought and is outside the scope of this commit Misc: - Add startup logging for the rebaser to indicate when it is actively consuming on the management stream - Extend "simulate_rebase" test to check for potential drift during cleanup - Ensure the UI looks more like the diagram refactor on main while getting things ready for component creation - Delete unused integration tests for the switchover Signed-off-by: Nick Gerace Co-authored-by: Theo Ephraim Co-authored-by: Jacob Helwig WIP: import most attribute functions WIP: remove category from schema - Remove category from schema since they are on schema variants in the new engine - Address clippy lints Signed-off-by: Nick Gerace WIP: attr funcs for map key props WIP: create attribute values for prop tree - Create attribute values for prop tree - Beautify tiny dot with colors - Remove unused content addresses Signed-off-by: Nick Gerace --- app/web/src/api/sdf/dal/change_set.ts | 1 - app/web/src/api/sdf/dal/diagram.ts | 17 +- app/web/src/components/AssetPalette.vue | 98 +- app/web/src/components/ChangeSetPanel.vue | 2 +- .../ModelingDiagram/ModelingDiagram.vue | 30 +- .../layout/navbar/ChangeSetPanel.vue | 5 +- app/web/src/store/change_sets.store.ts | 15 +- app/web/src/store/components.store.ts | 86 +- bin/sdf/src/main.rs | 4 +- lib/content-store/BUCK | 1 + lib/content-store/Cargo.toml | 1 + lib/content-store/src/pair.rs | 40 +- lib/content-store/src/store.rs | 12 +- lib/content-store/src/store/local.rs | 24 + lib/content-store/src/store/pg.rs | 48 +- .../pg/migrations/U0001__content_pairs.sql | 2 +- lib/dal-test/src/helpers.rs | 5 +- lib/dal-test/src/lib.rs | 2 + lib/dal/BUCK | 1 + lib/dal/Cargo.toml | 1 + lib/dal/src/action_prototype.rs | 19 +- lib/dal/src/attribute/prototype.rs | 197 +- lib/dal/src/attribute/prototype/argument.rs | 293 +- .../prototype/argument/static_value.rs | 99 + lib/dal/src/attribute/value.rs | 935 ++- lib/dal/src/builtins.rs | 6 +- lib/dal/src/builtins/func.rs | 9 +- lib/dal/src/builtins/schema.rs | 13 +- lib/dal/src/change_set_pointer.rs | 161 +- lib/dal/src/change_status.rs | 299 +- lib/dal/src/component.rs | 514 +- lib/dal/src/component/resource.rs | 381 +- lib/dal/src/component/status.rs | 6 - lib/dal/src/context.rs | 165 +- lib/dal/src/diagram.rs | 369 +- lib/dal/src/diagram/node.rs | 654 +- lib/dal/src/func.rs | 443 +- lib/dal/src/func/argument.rs | 445 +- lib/dal/src/func/backend.rs | 10 +- lib/dal/src/func/intrinsics.rs | 70 +- lib/dal/src/history_event.rs | 35 +- lib/dal/src/installed_pkg/asset.rs | 361 +- lib/dal/src/lib.rs | 58 +- .../migrations/U3001__change_set_pointers.sql | 5 +- lib/dal/src/node.rs | 141 +- lib/dal/src/pkg.rs | 175 +- lib/dal/src/pkg/import.rs | 2830 ++++---- lib/dal/src/prop.rs | 540 +- lib/dal/src/property_editor/schema.rs | 2 +- lib/dal/src/provider.rs | 65 + lib/dal/src/provider/external.rs | 169 +- lib/dal/src/provider/internal.rs | 366 +- lib/dal/src/schema.rs | 216 +- lib/dal/src/schema/ui_menu.rs | 5 +- lib/dal/src/schema/variant.rs | 984 ++- lib/dal/src/schema/variant/definition.rs | 1016 ++- lib/dal/src/schema/variant/leaves.rs | 273 +- lib/dal/src/schema/variant/root_prop.rs | 820 +-- lib/dal/src/socket.rs | 976 +-- lib/dal/src/validation/prototype.rs | 111 +- lib/dal/src/visibility.rs | 2 +- lib/dal/src/workspace.rs | 19 +- lib/dal/src/workspace_snapshot.rs | 243 +- lib/dal/src/workspace_snapshot/api.rs | 118 - .../src/workspace_snapshot/api/attribute.rs | 2 - .../api/attribute/prototype.rs | 95 - .../workspace_snapshot/api/attribute/value.rs | 810 --- .../src/workspace_snapshot/api/component.rs | 69 - lib/dal/src/workspace_snapshot/api/func.rs | 184 - lib/dal/src/workspace_snapshot/api/node.rs | 111 - lib/dal/src/workspace_snapshot/api/prop.rs | 154 - .../src/workspace_snapshot/api/provider.rs | 2 - .../api/provider/external.rs | 82 - .../api/provider/internal.rs | 125 - lib/dal/src/workspace_snapshot/api/schema.rs | 81 - .../workspace_snapshot/api/schema/variant.rs | 325 - .../api/schema/variant/root_prop.rs | 588 -- lib/dal/src/workspace_snapshot/api/socket.rs | 77 - .../src/workspace_snapshot/api/validation.rs | 1 - .../api/validation/prototype.rs | 71 - .../src/workspace_snapshot/content_address.rs | 13 +- lib/dal/src/workspace_snapshot/edge_weight.rs | 15 +- lib/dal/src/workspace_snapshot/graph.rs | 5791 ++--------------- lib/dal/src/workspace_snapshot/graph/tests.rs | 5447 ++++++++++++++++ .../workspace_snapshot/graph/tests/rebase.rs | 180 + lib/dal/src/workspace_snapshot/node_weight.rs | 65 +- .../node_weight/content_node_weight.rs | 8 +- .../node_weight/func_node_weight.rs | 105 +- .../node_weight/prop_node_weight.rs | 85 +- lib/dal/src/ws_event.rs | 15 +- .../mostly_everything_is_a_node_or_an_edge.rs | 8 +- .../builtins.rs | 35 +- .../change_set.rs | 13 - .../component.rs | 68 + .../content_store.rs | 25 - .../rebaser.rs | 664 +- .../sdf_mock.rs | 99 + .../src/routes/promote_builtin_route.rs | 4 +- .../src/routes/reject_module_route.rs | 4 +- .../src/routes/upsert_module_route.rs | 4 +- lib/pinga-server/src/server.rs | 1 + lib/rebaser-client/src/client.rs | 15 +- lib/rebaser-server/src/server.rs | 1 + .../src/server/change_set_loop.rs | 161 +- .../src/server/management_loop.rs | 10 +- lib/sdf-server/src/server/routes.rs | 18 +- lib/sdf-server/src/server/server.rs | 51 +- lib/sdf-server/src/server/service.rs | 2 +- .../src/server/service/change_set.rs | 40 +- .../service/change_set/create_change_set.rs | 13 +- .../change_set/list_open_change_sets.rs | 16 +- lib/sdf-server/src/server/service/diagram.rs | 55 +- .../service/diagram/create_component.rs | 196 + .../src/server/service/diagram/create_node.rs | 206 - .../src/server/service/diagram/get_diagram.rs | 5 +- .../service/diagram/list_schema_variants.rs | 167 +- .../service/diagram/set_component_position.rs | 98 + .../service/diagram/set_node_position.rs | 96 - lib/sdf-server/src/server/service/func.rs | 568 +- .../src/server/service/func/create_func.rs | 349 +- .../src/server/service/func/get_func.rs | 66 +- .../src/server/service/func/list_funcs.rs | 31 +- .../src/server/service/func/save_func.rs | 1239 ++-- .../server/service/schema/create_schema.rs | 10 +- .../src/server/service/schema/get_schema.rs | 8 +- .../src/server/service/schema/list_schemas.rs | 2 +- lib/sdf-server/tests/api.rs | 1 + lib/si-rabbitmq/src/consumer.rs | 7 - lib/si-test-macros/src/dal_test.rs | 1 + 129 files changed, 18044 insertions(+), 15827 deletions(-) create mode 100644 lib/dal/src/attribute/prototype/argument/static_value.rs delete mode 100644 lib/dal/src/workspace_snapshot/api.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/attribute.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/attribute/prototype.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/attribute/value.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/component.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/func.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/node.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/prop.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/provider.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/provider/external.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/provider/internal.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/schema.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/schema/variant.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/schema/variant/root_prop.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/socket.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/validation.rs delete mode 100644 lib/dal/src/workspace_snapshot/api/validation/prototype.rs create mode 100644 lib/dal/src/workspace_snapshot/graph/tests.rs create mode 100644 lib/dal/src/workspace_snapshot/graph/tests/rebase.rs delete mode 100644 lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/change_set.rs create mode 100644 lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/component.rs delete mode 100644 lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/content_store.rs create mode 100644 lib/dal/tests/integration_test/internal/mostly_everything_is_a_node_or_an_edge/sdf_mock.rs create mode 100644 lib/sdf-server/src/server/service/diagram/create_component.rs delete mode 100644 lib/sdf-server/src/server/service/diagram/create_node.rs create mode 100644 lib/sdf-server/src/server/service/diagram/set_component_position.rs delete mode 100644 lib/sdf-server/src/server/service/diagram/set_node_position.rs diff --git a/app/web/src/api/sdf/dal/change_set.ts b/app/web/src/api/sdf/dal/change_set.ts index dc9e2ca427..b35beb9d12 100644 --- a/app/web/src/api/sdf/dal/change_set.ts +++ b/app/web/src/api/sdf/dal/change_set.ts @@ -14,7 +14,6 @@ export enum ChangeSetStatus { export type ChangeSetId = string; export interface ChangeSet { id: ChangeSetId; - pk: ChangeSetId; name: string; actions: ActionInstance[]; status: ChangeSetStatus; diff --git a/app/web/src/api/sdf/dal/diagram.ts b/app/web/src/api/sdf/dal/diagram.ts index 1a7850dd97..d0ac52224b 100644 --- a/app/web/src/api/sdf/dal/diagram.ts +++ b/app/web/src/api/sdf/dal/diagram.ts @@ -2,30 +2,14 @@ import * as _ from "lodash-es"; export type DiagramKind = "configuration"; -export type DiagramProviderMetadata = string; - -export interface DiagramOutputProvider { - id: string; - ty: DiagramProviderMetadata; -} - export interface DiagramOutputSocket { id: string; name: string; - diagramKind: DiagramKind; - provider: DiagramOutputProvider; -} - -export interface DiagramInputProvider { - id: string; - ty: DiagramProviderMetadata; } export interface DiagramInputSocket { id: string; name: string; - diagramKind: DiagramKind; - provider: DiagramInputProvider; } export interface DiagramSchemaVariant { @@ -35,6 +19,7 @@ export interface DiagramSchemaVariant { schemaName: string; schemaId: string; color: string; + category: string; inputSockets: DiagramInputSocket[]; outputSockets: DiagramOutputSocket[]; } diff --git a/app/web/src/components/AssetPalette.vue b/app/web/src/components/AssetPalette.vue index 018eaab54f..7b32e128f6 100644 --- a/app/web/src/components/AssetPalette.vue +++ b/app/web/src/components/AssetPalette.vue @@ -1,6 +1,6 @@