diff --git a/.cargo/config.toml b/.cargo/config.toml index a2e6bac554..19338323e5 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,3 +1,6 @@ +[build] +rustflags = [ "--cfg", "tokio_unstable"] + [target.x86_64-unknown-linux-gnu] rustflags = [ "-C", "link-arg=-fuse-ld=lld", diff --git a/.ci/docker-compose.test-integration.yml b/.ci/docker-compose.test-integration.yml index 4dc468e80c..766b904cd4 100644 --- a/.ci/docker-compose.test-integration.yml +++ b/.ci/docker-compose.test-integration.yml @@ -29,7 +29,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si_test" - "POSTGRES_DB=si_test" - - "POSTGRES_MULTIPLE_DBS=si_test_dal,si_test_sdf_server" + - "POSTGRES_MULTIPLE_DBS=si_test_content_store,si_test_dal,si_test_sdf_server" command: - "-c" - "fsync=off" diff --git a/Cargo.lock b/Cargo.lock index 5990de4603..20771a4023 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.7.7" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ "getrandom 0.2.12", "once_cell", @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" +checksum = "42cd52102d3df161c77a887b608d7a4897d7cc112886a9537b738a887a03aaff" dependencies = [ "cfg-if", "getrandom 0.2.12", @@ -214,7 +214,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -236,7 +236,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -247,7 +247,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -272,6 +272,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atomic-polyfill" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4" +dependencies = [ + "critical-section", +] + [[package]] name = "atomic-write-file" version = "0.1.2" @@ -412,7 +421,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -601,7 +610,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", "syn_derive", ] @@ -646,9 +655,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" +checksum = "d32a994c2b3ca201d9b263612a374263f05e7adde37c4707f693dcd375076d1f" [[package]] name = "bytecheck" @@ -729,9 +738,9 @@ checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" [[package]] name = "chrono" -version = "0.4.33" +version = "0.4.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" +checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b" dependencies = [ "android-tzdata", "iana-time-zone", @@ -771,9 +780,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.0" +version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80c21025abd42669a92efc996ef13cfb2c5c627858421ea58d5c3b331a6c134f" +checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da" dependencies = [ "clap_builder", "clap_derive", @@ -781,9 +790,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.0" +version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458bf1f341769dfcf849846f65dffdf9146daa56bcd2a47cb4e1de9915567c99" +checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb" dependencies = [ "anstream", "anstyle", @@ -801,7 +810,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -821,6 +830,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "cobs" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" + [[package]] name = "color-eyre" version = "0.6.2" @@ -1003,6 +1018,28 @@ dependencies = [ "url", ] +[[package]] +name = "content-store" +version = "0.1.0" +dependencies = [ + "async-trait", + "blake3", + "bytes 1.5.0", + "chrono", + "color-eyre", + "postcard", + "postgres-types", + "refinery", + "remain", + "serde", + "serde_json", + "si-cbor", + "si-data-pg", + "telemetry", + "thiserror", + "uuid", +] + [[package]] name = "convert_case" version = "0.4.0" @@ -1092,13 +1129,19 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" dependencies = [ "cfg-if", ] +[[package]] +name = "critical-section" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" + [[package]] name = "crossbeam-channel" version = "0.5.11" @@ -1238,7 +1281,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1342,6 +1385,7 @@ dependencies = [ "buck2-resources", "chrono", "ciborium", + "content-store", "convert_case 0.6.0", "council-server", "dal-test", @@ -1359,9 +1403,13 @@ dependencies = [ "once_cell", "paste", "petgraph", + "postcard", "postgres-types", "pretty_assertions_sorted", "rand 0.8.5", + "rebaser-client", + "rebaser-core", + "rebaser-server", "refinery", "regex", "remain", @@ -1369,6 +1417,7 @@ dependencies = [ "serde-aux", "serde_json", "serde_with 3.6.1", + "si-cbor", "si-crypto", "si-data-nats", "si-data-pg", @@ -1394,6 +1443,7 @@ version = "0.1.0" dependencies = [ "buck2-resources", "color-eyre", + "content-store", "council-server", "dal", "derive_builder", @@ -1402,6 +1452,8 @@ dependencies = [ "module-index-client", "names", "pinga-server", + "rebaser-client", + "rebaser-server", "remain", "serde", "serde_json", @@ -1433,12 +1485,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc5d6b04b3fd0ba9926f945895de7d806260a2d7431ba82e7edaecb043c4c6b8" +checksum = "c376d08ea6aa96aafe61237c7200d1241cb177b7d3a542d791f2d118e9cbb955" dependencies = [ - "darling_core 0.20.5", - "darling_macro 0.20.5", + "darling_core 0.20.6", + "darling_macro 0.20.6", ] [[package]] @@ -1457,16 +1509,16 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e48a959bcd5c761246f5d090ebc2fbf7b9cd527a492b07a67510c108f1e7e3" +checksum = "33043dcd19068b8192064c704b3f83eb464f91f1ff527b44a4e2b08d9cdb8855" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1482,13 +1534,13 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1545d67a2149e1d93b7e5c7752dce5a7426eb5d1357ddcfd89336b94444f77" +checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" dependencies = [ - "darling_core 0.20.5", + "darling_core 0.20.6", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1809,9 +1861,9 @@ dependencies = [ [[package]] name = "either" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" dependencies = [ "serde", ] @@ -1837,6 +1889,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + [[package]] name = "encode_unicode" version = "0.3.6" @@ -1862,7 +1920,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2091,7 +2149,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2221,7 +2279,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.2.2", + "indexmap 2.2.3", "slab", "tokio", "tokio-util", @@ -2238,13 +2296,22 @@ dependencies = [ "crunchy", ] +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.7", + "ahash 0.7.8", ] [[package]] @@ -2259,7 +2326,7 @@ version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.8", "allocator-api2", ] @@ -2272,6 +2339,20 @@ dependencies = [ "hashbrown 0.14.3", ] +[[package]] +name = "heapless" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version", + "serde", + "spin 0.9.8", + "stable_deref_trait", +] + [[package]] name = "heck" version = "0.4.1" @@ -2283,9 +2364,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c62115964e08cb8039170eb33c1d0e2388a256930279edca206fff675f82c3" +checksum = "bd5256b483761cd23699d0da46cc6fd2ee3be420bbe6d020ae4a091e70b7e9fd" [[package]] name = "hex" @@ -2544,9 +2625,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.2" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "824b2ae422412366ba479e8111fd301f7b5faece8149317bb81925979a53f520" +checksum = "233cf39063f058ea2caae4091bf4a3ef70a653afbc026f5c4a4135d114e3c177" dependencies = [ "equivalent", "hashbrown 0.14.3", @@ -2555,9 +2636,9 @@ dependencies = [ [[package]] name = "indicatif" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" dependencies = [ "console", "instant", @@ -2580,7 +2661,7 @@ checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2812,7 +2893,7 @@ checksum = "afc95a651c82daf7004c824405aa1019723644950d488571bd718e3ed84646ed" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3276,7 +3357,7 @@ checksum = "1e32339a5dc40459130b3bd269e9892439f55b33e772d2a9d402a789baaf4e8a" dependencies = [ "futures-core", "futures-sink", - "indexmap 2.2.2", + "indexmap 2.2.3", "js-sys", "once_cell", "pin-project-lite", @@ -3421,7 +3502,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3435,7 +3516,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3547,7 +3628,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.2.2", + "indexmap 2.2.3", "serde", "serde_derive", ] @@ -3607,7 +3688,7 @@ checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3640,10 +3721,12 @@ name = "pinga-server" version = "0.1.0" dependencies = [ "buck2-resources", + "content-store", "dal", "derive_builder", "futures", "nats-subscriber", + "rebaser-client", "remain", "serde", "serde_json", @@ -3684,9 +3767,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "platforms" @@ -3736,6 +3819,18 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +[[package]] +name = "postcard" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8" +dependencies = [ + "cobs", + "embedded-io", + "heapless", + "serde", +] + [[package]] name = "postgres-derive" version = "0.4.5" @@ -3745,7 +3840,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3872,7 +3967,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", "version_check", "yansi 1.0.0-rc.1", ] @@ -4016,6 +4111,70 @@ dependencies = [ "rand_core 0.5.1", ] +[[package]] +name = "rebaser" +version = "0.1.0" +dependencies = [ + "clap", + "color-eyre", + "rebaser-server", + "telemetry-application", + "tokio", + "tokio-util", +] + +[[package]] +name = "rebaser-client" +version = "0.1.0" +dependencies = [ + "futures", + "rebaser-core", + "remain", + "serde", + "serde_json", + "si-data-nats", + "telemetry", + "thiserror", + "ulid", +] + +[[package]] +name = "rebaser-core" +version = "0.1.0" +dependencies = [ + "serde", + "serde_json", + "ulid", +] + +[[package]] +name = "rebaser-server" +version = "0.1.0" +dependencies = [ + "buck2-resources", + "content-store", + "dal", + "derive_builder", + "futures", + "nats-subscriber", + "rebaser-core", + "remain", + "serde", + "serde_json", + "si-crypto", + "si-data-nats", + "si-data-pg", + "si-settings", + "si-std", + "si-test-macros", + "stream-cancel", + "telemetry", + "thiserror", + "tokio", + "ulid", + "veritech-client", +] + [[package]] name = "redox_syscall" version = "0.4.1" @@ -4077,7 +4236,7 @@ dependencies = [ "quote", "refinery-core", "regex", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4132,7 +4291,7 @@ checksum = "1ad5e011230cad274d0532460c5ab69828ea47ae75681b42a841663efffaf794" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4308,9 +4467,9 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.34.2" +version = "1.34.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755392e1a2f77afd95580d3f0d0e94ac83eeeb7167552c9b5bca549e61a94d83" +checksum = "b39449a79f45e8da28c57c341891b69a183044b29518bb8f86dbac9df60bb7df" dependencies = [ "arrayvec", "borsh", @@ -4399,9 +4558,9 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e4980fa29e4c4b212ffb3db068a564cbf560e51d3944b7c88bd8bf5bec64f4" +checksum = "3c333bb734fcdedcea57de1602543590f545f127dc8b533324318fd492c5c70b" dependencies = [ "base64 0.21.7", "rustls-pki-types", @@ -4409,9 +4568,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a716eb65e3158e90e17cd93d855216e27bde02745ab842f2cab4a39dba1bacf" +checksum = "048a63e5b3ac996d78d402940b5fa47973d2d080c6c6fffa1d0f19c4445310b7" [[package]] name = "rustls-webpki" @@ -4487,6 +4646,7 @@ dependencies = [ "clap", "color-eyre", "nats-multiplexer", + "rebaser-client", "sdf-server", "si-std", "telemetry-application", @@ -4503,6 +4663,7 @@ dependencies = [ "base64 0.21.7", "buck2-resources", "chrono", + "content-store", "convert_case 0.6.0", "dal", "dal-test", @@ -4560,7 +4721,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4601,7 +4762,7 @@ dependencies = [ "proc-macro2", "quote", "sea-bae", - "syn 2.0.48", + "syn 2.0.49", "unicode-ident", ] @@ -4726,7 +4887,7 @@ checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4735,7 +4896,7 @@ version = "1.0.113" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "itoa", "ryu", "serde", @@ -4768,7 +4929,7 @@ checksum = "0b2e6b945e9d3df726b65d6ee24060aff8e3533d431f677a9695db04eff9dfdb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4828,7 +4989,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.2.2", + "indexmap 2.2.3", "serde", "serde_derive", "serde_json", @@ -4842,10 +5003,10 @@ version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ - "darling 0.20.5", + "darling 0.20.6", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4854,10 +5015,10 @@ version = "3.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "865f9743393e638991566a8b7a479043c2c8da94a33e0a31f18214c9cae0a64d" dependencies = [ - "darling 0.20.5", + "darling 0.20.6", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4866,7 +5027,7 @@ version = "0.9.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adf8a49373e98a4c5f0ceb5d05aa7c648d75f63774981ed95b7c7443bbd50c6e" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "itoa", "ryu", "serde", @@ -4920,6 +5081,16 @@ dependencies = [ "tokio-util", ] +[[package]] +name = "si-cbor" +version = "0.1.0" +dependencies = [ + "ciborium", + "remain", + "serde", + "thiserror", +] + [[package]] name = "si-cli" version = "0.1.0" @@ -5005,7 +5176,7 @@ dependencies = [ "refinery", "remain", "rustls 0.22.2", - "rustls-pemfile 2.0.0", + "rustls-pemfile 2.1.0", "serde", "si-std", "telemetry", @@ -5032,7 +5203,7 @@ dependencies = [ "base64 0.21.7", "chrono", "derive_builder", - "indexmap 2.2.2", + "indexmap 2.2.3", "object-tree", "petgraph", "remain", @@ -5089,7 +5260,7 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5269,7 +5440,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d84b0a3c3739e220d94b3239fd69fb1f74bc36e16643423bd99de3b43c21bfbd" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.8", "atoi", "bigdecimal", "byteorder", @@ -5287,7 +5458,7 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.2.2", + "indexmap 2.2.3", "log", "memchr", "once_cell", @@ -5534,7 +5705,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5569,9 +5740,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.48" +version = "2.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "915aea9e586f80826ee59f8453c1101f9d1c4b3964cd2460185ee8e299ada496" dependencies = [ "proc-macro2", "quote", @@ -5587,7 +5758,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5735,27 +5906,27 @@ checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] name = "thiserror" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5860,7 +6031,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -6045,7 +6216,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.4", + "toml_edit 0.22.6", ] [[package]] @@ -6063,11 +6234,11 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] @@ -6076,22 +6247,22 @@ version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.22.4" +version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951" +checksum = "2c1b5fd4128cc8d3e0cb74d4ed9a9cc7c7284becd4df68f5f940e1ad123606f6" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.6.1", ] [[package]] @@ -6196,7 +6367,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -6607,7 +6778,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", "wasm-bindgen-shared", ] @@ -6641,7 +6812,7 @@ checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -6872,9 +7043,18 @@ checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "winnow" -version = "0.5.39" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5389a154b01683d28c77f8f68f49dea75f0a4da32557a58f68ee51ebba472d29" +checksum = "d90f4e0f530c4c69f62b80d839e9ef3855edc9cba471a160c4d692deed62b401" dependencies = [ "memchr", ] @@ -6993,7 +7173,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -7013,5 +7193,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] diff --git a/Cargo.toml b/Cargo.toml index 08c65f1ed8..2dc4522146 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,6 +8,7 @@ members = [ "bin/cyclone", "bin/module-index", "bin/pinga", + "bin/rebaser", "bin/sdf", "bin/si", "bin/veritech", @@ -15,6 +16,7 @@ members = [ "lib/buck2-resources", "lib/bytes-lines-codec", "lib/config-file", + "lib/content-store", "lib/council-server", "lib/cyclone-client", "lib/cyclone-core", @@ -30,7 +32,11 @@ members = [ "lib/nats-subscriber", "lib/object-tree", "lib/pinga-server", + "lib/rebaser-client", + "lib/rebaser-core", + "lib/rebaser-server", "lib/sdf-server", + "lib/si-cbor", "lib/si-crypto", "lib/si-data-nats", "lib/si-data-pg", @@ -124,6 +130,7 @@ pathdiff = "0.2.1" petgraph = { version = "0.6.3", features = ["serde-1"] } pin-project-lite = "0.2.9" podman-api = "0.10" +postcard = { version = "1.0.8", features = ["use-std"] } postgres-types = { version = "0.2.5", features = ["derive"] } pretty_assertions_sorted = "1.2.1" proc-macro2 = "1.0.56" diff --git a/README.md b/README.md index 83baa13c6f..79a2b4dc1e 100644 --- a/README.md +++ b/README.md @@ -14,7 +14,12 @@ To learn more, read our ["Second Wave DevOps" blog post](https://www.systeminit. ## Quickstart -Follow the [Local Development Setup](#local-development-setup) instructions below. +**We recommend running SI using the stable version of the engine. +To do so, please checkout [`old-engine`](https://github.com/systeminit/si/tree/old-engine) and follow the instructions in its [README](https://github.com/systeminit/si/blob/old-engine/README.md).** + +> Disclaimer: contents of the [`docs`](./docs) directory may be out of date until the current architecture becomes stable. + +Otherwise, follow the [Local Development Setup](#local-development-setup) instructions below. We are working on and investigating more way(s) to try out System Initiative in the future. ## Local Development Setup diff --git a/app/web/package.json b/app/web/package.json index 2896fe50ce..4dc750fb1b 100644 --- a/app/web/package.json +++ b/app/web/package.json @@ -55,6 +55,9 @@ "date-fns": "^2.29.2", "floating-vue": "^2.0.0-beta.16", "fontfaceobserver": "^2.3.0", + "graphology": "^0.25.4", + "graphology-layout-forceatlas2": "^0.10.1", + "graphology-layout-noverlap": "^0.4.2", "is-promise": "^4.0.0", "javascript-time-ago": "^2.5.7", "joi": "^17.11.0", @@ -72,6 +75,7 @@ "plur": "^5.1.0", "posthog-js": "^1.76.0", "reconnecting-websocket": "^4.4.0", + "sigma": "3.0.0-beta.5", "tinycolor2": "^1.4.2", "typescript": "^4.9.5", "util-browser": "^0.0.2", @@ -105,6 +109,7 @@ "cypress-vite": "^1.5.0", "eslint": "^8.36.0", "faker": "^6.6.6", + "graphology-types": "^0.24.7", "unplugin-icons": "^0.17.1", "vite": "^5.0.10", "vite-plugin-checker": "^0.6.2", diff --git a/app/web/src/components/Workspace/WorkspaceViz.vue b/app/web/src/components/Workspace/WorkspaceViz.vue new file mode 100644 index 0000000000..171b00a3cb --- /dev/null +++ b/app/web/src/components/Workspace/WorkspaceViz.vue @@ -0,0 +1,33 @@ + + + diff --git a/app/web/src/components/Workspace/WorkspaceVizSchemaVariant.vue b/app/web/src/components/Workspace/WorkspaceVizSchemaVariant.vue new file mode 100644 index 0000000000..11d14a3f77 --- /dev/null +++ b/app/web/src/components/Workspace/WorkspaceVizSchemaVariant.vue @@ -0,0 +1,94 @@ + + + diff --git a/app/web/src/store/viz.store.ts b/app/web/src/store/viz.store.ts new file mode 100644 index 0000000000..d091404b7e --- /dev/null +++ b/app/web/src/store/viz.store.ts @@ -0,0 +1,81 @@ +import { defineStore } from "pinia"; +import { ApiRequest, addStoreHooks } from "@si/vue-lib/pinia"; +import { useWorkspacesStore } from "@/store/workspaces.store"; +import { useChangeSetsStore } from "@/store/change_sets.store"; +import { Visibility } from "@/api/sdf/dal/visibility"; +import { nilId } from "@/utils/nilId"; + +export type NodeKind = "Category" | "Content" | "Func" | "Ordering" | "Prop"; + +export type ContentKind = + | "Root" + | "ActionPrototype" + | "AttributePrototype" + | "AttributePrototypeArgument" + | "AttributeValue" + | "Component" + | "ExternalProvider" + | "FuncArg" + | "Func" + | "InternalProvider" + | "Prop" + | "Schema" + | "SchemaVariant" + | "StaticArgumentValue" + | "ValidationPrototype"; + +export interface VizResponse { + edges: { + from: string; + to: string; + }[]; + + nodes: { + id: string; + nodeKind: NodeKind; + contentKind: ContentKind | null; + name: string | null; + }[]; + + rootNodeId: string; +} + +export const useVizStore = () => { + const changeSetStore = useChangeSetsStore(); + const selectedChangeSetId = changeSetStore.selectedChangeSetId; + const workspacesStore = useWorkspacesStore(); + const workspaceId = workspacesStore.selectedWorkspacePk; + const visibility: Visibility = { + visibility_change_set_pk: selectedChangeSetId ?? nilId(), + }; + + return addStoreHooks( + defineStore( + `ws${workspaceId || "NONE"}/cs${selectedChangeSetId || "NONE"}/viz`, + { + state: () => ({ + edges: [], + nodes: [], + }), + getters: { + nodes: (state) => state.nodes, + edges: (state) => state.edges, + }, + actions: { + async FETCH_VIZ() { + return new ApiRequest({ + url: "/graphviz/nodes_edges", + params: { ...visibility }, + }); + }, + async FETCH_SCHEMA_VARIANT_VIZ(schemaVariantId: string) { + return new ApiRequest({ + url: "/graphviz/schema_variant", + params: { schemaVariantId, ...visibility }, + }); + }, + }, + }, + ), + )(); +}; diff --git a/bin/rebaser/BUCK b/bin/rebaser/BUCK new file mode 100644 index 0000000000..f61bde4387 --- /dev/null +++ b/bin/rebaser/BUCK @@ -0,0 +1,29 @@ +load( + "@prelude-si//:macros.bzl", + "docker_image", + "rust_binary", +) + +rust_binary( + name = "rebaser", + deps = [ + "//lib/rebaser-server:rebaser-server", + "//lib/telemetry-application-rs:telemetry-application", + "//third-party/rust:clap", + "//third-party/rust:color-eyre", + "//third-party/rust:tokio", + "//third-party/rust:tokio-util", + ], + srcs = glob(["src/**/*.rs"]), + resources = { + "dev.encryption.key": "//lib/cyclone-server:dev.encryption.key", + "dev.donkey.key": "//lib/dal:dev.donkey.key", + "dev.postgres.root.crt": "//config/keys:dev.postgres.root.crt", + }, +) + +docker_image( + name = "image", + image_name = "rebaser", + build_deps = ["//bin/rebaser:rebaser"] +) diff --git a/bin/rebaser/Cargo.toml b/bin/rebaser/Cargo.toml new file mode 100644 index 0000000000..7bcf3086ed --- /dev/null +++ b/bin/rebaser/Cargo.toml @@ -0,0 +1,18 @@ +[package] +name = "rebaser" +version = "0.1.0" +edition = "2021" +rust-version = "1.64" +publish = false + +[[bin]] +name = "rebaser" +path = "src/main.rs" + +[dependencies] +clap = { workspace = true } +color-eyre = { workspace = true } +rebaser-server = { path = "../../lib/rebaser-server" } +telemetry-application = { path = "../../lib/telemetry-application-rs" } +tokio = { workspace = true } +tokio-util = { workspace = true } diff --git a/bin/rebaser/Dockerfile b/bin/rebaser/Dockerfile new file mode 100644 index 0000000000..0be26bbabc --- /dev/null +++ b/bin/rebaser/Dockerfile @@ -0,0 +1,38 @@ +# hadolint ignore=DL3007 +FROM nixos/nix:latest AS builder +ARG BIN=rebaser + +COPY . /workdir +WORKDIR /workdir + +RUN set -eux; \ + nix \ + --extra-experimental-features "nix-command flakes impure-derivations ca-derivations" \ + --option filter-syscalls false \ + build \ + ".#$BIN"; + +RUN mkdir -p /tmp/nix-store-closure /tmp/local-bin +# hadolint ignore=SC2046 +RUN cp -R $(nix-store --query --requisites result/) /tmp/nix-store-closure +# hadolint ignore=SC2046 +RUN ln -snf $(nix-store --query result/)/bin/* /tmp/local-bin/ + +FROM alpine:3 AS final +ARG BIN=rebaser + +# hadolint ignore=DL3018 +RUN set -eux; \ + apk add --no-cache runuser; \ + adduser -D app; \ + for dir in /run /etc /usr/local/etc /home/app/.config; do \ + mkdir -pv "$dir/$BIN"; \ + done; + +WORKDIR /run/$BIN +COPY --from=builder /tmp/nix-store-closure /nix/store +COPY --from=builder /tmp/local-bin/* /usr/local/bin/ + +ENTRYPOINT [ \ + "/sbin/runuser", "-u", "app", "--", "/usr/local/bin/rebaser" \ +] diff --git a/bin/rebaser/src/args.rs b/bin/rebaser/src/args.rs new file mode 100644 index 0000000000..b74aa412c7 --- /dev/null +++ b/bin/rebaser/src/args.rs @@ -0,0 +1,112 @@ +use clap::{ArgAction, Parser}; +use rebaser_server::{Config, ConfigError, ConfigFile, StandardConfigFile}; + +const NAME: &str = "rebaser"; + +/// Parse, validate, and return the CLI arguments as a typed struct. +pub(crate) fn parse() -> Args { + Args::parse() +} + +#[derive(Parser, Debug)] +#[command(name = NAME, max_term_width = 100)] +pub(crate) struct Args { + /// Sets the verbosity mode. + /// + /// Multiple -v options increase verbosity. The maximum is 4. + #[arg(short = 'v', long = "verbose", action = ArgAction::Count)] + pub(crate) verbose: u8, + + /// PostgreSQL connection pool dbname [example: myapp] + #[arg(long)] + pub(crate) pg_dbname: Option, + + /// PostgreSQL connection pool hostname [example: prod.db.example.com] + #[arg(long)] + pub(crate) pg_hostname: Option, + + /// PostgreSQL connection pool max size [example: 8] + #[arg(long)] + pub(crate) pg_pool_max_size: Option, + + /// PostgreSQL connection pool port [example: 5432] + #[arg(long)] + pub(crate) pg_port: Option, + + /// PostgreSQL connection pool user [example: dbuser] + #[arg(long)] + pub(crate) pg_user: Option, + + /// NATS connection URL [example: demo.nats.io] + #[arg(long)] + pub(crate) nats_url: Option, + + /// Disable OpenTelemetry on startup + #[arg(long)] + pub(crate) disable_opentelemetry: bool, + + /// Cyclone encryption key file location [default: /run/rebaser/cyclone_encryption.key] + #[arg(long)] + pub(crate) cyclone_encryption_key_path: Option, + + /// The number of concurrent jobs that can be processed [default: 10] + #[arg(long)] + pub(crate) concurrency: Option, + + /// Instance ID [example: 01GWEAANW5BVFK5KDRVS6DEY0F"] + /// + /// And instance ID is used when tracking the execution of jobs in a way that can be traced + /// back to an instance of a Pinga service. + #[arg(long)] + pub(crate) instance_id: Option, +} + +impl TryFrom for Config { + type Error = ConfigError; + + fn try_from(args: Args) -> Result { + ConfigFile::layered_load(NAME, |config_map| { + if let Some(dbname) = args.pg_dbname { + config_map.set("pg.dbname", dbname); + } + if let Some(hostname) = args.pg_hostname { + config_map.set("pg.hostname", hostname); + } + if let Some(pool_max_size) = args.pg_pool_max_size { + config_map.set("pg.pool_max_size", i64::from(pool_max_size)); + } + if let Some(port) = args.pg_port { + config_map.set("pg.port", i64::from(port)); + } + if let Some(user) = args.pg_user { + config_map.set("pg.user", user); + } + if let Some(url) = args.nats_url { + config_map.set("nats.url", url); + } + if let Some(cyclone_encyption_key_path) = args.cyclone_encryption_key_path { + config_map.set("cyclone_encryption_key_path", cyclone_encyption_key_path); + } + if let Some(concurrency) = args.concurrency { + config_map.set("concurrency_limit", i64::from(concurrency)); + } + if let Some(instance_id) = args.instance_id { + config_map.set("instance_id", instance_id); + } + + config_map.set("pg.application_name", NAME); + })? + .try_into() + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn verify_command() { + use clap::CommandFactory; + Args::command().debug_assert() + } +} diff --git a/bin/rebaser/src/main.rs b/bin/rebaser/src/main.rs new file mode 100644 index 0000000000..693e1a1260 --- /dev/null +++ b/bin/rebaser/src/main.rs @@ -0,0 +1,62 @@ +use color_eyre::Result; +use rebaser_server::{Config, Server}; +use telemetry_application::prelude::*; +use tokio_util::{sync::CancellationToken, task::TaskTracker}; + +mod args; + +const RT_DEFAULT_THREAD_STACK_SIZE: usize = 2 * 1024 * 1024 * 3; + +fn main() -> Result<()> { + let thread_builder = ::std::thread::Builder::new().stack_size(RT_DEFAULT_THREAD_STACK_SIZE); + let thread_handler = thread_builder.spawn(|| { + tokio::runtime::Builder::new_multi_thread() + .thread_stack_size(RT_DEFAULT_THREAD_STACK_SIZE) + .thread_name("bin/rebaser-tokio::runtime") + .enable_all() + .build()? + .block_on(async_main()) + })?; + thread_handler.join().unwrap() +} + +async fn async_main() -> Result<()> { + let shutdown_token = CancellationToken::new(); + let task_tracker = TaskTracker::new(); + + color_eyre::install()?; + let args = args::parse(); + + let (mut telemetry, telemetry_shutdown) = { + let config = TelemetryConfig::builder() + .service_name("rebaser") + .service_namespace("si") + .log_env_var_prefix("SI") + .app_modules(vec!["rebaser", "rebaser_server"]) + .interesting_modules(vec!["si_data_nats", "si_data_pg"]) + .build()?; + + telemetry_application::init(config, &task_tracker, shutdown_token.clone())? + }; + + if args.verbose > 0 { + telemetry + .set_verbosity_and_wait(args.verbose.into()) + .await?; + } + debug!(arguments =?args, "parsed cli arguments"); + + let config = Config::try_from(args)?; + + Server::from_config(config).await?.run().await?; + + // TODO(nick): see other TODOs from the other services with similar shutdown procedures. + { + shutdown_token.cancel(); + task_tracker.wait().await; + telemetry_shutdown.wait().await?; + } + + info!("graceful shutdown complete."); + Ok(()) +} diff --git a/bin/sdf/BUCK b/bin/sdf/BUCK index c1e732a7de..ea14648bd2 100644 --- a/bin/sdf/BUCK +++ b/bin/sdf/BUCK @@ -9,6 +9,7 @@ rust_binary( name = "sdf", deps = [ "//lib/nats-multiplexer:nats-multiplexer", + "//lib/rebaser-client:rebaser-client", "//lib/sdf-server:sdf-server", "//lib/si-std:si-std", "//lib/telemetry-application-rs:telemetry-application", diff --git a/bin/sdf/Cargo.toml b/bin/sdf/Cargo.toml index a4ab96c772..f4d06d7791 100644 --- a/bin/sdf/Cargo.toml +++ b/bin/sdf/Cargo.toml @@ -10,11 +10,13 @@ name = "sdf" path = "src/main.rs" [dependencies] -clap = { workspace = true } -color-eyre = { workspace = true } nats-multiplexer = { path = "../../lib/nats-multiplexer" } +rebaser-client = { path = "../../lib/rebaser-client" } sdf-server = { path = "../../lib/sdf-server" } si-std = { path = "../../lib/si-std" } telemetry-application = { path = "../../lib/telemetry-application-rs" } + +clap = { workspace = true } +color-eyre = { workspace = true } tokio = { workspace = true } tokio-util = { workspace = true } diff --git a/bin/sdf/src/main.rs b/bin/sdf/src/main.rs index c85fb17a48..259729cd50 100644 --- a/bin/sdf/src/main.rs +++ b/bin/sdf/src/main.rs @@ -12,6 +12,8 @@ use sdf_server::{ use telemetry_application::prelude::*; use tokio_util::{sync::CancellationToken, task::TaskTracker}; +use rebaser_client::Config as RebaserClientConfig; + mod args; type JobProcessor = sdf_server::NatsProcessor; @@ -99,15 +101,21 @@ async fn async_main() -> Result<()> { let pg_pool = Server::create_pg_pool(config.pg_pool()).await?; + let content_store_pg_pool = Server::create_pg_pool(config.content_store_pg_pool()).await?; + let veritech = Server::create_veritech_client(nats_conn.clone()); let symmetric_crypto_service = Server::create_symmetric_crypto_service(config.symmetric_crypto_service()).await?; - let pkgs_path: PathBuf = config.pkgs_path().try_into()?; + let pkgs_path: PathBuf = config.pkgs_path().into(); let module_index_url = config.module_index_url().to_string(); + // TODO: accept command line arguments and or environment variables to configure the rebaser + // client + let rebaser_config = RebaserClientConfig::default(); + let (ws_multiplexer, ws_multiplexer_client) = Multiplexer::new(&nats_conn, WS_MULTIPLEXER_SUBJECT).await?; let (crdt_multiplexer, crdt_multiplexer_client) = @@ -122,6 +130,8 @@ async fn async_main() -> Result<()> { Some(pkgs_path), Some(module_index_url), symmetric_crypto_service, + rebaser_config, + content_store_pg_pool, ); if let MigrationMode::Run | MigrationMode::RunAndQuit = config.migration_mode() { @@ -153,15 +163,15 @@ async fn async_main() -> Result<()> { crdt_multiplexer, crdt_multiplexer_client, )?; - let second_shutdown_broadcast_rx = initial_shutdown_broadcast_rx.resubscribe(); + let _second_shutdown_broadcast_rx = initial_shutdown_broadcast_rx.resubscribe(); - Server::start_resource_refresh_scheduler( - services_context.clone(), - initial_shutdown_broadcast_rx, - ) - .await; + // Server::start_resource_refresh_scheduler( + // services_context.clone(), + // initial_shutdown_broadcast_rx, + // ) + // .await; - Server::start_status_updater(services_context, second_shutdown_broadcast_rx).await?; + // Server::start_status_updater(services_context, second_shutdown_broadcast_rx).await?; server.run().await?; } @@ -177,15 +187,15 @@ async fn async_main() -> Result<()> { crdt_multiplexer_client, ) .await?; - let second_shutdown_broadcast_rx = initial_shutdown_broadcast_rx.resubscribe(); + let _second_shutdown_broadcast_rx = initial_shutdown_broadcast_rx.resubscribe(); - Server::start_resource_refresh_scheduler( - services_context.clone(), - initial_shutdown_broadcast_rx, - ) - .await; + // Server::start_resource_refresh_scheduler( + // services_context.clone(), + // initial_shutdown_broadcast_rx, + // ) + // .await; - Server::start_status_updater(services_context, second_shutdown_broadcast_rx).await?; + // Server::start_status_updater(services_context, second_shutdown_broadcast_rx).await?; server.run().await?; } diff --git a/component/postgres/BUCK b/component/postgres/BUCK index 67649016ad..387a8820d3 100644 --- a/component/postgres/BUCK +++ b/component/postgres/BUCK @@ -30,7 +30,7 @@ docker_image( "--env", "POSTGRES_DB=si", "--env", - "POSTGRES_MULTIPLE_DBS=si_test,si_test_dal,si_test_sdf_server,si_auth", + "POSTGRES_MULTIPLE_DBS=si_content_store,si_auth,si_test,si_test_content_store,si_test_dal,si_test_sdf_server", "--publish", "5432:5432", ], diff --git a/dev/Tiltfile b/dev/Tiltfile index 1c25eb85b8..995d9500ac 100644 --- a/dev/Tiltfile +++ b/dev/Tiltfile @@ -11,11 +11,11 @@ groups = { "postgres-test", ], "backend": [ - "council", "pinga", "veritech", "sdf", "module-index", + "rebaser", ], "frontend": [ "web", @@ -77,38 +77,38 @@ for service in compose_services: links = [] dc_resource(service, links = links, labels = ["platform"]) -# Locally build and run `module-index` -module_index_target = "//bin/module-index:module-index" +# Locally build and run `rebaser-server` +rebaser_target = "//bin/rebaser:rebaser" local_resource( - "module-index", + "rebaser", labels = ["backend"], - cmd = "buck2 build {}".format(module_index_target), - serve_cmd = "buck2 run {}".format(module_index_target), - serve_env = {"SI_FORCE_COLOR": "true"}, + cmd = "buck2 build {}".format(rebaser_target), + serve_cmd = "buck2 run {}".format(rebaser_target), allow_parallel = True, - auto_init = False, resource_deps = [ + "nats", "otelcol", "postgres", ], - deps = _buck2_dep_inputs(module_index_target), - trigger_mode = trigger_mode, + deps = _buck2_dep_inputs(rebaser_target), + trigger_mode = trigger_mode ) -# Locally build and run `council` -council_target = "//bin/council:council" +# Locally build and run `module-index` +module_index_target = "//bin/module-index:module-index" local_resource( - "council", + "module-index", labels = ["backend"], - cmd = "buck2 build {}".format(council_target), - serve_cmd = "buck2 run {}".format(council_target), + cmd = "buck2 build {}".format(module_index_target), + serve_cmd = "buck2 run {}".format(module_index_target), serve_env = {"SI_FORCE_COLOR": "true"}, allow_parallel = True, + auto_init = False, resource_deps = [ - "nats", "otelcol", + "postgres", ], - deps = _buck2_dep_inputs(council_target), + deps = _buck2_dep_inputs(module_index_target), trigger_mode = trigger_mode, ) @@ -122,7 +122,6 @@ local_resource( serve_env = {"SI_FORCE_COLOR": "true"}, allow_parallel = True, resource_deps = [ - "council", "nats", "otelcol", "veritech", @@ -166,6 +165,7 @@ local_resource( "pinga", "postgres", "veritech", + "rebaser", ], deps = _buck2_dep_inputs(sdf_target), trigger_mode = trigger_mode, diff --git a/dev/docker-compose.platform.yml b/dev/docker-compose.platform.yml index a916104a7e..d2ff65dad3 100644 --- a/dev/docker-compose.platform.yml +++ b/dev/docker-compose.platform.yml @@ -9,7 +9,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si" - "POSTGRES_DB=si" - - "POSTGRES_MULTIPLE_DBS=si_auth,si_module_index" + - "POSTGRES_MULTIPLE_DBS=si_content_store,si_auth,si_module_index" ports: - "5432:5432" @@ -20,7 +20,7 @@ services: - "PGPASSWORD=bugbear" - "POSTGRES_USER=si_test" - "POSTGRES_DB=si_test" - - "POSTGRES_MULTIPLE_DBS=si_test_dal,si_test_sdf_server" + - "POSTGRES_MULTIPLE_DBS=si_test_content_store,si_test_dal,si_test_sdf_server" command: - "-c" - "fsync=off" diff --git a/flake.nix b/flake.nix index 2ed2d6b8be..e6be0c28f3 100644 --- a/flake.nix +++ b/flake.nix @@ -70,6 +70,7 @@ libiconv darwin.apple_sdk.frameworks.Security darwin.apple_sdk.frameworks.SystemConfiguration + darwin.apple_sdk.frameworks.CoreFoundation ]; # This isn't an exact science, but confirmed the system interpreter by @@ -239,6 +240,8 @@ pinga = binDerivation {pkgName = "pinga";}; + rebaser = binDerivation {pkgName = "rebaser";}; + sdf = binDerivation {pkgName = "sdf";}; si = binDerivation {pkgName = "si";}; diff --git a/lib/content-store/BUCK b/lib/content-store/BUCK new file mode 100644 index 0000000000..2642e94bc9 --- /dev/null +++ b/lib/content-store/BUCK @@ -0,0 +1,30 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "content-store", + deps = [ + "//lib/si-cbor:si-cbor", + "//lib/si-data-pg:si-data-pg", + "//lib/telemetry-rs:telemetry", + "//third-party/rust:async-trait", + "//third-party/rust:blake3", + "//third-party/rust:bytes", + "//third-party/rust:chrono", + "//third-party/rust:color-eyre", + "//third-party/rust:postcard", + "//third-party/rust:postgres-types", + "//third-party/rust:refinery", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:serde_json", + "//third-party/rust:thiserror", + "//third-party/rust:uuid", + ], + srcs = glob([ + "src/**/*.rs", + "src/store/pg/migrations/**/*.sql", + ]), + env = { + "CARGO_MANIFEST_DIR": ".", + }, +) diff --git a/lib/content-store/Cargo.toml b/lib/content-store/Cargo.toml new file mode 100644 index 0000000000..5949f1296b --- /dev/null +++ b/lib/content-store/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "content-store" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +si-cbor = { path = "../../lib/si-cbor" } +si-data-pg = { path = "../../lib/si-data-pg" } +telemetry = { path = "../../lib/telemetry-rs" } + +async-trait = { workspace = true } +blake3 = { workspace = true } +bytes = { workspace = true } +chrono = { workspace = true } +color-eyre = { workspace = true } +postcard = { workspace = true } +postgres-types = { workspace = true } +refinery = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +thiserror = { workspace = true } +uuid = { workspace = true } diff --git a/lib/content-store/build.rs b/lib/content-store/build.rs new file mode 100644 index 0000000000..d2e79ef4d0 --- /dev/null +++ b/lib/content-store/build.rs @@ -0,0 +1,13 @@ +use std::fs; + +fn main() -> Result<(), Box> { + println!("cargo:rerun-if-changed=src/store/pg/migrations"); + for entry in fs::read_dir("./src/store/pg/migrations")? { + let entry = entry?; + let path = entry.path(); + if path.is_file() { + println!("cargo:rerun-if-changed={}", path.display()); + } + } + Ok(()) +} diff --git a/lib/content-store/src/hash.rs b/lib/content-store/src/hash.rs new file mode 100644 index 0000000000..06a55918b0 --- /dev/null +++ b/lib/content-store/src/hash.rs @@ -0,0 +1,154 @@ +use bytes::BytesMut; +use std::{fmt, str::FromStr}; + +use postgres_types::ToSql; +use serde::{ + de::{self, Visitor}, + Deserialize, Serialize, +}; +use serde_json::Value; + +use thiserror::Error; + +/// The [`blake3::Hash`] of a given set of contents. +#[derive(Clone, Copy, Eq, Hash, PartialEq)] +pub struct ContentHash(blake3::Hash); + +impl ContentHash { + /// Create a new [`ContentHash`] from a byte array. + #[must_use] + pub fn new(input: &[u8]) -> Self { + Self(blake3::hash(input)) + } + + /// Provide a [`hasher`](ContentHasher) to create [`hashes`](ContentHash). + pub fn hasher() -> ContentHasher { + ContentHasher::new() + } +} + +impl From<&Value> for ContentHash { + fn from(value: &Value) -> Self { + let input = value.to_string(); + Self::new(input.as_bytes()) + } +} + +impl From<&str> for ContentHash { + fn from(input: &str) -> Self { + Self::new(input.as_bytes()) + } +} + +impl Default for ContentHash { + fn default() -> Self { + Self::new("".as_bytes()) + } +} + +impl fmt::Debug for ContentHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + write!(f, "ContentHash({})", self.0) + } +} + +impl fmt::Display for ContentHash { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.0.fmt(f) + } +} + +impl Serialize for ContentHash { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&self.to_string()) + } +} + +struct ContentHashVisitor; + +impl<'de> Visitor<'de> for ContentHashVisitor { + type Value = ContentHash; + + fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + formatter.write_str("a blake3 hash string") + } + + fn visit_str(self, v: &str) -> Result + where + E: de::Error, + { + ContentHash::from_str(v).map_err(|e| E::custom(e.to_string())) + } +} + +impl<'de> Deserialize<'de> for ContentHash { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'de>, + { + deserializer.deserialize_str(ContentHashVisitor) + } +} + +#[derive(Debug, Error)] +#[error("failed to parse hash hex string")] +pub struct ContentHashParseError(#[from] blake3::HexError); + +impl FromStr for ContentHash { + type Err = ContentHashParseError; + + fn from_str(s: &str) -> Result { + Ok(Self(blake3::Hash::from_str(s)?)) + } +} + +#[derive(Debug, Default)] +pub struct ContentHasher(blake3::Hasher); + +impl ContentHasher { + pub fn new() -> Self { + ContentHasher(blake3::Hasher::new()) + } + + pub fn update(&mut self, input: &[u8]) { + self.0.update(input); + } + + pub fn finalize(&self) -> ContentHash { + ContentHash(self.0.finalize()) + } +} + +impl ToSql for ContentHash { + fn to_sql( + &self, + ty: &postgres_types::Type, + out: &mut BytesMut, + ) -> Result> + where + Self: Sized, + { + let self_string = self.to_string(); + + self_string.to_sql(ty, out) + } + + fn accepts(ty: &postgres_types::Type) -> bool + where + Self: Sized, + { + String::accepts(ty) + } + + fn to_sql_checked( + &self, + ty: &postgres_types::Type, + out: &mut BytesMut, + ) -> Result> { + let self_string = self.to_string(); + self_string.to_sql_checked(ty, out) + } +} diff --git a/lib/content-store/src/lib.rs b/lib/content-store/src/lib.rs new file mode 100644 index 0000000000..c4af35d573 --- /dev/null +++ b/lib/content-store/src/lib.rs @@ -0,0 +1,36 @@ +//! This crate provides the ability to interface with content stores of varying kinds as well as +//! the ability to generate hashes for hashable content blobs. + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + +mod hash; +mod pair; +mod store; +mod value; + +pub use hash::ContentHash; +pub use store::local::LocalStore; +pub use store::pg::tools::PgStoreTools; +pub use store::pg::PgStore; +pub use store::Store; +pub use store::{StoreError, StoreResult}; +pub use value::Value; diff --git a/lib/content-store/src/pair.rs b/lib/content-store/src/pair.rs new file mode 100644 index 0000000000..4f577732e1 --- /dev/null +++ b/lib/content-store/src/pair.rs @@ -0,0 +1,108 @@ +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use si_data_pg::{PgError, PgPool, PgPoolError, PgRow}; +use std::str::FromStr; +use telemetry::prelude::*; +use thiserror::Error; + +use crate::hash::{ContentHash, ContentHashParseError}; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum ContentPairError { + #[error("content hash parse error: {0}")] + ContentHashParse(#[from] ContentHashParseError), + #[error("pg error: {0}")] + Pg(#[from] PgError), + #[error("pg pool error: {0}")] + PgPool(#[from] PgPoolError), +} + +pub(crate) type ContentPairResult = Result; + +#[derive(Debug, Serialize, Deserialize)] +pub(crate) struct ContentPair { + key: String, + created_at: DateTime, + value: Vec, +} + +impl TryFrom for ContentPair { + type Error = ContentPairError; + + fn try_from(row: PgRow) -> Result { + Ok(Self { + key: row.try_get("key")?, + created_at: row.try_get("created_at")?, + value: row.try_get("value")?, + }) + } +} + +impl ContentPair { + #[instrument(name = "content_store.content_pair.new", level = "debug", skip_all)] + pub(crate) async fn new( + pg_pool: &PgPool, + key: ContentHash, + value: &[u8], + ) -> ContentPairResult<()> { + let client = pg_pool.get().await?; + client + .query( + "INSERT INTO content_pairs (key, value) VALUES ($1, $2) ON CONFLICT DO NOTHING", + &[&key.to_string(), &value], + ) + .await?; + Ok(()) + } + + pub(crate) fn value(&self) -> &[u8] { + &self.value + } + + pub(crate) fn key(&self) -> ContentPairResult { + Ok(ContentHash::from_str(self.key.as_str())?) + } + + pub(crate) async fn find( + pg_pool: &PgPool, + key: &ContentHash, + ) -> ContentPairResult> { + let client = pg_pool.get().await?; + let maybe_row = client + .query_opt( + "SELECT * FROM content_pairs WHERE key = $1 LIMIT 1", + &[&key.to_string()], + ) + .await?; + match maybe_row { + Some(row) => Ok(Some(Self::try_from(row)?)), + None => Ok(None), + } + } + + pub(crate) async fn find_many( + pg_pool: &PgPool, + keys: &[ContentHash], + ) -> ContentPairResult> { + let mut result = vec![]; + let client = pg_pool.get().await?; + + let key_strings: Vec = keys.iter().map(|k| k.to_string()).collect(); + let key_string_refs: Vec<&String> = key_strings.iter().collect(); + + let rows = client + .query( + "SELECT * FROM content_pairs WHERE key = any($1)", + &[&key_string_refs], + ) + .await?; + + for row in rows { + let pair = Self::try_from(row)?; + result.push(pair); + } + + Ok(result) + } +} diff --git a/lib/content-store/src/store.rs b/lib/content-store/src/store.rs new file mode 100644 index 0000000000..69f39b5654 --- /dev/null +++ b/lib/content-store/src/store.rs @@ -0,0 +1,62 @@ +use serde::de::DeserializeOwned; +use serde::Serialize; +use si_data_pg::{PgError, PgPoolError}; +use std::collections::HashMap; +use thiserror::Error; + +use crate::hash::ContentHash; +use crate::pair::ContentPairError; + +pub(crate) mod local; +pub(crate) mod pg; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Error, Debug)] +pub enum StoreError { + #[error("content pair error: {0}")] + ContentPair(#[from] ContentPairError), + #[error("pg error: {0}")] + Pg(#[from] PgError), + #[error("pg pool error: {0}")] + PgPool(#[from] PgPoolError), + #[error("postcard error: {0}")] + Postcard(#[from] postcard::Error), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), +} + +/// The [`Result`] type used by the [`Store`] trait methods +pub type StoreResult = Result; + +/// This trait provides the minimum methods needed to create a content store. +#[async_trait::async_trait] +pub trait Store { + /// Indicates whether or not the store is empty. + fn is_empty(&self) -> bool; + + /// Indicates the number of keys in the store. + fn len(&self) -> usize; + + /// Adds an item to the store. + fn add(&mut self, object: &T) -> StoreResult + where + T: Serialize + ?Sized; + + /// Gets an item from the store. + /// + /// Implementers of this trait may want to consider a "pull-through cache" implementation for + /// this method. + async fn get(&mut self, key: &ContentHash) -> StoreResult> + where + T: DeserializeOwned; + + /// Gets multiple items from the store + /// + async fn get_bulk(&mut self, keys: &[ContentHash]) -> StoreResult> + where + T: DeserializeOwned + std::marker::Send; + + /// Writes out content in the store to a persistent storage layer, if applicable. + async fn write(&mut self) -> StoreResult<()>; +} diff --git a/lib/content-store/src/store/local.rs b/lib/content-store/src/store/local.rs new file mode 100644 index 0000000000..0a2ed462e2 --- /dev/null +++ b/lib/content-store/src/store/local.rs @@ -0,0 +1,70 @@ +use crate::hash::ContentHash; +use crate::store::{Store, StoreResult}; +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::collections::HashMap; +use telemetry::prelude::*; + +/// A kind of content store that operates entirely in memory. +#[derive(Default, Debug)] +pub struct LocalStore(HashMap>); + +#[async_trait::async_trait] +impl Store for LocalStore { + fn is_empty(&self) -> bool { + self.0.is_empty() + } + + fn len(&self) -> usize { + self.0.len() + } + + fn add(&mut self, object: &T) -> StoreResult + where + T: Serialize + ?Sized, + { + let value = serde_json::to_vec(object)?; + let key = ContentHash::new(&value); + self.0.insert(key, value); + Ok(key) + } + + async fn get(&mut self, key: &ContentHash) -> StoreResult> + where + T: DeserializeOwned, + { + let maybe_object = match self.0.get(key) { + Some(value) => Some(serde_json::from_slice(value)?), + None => None, + }; + Ok(maybe_object) + } + + async fn get_bulk(&mut self, keys: &[ContentHash]) -> StoreResult> + where + T: DeserializeOwned + std::marker::Send, + { + Ok(keys + .iter() + .filter_map(|key| match self.0.get(key) { + None => None, + Some(item) => match serde_json::from_slice(item) { + Ok(deserialized) => Some((key.to_owned(), deserialized)), + Err(err) => { + error!( + "Could not deserialize item {} in content store: {}", + key, + err.to_string() + ); + None + } + }, + }) + .collect()) + } + + /// This a "no-op" for the [`LocalStore`] since everything is handled in memory. + async fn write(&mut self) -> StoreResult<()> { + Ok(()) + } +} diff --git a/lib/content-store/src/store/pg.rs b/lib/content-store/src/store/pg.rs new file mode 100644 index 0000000000..7302d7fae5 --- /dev/null +++ b/lib/content-store/src/store/pg.rs @@ -0,0 +1,139 @@ +use serde::de::DeserializeOwned; +use serde::Serialize; +use si_data_pg::PgPool; +use std::collections::HashMap; + +use crate::hash::ContentHash; +use crate::pair::ContentPair; +use crate::store::{Store, StoreResult}; +use crate::PgStoreTools; + +pub(crate) mod tools; + +/// A content store backed by Postgres. +#[derive(Debug, Clone)] +pub struct PgStore { + inner: HashMap, + pg_pool: PgPool, +} + +#[derive(Default, Debug, Clone, Eq, PartialEq)] +struct PgStoreItem { + value: Vec, + written: bool, +} + +impl PgStoreItem { + fn new(value: Vec) -> Self { + Self { + value, + ..Default::default() + } + } +} + +impl PgStore { + /// Create a new [`PgStore`] from a given [`PgPool`]. + pub async fn new(pg_pool: PgPool) -> StoreResult { + Ok(Self { + inner: Default::default(), + pg_pool, + }) + } + + /// Create a new [`PgStore`] from a given [`PgPool`]. + pub async fn new_production() -> StoreResult { + let pg_pool = PgStoreTools::new_production_pg_pool().await?; + Ok(Self { + inner: Default::default(), + pg_pool, + }) + } + + /// Migrate the content store database + pub async fn migrate(pg_pool: &PgPool) -> StoreResult<()> { + PgStoreTools::migrate(pg_pool).await?; + + Ok(()) + } + + /// Access the internal pg_pool + pub fn pg_pool(&self) -> &PgPool { + &self.pg_pool + } +} + +#[async_trait::async_trait] +impl Store for PgStore { + fn is_empty(&self) -> bool { + self.inner.is_empty() + } + + fn len(&self) -> usize { + self.inner.len() + } + + fn add(&mut self, object: &T) -> StoreResult + where + T: Serialize + ?Sized, + { + let value = postcard::to_stdvec(object)?; + let key = ContentHash::new(value.as_slice()); + self.inner.insert(key, PgStoreItem::new(value)); + Ok(key) + } + + async fn get(&mut self, key: &ContentHash) -> StoreResult> + where + T: DeserializeOwned, + { + let object = match self.inner.get(key) { + Some(item) => postcard::from_bytes(&item.value)?, + None => match ContentPair::find(&self.pg_pool, key).await? { + Some(content_pair) => { + let encoded = content_pair.value(); + let decoded = postcard::from_bytes(encoded)?; + self.add(encoded)?; + + decoded + } + None => return Ok(None), + }, + }; + Ok(Some(object)) + } + + async fn get_bulk(&mut self, keys: &[ContentHash]) -> StoreResult> + where + T: DeserializeOwned + std::marker::Send, + { + let mut result = HashMap::new(); + let mut keys_to_fetch = vec![]; + + for key in keys { + match self.inner.get(key) { + Some(item) => { + result.insert(*key, postcard::from_bytes(&item.value)?); + } + None => keys_to_fetch.push(*key), + } + } + + for pair in ContentPair::find_many(&self.pg_pool, keys_to_fetch.as_slice()).await? { + let encoded = pair.value(); + result.insert(pair.key()?, postcard::from_bytes(encoded)?); + self.add(encoded)?; + } + Ok(result) + } + + async fn write(&mut self) -> StoreResult<()> { + for (key, item) in self.inner.iter_mut() { + if !item.written { + ContentPair::new(&self.pg_pool, key.to_owned(), &item.value).await?; + item.written = true; + } + } + Ok(()) + } +} diff --git a/lib/content-store/src/store/pg/migrations/U0001__content_pairs.sql b/lib/content-store/src/store/pg/migrations/U0001__content_pairs.sql new file mode 100644 index 0000000000..309657efbd --- /dev/null +++ b/lib/content-store/src/store/pg/migrations/U0001__content_pairs.sql @@ -0,0 +1,6 @@ +CREATE TABLE content_pairs +( + key text primary key NOT NULL, + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + value bytea NOT NULL +); diff --git a/lib/content-store/src/store/pg/tools.rs b/lib/content-store/src/store/pg/tools.rs new file mode 100644 index 0000000000..0a548a92db --- /dev/null +++ b/lib/content-store/src/store/pg/tools.rs @@ -0,0 +1,39 @@ +use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; +use telemetry::prelude::*; + +mod embedded { + use refinery::embed_migrations; + + embed_migrations!("./src/store/pg/migrations"); +} + +const DBNAME: &str = "si_content_store"; +const APPLICATION_NAME: &str = "si-content-store"; + +/// A unit struct that provides helpers for performing [`PgStore`] migrations. +#[allow(missing_debug_implementations)] +pub struct PgStoreTools; + +impl PgStoreTools { + /// Create a new [`PgPool`] for a production [`PgStore`]. + pub async fn new_production_pg_pool() -> Result { + let pg_pool_config = Self::default_pool_config(); + let pg_pool = PgPool::new(&pg_pool_config).await?; + Ok(pg_pool) + } + + /// The default pool configuration for the PgStore + pub fn default_pool_config() -> PgPoolConfig { + PgPoolConfig { + dbname: DBNAME.to_string(), + application_name: APPLICATION_NAME.to_string(), + ..Default::default() + } + } + + /// Perform migrations for the database. + #[instrument(skip_all)] + pub async fn migrate(pg_pool: &PgPool) -> Result<(), PgPoolError> { + pg_pool.migrate(embedded::migrations::runner()).await + } +} diff --git a/lib/content-store/src/value.rs b/lib/content-store/src/value.rs new file mode 100644 index 0000000000..54003b1fb9 --- /dev/null +++ b/lib/content-store/src/value.rs @@ -0,0 +1,94 @@ +use std::collections::BTreeMap; + +#[derive(Debug, PartialEq, serde::Serialize, serde::Deserialize, Clone)] +pub enum ValueNumber { + U64(u64), + I64(i64), + F64(f64), +} + +/// A type that can be converted to and from serde_json::Value types infallibly, +/// *so long as* arbitrary precision arithmetic is not enabled for serde_json. +/// This is necessary because postcard will *not* deserialize serde_json's `Number` +/// type, but we still want to store arbitrary payloads in our content store. +/// The alternative is to serialize the value to a string and then serialize +/// that string with postcard. +#[derive(Debug, PartialEq, serde::Serialize, serde::Deserialize, Clone)] +#[remain::sorted] +pub enum Value { + /// An array of values + Array(Vec), + /// A boolean scalar + Bool(bool), + /// A null value + Null, + /// A Number value. JSON numbers are either double precision IEEE floating point values, or + /// they in some implementations can be BigInt values. However, we're currently only going to + /// support double precision floats and 64 bit integers. If arbitrary precision integers are + /// enabled for serde_json, this *will* cause a panic. + Number(ValueNumber), + /// An object. BTreeMap is the internal representation used by serde_json for objects, + /// *unless* order preservation is enabled. If order preservation is enabled, we will + /// lose that ordering information in the conversion to/from `serde_json::Value``. + Object(BTreeMap), + /// A string scalar value + String(String), +} + +// todo: make this non-recursive for maps and arrays +impl From for Value { + fn from(value: serde_json::Value) -> Self { + match value { + serde_json::Value::Null => Self::Null, + serde_json::Value::Bool(b) => Self::Bool(b), + serde_json::Value::Number(n) => Value::Number(if n.is_u64() { + ValueNumber::U64( + n.as_u64() + .expect("serde_json said it was a u64 but refused to give me one"), + ) + } else if n.is_i64() { + ValueNumber::I64( + n.as_i64() + .expect("serde_json said it was an i64 but refused to give me one"), + ) + } else if n.is_f64() { + ValueNumber::F64( + n.as_f64() + .expect("serde_json said it was an f64 but refused to give me one"), + ) + } else { + panic!("the arbitrary_precision feature of serde_json is not supported"); + }), + serde_json::Value::Array(mut a) => Self::Array(a.drain(..).map(|e| e.into()).collect()), + serde_json::Value::String(s) => Self::String(s), + // Can we avoid these clones? + serde_json::Value::Object(map) => Self::Object( + map.iter() + .map(|(k, v)| (k.to_owned(), v.to_owned().into())) + .collect(), + ), + } + } +} + +impl From for serde_json::Value { + fn from(value: Value) -> Self { + match value { + Value::Null => serde_json::Value::Null, + Value::Bool(b) => serde_json::Value::Bool(b), + Value::Array(mut a) => serde_json::Value::Array(a.drain(..).map(Into::into).collect()), + Value::Number(n) => serde_json::Value::Number(match n { + ValueNumber::U64(n) => n.into(), + ValueNumber::I64(n) => n.into(), + ValueNumber::F64(n) => serde_json::value::Number::from_f64(n) + .expect("cannot deserialize an infinite or NAN f64 value"), + }), + Value::String(s) => serde_json::Value::String(s), + Value::Object(map) => serde_json::Value::Object( + map.iter() + .map(|(k, v)| (k.to_owned(), v.to_owned().into())) + .collect(), + ), + } + } +} diff --git a/lib/cyclone-server/src/tower.rs b/lib/cyclone-server/src/tower.rs index bcf9d727e4..0c94f625f8 100644 --- a/lib/cyclone-server/src/tower.rs +++ b/lib/cyclone-server/src/tower.rs @@ -1,5 +1,4 @@ -pub use limit_requests::{LimitRequest, LimitRequestLayer}; -pub use web_socket_trace::{WebSocketTrace, WebSocketTraceLayer}; +pub use web_socket_trace::WebSocketTraceLayer; mod limit_requests { use std::{ diff --git a/lib/dal-test/BUCK b/lib/dal-test/BUCK index 13228a2cd5..c5dc92ce64 100644 --- a/lib/dal-test/BUCK +++ b/lib/dal-test/BUCK @@ -4,10 +4,13 @@ rust_library( name = "dal-test", deps = [ "//lib/buck2-resources:buck2-resources", + "//lib/content-store:content-store", "//lib/council-server:council-server", "//lib/dal:dal", "//lib/module-index-client:module-index-client", "//lib/pinga-server:pinga-server", + "//lib/rebaser-client:rebaser-client", + "//lib/rebaser-server:rebaser-server", "//lib/si-crypto:si-crypto", "//lib/si-data-nats:si-data-nats", "//lib/si-data-pg:si-data-pg", diff --git a/lib/dal-test/Cargo.toml b/lib/dal-test/Cargo.toml index 7a59d21518..62d10a7363 100644 --- a/lib/dal-test/Cargo.toml +++ b/lib/dal-test/Cargo.toml @@ -7,29 +7,33 @@ publish = false [dependencies] buck2-resources = { path = "../../lib/buck2-resources" } -color-eyre = { workspace = true } +content-store = { path = "../../lib/content-store" } council-server = { path = "../../lib/council-server" } dal = { path = "../../lib/dal" } -derive_builder = { workspace = true } -jwt-simple = { workspace = true } -lazy_static = { workspace = true } module-index-client = { path = "../../lib/module-index-client" } -names = { workspace = true } pinga-server = { path = "../../lib/pinga-server" } -remain = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client"} +rebaser-server = { path = "../../lib/rebaser-server" } si-crypto = { path = "../../lib/si-crypto" } si-data-nats = { path = "../../lib/si-data-nats" } si-data-pg = { path = "../../lib/si-data-pg" } si-std = { path = "../../lib/si-std" } si-test-macros = { path = "../../lib/si-test-macros" } -sodiumoxide = { workspace = true } telemetry = { path = "../../lib/telemetry-rs" } +veritech-client = { path = "../../lib/veritech-client" } +veritech-server = { path = "../../lib/veritech-server" } + +color-eyre = { workspace = true } +derive_builder = { workspace = true } +jwt-simple = { workspace = true } +lazy_static = { workspace = true } +names = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +sodiumoxide = { workspace = true } tempfile = { workspace = true } thiserror = { workspace = true } tokio = { workspace = true } tracing-subscriber = { workspace = true } uuid = { workspace = true } -veritech-client = { path = "../../lib/veritech-client" } -veritech-server = { path = "../../lib/veritech-server" } diff --git a/lib/dal-test/src/helpers.rs b/lib/dal-test/src/helpers.rs index 0f4ee35049..0dd97ef1e8 100644 --- a/lib/dal-test/src/helpers.rs +++ b/lib/dal-test/src/helpers.rs @@ -1,20 +1,14 @@ use color_eyre::Result; -use dal::{ - func::{ - argument::{FuncArgument, FuncArgumentId}, - binding::FuncBindingId, - binding_return_value::FuncBindingReturnValueId, - }, - ChangeSet, DalContext, Func, FuncBinding, FuncId, HistoryActor, StandardModel, User, UserClaim, - UserPk, Visibility, Workspace, WorkspaceSignup, -}; +use dal::change_set_pointer::{ChangeSetPointer, ChangeSetPointerId}; +use dal::{DalContext, UserClaim}; use jwt_simple::algorithms::RSAKeyPairLike; use jwt_simple::{claims::Claims, reexports::coarsetime::Duration}; use names::{Generator, Name}; use crate::jwt_private_signing_key; +use crate::signup::WorkspaceSignup; -pub mod component_bag; +// pub mod component_bag; pub fn generate_fake_name() -> String { Generator::with_naming(Name::Numbered).next().unwrap() @@ -41,7 +35,7 @@ pub async fn workspace_signup(ctx: &DalContext) -> Result<(WorkspaceSignup, Stri let user_name = format!("frank {workspace_name}"); let user_email = format!("{workspace_name}@example.com"); - let nw = Workspace::signup(&mut ctx, &workspace_name, &user_name, &user_email) + let nw = WorkspaceSignup::new(&mut ctx, &workspace_name, &user_name, &user_email) .await .wrap_err("cannot signup a new workspace")?; let auth_token = create_auth_token(UserClaim { @@ -52,77 +46,79 @@ pub async fn workspace_signup(ctx: &DalContext) -> Result<(WorkspaceSignup, Stri Ok((nw, auth_token)) } -pub async fn create_user(ctx: &DalContext) -> User { - let name = generate_fake_name(); - User::new( - ctx, - UserPk::generate(), - &name, - &format!("{name}@test.systeminit.com"), - None::<&str>, - ) - .await - .expect("cannot create user") -} - -pub async fn create_change_set(ctx: &DalContext) -> ChangeSet { - let name = generate_fake_name(); - ChangeSet::new(ctx, &name, None) - .await - .expect("cannot create change_set") -} - -pub fn create_visibility_for_change_set(change_set: &ChangeSet) -> Visibility { - Visibility::new(change_set.pk, None) -} - -/// Creates a new [`Visibility`] backed by a new [`ChangeSet`] -pub async fn create_visibility_for_new_change_set(ctx: &DalContext) -> Visibility { - let _history_actor = HistoryActor::SystemInit; - let change_set = create_change_set(ctx).await; - - create_visibility_for_change_set(&change_set) -} - -pub async fn create_change_set_and_update_ctx(ctx: &mut DalContext) { - let visibility = create_visibility_for_new_change_set(ctx).await; - ctx.update_visibility(visibility); -} - -/// Get the "si:identity" [`Func`] and execute (if necessary). -pub async fn setup_identity_func( - ctx: &DalContext, -) -> ( - FuncId, - FuncBindingId, - FuncBindingReturnValueId, - FuncArgumentId, +// pub async fn create_user(ctx: &DalContext) -> User { +// let name = generate_fake_name(); +// User::new( +// ctx, +// UserPk::generate(), +// &name, +// &format!("{name}@test.systeminit.com"), +// None::<&str>, +// ) +// .await +// .expect("cannot create user") +// } +// + +pub async fn create_change_set_and_update_ctx( + ctx: &mut DalContext, + base_change_set_id: ChangeSetPointerId, ) { - let identity_func: Func = Func::find_by_attr(ctx, "name", &"si:identity".to_string()) + let base_change_set = ChangeSetPointer::find(ctx, base_change_set_id) .await - .expect("could not find identity func by name attr") - .pop() - .expect("identity func not found"); - - let identity_func_identity_arg = FuncArgument::list_for_func(ctx, *identity_func.id()) + .expect("could not perform find change set") + .expect("no change set found"); + let mut change_set = ChangeSetPointer::new(ctx, generate_fake_name(), Some(base_change_set_id)) .await - .expect("cannot list identity func args") - .pop() - .expect("cannot find identity func identity arg"); - - let (identity_func_binding, identity_func_binding_return_value) = - FuncBinding::create_and_execute( + .expect("could not create change set pointer"); + change_set + .update_pointer( ctx, - serde_json::json![{ "identity": null }], - *identity_func.id(), - vec![], + base_change_set + .workspace_snapshot_id + .expect("no workspace snapshot set on base change set"), ) .await - .expect("could not find or create identity func binding"); - ( - *identity_func.id(), - *identity_func_binding.id(), - *identity_func_binding_return_value.id(), - *identity_func_identity_arg.id(), - ) + .expect("could not update pointer"); + ctx.update_visibility_v2(&change_set); + ctx.update_snapshot_to_visibility() + .await + .expect("could not update snapshot to visibility"); } + +// /// Get the "si:identity" [`Func`] and execute (if necessary). +// pub async fn setup_identity_func( +// ctx: &DalContext, +// ) -> ( +// FuncId, +// FuncBindingId, +// FuncBindingReturnValueId, +// FuncArgumentId, +// ) { +// let identity_func: Func = Func::find_by_attr(ctx, "name", &"si:identity".to_string()) +// .await +// .expect("could not find identity func by name attr") +// .pop() +// .expect("identity func not found"); +// +// let identity_func_identity_arg = FuncArgument::list_for_func(ctx, *identity_func.id()) +// .await +// .expect("cannot list identity func args") +// .pop() +// .expect("cannot find identity func identity arg"); +// +// let (identity_func_binding, identity_func_binding_return_value) = +// FuncBinding::create_and_execute( +// ctx, +// serde_json::json![{ "identity": null }], +// *identity_func.id(), +// ) +// .await +// .expect("could not find or create identity func binding"); +// ( +// *identity_func.id(), +// *identity_func_binding.id(), +// *identity_func_binding_return_value.id(), +// *identity_func_identity_arg.id(), +// ) +// } diff --git a/lib/dal-test/src/lib.rs b/lib/dal-test/src/lib.rs index b65a7ee094..06b2f28616 100644 --- a/lib/dal-test/src/lib.rs +++ b/lib/dal-test/src/lib.rs @@ -9,6 +9,7 @@ use std::{ }; use buck2_resources::Buck2Resources; +use content_store::PgStoreTools; use dal::{ builtins::SelectedTestBuiltinSchemas, job::processor::{JobQueueProcessor, NatsProcessor}, @@ -17,6 +18,7 @@ use dal::{ use derive_builder::Builder; use jwt_simple::prelude::RS256KeyPair; use lazy_static::lazy_static; +use rebaser_client::Config as RebaserClientConfig; use si_crypto::{ SymmetricCryptoService, SymmetricCryptoServiceConfig, SymmetricCryptoServiceConfigFile, }; @@ -34,10 +36,12 @@ pub use color_eyre::{ eyre::{eyre, Result, WrapErr}, }; pub use si_test_macros::{dal_test as test, sdf_test}; +pub use signup::WorkspaceSignup; pub use telemetry; pub use tracing_subscriber; pub mod helpers; +mod signup; pub mod test_harness; const DEFAULT_TEST_PG_USER: &str = "si_test"; @@ -47,6 +51,7 @@ const ENV_VAR_NATS_URL: &str = "SI_TEST_NATS_URL"; const ENV_VAR_MODULE_INDEX_URL: &str = "SI_TEST_MODULE_INDEX_URL"; const ENV_VAR_PG_HOSTNAME: &str = "SI_TEST_PG_HOSTNAME"; const ENV_VAR_PG_DBNAME: &str = "SI_TEST_PG_DBNAME"; +const ENV_VAR_CONTENT_STORE_PG_DBNAME: &str = "SI_TEST_CONTENT_STORE_PG_DBNAME"; const ENV_VAR_PG_USER: &str = "SI_TEST_PG_USER"; const ENV_VAR_PG_PORT: &str = "SI_TEST_PG_PORT"; const ENV_VAR_BUILTIN_SCHEMAS: &str = "SI_TEST_BUILTIN_SCHEMAS"; @@ -99,12 +104,21 @@ pub struct Config { #[builder(default)] pkgs_path: Option, symmetric_crypto_service_config: SymmetricCryptoServiceConfig, + // TODO(nick): determine why this is unused. + #[allow(dead_code)] + #[builder(default)] + rebaser_config: RebaserClientConfig, + #[builder(default = "PgStoreTools::default_pool_config()")] + content_store_pg_pool: PgPoolConfig, } impl Config { #[allow(clippy::disallowed_methods)] // Environment variables are used exclusively in test and // all are prefixed with `SI_TEST_` - fn create_default(pg_dbname: &'static str) -> Result { + fn create_default( + pg_dbname: &'static str, + content_store_pg_dbname: &'static str, + ) -> Result { let mut config = { let mut builder = ConfigBuilder::default(); detect_and_configure_testing(&mut builder)?; @@ -127,6 +141,20 @@ impl Config { config.pg.pool_max_size *= 32; config.pg.certificate_path = Some(config.postgres_key_path.clone().try_into()?); + if let Ok(value) = env::var(ENV_VAR_PG_HOSTNAME) { + config.content_store_pg_pool.hostname = value; + } + config.content_store_pg_pool.dbname = env::var(ENV_VAR_CONTENT_STORE_PG_DBNAME) + .unwrap_or_else(|_| content_store_pg_dbname.to_string()); + config.content_store_pg_pool.user = + env::var(ENV_VAR_PG_USER).unwrap_or_else(|_| DEFAULT_TEST_PG_USER.to_string()); + config.content_store_pg_pool.port = env::var(ENV_VAR_PG_PORT) + .unwrap_or_else(|_| DEFAULT_TEST_PG_PORT_STR.to_string()) + .parse()?; + config.content_store_pg_pool.pool_max_size *= 32; + config.content_store_pg_pool.certificate_path = + Some(config.postgres_key_path.clone().try_into()?); + if let Ok(value) = env::var(ENV_VAR_MODULE_INDEX_URL) { config.module_index_url = value; } @@ -184,6 +212,12 @@ pub struct TestContext { encryption_key: Arc, /// A service that can encrypt values based on the loaded donkeys symmetric_crypto_service: SymmetricCryptoService, + /// The pg_pool used by the content-addressable [`store`](content_store::Store) used by the + /// "dal". + content_store_pg_pool: PgPool, + + /// The configuration for the rebaser client used in tests + rebaser_config: RebaserClientConfig, } impl TestContext { @@ -194,14 +228,18 @@ impl TestContext { /// /// This functions wraps over a mutex which ensures that only the first caller will run global /// database creation, migrations, and other preparations. - pub async fn global(pg_dbname: &'static str) -> Result { + pub async fn global( + pg_dbname: &'static str, + content_store_pg_dbname: &'static str, + ) -> Result { let mut mutex_guard = TEST_CONTEXT_BUILDER.lock().await; match &*mutex_guard { ContextBuilderState::Uninitialized => { - let config = Config::create_default(pg_dbname).si_inspect_err(|err| { - *mutex_guard = ContextBuilderState::errored(err.to_string()) - })?; + let config = Config::create_default(pg_dbname, content_store_pg_dbname) + .si_inspect_err(|err| { + *mutex_guard = ContextBuilderState::errored(err.to_string()) + })?; let test_context_builder = TestContextBuilder::create(config) .await .si_inspect_err(|err| { @@ -258,6 +296,8 @@ impl TestContext { self.config.pkgs_path.to_owned(), None, self.symmetric_crypto_service.clone(), + self.rebaser_config.clone(), + self.content_store_pg_pool.clone(), ) } @@ -301,25 +341,38 @@ impl TestContextBuilder { let pg_pool = PgPool::new(&self.config.pg) .await .wrap_err("failed to create global setup PgPool")?; + let content_store_pool = PgPool::new(&self.config.content_store_pg_pool) + .await + .wrap_err("failed to create global setup content store PgPool")?; - self.build_inner(pg_pool).await + self.build_inner(pg_pool, content_store_pool).await } /// Builds and returns a new [`TestContext`] with its own connection pooling for each test. async fn build_for_test(&self) -> Result { - let pg_pool = self.create_test_specific_db_with_pg_pool().await?; + let pg_pool = self + .create_test_specific_db_with_pg_pool(&self.config.pg) + .await?; + let content_store_pg_pool = self + .create_test_specific_db_with_pg_pool(&self.config.content_store_pg_pool) + .await?; - self.build_inner(pg_pool).await + self.build_inner(pg_pool, content_store_pg_pool).await } - async fn build_inner(&self, pg_pool: PgPool) -> Result { + async fn build_inner( + &self, + pg_pool: PgPool, + content_store_pg_pool: PgPool, + ) -> Result { + let universal_prefix = random_identifier_string(); + // Need to make a new NatsConfig so that we can add the test-specific subject prefix // without leaking it to other tests. let mut nats_config = self.config.nats.clone(); - let nats_subject_prefix = random_identifier_string(); - nats_config.subject_prefix = Some(nats_subject_prefix.clone()); + nats_config.subject_prefix = Some(universal_prefix.clone()); let mut config = self.config.clone(); - config.nats.subject_prefix = Some(nats_subject_prefix); + config.nats.subject_prefix = Some(universal_prefix.clone()); let nats_conn = NatsClient::new(&nats_config) .await @@ -331,6 +384,9 @@ impl TestContextBuilder { SymmetricCryptoService::from_config(&self.config.symmetric_crypto_service_config) .await?; + let mut rebaser_config = RebaserClientConfig::default(); + rebaser_config.set_subject_prefix(universal_prefix); + Ok(TestContext { config, pg_pool, @@ -338,12 +394,17 @@ impl TestContextBuilder { job_processor, encryption_key: self.encryption_key.clone(), symmetric_crypto_service, + rebaser_config, + content_store_pg_pool, }) } - async fn create_test_specific_db_with_pg_pool(&self) -> Result { + async fn create_test_specific_db_with_pg_pool( + &self, + pg_pool_config: &PgPoolConfig, + ) -> Result { // Connect to the 'postgres' database so we can copy our migrated template test database - let mut new_pg_pool_config = self.config.pg.clone(); + let mut new_pg_pool_config = pg_pool_config.clone(); new_pg_pool_config.dbname = "postgres".to_string(); let new_pg_pool = PgPool::new(&new_pg_pool_config) .await @@ -355,10 +416,10 @@ impl TestContextBuilder { // Create new database from template let db_name_suffix = random_identifier_string(); - let dbname = format!("{}_{}", self.config.pg.dbname, db_name_suffix); + let dbname = format!("{}_{}", pg_pool_config.dbname, db_name_suffix); let query = format!( "CREATE DATABASE {dbname} WITH TEMPLATE {} OWNER {};", - self.config.pg.dbname, self.config.pg.user, + pg_pool_config.dbname, pg_pool_config.user, ); let db_exists_check = db_conn .query_opt( @@ -463,6 +524,33 @@ pub fn pinga_server(services_context: &ServicesContext) -> Result Result { + let _config: rebaser_server::Config = { + let mut config_file = rebaser_server::ConfigFile::default(); + rebaser_server::detect_and_configure_development(&mut config_file) + .wrap_err("failed to detect and configure Rebaser ConfigFile")?; + config_file + .try_into() + .wrap_err("failed to build Rebaser server config")? + }; + + let server = rebaser_server::Server::from_services( + services_context.encryption_key(), + services_context.nats_conn().clone(), + services_context.pg_pool().clone(), + services_context.veritech().clone(), + services_context.job_processor(), + services_context.symmetric_crypto_service().clone(), + services_context.rebaser_config().clone(), + services_context.content_store_pg_pool().clone(), + ) + .wrap_err("failed to create Rebaser server")?; + + Ok(server) +} + /// Configures and builds a [`veritech_server::Server`] suitable for running alongside DAL /// object-related tests. pub async fn veritech_server_for_uds_cyclone( @@ -515,12 +603,20 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { let pinga_server_handle = pinga_server.shutdown_handle(); tokio::spawn(pinga_server.run()); + // Start up a Rebaser server for migrations + info!("starting Rebaser server for initial migrations"); + let rebaser_server = rebaser_server(&services_ctx)?; + let rebaser_server_handle = rebaser_server.shutdown_handle(); + tokio::spawn(rebaser_server.run()); + // Start up a Veritech server as a task exclusively to allow the migrations to run info!("starting Veritech server for initial migrations"); let veritech_server = veritech_server_for_uds_cyclone(test_context.config.nats.clone()).await?; let veritech_server_handle = veritech_server.shutdown_handle(); tokio::spawn(veritech_server.run()); + info!("creating client with pg pool for global Content Store test database"); + info!("testing database connection"); services_ctx .pg_pool() @@ -528,13 +624,25 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { .await .wrap_err("failed to connect to database, is it running and available?")?; + info!("testing global content store database connection"); + services_ctx + .content_store_pg_pool() + .test_connection() + .await + .wrap_err("failed to connect to content store database, is it running and available?")?; + #[allow(clippy::disallowed_methods)] // Environment variables are used exclusively in test and // all are prefixed with `SI_TEST_` if !env::var(ENV_VAR_KEEP_OLD_DBS).is_ok_and(|v| !v.is_empty()) { - info!("dropping old test-specific databases"); + info!("dropping old test-specific databases for dal"); drop_old_test_databases(services_ctx.pg_pool()) .await .wrap_err("failed to drop old databases")?; + + info!("dropping old test-specific content store databases"); + drop_old_test_databases(services_ctx.content_store_pg_pool()) + .await + .wrap_err("failed to drop old test-specific content store databases")?; } // Ensure the database is totally clean, then run all migrations @@ -544,8 +652,15 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { .drop_and_create_public_schema() .await .wrap_err("failed to drop and create the database")?; + + services_ctx + .content_store_pg_pool() + .drop_and_create_public_schema() + .await + .wrap_err("failed to drop and create content store database")?; + info!("running database migrations"); - dal::migrate(services_ctx.pg_pool()) + dal::migrate(services_ctx.pg_pool(), services_ctx.content_store_pg_pool()) .await .wrap_err("failed to migrate database")?; @@ -569,6 +684,8 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { .expect("no pkgs path configured"), test_context.config.module_index_url.clone(), services_ctx.symmetric_crypto_service(), + services_ctx.rebaser_config().clone(), + services_ctx.content_store_pg_pool(), ) .await .wrap_err("failed to run builtin migrations")?; @@ -578,6 +695,11 @@ async fn global_setup(test_context_builer: TestContextBuilder) -> Result<()> { info!("shutting down initial migrations Pinga server"); pinga_server_handle.shutdown().await; + // Shutdown the Rebaser server (each test gets their own server instance with an exclusively + // unique subject prefix) + info!("shutting down initial migrations Rebaser server"); + rebaser_server_handle.shutdown().await; + // Shutdown the Veritech server (each test gets their own server instance with an exclusively // unique subject prefix) info!("shutting down initial migrations Veritech server"); diff --git a/lib/dal-test/src/signup.rs b/lib/dal-test/src/signup.rs new file mode 100644 index 0000000000..9312e14e58 --- /dev/null +++ b/lib/dal-test/src/signup.rs @@ -0,0 +1,38 @@ +use dal::{DalContext, HistoryActor, KeyPair, User, UserPk, Workspace, WorkspacePk}; +use serde::{Deserialize, Serialize}; + +/// A wrapper for creating [`Workspaces`](Workspace) for integration tests. +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +pub struct WorkspaceSignup { + pub key_pair: KeyPair, + pub user: User, + pub workspace: Workspace, +} + +impl WorkspaceSignup { + pub async fn new( + ctx: &mut DalContext, + workspace_name: impl AsRef, + user_name: impl AsRef, + user_email: impl AsRef, + ) -> color_eyre::Result { + let workspace = Workspace::new(ctx, WorkspacePk::generate(), workspace_name).await?; + let key_pair = KeyPair::new(ctx, "default").await?; + + let user = User::new( + ctx, + UserPk::generate(), + &user_name, + &user_email, + None::<&str>, + ) + .await?; + ctx.update_history_actor(HistoryActor::User(user.pk())); + + Ok(Self { + key_pair, + user, + workspace, + }) + } +} diff --git a/lib/dal-test/src/test_harness.rs b/lib/dal-test/src/test_harness.rs index 86f5b0466c..7f734faca3 100644 --- a/lib/dal-test/src/test_harness.rs +++ b/lib/dal-test/src/test_harness.rs @@ -1,14 +1,8 @@ use dal::{ - component::ComponentKind, func::{binding::FuncBinding, FuncId}, key_pair::KeyPairPk, - node::NodeKind, - schema, - socket::{Socket, SocketArity, SocketEdgeKind, SocketKind}, - ChangeSet, ChangeSetPk, Component, DalContext, DiagramKind, EncryptedSecret, Func, - FuncBackendKind, FuncBackendResponseType, KeyPair, Node, Prop, PropId, PropKind, Schema, - SchemaId, SchemaVariantId, Secret, StandardModel, User, UserPk, Visibility, Workspace, - WorkspacePk, + ChangeSet, ChangeSetPk, DalContext, EncryptedSecret, FuncBackendKind, KeyPair, Secret, User, + UserPk, Visibility, }; use names::{Generator, Name}; @@ -38,13 +32,6 @@ pub fn create_visibility_head() -> Visibility { Visibility::new(ChangeSetPk::NONE, None) } -pub async fn create_workspace(ctx: &mut DalContext) -> Workspace { - let name = generate_fake_name(); - Workspace::new(ctx, WorkspacePk::generate(), &name) - .await - .expect("cannot create workspace") -} - pub async fn create_key_pair(ctx: &DalContext) -> KeyPair { let name = generate_fake_name(); KeyPair::new(ctx, &name) @@ -65,115 +52,115 @@ pub async fn create_user(ctx: &DalContext) -> User { .expect("cannot create user") } -pub async fn create_schema(ctx: &DalContext) -> Schema { - let name = generate_fake_name(); - Schema::new(ctx, &name, &ComponentKind::Standard) - .await - .expect("cannot create schema") -} - -pub async fn create_schema_variant(ctx: &DalContext, schema_id: SchemaId) -> schema::SchemaVariant { - create_schema_variant_with_root(ctx, schema_id).await.0 -} - -pub async fn create_schema_variant_with_root( - ctx: &DalContext, - schema_id: SchemaId, -) -> (schema::SchemaVariant, schema::RootProp) { - let name = generate_fake_name(); - let (variant, root) = schema::SchemaVariant::new(ctx, schema_id, name) - .await - .expect("cannot create schema variant"); - - let _input_socket = Socket::new( - ctx, - "input", - connection_annotation_string!("input"), - SocketKind::Standalone, - &SocketEdgeKind::ConfigurationInput, - &SocketArity::Many, - &DiagramKind::Configuration, - Some(*variant.id()), - ) - .await - .expect("Unable to create socket"); - - let _output_socket = Socket::new( - ctx, - "output", - connection_annotation_string!("output"), - SocketKind::Standalone, - &SocketEdgeKind::ConfigurationOutput, - &SocketArity::Many, - &DiagramKind::Configuration, - Some(*variant.id()), - ) - .await - .expect("Unable to create socket"); - - (variant, root) -} - -pub async fn create_prop_without_ui_optionals( - ctx: &DalContext, - name: impl AsRef, - kind: PropKind, - schema_variant_id: SchemaVariantId, - parent_prop_id: Option, -) -> Prop { - Prop::new_without_ui_optionals(ctx, name, kind, schema_variant_id, parent_prop_id) - .await - .expect("could not create prop") -} - -pub async fn create_component_and_schema(ctx: &DalContext) -> Component { - let schema = create_schema(ctx).await; - let mut schema_variant = create_schema_variant(ctx, *schema.id()).await; - schema_variant - .finalize(ctx, None) - .await - .expect("unable to finalize schema variant"); - let name = generate_fake_name(); - let (component, _) = Component::new(ctx, &name, *schema_variant.id()) - .await - .expect("cannot create component"); - component -} - -pub async fn create_component_for_schema_variant( - ctx: &DalContext, - schema_variant_id: &SchemaVariantId, -) -> Component { - let name = generate_fake_name(); - let (component, _) = Component::new(ctx, &name, *schema_variant_id) - .await - .expect("cannot create component"); - component -} - -pub async fn create_component_for_schema(ctx: &DalContext, schema_id: &SchemaId) -> Component { - let name = generate_fake_name(); - let (component, _) = Component::new_for_default_variant_from_schema(ctx, &name, *schema_id) - .await - .expect("cannot create component"); - component -} - -pub async fn create_node(ctx: &DalContext, node_kind: &NodeKind) -> Node { - Node::new(ctx, node_kind).await.expect("cannot create node") -} - -pub async fn create_func(ctx: &DalContext) -> Func { - let name = generate_fake_name(); - Func::new( - ctx, - name, - FuncBackendKind::String, - FuncBackendResponseType::String, - ) - .await - .expect("cannot create func") -} +// pub async fn create_schema(ctx: &DalContext) -> Schema { +// let name = generate_fake_name(); +// Schema::new(ctx, &name, &ComponentKind::Standard) +// .await +// .expect("cannot create schema") +// } + +// pub async fn create_schema_variant(ctx: &DalContext, schema_id: SchemaId) -> schema::SchemaVariant { +// create_schema_variant_with_root(ctx, schema_id).await.0 +// } + +// pub async fn create_schema_variant_with_root( +// ctx: &DalContext, +// schema_id: SchemaId, +// ) -> (schema::SchemaVariant, schema::RootProp) { +// let name = generate_fake_name(); +// let (variant, root) = schema::SchemaVariant::new(ctx, schema_id, name) +// .await +// .expect("cannot create schema variant"); +// +// let _input_socket = Socket::new( +// ctx, +// "input", +// connection_annotation_string!("input"), +// SocketKind::Standalone, +// &SocketEdgeKind::ConfigurationInput, +// &SocketArity::Many, +// &DiagramKind::Configuration, +// Some(*variant.id()), +// ) +// .await +// .expect("Unable to create socket"); +// +// let _output_socket = Socket::new( +// ctx, +// "output", +// connection_annotation_string!("output"), +// SocketKind::Standalone, +// &SocketEdgeKind::ConfigurationOutput, +// &SocketArity::Many, +// &DiagramKind::Configuration, +// Some(*variant.id()), +// ) +// .await +// .expect("Unable to create socket"); +// +// (variant, root) +// } + +// pub async fn create_prop_without_ui_optionals( +// ctx: &DalContext, +// name: impl AsRef, +// kind: PropKind, +// schema_variant_id: SchemaVariantId, +// parent_prop_id: Option, +// ) -> Prop { +// Prop::new_without_ui_optionals(ctx, name, kind, schema_variant_id, parent_prop_id) +// .await +// .expect("could not create prop") +// } + +// pub async fn create_component_and_schema(ctx: &DalContext) -> Component { +// let schema = create_schema(ctx).await; +// let mut schema_variant = create_schema_variant(ctx, *schema.id()).await; +// schema_variant +// .finalize(ctx, None) +// .await +// .expect("unable to finalize schema variant"); +// let name = generate_fake_name(); +// let (component, _) = Component::new(ctx, &name, *schema_variant.id()) +// .await +// .expect("cannot create component"); +// component +// } + +// pub async fn create_component_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: &SchemaVariantId, +// ) -> Component { +// let name = generate_fake_name(); +// let (component, _) = Component::new(ctx, &name, *schema_variant_id) +// .await +// .expect("cannot create component"); +// component +// } + +// pub async fn create_component_for_schema(ctx: &DalContext, schema_id: &SchemaId) -> Component { +// let name = generate_fake_name(); +// let (component, _) = Component::new_for_default_variant_from_schema(ctx, &name, *schema_id) +// .await +// .expect("cannot create component"); +// component +// } + +// pub async fn create_node(ctx: &DalContext, node_kind: &NodeKind) -> Node { +// Node::new(ctx, node_kind).await.expect("cannot create node") +// } + +// pub async fn create_func(ctx: &DalContext) -> Func { +// let name = generate_fake_name(); +// Func::new( +// ctx, +// name, +// FuncBackendKind::String, +// FuncBackendResponseType::String, +// ) +// .await +// .expect("cannot create func") +// } pub async fn create_func_binding( ctx: &DalContext, diff --git a/lib/dal/BUCK b/lib/dal/BUCK index 33ad55ebdc..96cb562389 100644 --- a/lib/dal/BUCK +++ b/lib/dal/BUCK @@ -7,9 +7,12 @@ load( rust_library( name = "dal", deps = [ + "//lib/si-cbor:si-cbor", + "//lib/content-store:content-store", "//lib/council-server:council-server", "//lib/nats-subscriber:nats-subscriber", "//lib/object-tree:object-tree", + "//lib/rebaser-client:rebaser-client", "//lib/si-crypto:si-crypto", "//lib/si-data-nats:si-data-nats", "//lib/si-data-pg:si-data-pg", @@ -23,8 +26,8 @@ rust_library( "//third-party/rust:async-trait", "//third-party/rust:base64", "//third-party/rust:blake3", - "//third-party/rust:ciborium", "//third-party/rust:chrono", + "//third-party/rust:ciborium", "//third-party/rust:convert_case", "//third-party/rust:derive_more", "//third-party/rust:diff", @@ -32,12 +35,15 @@ rust_library( "//third-party/rust:futures", "//third-party/rust:hex", "//third-party/rust:iftree", + "//third-party/rust:itertools", "//third-party/rust:jwt-simple", "//third-party/rust:lazy_static", "//third-party/rust:once_cell", "//third-party/rust:paste", "//third-party/rust:petgraph", + "//third-party/rust:postcard", "//third-party/rust:postgres-types", + "//third-party/rust:pretty_assertions_sorted", "//third-party/rust:rand", "//third-party/rust:refinery", "//third-party/rust:regex", @@ -74,11 +80,16 @@ rust_library( rust_test( name = "test-integration", deps = [ + "//lib/content-store:content-store", "//lib/dal-test:dal-test", + "//lib/rebaser-client:rebaser-client", + "//lib/rebaser-core:rebaser-core", + "//lib/rebaser-server:rebaser-server", "//lib/si-pkg:si-pkg", "//lib/veritech-client:veritech-client", "//third-party/rust:base64", "//third-party/rust:itertools", + "//third-party/rust:petgraph", "//third-party/rust:pretty_assertions_sorted", "//third-party/rust:serde_json", "//third-party/rust:sodiumoxide", diff --git a/lib/dal/Cargo.toml b/lib/dal/Cargo.toml index c31e9aa0b6..04f965ceec 100644 --- a/lib/dal/Cargo.toml +++ b/lib/dal/Cargo.toml @@ -13,6 +13,7 @@ base64 = { workspace = true } blake3 = { workspace = true } chrono = { workspace = true } ciborium = { workspace = true } +content-store = { path = "../../lib/content-store" } convert_case = { workspace = true } council-server = { path = "../../lib/council-server" } derive_more = { workspace = true } @@ -21,6 +22,7 @@ dyn-clone = { workspace = true } futures = { workspace = true } hex = { workspace = true } iftree = { workspace = true } +itertools = { workspace = true } jwt-simple = { workspace = true } lazy_static = { workspace = true } nats-subscriber = { path = "../../lib/nats-subscriber" } @@ -28,8 +30,10 @@ object-tree = { path = "../../lib/object-tree" } once_cell = { workspace = true } paste = { workspace = true } petgraph = { workspace = true } +postcard = { version = "1.0.8", features = ["alloc"] } postgres-types = { workspace = true } rand = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client" } refinery = { workspace = true } regex = { workspace = true } remain = { workspace = true } @@ -37,6 +41,7 @@ serde = { workspace = true } serde-aux = { workspace = true } serde_json = { workspace = true } serde_with = { workspace = true } +si-cbor = { path = "../../lib/si-cbor" } si-crypto = { path = "../../lib/si-crypto" } si-data-nats = { path = "../../lib/si-data-nats" } si-data-pg = { path = "../../lib/si-data-pg" } @@ -59,4 +64,6 @@ buck2-resources = { path = "../../lib/buck2-resources" } dal-test = { path = "../../lib/dal-test" } itertools = { workspace = true } pretty_assertions_sorted = { workspace = true } +rebaser-core = { path = "../../lib/rebaser-core" } +rebaser-server = { path = "../../lib/rebaser-server" } tempfile = { workspace = true } diff --git a/lib/dal/examples/dal-pkg-export/main.rs b/lib/dal/examples/dal-pkg-export/main.rs index 444e7080b1..0f65ffe2e1 100644 --- a/lib/dal/examples/dal-pkg-export/main.rs +++ b/lib/dal/examples/dal-pkg-export/main.rs @@ -1,149 +1,149 @@ -use buck2_resources::Buck2Resources; -use std::{env, path::Path, str::FromStr, sync::Arc}; -use tokio::fs; - -use dal::{ - pkg::PkgExporter, ChangeSet, ChangeSetPk, DalContext, JobQueueProcessor, NatsProcessor, Schema, - ServicesContext, StandardModel, Tenancy, Workspace, -}; -use si_crypto::{SymmetricCryptoService, SymmetricCryptoServiceConfigFile}; -use si_data_nats::{NatsClient, NatsConfig}; -use si_data_pg::{PgPool, PgPoolConfig}; -use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; +// use buck2_resources::Buck2Resources; +// use std::{env, path::Path, str::FromStr, sync::Arc}; +// use tokio::fs; + +// use dal::{ +// pkg::PkgExporter, ChangeSet, ChangeSetPk, DalContext, JobQueueProcessor, NatsProcessor, Schema, +// ServicesContext, StandardModel, Tenancy, Workspace, +// }; +// use si_crypto::{SymmetricCryptoService, SymmetricCryptoServiceConfigFile}; +// use si_data_nats::{NatsClient, NatsConfig}; +// use si_data_pg::{PgPool, PgPoolConfig}; +// use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; type Result = std::result::Result>; -const USAGE: &str = - "usage: program "; +// const USAGE: &str = +// "usage: program "; #[tokio::main] async fn main() -> Result<()> { - let mut args = env::args(); - let change_set_pk = ChangeSetPk::from_str(args.nth(1).expect(USAGE).as_str())?; - let tar_file = args.next().expect(USAGE); - let name = args.next().expect(USAGE); - let version = args.next().expect(USAGE); - let created_by = args.next().expect(USAGE); - let schema_names = args.next().expect(USAGE); - let schema_names = schema_names.split(','); - - let description = format!("{name} package, created by {created_by}."); - - let mut ctx = ctx().await?; - - let workspace = match Workspace::find_first_user_workspace(&ctx).await? { - Some(workspace) => workspace, - None => Workspace::builtin(&ctx).await?, - }; - - ctx.update_tenancy(Tenancy::new(*workspace.pk())); - let change_set = ChangeSet::get_by_pk(&ctx, &change_set_pk) - .await? - .expect("That change set could not be found"); - ctx.update_visibility(ctx.visibility().to_change_set(change_set.pk)); - - let mut schema_ids = Vec::new(); - for schema_name in schema_names { - schema_ids.push(*Schema::find_by_name(&ctx, schema_name.trim()).await?.id()); - } - - println!( - "--- Exporting pkg: {tar_file} from head change set in workspace \"{}\"", - workspace.name() - ); - let mut exporter = - PkgExporter::new_module_exporter(name, version, Some(description), created_by, schema_ids); - - fs::write(&tar_file, exporter.export_as_bytes(&ctx).await?).await?; - - println!("--- Committing database transaction"); - ctx.commit().await?; - println!(" - Committed."); - - println!("--- Export complete."); + // let mut args = env::args(); + // let change_set_pk = ChangeSetPk::from_str(args.nth(1).expect(USAGE).as_str())?; + // let tar_file = args.next().expect(USAGE); + // let name = args.next().expect(USAGE); + // let version = args.next().expect(USAGE); + // let created_by = args.next().expect(USAGE); + // let schema_names = args.next().expect(USAGE); + // let schema_names = schema_names.split(','); + + // let description = format!("{name} package, created by {created_by}."); + + // let mut ctx = ctx().await?; + + // let workspace = match Workspace::find_first_user_workspace(&ctx).await? { + // Some(workspace) => workspace, + // None => Workspace::builtin(&ctx).await?, + // }; + + // ctx.update_tenancy(Tenancy::new(*workspace.pk())); + // let change_set = ChangeSet::get_by_pk(&ctx, &change_set_pk) + // .await? + // .expect("That change set could not be found"); + // ctx.update_visibility(ctx.visibility().to_change_set(change_set.pk)); + + // let mut schema_ids = Vec::new(); + // for schema_name in schema_names { + // schema_ids.push(*Schema::find_by_name(&ctx, schema_name.trim()).await?.id()); + // } + + // println!( + // "--- Exporting pkg: {tar_file} from head change set in workspace \"{}\"", + // workspace.name() + // ); + // let mut exporter = + // PkgExporter::new_module_exporter(name, version, Some(description), created_by, schema_ids); + + // fs::write(&tar_file, exporter.export_as_bytes(&ctx).await?).await?; + + // println!("--- Committing database transaction"); + // ctx.commit().await?; + // println!(" - Committed."); + + // println!("--- Export complete."); Ok(()) } -async fn ctx() -> Result { - let encryption_key = Arc::new(load_encryption_key().await?); - let pg_pool = create_pg_pool().await?; - let nats_conn = connect_to_nats().await?; - let veritech = create_veritech_client(nats_conn.clone()); - let symmetric_crypto_service = create_symmetric_crypto_service().await?; - - let job_processor = connect_processor(nats_conn.clone()).await?; - - let services_context = ServicesContext::new( - pg_pool, - nats_conn, - job_processor, - veritech, - encryption_key, - None, - None, - symmetric_crypto_service, - ); - - Ok(DalContext::builder(services_context, false) - .build_default() - .await?) -} - -async fn create_pg_pool() -> Result { - PgPool::new(&PgPoolConfig::default()) - .await - .map_err(Into::into) -} - -async fn connect_to_nats() -> Result { - NatsClient::new(&NatsConfig::default()) - .await - .map_err(Into::into) -} - -fn create_veritech_client(nats: NatsClient) -> VeritechClient { - VeritechClient::new(nats) -} - -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -async fn load_encryption_key() -> Result { - let path = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { - Buck2Resources::read()?.get_ends_with("dev.encryption.key")? - } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { - Path::new(&dir).join("../../lib/cyclone-server/src/dev.encryption.key") - } else { - unimplemented!("not running with Buck2 or Cargo, unsupported") - }; - - CycloneEncryptionKey::load(path).await.map_err(Into::into) -} - -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -async fn create_symmetric_crypto_service() -> Result { - let active_key = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { - Buck2Resources::read()?.get_ends_with("dev.donkey.key")? - } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { - Path::new(&dir).join("../../lib/dal/dev.donkey.key") - } else { - unimplemented!("not running with Buck2 or Cargo, unsupported") - }; - - SymmetricCryptoService::from_config( - &SymmetricCryptoServiceConfigFile { - active_key: Some(active_key.to_string_lossy().into_owned()), - active_key_base64: None, - extra_keys: Default::default(), - } - .try_into()?, - ) - .await - .map_err(Into::into) -} - -async fn connect_processor( - job_client: NatsClient, -) -> Result> { - let job_processor = - Box::new(NatsProcessor::new(job_client)) as Box; - Ok(job_processor) -} +// async fn ctx() -> Result { +// let encryption_key = Arc::new(load_encryption_key().await?); +// let pg_pool = create_pg_pool().await?; +// let nats_conn = connect_to_nats().await?; +// let veritech = create_veritech_client(nats_conn.clone()); +// let symmetric_crypto_service = create_symmetric_crypto_service().await?; + +// let job_processor = connect_processor(nats_conn.clone()).await?; + +// let services_context = ServicesContext::new( +// pg_pool, +// nats_conn, +// job_processor, +// veritech, +// encryption_key, +// None, +// None, +// symmetric_crypto_service, +// ); + +// Ok(DalContext::builder(services_context, false) +// .build_default() +// .await?) +// } + +// async fn create_pg_pool() -> Result { +// PgPool::new(&PgPoolConfig::default()) +// .await +// .map_err(Into::into) +// } + +// async fn connect_to_nats() -> Result { +// NatsClient::new(&NatsConfig::default()) +// .await +// .map_err(Into::into) +// } + +// fn create_veritech_client(nats: NatsClient) -> VeritechClient { +// VeritechClient::new(nats) +// } + +// #[allow(clippy::disallowed_methods)] // Used to determine if running in development +// async fn load_encryption_key() -> Result { +// let path = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { +// Buck2Resources::read()?.get_ends_with("dev.encryption.key")? +// } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { +// Path::new(&dir).join("../../lib/cyclone-server/src/dev.encryption.key") +// } else { +// unimplemented!("not running with Buck2 or Cargo, unsupported") +// }; + +// CycloneEncryptionKey::load(path).await.map_err(Into::into) +// } + +// #[allow(clippy::disallowed_methods)] // Used to determine if running in development +// async fn create_symmetric_crypto_service() -> Result { +// let active_key = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { +// Buck2Resources::read()?.get_ends_with("dev.donkey.key")? +// } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { +// Path::new(&dir).join("../../lib/dal/dev.donkey.key") +// } else { +// unimplemented!("not running with Buck2 or Cargo, unsupported") +// }; + +// SymmetricCryptoService::from_config( +// &SymmetricCryptoServiceConfigFile { +// active_key: Some(active_key.to_string_lossy().into_owned()), +// active_key_base64: None, +// extra_keys: Default::default(), +// } +// .try_into()?, +// ) +// .await +// .map_err(Into::into) +// } + +// async fn connect_processor( +// job_client: NatsClient, +// ) -> Result> { +// let job_processor = +// Box::new(NatsProcessor::new(job_client)) as Box; +// Ok(job_processor) +// } diff --git a/lib/dal/examples/dal-pkg-import/main.rs b/lib/dal/examples/dal-pkg-import/main.rs index 8b3d4236a4..52904fa296 100644 --- a/lib/dal/examples/dal-pkg-import/main.rs +++ b/lib/dal/examples/dal-pkg-import/main.rs @@ -1,138 +1,138 @@ -use std::{env, path::Path, sync::Arc}; - -use buck2_resources::Buck2Resources; -use dal::generate_unique_id; -use dal::{ - pkg::import_pkg_from_pkg, ChangeSet, DalContext, JobQueueProcessor, NatsProcessor, - ServicesContext, Tenancy, Workspace, -}; -use si_crypto::{SymmetricCryptoService, SymmetricCryptoServiceConfigFile}; -use si_data_nats::{NatsClient, NatsConfig}; -use si_data_pg::{PgPool, PgPoolConfig}; -use si_pkg::SiPkg; -use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; +// use std::{env, path::Path, sync::Arc}; + +// use buck2_resources::Buck2Resources; +// use dal::generate_unique_id; +// use dal::{ +// pkg::import_pkg_from_pkg, ChangeSet, DalContext, JobQueueProcessor, NatsProcessor, +// ServicesContext, Tenancy, Workspace, +// }; +// use si_crypto::{SymmetricCryptoService, SymmetricCryptoServiceConfigFile}; +// use si_data_nats::{NatsClient, NatsConfig}; +// use si_data_pg::{PgPool, PgPoolConfig}; +// use si_pkg::SiPkg; +// use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; type Result = std::result::Result>; #[tokio::main] async fn main() -> Result<()> { - let mut args = env::args(); - let tar_file = args.nth(1).expect("usage: program "); - - let mut ctx = ctx().await?; - - let workspace = match Workspace::find_first_user_workspace(&ctx).await? { - Some(workspace) => workspace, - None => Workspace::builtin(&ctx).await?, - }; - - ctx.update_tenancy(Tenancy::new(*workspace.pk())); - - let pkg = SiPkg::load_from_file(Path::new(&tar_file)).await?; - let metadata = pkg.metadata()?; - let change_set_name = format!( - "pkg - {} ({}) {}", - metadata.name(), - metadata.version(), - generate_unique_id(4) - ); - let change_set = ChangeSet::new(&ctx, &change_set_name, None).await?; - let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_change_set(change_set.pk)); - - println!( - "--- Importing pkg: {tar_file} into change set \"{change_set_name}\" in workspace \"{}\"", - workspace.name() - ); - import_pkg_from_pkg(&ctx, &pkg, None).await?; - - println!("--- Committing database transaction"); - ctx.commit().await?; - println!(" - Committed."); - - println!("--- Import complete."); + // let mut args = env::args(); + // let tar_file = args.nth(1).expect("usage: program "); + + // let mut ctx = ctx().await?; + + // let workspace = match Workspace::find_first_user_workspace(&ctx).await? { + // Some(workspace) => workspace, + // None => Workspace::builtin(&ctx).await?, + // }; + + // ctx.update_tenancy(Tenancy::new(*workspace.pk())); + + // let pkg = SiPkg::load_from_file(Path::new(&tar_file)).await?; + // let metadata = pkg.metadata()?; + // let change_set_name = format!( + // "pkg - {} ({}) {}", + // metadata.name(), + // metadata.version(), + // generate_unique_id(4) + // ); + // let change_set = ChangeSet::new(&ctx, &change_set_name, None).await?; + // let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_change_set(change_set.pk)); + + // println!( + // "--- Importing pkg: {tar_file} into change set \"{change_set_name}\" in workspace \"{}\"", + // workspace.name() + // ); + // import_pkg_from_pkg(&ctx, &pkg, None).await?; + + // println!("--- Committing database transaction"); + // ctx.commit().await?; + // println!(" - Committed."); + + // println!("--- Import complete."); Ok(()) } -async fn ctx() -> Result { - let encryption_key = Arc::new(load_encryption_key().await?); - let pg_pool = create_pg_pool().await?; - let nats_conn = connect_to_nats().await?; - let veritech = create_veritech_client(nats_conn.clone()); - let symmetric_crypto_service = create_symmetric_crypto_service().await?; - - let job_processor = connect_processor(nats_conn.clone()).await?; - - let services_context = ServicesContext::new( - pg_pool, - nats_conn, - job_processor, - veritech, - encryption_key, - None, - None, - symmetric_crypto_service, - ); - - Ok(DalContext::builder(services_context, false) - .build_default() - .await?) -} - -async fn create_pg_pool() -> Result { - PgPool::new(&PgPoolConfig::default()) - .await - .map_err(Into::into) -} - -async fn connect_to_nats() -> Result { - NatsClient::new(&NatsConfig::default()) - .await - .map_err(Into::into) -} - -fn create_veritech_client(nats: NatsClient) -> VeritechClient { - VeritechClient::new(nats) -} - -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -async fn load_encryption_key() -> Result { - let path = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { - Buck2Resources::read()?.get_ends_with("dev.encryption.key")? - } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { - Path::new(&dir).join("../../lib/cyclone-server/src/dev.encryption.key") - } else { - unimplemented!("not running with Buck2 or Cargo, unsupported") - }; - - CycloneEncryptionKey::load(path).await.map_err(Into::into) -} - -#[allow(clippy::disallowed_methods)] // Used to determine if running in development -async fn create_symmetric_crypto_service() -> Result { - let active_key = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { - Buck2Resources::read()?.get_ends_with("dev.donkey.key")? - } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { - Path::new(&dir).join("../../lib/dal/dev.donkey.key") - } else { - unimplemented!("not running with Buck2 or Cargo, unsupported") - }; - - SymmetricCryptoService::from_config( - &SymmetricCryptoServiceConfigFile { - active_key: Some(active_key.to_string_lossy().into_owned()), - active_key_base64: None, - extra_keys: Default::default(), - } - .try_into()?, - ) - .await - .map_err(Into::into) -} - -async fn connect_processor( - job_client: NatsClient, -) -> Result> { - let job_processor = - Box::new(NatsProcessor::new(job_client)) as Box; - Ok(job_processor) -} +// async fn ctx() -> Result { +// let encryption_key = Arc::new(load_encryption_key().await?); +// let pg_pool = create_pg_pool().await?; +// let nats_conn = connect_to_nats().await?; +// let veritech = create_veritech_client(nats_conn.clone()); +// let symmetric_crypto_service = create_symmetric_crypto_service().await?; + +// let job_processor = connect_processor(nats_conn.clone()).await?; + +// let services_context = ServicesContext::new( +// pg_pool, +// nats_conn, +// job_processor, +// veritech, +// encryption_key, +// None, +// None, +// symmetric_crypto_service, +// ); + +// Ok(DalContext::builder(services_context, false) +// .build_default() +// .await?) +// } + +// async fn create_pg_pool() -> Result { +// PgPool::new(&PgPoolConfig::default()) +// .await +// .map_err(Into::into) +// } + +// async fn connect_to_nats() -> Result { +// NatsClient::new(&NatsConfig::default()) +// .await +// .map_err(Into::into) +// } + +// fn create_veritech_client(nats: NatsClient) -> VeritechClient { +// VeritechClient::new(nats) +// } + +// #[allow(clippy::disallowed_methods)] // Used to determine if running in development +// async fn load_encryption_key() -> Result { +// let path = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { +// Buck2Resources::read()?.get_ends_with("dev.encryption.key")? +// } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { +// Path::new(&dir).join("../../lib/cyclone-server/src/dev.encryption.key") +// } else { +// unimplemented!("not running with Buck2 or Cargo, unsupported") +// }; + +// CycloneEncryptionKey::load(path).await.map_err(Into::into) +// } + +// #[allow(clippy::disallowed_methods)] // Used to determine if running in development +// async fn create_symmetric_crypto_service() -> Result { +// let active_key = if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { +// Buck2Resources::read()?.get_ends_with("dev.donkey.key")? +// } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { +// Path::new(&dir).join("../../lib/dal/dev.donkey.key") +// } else { +// unimplemented!("not running with Buck2 or Cargo, unsupported") +// }; + +// SymmetricCryptoService::from_config( +// &SymmetricCryptoServiceConfigFile { +// active_key: Some(active_key.to_string_lossy().into_owned()), +// active_key_base64: None, +// extra_keys: Default::default(), +// } +// .try_into()?, +// ) +// .await +// .map_err(Into::into) +// } + +// async fn connect_processor( +// job_client: NatsClient, +// ) -> Result> { +// let job_processor = +// Box::new(NatsProcessor::new(job_client)) as Box; +// Ok(job_processor) +// } diff --git a/lib/dal/src/action_prototype.rs b/lib/dal/src/action_prototype.rs index bd1ee4f3e6..80a94534c3 100644 --- a/lib/dal/src/action_prototype.rs +++ b/lib/dal/src/action_prototype.rs @@ -1,29 +1,15 @@ -use std::default::Default; - +use content_store::{Store, StoreError}; use serde::{Deserialize, Serialize}; -use strum::{AsRefStr, Display}; +use si_pkg::ActionFuncSpecKind; +use strum::{AsRefStr, Display, EnumDiscriminants}; use thiserror::Error; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use si_pkg::ActionFuncSpecKind; -use telemetry::prelude::*; - -use crate::func::before::before_funcs_for_component; -use crate::{ - component::view::ComponentViewError, func::backend::js_action::ActionRunResult, - impl_standard_model, pk, standard_model, standard_model_accessor, Component, ComponentId, - ComponentView, DalContext, Func, FuncBinding, FuncBindingError, FuncBindingReturnValueError, - FuncError, FuncId, HistoryEventError, SchemaVariantId, StandardModel, StandardModelError, - Tenancy, Timestamp, TransactionsError, Visibility, WsEvent, WsEventError, -}; - -const FIND_FOR_CONTEXT: &str = include_str!("./queries/action_prototype/find_for_context.sql"); -const FIND_FOR_CONTEXT_AND_KIND: &str = - include_str!("./queries/action_prototype/find_for_context_and_kind.sql"); -const FIND_FOR_FUNC: &str = include_str!("./queries/action_prototype/find_for_func.sql"); -const FIND_FOR_CONTEXT_AND_FUNC: &str = - include_str!("./queries/action_prototype/find_for_context_and_func.sql"); +use crate::change_set_pointer::ChangeSetPointerError; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}; +use crate::workspace_snapshot::node_weight::{ContentNodeWeight, NodeWeight, NodeWeightError}; +use crate::workspace_snapshot::WorkspaceSnapshotError; +use crate::{pk, DalContext, FuncId, SchemaVariantId, Timestamp, TransactionsError}; #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] @@ -33,80 +19,83 @@ pub struct ActionPrototypeView { display_name: Option, } -impl ActionPrototypeView { - pub async fn new( - ctx: &DalContext, - prototype: ActionPrototype, - ) -> ActionPrototypeResult { - let mut display_name = None; - let func_details = Func::get_by_id(ctx, &prototype.func_id).await?; - if let Some(func) = func_details { - display_name = func.display_name().map(|dname| dname.to_string()) - }; - Ok(Self { - id: prototype.id, - name: prototype.name().map_or_else( - || match prototype.kind() { - ActionKind::Create => "create".to_owned(), - ActionKind::Delete => "delete".to_owned(), - ActionKind::Other => "other".to_owned(), - ActionKind::Refresh => "refresh".to_owned(), - }, - ToOwned::to_owned, - ), - display_name, - }) - } -} +// impl ActionPrototypeView { +// pub async fn new( +// ctx: &DalContext, +// prototype: ActionPrototype, +// ) -> ActionPrototypeResult { +// // let mut display_name = None; +// // let func_details = Func::get_by_id(ctx, &prototype.func_id).await?; +// // if let Some(func) = func_details { +// // display_name = func.display_name().map(|dname| dname.to_string()) +// // }; +// Ok(Self { +// id: prototype.id, +// name: prototype.name().map_or_else( +// || match prototype.kind() { +// ActionKind::Create => "create".to_owned(), +// ActionKind::Delete => "delete".to_owned(), +// ActionKind::Other => "other".to_owned(), +// ActionKind::Refresh => "refresh".to_owned(), +// }, +// ToOwned::to_owned, +// ), +// display_name: Some("delete me".to_string()), +// }) +// } +// } #[remain::sorted] #[derive(Error, Debug)] pub enum ActionPrototypeError { - #[error("component error: {0}")] - Component(String), - #[error("component not found: {0}")] - ComponentNotFound(ComponentId), - #[error(transparent)] - ComponentView(#[from] ComponentViewError), - #[error("func error: {0}")] - Func(#[from] FuncError), #[error(transparent)] - FuncBinding(#[from] FuncBindingError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("action Func {0} not found for ActionPrototype {1}")] - FuncNotFound(FuncId, ActionPrototypeId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("this asset already has an action of this kind")] - MultipleOfSameKind, - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("not found with kind {0} for context {1:?}")] - NotFoundByKindAndContext(ActionKind, ActionPrototypeContext), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("schema not found")] - SchemaNotFound, - #[error("schema variant not found")] - SchemaVariantNotFound, - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), + ChangeSetPointer(#[from] ChangeSetPointerError), + // #[error("component error: {0}")] + // Component(String), + // #[error("component not found: {0}")] + // ComponentNotFound(ComponentId), + // #[error(transparent)] + // ComponentView(#[from] ComponentViewError), + // #[error(transparent)] + // FuncBinding(#[from] FuncBindingError), + // #[error(transparent)] + // FuncBindingReturnValue(#[from] FuncBindingReturnValueError), + // #[error("action Func {0} not found for ActionPrototype {1}")] + // FuncNotFound(FuncId, ActionPrototypeId), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + // #[error("history event error: {0}")] + // HistoryEvent(#[from] HistoryEventError), + // #[error("this asset already has an action of this kind")] + // MultipleOfSameKind, + // #[error("nats txn error: {0}")] + // Nats(#[from] NatsError), + // #[error("not found with kind {0} for context {1:?}")] + // NotFoundByKindAndContext(ActionKind, ActionPrototypeContext), + // #[error("pg error: {0}")] + // Pg(#[from] PgError), + // #[error("schema not found")] + // SchemaNotFound, + // #[error("schema variant not found")] + // SchemaVariantNotFound, + // #[error("error serializing/deserializing json: {0}")] + // SerdeJson(#[from] serde_json::Error), + // #[error("standard model error: {0}")] + // StandardModelError(#[from] StandardModelError), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("store error: {0}")] + Store(#[from] StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), - #[error(transparent)] - WsEvent(#[from] WsEventError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type ActionPrototypeResult = Result; -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy)] -pub struct ActionPrototypeContext { - pub schema_variant_id: SchemaVariantId, -} - /// Describes how an [`Action`](ActionPrototype) affects the world. #[remain::sorted] #[derive(AsRefStr, Deserialize, Display, Serialize, Debug, Eq, PartialEq, Clone, Copy, Hash)] @@ -145,288 +134,268 @@ impl From<&ActionKind> for ActionFuncSpecKind { } } -// Hrm - is this a universal resolver context? -- Adam -impl Default for ActionPrototypeContext { - fn default() -> Self { - Self::new() - } -} - -impl ActionPrototypeContext { - pub fn new() -> Self { - Self { - schema_variant_id: SchemaVariantId::NONE, - } - } - - pub fn new_for_context_field(context_field: ActionPrototypeContextField) -> Self { - match context_field { - ActionPrototypeContextField::SchemaVariant(schema_variant_id) => { - ActionPrototypeContext { schema_variant_id } - } - } - } - - pub fn schema_variant_id(&self) -> SchemaVariantId { - self.schema_variant_id - } - - pub fn set_schema_variant_id(&mut self, schema_variant_id: SchemaVariantId) { - self.schema_variant_id = schema_variant_id; - } -} - -pk!(ActionPrototypePk); pk!(ActionPrototypeId); // An ActionPrototype joins a `FuncId` to a `SchemaVariantId` with a `ActionKind` and `name` #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct ActionPrototype { - pk: ActionPrototypePk, id: ActionPrototypeId, - func_id: FuncId, kind: ActionKind, name: Option, - schema_variant_id: SchemaVariantId, - #[serde(flatten)] - tenancy: Tenancy, #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, } -#[remain::sorted] -#[derive(Debug, Copy, Clone, PartialEq, Eq)] -pub enum ActionPrototypeContextField { - SchemaVariant(SchemaVariantId), +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +#[serde(tag = "version")] +pub enum ActionPrototypeContent { + V1(ActionPrototypeContentV1), } -impl From for ActionPrototypeContextField { - fn from(schema_variant_id: SchemaVariantId) -> Self { - ActionPrototypeContextField::SchemaVariant(schema_variant_id) - } -} - -impl_standard_model! { - model: ActionPrototype, - pk: ActionPrototypePk, - id: ActionPrototypeId, - table_name: "action_prototypes", - history_event_label_base: "action_prototype", - history_event_message_name: "Action Prototype" +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct ActionPrototypeContentV1 { + kind: ActionKind, + name: Option, + timestamp: Timestamp, } impl ActionPrototype { - pub async fn new( - ctx: &DalContext, - func_id: FuncId, - kind: ActionKind, - context: ActionPrototypeContext, - ) -> ActionPrototypeResult { - let action_prototypes = Self::find_for_context(ctx, context).await?; - for prototype in action_prototypes { - if *prototype.kind() == kind && kind != ActionKind::Other { - return Err(ActionPrototypeError::MultipleOfSameKind); - } - } - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM action_prototype_create_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &kind.as_ref(), - &context.schema_variant_id(), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } + pub fn assemble(node_weight: &ContentNodeWeight, content: &ActionPrototypeContentV1) -> Self { + let content = content.to_owned(); - pub async fn find_for_context( - ctx: &DalContext, - context: ActionPrototypeContext, - ) -> ActionPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.schema_variant_id(), - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) + Self { + id: node_weight.id().into(), + name: content.name, + kind: content.kind, + timestamp: content.timestamp, + } } - pub async fn find_for_context_and_kind( + pub async fn new( ctx: &DalContext, + name: Option>, kind: ActionKind, - context: ActionPrototypeContext, - ) -> ActionPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_AND_KIND, - &[ - ctx.tenancy(), - ctx.visibility(), - &kind.as_ref(), - &context.schema_variant_id(), - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_func( - ctx: &DalContext, + schema_variant_id: SchemaVariantId, func_id: FuncId, - ) -> ActionPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), &func_id]) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_context_and_func( - ctx: &DalContext, - context: ActionPrototypeContext, - func_id: FuncId, - ) -> ActionPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_AND_FUNC, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.schema_variant_id(), - &func_id, - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - standard_model_accessor!( - schema_variant_id, - Pk(SchemaVariantId), - ActionPrototypeResult - ); - standard_model_accessor!(name, Option, ActionPrototypeResult); - standard_model_accessor!(func_id, Pk(FuncId), ActionPrototypeResult); - standard_model_accessor!(kind, Enum(ActionKind), ActionPrototypeResult); - - pub async fn set_kind_checked( - &mut self, - ctx: &DalContext, - kind: ActionKind, - ) -> ActionPrototypeResult<()> { - let action_prototypes = Self::find_for_context( - ctx, - ActionPrototypeContext { - schema_variant_id: self.schema_variant_id(), - }, - ) - .await?; - for prototype in action_prototypes { - if *prototype.kind() == kind && kind != ActionKind::Other && prototype.id() != self.id() - { - return Err(ActionPrototypeError::MultipleOfSameKind); - } - } - self.set_kind(ctx, kind).await - } - - pub fn context(&self) -> ActionPrototypeContext { - let mut context = ActionPrototypeContext::new(); - context.set_schema_variant_id(self.schema_variant_id); - - context - } + ) -> ActionPrototypeResult { + let timestamp = Timestamp::now(); - pub async fn run( - &self, - ctx: &DalContext, - component_id: ComponentId, - ) -> ActionPrototypeResult> { - let component_view = ComponentView::new(ctx, component_id).await?; - let deleted_ctx = ctx.clone_with_delete_visibility(); - let before = before_funcs_for_component(&deleted_ctx, &component_id).await?; - - let (_, return_value) = FuncBinding::create_and_execute( - ctx, - serde_json::to_value(component_view)?, - self.func_id(), - before, - ) - .await?; - - let mut logs = vec![]; - for stream_part in return_value - .get_output_stream(ctx) - .await? - .unwrap_or_default() - { - logs.push(stream_part); - } + let content = ActionPrototypeContentV1 { + kind, + timestamp, + name: name.map(Into::into), + }; - logs.sort_by_key(|log| log.timestamp); - - Ok(match return_value.value() { - Some(value) => { - let mut run_result: ActionRunResult = serde_json::from_value(value.clone())?; - run_result.logs = logs.iter().map(|l| l.message.clone()).collect(); - - let deleted_ctx = &ctx.clone_with_delete_visibility(); - let mut component = Component::get_by_id(deleted_ctx, &component_id) - .await? - .ok_or(ActionPrototypeError::ComponentNotFound(component_id))?; - - if component.needs_destroy() && run_result.payload.is_none() { - component - .set_needs_destroy(deleted_ctx, false) - .await - .map_err(|e| ActionPrototypeError::Component(e.to_string()))?; - } - - if component - .set_resource(ctx, run_result.clone()) - .await - .map_err(|e| ActionPrototypeError::Component(e.to_string()))? - { - WsEvent::resource_refreshed(ctx, *component.id()) - .await? - .publish_on_commit(ctx) - .await?; - } - - Some(run_result) - } - None => None, - }) + let hash = ctx + .content_store() + .lock() + .await + .add(&ActionPrototypeContent::V1(content.to_owned()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::ActionPrototype(hash))?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + workspace_snapshot.add_node(node_weight.to_owned())?; + workspace_snapshot.add_edge( + schema_variant_id, + EdgeWeight::new(change_set, EdgeWeightKind::ActionPrototype(kind))?, + id, + )?; + workspace_snapshot.add_edge( + id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_id, + )?; + + let content_node_weight = node_weight + .get_content_node_weight_of_kind(ContentAddressDiscriminants::ActionPrototype)?; + + Ok(ActionPrototype::assemble(&content_node_weight, &content)) } } + +// impl ActionPrototype { +// pub async fn find_for_context( +// ctx: &DalContext, +// context: ActionPrototypeContext, +// ) -> ActionPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.schema_variant_id(), +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_context_and_kind( +// ctx: &DalContext, +// kind: ActionKind, +// context: ActionPrototypeContext, +// ) -> ActionPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT_AND_KIND, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &kind.as_ref(), +// &context.schema_variant_id(), +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_func( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> ActionPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), &func_id]) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_context_and_func( +// ctx: &DalContext, +// context: ActionPrototypeContext, +// func_id: FuncId, +// ) -> ActionPrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT_AND_FUNC, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.schema_variant_id(), +// &func_id, +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// standard_model_accessor!( +// schema_variant_id, +// Pk(SchemaVariantId), +// ActionPrototypeResult +// ); +// standard_model_accessor!(name, Option, ActionPrototypeResult); +// standard_model_accessor!(func_id, Pk(FuncId), ActionPrototypeResult); +// standard_model_accessor!(kind, Enum(ActionKind), ActionPrototypeResult); + +// pub async fn set_kind_checked( +// &mut self, +// ctx: &DalContext, +// kind: ActionKind, +// ) -> ActionPrototypeResult<()> { +// let action_prototypes = Self::find_for_context( +// ctx, +// ActionPrototypeContext { +// schema_variant_id: self.schema_variant_id(), +// }, +// ) +// .await?; +// for prototype in action_prototypes { +// if *prototype.kind() == kind && kind != ActionKind::Other && prototype.id() != self.id() +// { +// return Err(ActionPrototypeError::MultipleOfSameKind); +// } +// } +// self.set_kind(ctx, kind).await +// } + +// pub fn context(&self) -> ActionPrototypeContext { +// let mut context = ActionPrototypeContext::new(); +// context.set_schema_variant_id(self.schema_variant_id); + +// context +// } + +// pub async fn run( +// &self, +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> ActionPrototypeResult> { +// let component_view = ComponentView::new(ctx, component_id).await?; +// let deleted_ctx = ctx.clone_with_delete_visibility(); +// let before = before_funcs_for_component(&deleted_ctx, &component_id).await?; + +// let (_, return_value) = FuncBinding::create_and_execute( +// ctx, +// serde_json::to_value(component_view)?, +// self.func_id(), +// before, +// ) +// .await?; + +// let mut logs = vec![]; +// for stream_part in return_value +// .get_output_stream(ctx) +// .await? +// .unwrap_or_default() +// { +// logs.push(stream_part); +// } + +// logs.sort_by_key(|log| log.timestamp); + +// Ok(match return_value.value() { +// Some(value) => { +// let mut run_result: ActionRunResult = serde_json::from_value(value.clone())?; +// run_result.logs = logs.iter().map(|l| l.message.clone()).collect(); + +// let deleted_ctx = &ctx.clone_with_delete_visibility(); +// let mut component = Component::get_by_id(deleted_ctx, &component_id) +// .await? +// .ok_or(ActionPrototypeError::ComponentNotFound(component_id))?; + +// if component.needs_destroy() && run_result.payload.is_none() { +// component +// .set_needs_destroy(deleted_ctx, false) +// .await +// .map_err(|e| ActionPrototypeError::Component(e.to_string()))?; +// } + +// if component +// .set_resource(ctx, run_result.clone()) +// .await +// .map_err(|e| ActionPrototypeError::Component(e.to_string()))? +// { +// WsEvent::resource_refreshed(ctx, *component.id()) +// .await? +// .publish_on_commit(ctx) +// .await?; +// } + +// Some(run_result) +// } +// None => None, +// }) +// } +// } diff --git a/lib/dal/src/attribute/context.rs b/lib/dal/src/attribute/context.rs index 93abeb0faf..35278c6db8 100644 --- a/lib/dal/src/attribute/context.rs +++ b/lib/dal/src/attribute/context.rs @@ -1,576 +1,565 @@ -//! This module contains the [`AttributeContext`], and its corresponding builder, [`AttributeContextBuilder`]. -//! The context can be scoped with varying levels of specificity, using an order of precedence. -//! The builder ensures the correct order of precedence is maintained whilst setting and unsetting -//! fields of specificity. -//! -//! ## The Order of Precedence -//! -//! The order of precedence is as follows (from least to most "specificity"): -//! - [`PropId`] / [`InternalProviderId`] / [`ExternalProviderId`] -//! - [`ComponentId`] -//! -//! At the level of least "specificity", you can provider have a [`PropId`], an -//! [`InternalProviderId`], or an [`ExternalProviderId`]. However, you can only provide one and only -//! one for an [`AttributeContext`] since they are at the same "level" in the order of precedence. -//! -//! ## `AttributeContext` vs. `AttributeReadContext` -//! -//! While the [`AttributeContext`] can be used for both read and write queries, the -//! [`AttributeReadContext`](crate::AttributeReadContext) is useful for read-only queries and for -//! flexibility when searching for objects of varying levels of specificity. - -use serde::{Deserialize, Serialize}; -use std::cmp::Ordering; -use std::default::Default; -use thiserror::Error; - -use crate::{ - ComponentId, DalContext, ExternalProviderId, InternalProviderId, Prop, PropId, StandardModel, - StandardModelError, -}; - -pub mod read; - -use crate::attribute::context::AttributeContextLeastSpecificFieldKind::{ - ExternalProvider, InternalProvider, -}; -pub use read::AttributeReadContext; - -/// Indicates which least specific field for an [`AttributeContext`] is specified and contains the -/// field's value. -#[remain::sorted] -#[derive(Debug)] -pub enum AttributeContextLeastSpecificFieldKind { - ExternalProvider(ExternalProviderId), - InternalProvider(InternalProviderId), - Prop(PropId), -} - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum AttributeContextError { - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("could not find least specific field")] - LeastSpecificFieldKindNotFound, - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), -} - -pub type AttributeContextResult = Result; - -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub struct AttributeContext { - #[serde(rename = "attribute_context_prop_id")] - prop_id: PropId, - #[serde(rename = "attribute_context_internal_provider_id")] - internal_provider_id: InternalProviderId, - #[serde(rename = "attribute_context_external_provider_id")] - external_provider_id: ExternalProviderId, - #[serde(rename = "attribute_context_component_id")] - component_id: ComponentId, -} - -impl From for AttributeContextBuilder { - fn from(from_context: AttributeContext) -> AttributeContextBuilder { - AttributeContextBuilder { - prop_id: from_context.prop_id(), - internal_provider_id: from_context.internal_provider_id(), - external_provider_id: from_context.external_provider_id(), - component_id: from_context.component_id(), - } - } -} - -impl From for AttributeContextBuilder { - fn from(from_read_context: AttributeReadContext) -> AttributeContextBuilder { - let mut builder = AttributeContextBuilder::new(); - if let Some(prop_id) = from_read_context.prop_id { - builder.set_prop_id(prop_id); - } - if let Some(internal_provider_id) = from_read_context.internal_provider_id { - builder.set_internal_provider_id(internal_provider_id); - } - if let Some(external_provider_id) = from_read_context.external_provider_id { - builder.set_external_provider_id(external_provider_id); - } - if let Some(component_id) = from_read_context.component_id { - builder.set_component_id(component_id); - } - builder - } -} - -impl PartialOrd for AttributeContext { - /// How to compare two [`AttributeContexts`](crate::AttributeContext): - /// - /// - [`Ordering::Equal`]: same level of specificity between two contexts - /// - [`Ordering::Greater`]: "self" is "more-specific" than "other" - /// - [`Ordering::Less`]: "self" is "less-specific" than "other" - /// - [`None`]: "self" and "other" have different "least-specific" fields (e.g. "self" is - /// [`Prop`](crate::Prop)-specific and "other" is [`InternalProvider`](crate::InternalProvider)-specific. - fn partial_cmp(&self, other: &Self) -> Option { - if !self.is_component_unset() { - return if !other.is_component_unset() { - Some(Ordering::Equal) - } else { - Some(Ordering::Greater) - }; - } - - if !self.is_external_provider_unset() { - return if !other.is_component_unset() { - Some(Ordering::Less) - } else if !other.is_external_provider_unset() { - Some(Ordering::Equal) - } else { - None - }; - } - - if !self.is_internal_provider_unset() { - return if !other.is_component_unset() { - Some(Ordering::Less) - } else if !other.is_internal_provider_unset() { - Some(Ordering::Equal) - } else { - None - }; - } - - if !self.is_prop_unset() { - return if !other.is_component_unset() { - Some(Ordering::Less) - } else if !other.is_prop_unset() { - Some(Ordering::Equal) - } else { - None - }; - } - - None - } -} - -impl AttributeContext { - pub fn builder() -> AttributeContextBuilder { - AttributeContextBuilder::new() - } - - /// Use this when you want this exact context but with the provided - /// component id instead - pub fn clone_with_component_id(&self, component_id: ComponentId) -> Self { - Self { - prop_id: self.prop_id, - internal_provider_id: self.internal_provider_id, - external_provider_id: self.external_provider_id, - component_id, - } - } - - pub fn prop_id(&self) -> PropId { - self.prop_id - } - - pub fn is_prop_unset(&self) -> bool { - self.prop_id == PropId::NONE - } - - pub fn internal_provider_id(&self) -> InternalProviderId { - self.internal_provider_id - } - - pub fn is_internal_provider_unset(&self) -> bool { - self.internal_provider_id == InternalProviderId::NONE - } - - pub fn external_provider_id(&self) -> ExternalProviderId { - self.external_provider_id - } - - pub fn is_external_provider_unset(&self) -> bool { - self.external_provider_id == ExternalProviderId::NONE - } - - pub fn component_id(&self) -> ComponentId { - self.component_id - } - - pub fn is_component_unset(&self) -> bool { - self.component_id == ComponentId::NONE - } - - pub fn is_least_specific(&self) -> bool { - self.component_id == ComponentId::NONE - } - - /// Return a new [`AttributeContext`] with the most specific piece - /// of the current [`AttributeContext`] unset, widening the scope - /// of the context by one step. If widening the context would - /// result in everything being unset, it will return a new copy of - /// the current [`AttributeContext`]. - pub fn less_specific(&self) -> AttributeContextResult { - let mut builder = AttributeContextBuilder::from(*self); - if self.component_id() != ComponentId::NONE { - builder.unset_component_id(); - } - Ok(builder.to_context()?) - } - - /// Returns true if the least specific field corresponds to a [`Prop`](crate::Prop). - pub fn is_least_specific_field_kind_prop(&self) -> AttributeContextResult { - if let AttributeContextLeastSpecificFieldKind::Prop(_) = self.least_specific_field_kind()? { - Ok(true) - } else { - Ok(false) - } - } - - /// Returns true if the least specific field corresponds to an [`InternalProvider`](crate::InternalProvider). - pub fn is_least_specific_field_kind_internal_provider(&self) -> AttributeContextResult { - if let InternalProvider(_) = self.least_specific_field_kind()? { - Ok(true) - } else { - Ok(false) - } - } - - /// Returns true if the least specific field corresponds to an [`InternalProvider`](crate::InternalProvider) - /// _or_ an [`ExternalProvider`](crate::ExternalProvider). - pub fn is_least_specific_field_kind_internal_or_external_provider( - &self, - ) -> AttributeContextResult { - match self.least_specific_field_kind()? { - InternalProvider(_) | ExternalProvider(_) => Ok(true), - _ => Ok(false), - } - } - - /// Returns true if the least specific field corresponds to an [`ExternalProvider`](crate::ExternalProvider). - pub fn is_least_specific_field_kind_external_provider(&self) -> AttributeContextResult { - if let ExternalProvider(_) = self.least_specific_field_kind()? { - Ok(true) - } else { - Ok(false) - } - } - - /// Returns the [`AttributeContextLeastSpecificFieldKind`] that is "set" for [`Self`]. - pub fn least_specific_field_kind( - &self, - ) -> AttributeContextResult { - if self.prop_id != PropId::NONE { - Ok(AttributeContextLeastSpecificFieldKind::Prop(self.prop_id)) - } else if self.internal_provider_id != InternalProviderId::NONE { - Ok(InternalProvider(self.internal_provider_id)) - } else if self.external_provider_id != ExternalProviderId::NONE { - Ok(ExternalProvider(self.external_provider_id)) - } else { - // This should never be possible to hit, but this check exists to protect - // against potential regressions. - Err(AttributeContextError::LeastSpecificFieldKindNotFound) - } - } - - pub async fn prop(&self, ctx: &DalContext) -> AttributeContextResult> { - Ok(Prop::get_by_id(ctx, &self.prop_id()).await?) - } - - pub async fn internal_provider( - &self, - ctx: &DalContext, - ) -> AttributeContextResult> { - Ok(crate::InternalProvider::get_by_id(ctx, &self.internal_provider_id()).await?) - } - - pub async fn external_provider( - &self, - ctx: &DalContext, - ) -> AttributeContextResult> { - Ok(crate::ExternalProvider::get_by_id(ctx, &self.external_provider_id()).await?) - } - - pub fn check(&self, context: Self) -> bool { - let prop = self.prop_id == context.prop_id - && context.internal_provider_id.is_none() - && context.external_provider_id.is_none(); - let internal_provider = context.prop_id.is_none() - && self.internal_provider_id == context.internal_provider_id - && context.external_provider_id.is_none(); - let external_provider = context.prop_id.is_none() - && context.internal_provider_id.is_none() - && self.external_provider_id == context.external_provider_id; - let component = self.component_id == context.component_id || self.component_id.is_none(); - (prop || internal_provider || external_provider) && component - } -} - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum AttributeContextBuilderError { - #[error( - "cannot specify more than one field at the lowest level in the order of precedence: {0:?}" - )] - MultipleLeastSpecificFieldsSpecified(AttributeContextBuilder), - #[error("for builder {0:?}, the following fields must be set: {1:?}")] - PrerequisteFieldsUnset(AttributeContextBuilder, Vec<&'static str>), -} - -pub type AttributeContextBuilderResult = Result; - -/// A builder with non-consuming "setter" and "unsetter" methods that -/// verify the order of precedence for [`AttributeContext`]. -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy)] -pub struct AttributeContextBuilder { - prop_id: PropId, - internal_provider_id: InternalProviderId, - external_provider_id: ExternalProviderId, - component_id: ComponentId, -} - -/// Returns [`Self::new()`]. -impl Default for AttributeContextBuilder { - fn default() -> Self { - Self::new() - } -} - -impl AttributeContextBuilder { - /// Creates [`Self`] with all fields unset. - pub fn new() -> Self { - Self { - prop_id: PropId::NONE, - internal_provider_id: InternalProviderId::NONE, - external_provider_id: ExternalProviderId::NONE, - component_id: ComponentId::NONE, - } - } - - pub fn to_context_unchecked(&self) -> AttributeContext { - AttributeContext { - prop_id: self.prop_id, - internal_provider_id: self.internal_provider_id, - external_provider_id: self.external_provider_id, - component_id: self.component_id, - } - } - - /// Converts [`Self`] to [`AttributeContext`]. This method will - /// fail if the order of precedence is broken (i.e. more-specific - /// fields are set, but one-to-all less-specific fields are unset) - /// or if the field of least specificity, [`PropId`], is unset. - pub fn to_context(&self) -> AttributeContextBuilderResult { - let mut unset_prerequisite_fields = Vec::new(); - - // The lowest level in the order of precedence must always be set. - if self.prop_id == PropId::NONE - && self.internal_provider_id == InternalProviderId::NONE - && self.external_provider_id == ExternalProviderId::NONE - { - unset_prerequisite_fields.push("PropId or InternalProviderId or ExternalProviderId"); - } - - // Only one field at the lowest level in the order of precedence can be set. - #[allow(clippy::nonminimal_bool)] - if (self.prop_id != PropId::NONE && self.internal_provider_id != InternalProviderId::NONE) - || (self.prop_id != PropId::NONE - && self.external_provider_id != ExternalProviderId::NONE) - || (self.internal_provider_id != InternalProviderId::NONE - && self.external_provider_id != ExternalProviderId::NONE) - { - return Err(AttributeContextBuilderError::MultipleLeastSpecificFieldsSpecified(*self)); - } - - if !unset_prerequisite_fields.is_empty() { - return Err(AttributeContextBuilderError::PrerequisteFieldsUnset( - *self, - unset_prerequisite_fields, - )); - } - - Ok(AttributeContext { - prop_id: self.prop_id, - internal_provider_id: self.internal_provider_id, - external_provider_id: self.external_provider_id, - component_id: self.component_id, - }) - } - - /// Sets the [`PropId`] field. If the unset value is passed in, then - /// [`Self::unset_prop_id()`] is returned. - pub fn set_prop_id(&mut self, prop_id: PropId) -> &mut Self { - if prop_id == PropId::NONE { - return self.unset_prop_id(); - } - self.prop_id = prop_id; - self - } - - /// Sets the [`InternalProviderId`] field. If the unset value is passed in, then - /// [`Self::unset_internal_provider_id()`] is returned. - pub fn set_internal_provider_id( - &mut self, - internal_provider_id: InternalProviderId, - ) -> &mut Self { - if internal_provider_id == InternalProviderId::NONE { - return self.unset_internal_provider_id(); - } - self.internal_provider_id = internal_provider_id; - self - } - - /// Sets the [`ExternalProviderId`] field. If the unset value is passed in, then - /// [`Self::unset_external_provider_id()`] is returned. - pub fn set_external_provider_id( - &mut self, - external_provider_id: ExternalProviderId, - ) -> &mut Self { - if external_provider_id == ExternalProviderId::NONE { - return self.unset_external_provider_id(); - } - self.external_provider_id = external_provider_id; - self - } - - /// Sets the [`ComponentId`] field. If the unset value is passed in, then - /// [`Self::unset_component_id()`] is returned. - pub fn set_component_id(&mut self, component_id: ComponentId) -> &mut Self { - if component_id == ComponentId::NONE { - return self.unset_component_id(); - } - self.component_id = component_id; - self - } - - /// Unsets the [`PropId`]. - pub fn unset_prop_id(&mut self) -> &mut Self { - self.prop_id = PropId::NONE; - self - } - - /// Unsets the [`InternalProviderId`]. - pub fn unset_internal_provider_id(&mut self) -> &mut Self { - self.internal_provider_id = InternalProviderId::NONE; - self - } - - /// Unsets the [`ExternalProviderId`]. - pub fn unset_external_provider_id(&mut self) -> &mut Self { - self.external_provider_id = ExternalProviderId::NONE; - self - } - - /// Unsets the [`ComponentId`]. - pub fn unset_component_id(&mut self) -> &mut Self { - self.component_id = ComponentId::NONE; - self - } -} - -impl postgres_types::ToSql for AttributeContext { - fn to_sql( - &self, - ty: &postgres_types::Type, - out: &mut postgres_types::private::BytesMut, - ) -> Result> - where - Self: Sized, - { - let json = serde_json::to_value(self)?; - postgres_types::ToSql::to_sql(&json, ty, out) - } - - fn accepts(ty: &postgres_types::Type) -> bool - where - Self: Sized, - { - ty == &postgres_types::Type::JSONB - } - - fn to_sql_checked( - &self, - ty: &postgres_types::Type, - out: &mut postgres_types::private::BytesMut, - ) -> Result> { - let json = serde_json::to_value(self)?; - postgres_types::ToSql::to_sql(&json, ty, out) - } -} - -// NOTE(nick): there are only error permutations tests for fields that have at least two prerequisite -// fields. Thus ComponentId, and SchemaVariantId have error permutations tests and SchemaId -// and PropId do not. - -// TODO(nick): for the aforementioned error permutations tests, when/if more "layers" are added, we will likely -// need a helper to "flip" values from set to unset (and vice versa) to automatically test every condition. -// Currently, all error permutations are manually written. In an example using an automatic setup, the -// helper could provide an iteration method that flips each fields value from unset to -// "Id::generate()" and vice versa. Then, the test writer could supply contraints to indicate when the helper -// should expect failure or success upon iteration. - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn less_specific() { - let prop_id = PropId::generate(); - let component_id = ComponentId::generate(); - let context = AttributeContextBuilder::new() - .set_prop_id(prop_id) - .set_component_id(component_id) - .to_context() - .expect("cannot build attribute context"); - assert!(!context.is_least_specific()); - - let new_context = context - .less_specific() - .expect("cannot create less specific context"); - - assert_eq!( - AttributeContextBuilder::new() - .set_prop_id(prop_id) - .to_context() - .expect("cannot create expected context"), - new_context, - ); - - let new_context = new_context - .less_specific() - .expect("cannot create less specific context"); - - // Should be the exact same. - assert_eq!( - AttributeContextBuilder::new() - .set_prop_id(prop_id) - .to_context() - .expect("cannot create expected context"), - new_context, - ); - assert!(new_context.is_least_specific()); - } - - #[test] - fn builder_new() { - let prop_id = PropId::generate(); - let component_id = ComponentId::generate(); - - let mut builder = AttributeContextBuilder::new(); - - // Empty (FAIL) - assert!(builder.to_context().is_err()); - - // ComponentId without PropId (FAIL) - builder.set_component_id(component_id); - assert!(builder.to_context().is_err()); - builder.unset_component_id(); - - // PropId (PASS) - builder.set_prop_id(prop_id); - assert!(builder.to_context().is_ok()); - - // ComponentId with PropId (PASS) - builder.set_component_id(component_id); - assert!(builder.to_context().is_ok()); - } -} +// //! This module contains the [`AttributeContext`], and its corresponding builder, [`AttributeContextBuilder`]. +// //! The context can be scoped with varying levels of specificity, using an order of precedence. +// //! The builder ensures the correct order of precedence is maintained whilst setting and unsetting +// //! fields of specificity. +// //! +// //! ## The Order of Precedence +// //! +// //! The order of precedence is as follows (from least to most "specificity"): +// //! - [`PropId`] / [`InternalProviderId`] / [`ExternalProviderId`] +// //! - [`ComponentId`] +// //! +// //! At the level of least "specificity", you can provider have a [`PropId`], an +// //! [`InternalProviderId`], or an [`ExternalProviderId`]. However, you can only provide one and only +// //! one for an [`AttributeContext`] since they are at the same "level" in the order of precedence. +// //! +// //! ## `AttributeContext` vs. `AttributeReadContext` +// //! +// //! While the [`AttributeContext`] can be used for both read and write queries, the +// //! [`AttributeReadContext`](crate::AttributeReadContext) is useful for read-only queries and for +// //! flexibility when searching for objects of varying levels of specificity. + +// use serde::{Deserialize, Serialize}; +// use std::cmp::Ordering; +// use std::default::Default; +// use thiserror::Error; + +// use crate::{ +// ComponentId, DalContext, ExternalProviderId, InternalProviderId, Prop, PropId, StandardModel, +// StandardModelError, +// }; + +// pub mod read; + +// use crate::attribute::context::AttributeContextLeastSpecificFieldKind::{ +// ExternalProvider, InternalProvider, +// }; +// pub use read::AttributeReadContext; + +// /// Indicates which least specific field for an [`AttributeContext`] is specified and contains the +// /// field's value. +// #[remain::sorted] +// #[derive(Debug)] +// pub enum AttributeContextLeastSpecificFieldKind { +// ExternalProvider(ExternalProviderId), +// InternalProvider(InternalProviderId), +// Prop(PropId), +// } + +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum AttributeContextError { +// #[error("attribute context builder error: {0}")] +// AttributeContextBuilder(#[from] AttributeContextBuilderError), +// #[error("could not find least specific field")] +// LeastSpecificFieldKindNotFound, +// #[error("standard model error: {0}")] +// StandardModel(#[from] StandardModelError), +// } + +// pub type AttributeContextResult = Result; + +// #[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq, Hash)] +// pub struct AttributeContext { +// #[serde(rename = "attribute_context_prop_id")] +// prop_id: PropId, +// #[serde(rename = "attribute_context_internal_provider_id")] +// internal_provider_id: InternalProviderId, +// #[serde(rename = "attribute_context_external_provider_id")] +// external_provider_id: ExternalProviderId, +// #[serde(rename = "attribute_context_component_id")] +// component_id: ComponentId, +// } + +// impl From for AttributeContextBuilder { +// fn from(from_context: AttributeContext) -> AttributeContextBuilder { +// AttributeContextBuilder { +// prop_id: from_context.prop_id(), +// internal_provider_id: from_context.internal_provider_id(), +// external_provider_id: from_context.external_provider_id(), +// component_id: from_context.component_id(), +// } +// } +// } + +// impl From for AttributeContextBuilder { +// fn from(from_read_context: AttributeReadContext) -> AttributeContextBuilder { +// let mut builder = AttributeContextBuilder::new(); +// if let Some(prop_id) = from_read_context.prop_id { +// builder.set_prop_id(prop_id); +// } +// if let Some(internal_provider_id) = from_read_context.internal_provider_id { +// builder.set_internal_provider_id(internal_provider_id); +// } +// if let Some(external_provider_id) = from_read_context.external_provider_id { +// builder.set_external_provider_id(external_provider_id); +// } +// if let Some(component_id) = from_read_context.component_id { +// builder.set_component_id(component_id); +// } +// builder +// } +// } + +// impl PartialOrd for AttributeContext { +// /// How to compare two [`AttributeContexts`](crate::AttributeContext): +// /// +// /// - [`Ordering::Equal`]: same level of specificity between two contexts +// /// - [`Ordering::Greater`]: "self" is "more-specific" than "other" +// /// - [`Ordering::Less`]: "self" is "less-specific" than "other" +// /// - [`None`]: "self" and "other" have different "least-specific" fields (e.g. "self" is +// /// [`Prop`](crate::Prop)-specific and "other" is [`InternalProvider`](crate::InternalProvider)-specific. +// fn partial_cmp(&self, other: &Self) -> Option { +// if !self.is_component_unset() { +// return if !other.is_component_unset() { +// Some(Ordering::Equal) +// } else { +// Some(Ordering::Greater) +// }; +// } + +// if !self.is_external_provider_unset() { +// return if !other.is_component_unset() { +// Some(Ordering::Less) +// } else if !other.is_external_provider_unset() { +// Some(Ordering::Equal) +// } else { +// None +// }; +// } + +// if !self.is_internal_provider_unset() { +// return if !other.is_component_unset() { +// Some(Ordering::Less) +// } else if !other.is_internal_provider_unset() { +// Some(Ordering::Equal) +// } else { +// None +// }; +// } + +// if !self.is_prop_unset() { +// return if !other.is_component_unset() { +// Some(Ordering::Less) +// } else if !other.is_prop_unset() { +// Some(Ordering::Equal) +// } else { +// None +// }; +// } + +// None +// } +// } + +// impl AttributeContext { +// pub fn builder() -> AttributeContextBuilder { +// AttributeContextBuilder::new() +// } + +// pub fn prop_id(&self) -> PropId { +// self.prop_id +// } + +// pub fn is_prop_unset(&self) -> bool { +// self.prop_id == PropId::NONE +// } + +// pub fn internal_provider_id(&self) -> InternalProviderId { +// self.internal_provider_id +// } + +// pub fn is_internal_provider_unset(&self) -> bool { +// self.internal_provider_id == InternalProviderId::NONE +// } + +// pub fn external_provider_id(&self) -> ExternalProviderId { +// self.external_provider_id +// } + +// pub fn is_external_provider_unset(&self) -> bool { +// self.external_provider_id == ExternalProviderId::NONE +// } + +// pub fn component_id(&self) -> ComponentId { +// self.component_id +// } + +// pub fn is_component_unset(&self) -> bool { +// self.component_id == ComponentId::NONE +// } + +// pub fn is_least_specific(&self) -> bool { +// self.component_id == ComponentId::NONE +// } + +// /// Return a new [`AttributeContext`] with the most specific piece +// /// of the current [`AttributeContext`] unset, widening the scope +// /// of the context by one step. If widening the context would +// /// result in everything being unset, it will return a new copy of +// /// the current [`AttributeContext`]. +// pub fn less_specific(&self) -> AttributeContextResult { +// let mut builder = AttributeContextBuilder::from(*self); +// if self.component_id() != ComponentId::NONE { +// builder.unset_component_id(); +// } +// Ok(builder.to_context()?) +// } + +// /// Returns true if the least specific field corresponds to a [`Prop`](crate::Prop). +// pub fn is_least_specific_field_kind_prop(&self) -> AttributeContextResult { +// if let AttributeContextLeastSpecificFieldKind::Prop(_) = self.least_specific_field_kind()? { +// Ok(true) +// } else { +// Ok(false) +// } +// } + +// /// Returns true if the least specific field corresponds to an [`InternalProvider`](crate::InternalProvider). +// pub fn is_least_specific_field_kind_internal_provider(&self) -> AttributeContextResult { +// if let InternalProvider(_) = self.least_specific_field_kind()? { +// Ok(true) +// } else { +// Ok(false) +// } +// } + +// /// Returns true if the least specific field corresponds to an [`InternalProvider`](crate::InternalProvider) +// /// _or_ an [`ExternalProvider`](crate::ExternalProvider). +// pub fn is_least_specific_field_kind_internal_or_external_provider( +// &self, +// ) -> AttributeContextResult { +// match self.least_specific_field_kind()? { +// InternalProvider(_) | ExternalProvider(_) => Ok(true), +// _ => Ok(false), +// } +// } + +// /// Returns true if the least specific field corresponds to an [`ExternalProvider`](crate::ExternalProvider). +// pub fn is_least_specific_field_kind_external_provider(&self) -> AttributeContextResult { +// if let ExternalProvider(_) = self.least_specific_field_kind()? { +// Ok(true) +// } else { +// Ok(false) +// } +// } + +// /// Returns the [`AttributeContextLeastSpecificFieldKind`] that is "set" for [`Self`]. +// pub fn least_specific_field_kind( +// &self, +// ) -> AttributeContextResult { +// if self.prop_id != PropId::NONE { +// Ok(AttributeContextLeastSpecificFieldKind::Prop(self.prop_id)) +// } else if self.internal_provider_id != InternalProviderId::NONE { +// Ok(InternalProvider(self.internal_provider_id)) +// } else if self.external_provider_id != ExternalProviderId::NONE { +// Ok(ExternalProvider(self.external_provider_id)) +// } else { +// // This should never be possible to hit, but this check exists to protect +// // against potential regressions. +// Err(AttributeContextError::LeastSpecificFieldKindNotFound) +// } +// } + +// pub async fn prop(&self, ctx: &DalContext) -> AttributeContextResult> { +// Ok(Prop::get_by_id(ctx, &self.prop_id()).await?) +// } + +// pub async fn internal_provider( +// &self, +// ctx: &DalContext, +// ) -> AttributeContextResult> { +// Ok(crate::InternalProvider::get_by_id(ctx, &self.internal_provider_id()).await?) +// } + +// pub async fn external_provider( +// &self, +// ctx: &DalContext, +// ) -> AttributeContextResult> { +// Ok(crate::ExternalProvider::get_by_id(ctx, &self.external_provider_id()).await?) +// } +// +// pub fn check(&self, context: Self) -> bool { +// let prop = self.prop_id == context.prop_id +// && context.internal_provider_id.is_none() +// && context.external_provider_id.is_none(); +// let internal_provider = context.prop_id.is_none() +// && self.internal_provider_id == context.internal_provider_id +// && context.external_provider_id.is_none(); +// let external_provider = context.prop_id.is_none() +// && context.internal_provider_id.is_none() +// && self.external_provider_id == context.external_provider_id; +// let component = self.component_id == context.component_id || self.component_id.is_none(); +// (prop || internal_provider || external_provider) && component +// } +// } + +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum AttributeContextBuilderError { +// #[error( +// "cannot specify more than one field at the lowest level in the order of precedence: {0:?}" +// )] +// MultipleLeastSpecificFieldsSpecified(AttributeContextBuilder), +// #[error("for builder {0:?}, the following fields must be set: {1:?}")] +// PrerequisteFieldsUnset(AttributeContextBuilder, Vec<&'static str>), +// } + +// pub type AttributeContextBuilderResult = Result; + +// /// A builder with non-consuming "setter" and "unsetter" methods that +// /// verify the order of precedence for [`AttributeContext`]. +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq, Copy)] +// pub struct AttributeContextBuilder { +// prop_id: PropId, +// internal_provider_id: InternalProviderId, +// external_provider_id: ExternalProviderId, +// component_id: ComponentId, +// } + +// /// Returns [`Self::new()`]. +// impl Default for AttributeContextBuilder { +// fn default() -> Self { +// Self::new() +// } +// } + +// impl AttributeContextBuilder { +// /// Creates [`Self`] with all fields unset. +// pub fn new() -> Self { +// Self { +// prop_id: PropId::NONE, +// internal_provider_id: InternalProviderId::NONE, +// external_provider_id: ExternalProviderId::NONE, +// component_id: ComponentId::NONE, +// } +// } + +// pub fn to_context_unchecked(&self) -> AttributeContext { +// AttributeContext { +// prop_id: self.prop_id, +// internal_provider_id: self.internal_provider_id, +// external_provider_id: self.external_provider_id, +// component_id: self.component_id, +// } +// } + +// /// Converts [`Self`] to [`AttributeContext`]. This method will +// /// fail if the order of precedence is broken (i.e. more-specific +// /// fields are set, but one-to-all less-specific fields are unset) +// /// or if the field of least specificity, [`PropId`], is unset. +// pub fn to_context(&self) -> AttributeContextBuilderResult { +// let mut unset_prerequisite_fields = Vec::new(); + +// // The lowest level in the order of precedence must always be set. +// if self.prop_id == PropId::NONE +// && self.internal_provider_id == InternalProviderId::NONE +// && self.external_provider_id == ExternalProviderId::NONE +// { +// unset_prerequisite_fields.push("PropId or InternalProviderId or ExternalProviderId"); +// } + +// // Only one field at the lowest level in the order of precedence can be set. +// #[allow(clippy::nonminimal_bool)] +// if (self.prop_id != PropId::NONE && self.internal_provider_id != InternalProviderId::NONE) +// || (self.prop_id != PropId::NONE +// && self.external_provider_id != ExternalProviderId::NONE) +// || (self.internal_provider_id != InternalProviderId::NONE +// && self.external_provider_id != ExternalProviderId::NONE) +// { +// return Err(AttributeContextBuilderError::MultipleLeastSpecificFieldsSpecified(*self)); +// } + +// if !unset_prerequisite_fields.is_empty() { +// return Err(AttributeContextBuilderError::PrerequisteFieldsUnset( +// *self, +// unset_prerequisite_fields, +// )); +// } + +// Ok(AttributeContext { +// prop_id: self.prop_id, +// internal_provider_id: self.internal_provider_id, +// external_provider_id: self.external_provider_id, +// component_id: self.component_id, +// }) +// } + +// /// Sets the [`PropId`] field. If the unset value is passed in, then +// /// [`Self::unset_prop_id()`] is returned. +// pub fn set_prop_id(&mut self, prop_id: PropId) -> &mut Self { +// if prop_id == PropId::NONE { +// return self.unset_prop_id(); +// } +// self.prop_id = prop_id; +// self +// } + +// /// Sets the [`InternalProviderId`] field. If the unset value is passed in, then +// /// [`Self::unset_internal_provider_id()`] is returned. +// pub fn set_internal_provider_id( +// &mut self, +// internal_provider_id: InternalProviderId, +// ) -> &mut Self { +// if internal_provider_id == InternalProviderId::NONE { +// return self.unset_internal_provider_id(); +// } +// self.internal_provider_id = internal_provider_id; +// self +// } + +// /// Sets the [`ExternalProviderId`] field. If the unset value is passed in, then +// /// [`Self::unset_external_provider_id()`] is returned. +// pub fn set_external_provider_id( +// &mut self, +// external_provider_id: ExternalProviderId, +// ) -> &mut Self { +// if external_provider_id == ExternalProviderId::NONE { +// return self.unset_external_provider_id(); +// } +// self.external_provider_id = external_provider_id; +// self +// } + +// /// Sets the [`ComponentId`] field. If the unset value is passed in, then +// /// [`Self::unset_component_id()`] is returned. +// pub fn set_component_id(&mut self, component_id: ComponentId) -> &mut Self { +// if component_id == ComponentId::NONE { +// return self.unset_component_id(); +// } +// self.component_id = component_id; +// self +// } + +// /// Unsets the [`PropId`]. +// pub fn unset_prop_id(&mut self) -> &mut Self { +// self.prop_id = PropId::NONE; +// self +// } + +// /// Unsets the [`InternalProviderId`]. +// pub fn unset_internal_provider_id(&mut self) -> &mut Self { +// self.internal_provider_id = InternalProviderId::NONE; +// self +// } + +// /// Unsets the [`ExternalProviderId`]. +// pub fn unset_external_provider_id(&mut self) -> &mut Self { +// self.external_provider_id = ExternalProviderId::NONE; +// self +// } + +// /// Unsets the [`ComponentId`]. +// pub fn unset_component_id(&mut self) -> &mut Self { +// self.component_id = ComponentId::NONE; +// self +// } +// } + +// impl postgres_types::ToSql for AttributeContext { +// fn to_sql( +// &self, +// ty: &postgres_types::Type, +// out: &mut postgres_types::private::BytesMut, +// ) -> Result> +// where +// Self: Sized, +// { +// let json = serde_json::to_value(self)?; +// postgres_types::ToSql::to_sql(&json, ty, out) +// } + +// fn accepts(ty: &postgres_types::Type) -> bool +// where +// Self: Sized, +// { +// ty == &postgres_types::Type::JSONB +// } + +// fn to_sql_checked( +// &self, +// ty: &postgres_types::Type, +// out: &mut postgres_types::private::BytesMut, +// ) -> Result> { +// let json = serde_json::to_value(self)?; +// postgres_types::ToSql::to_sql(&json, ty, out) +// } +// } + +// // NOTE(nick): there are only error permutations tests for fields that have at least two prerequisite +// // fields. Thus ComponentId, and SchemaVariantId have error permutations tests and SchemaId +// // and PropId do not. + +// // TODO(nick): for the aforementioned error permutations tests, when/if more "layers" are added, we will likely +// // need a helper to "flip" values from set to unset (and vice versa) to automatically test every condition. +// // Currently, all error permutations are manually written. In an example using an automatic setup, the +// // helper could provide an iteration method that flips each fields value from unset to +// // "Id::generate()" and vice versa. Then, the test writer could supply contraints to indicate when the helper +// // should expect failure or success upon iteration. + +// #[cfg(test)] +// mod tests { +// use super::*; + +// #[test] +// fn less_specific() { +// let prop_id = PropId::generate(); +// let component_id = ComponentId::generate(); +// let context = AttributeContextBuilder::new() +// .set_prop_id(prop_id) +// .set_component_id(component_id) +// .to_context() +// .expect("cannot build attribute context"); +// assert!(!context.is_least_specific()); + +// let new_context = context +// .less_specific() +// .expect("cannot create less specific context"); + +// assert_eq!( +// AttributeContextBuilder::new() +// .set_prop_id(prop_id) +// .to_context() +// .expect("cannot create expected context"), +// new_context, +// ); + +// let new_context = new_context +// .less_specific() +// .expect("cannot create less specific context"); + +// // Should be the exact same. +// assert_eq!( +// AttributeContextBuilder::new() +// .set_prop_id(prop_id) +// .to_context() +// .expect("cannot create expected context"), +// new_context, +// ); +// assert!(new_context.is_least_specific()); +// } + +// #[test] +// fn builder_new() { +// let prop_id = PropId::generate(); +// let component_id = ComponentId::generate(); + +// let mut builder = AttributeContextBuilder::new(); + +// // Empty (FAIL) +// assert!(builder.to_context().is_err()); + +// // ComponentId without PropId (FAIL) +// builder.set_component_id(component_id); +// assert!(builder.to_context().is_err()); +// builder.unset_component_id(); + +// // PropId (PASS) +// builder.set_prop_id(prop_id); +// assert!(builder.to_context().is_ok()); + +// // ComponentId with PropId (PASS) +// builder.set_component_id(component_id); +// assert!(builder.to_context().is_ok()); +// } +// } diff --git a/lib/dal/src/attribute/context/read.rs b/lib/dal/src/attribute/context/read.rs index 7077e1eb46..0a29fef2c0 100644 --- a/lib/dal/src/attribute/context/read.rs +++ b/lib/dal/src/attribute/context/read.rs @@ -1,237 +1,237 @@ -use serde::{Deserialize, Serialize}; - -use crate::{AttributeContext, ComponentId, ExternalProviderId, InternalProviderId, PropId}; - -/// An `AttributeReadContext` allows for saying "do not use this field -/// to filter results" by providing [`None`] for the field's value. -/// It also allows for saying "explicitly filter out results for that -/// have this field set" by providing the unset value for the field's -/// value. -/// -/// For example: -/// -/// ```rust -/// # use dal::attribute::context::read::AttributeReadContext; -/// # use dal::{ExternalProviderId, InternalProviderId, ComponentId}; -/// let read_context = AttributeReadContext { -/// prop_id: None, -/// internal_provider_id: Some(InternalProviderId::NONE), -/// external_provider_id: Some(ExternalProviderId::NONE), -/// component_id: Some(ComponentId::generate()) -/// }; -/// ``` -/// -/// The above `AttributeReadContext` would be used for finding all -/// attributes, across all [`Props`](crate::Prop) that have been set -/// for a given [`ComponentId`]. -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] -pub struct AttributeReadContext { - #[serde(rename = "attribute_context_prop_id")] - pub prop_id: Option, - #[serde(rename = "attribute_context_internal_provider_id")] - pub internal_provider_id: Option, - #[serde(rename = "attribute_context_external_provider_id")] - pub external_provider_id: Option, - #[serde(rename = "attribute_context_component_id")] - pub component_id: Option, -} - -impl Default for AttributeReadContext { - fn default() -> Self { - Self { - prop_id: Some(PropId::NONE), - internal_provider_id: Some(InternalProviderId::NONE), - external_provider_id: Some(ExternalProviderId::NONE), - component_id: Some(ComponentId::NONE), - } - } -} - -impl From for AttributeReadContext { - fn from(from_context: AttributeContext) -> Self { - Self { - prop_id: Some(from_context.prop_id()), - internal_provider_id: Some(from_context.internal_provider_id()), - external_provider_id: Some(from_context.external_provider_id()), - component_id: Some(from_context.component_id()), - } - } -} - -impl AttributeReadContext { - /// Creates a [`read context`](Self) with a given [`PropId`](crate::Prop) - /// and all other fields set to their defaults. - pub fn default_with_prop(prop_id: PropId) -> Self { - Self { - prop_id: Some(prop_id), - ..Self::default() - } - } - - pub fn default_with_prop_and_component_id( - prop_id: PropId, - component_id: Option, - ) -> Self { - Self { - prop_id: Some(prop_id), - component_id: match component_id { - Some(component_id) => Some(component_id), - None => Some(ComponentId::NONE), - }, - ..Self::default() - } - } - - /// Creates a [`read context`](Self) with a given [`InternalProviderId`](crate::InternalProvider) - /// and all other fields set to their defaults. - pub fn default_with_internal_provider(internal_provider_id: InternalProviderId) -> Self { - Self { - internal_provider_id: Some(internal_provider_id), - ..Self::default() - } - } - - /// Creates a [`read context`](Self) with a given [`ExternalProviderId`](crate::ExternalProvider) - /// and all other fields set to their defaults. - pub fn default_with_external_provider(external_provider_id: ExternalProviderId) -> Self { - Self { - external_provider_id: Some(external_provider_id), - ..Self::default() - } - } - - pub fn prop_id(&self) -> Option { - self.prop_id - } - - pub fn has_prop_id(&self) -> bool { - self.prop_id.is_some() - } - - pub fn has_set_prop_id(&self) -> bool { - if let Some(prop_id) = self.prop_id { - prop_id != PropId::NONE - } else { - false - } - } - - pub fn has_unset_prop_id(&self) -> bool { - if let Some(prop_id) = self.prop_id { - prop_id == PropId::NONE - } else { - false - } - } - - pub fn internal_provider_id(&self) -> Option { - self.internal_provider_id - } - - pub fn has_internal_provider_id(&self) -> bool { - self.internal_provider_id.is_some() - } - - pub fn has_set_internal_provider(&self) -> bool { - if let Some(internal_provider) = self.internal_provider_id { - internal_provider != InternalProviderId::NONE - } else { - false - } - } - - pub fn has_unset_internal_provider(&self) -> bool { - if let Some(internal_provider) = self.internal_provider_id { - internal_provider == InternalProviderId::NONE - } else { - false - } - } - - pub fn external_provider_id(&self) -> Option { - self.external_provider_id - } - - pub fn has_external_provider_id(&self) -> bool { - self.external_provider_id.is_some() - } - - pub fn has_set_external_provider(&self) -> bool { - if let Some(external_provider) = self.external_provider_id { - external_provider != ExternalProviderId::NONE - } else { - false - } - } - - pub fn has_unset_external_provider(&self) -> bool { - if let Some(external_provider) = self.external_provider_id { - external_provider == ExternalProviderId::NONE - } else { - false - } - } - - pub fn component_id(&self) -> Option { - self.component_id - } - - pub fn has_component_id(&self) -> bool { - self.component_id.is_some() - } - - pub fn has_set_component_id(&self) -> bool { - if let Some(component_id) = self.component_id { - component_id != ComponentId::NONE - } else { - false - } - } - - pub fn has_unset_component_id(&self) -> bool { - if let Some(component_id) = self.component_id { - component_id == ComponentId::NONE - } else { - false - } - } - - pub fn any() -> Self { - Self { - prop_id: None, - internal_provider_id: None, - external_provider_id: None, - component_id: None, - } - } -} - -impl postgres_types::ToSql for AttributeReadContext { - fn to_sql( - &self, - ty: &postgres_types::Type, - out: &mut postgres_types::private::BytesMut, - ) -> Result> - where - Self: Sized, - { - let json = serde_json::to_value(self)?; - postgres_types::ToSql::to_sql(&json, ty, out) - } - - fn accepts(ty: &postgres_types::Type) -> bool - where - Self: Sized, - { - ty == &postgres_types::Type::JSONB - } - - fn to_sql_checked( - &self, - ty: &postgres_types::Type, - out: &mut postgres_types::private::BytesMut, - ) -> Result> { - let json = serde_json::to_value(self)?; - postgres_types::ToSql::to_sql(&json, ty, out) - } -} +// use serde::{Deserialize, Serialize}; + +// use crate::{AttributeContext, ComponentId, ExternalProviderId, InternalProviderId, PropId}; + +// /// An `AttributeReadContext` allows for saying "do not use this field +// /// to filter results" by providing [`None`] for the field's value. +// /// It also allows for saying "explicitly filter out results for that +// /// have this field set" by providing the unset value for the field's +// /// value. +// /// +// /// For example: +// /// +// /// ```rust +// /// # use dal::attribute::context::read::AttributeReadContext; +// /// # use dal::{ExternalProviderId, InternalProviderId, ComponentId}; +// /// let read_context = AttributeReadContext { +// /// prop_id: None, +// /// internal_provider_id: Some(InternalProviderId::NONE), +// /// external_provider_id: Some(ExternalProviderId::NONE), +// /// component_id: Some(ComponentId::generate()) +// /// }; +// /// ``` +// /// +// /// The above `AttributeReadContext` would be used for finding all +// /// attributes, across all [`Props`](crate::Prop) that have been set +// /// for a given [`ComponentId`]. +// #[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] +// pub struct AttributeReadContext { +// #[serde(rename = "attribute_context_prop_id")] +// pub prop_id: Option, +// #[serde(rename = "attribute_context_internal_provider_id")] +// pub internal_provider_id: Option, +// #[serde(rename = "attribute_context_external_provider_id")] +// pub external_provider_id: Option, +// #[serde(rename = "attribute_context_component_id")] +// pub component_id: Option, +// } + +// impl Default for AttributeReadContext { +// fn default() -> Self { +// Self { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: Some(ComponentId::NONE), +// } +// } +// } + +// impl From for AttributeReadContext { +// fn from(from_context: AttributeContext) -> Self { +// Self { +// prop_id: Some(from_context.prop_id()), +// internal_provider_id: Some(from_context.internal_provider_id()), +// external_provider_id: Some(from_context.external_provider_id()), +// component_id: Some(from_context.component_id()), +// } +// } +// } + +// impl AttributeReadContext { +// /// Creates a [`read context`](Self) with a given [`PropId`](crate::Prop) +// /// and all other fields set to their defaults. +// pub fn default_with_prop(prop_id: PropId) -> Self { +// Self { +// prop_id: Some(prop_id), +// ..Self::default() +// } +// } + +// pub fn default_with_prop_and_component_id( +// prop_id: PropId, +// component_id: Option, +// ) -> Self { +// Self { +// prop_id: Some(prop_id), +// component_id: match component_id { +// Some(component_id) => Some(component_id), +// None => Some(ComponentId::NONE), +// }, +// ..Self::default() +// } +// } + +// /// Creates a [`read context`](Self) with a given [`InternalProviderId`](crate::InternalProvider) +// /// and all other fields set to their defaults. +// pub fn default_with_internal_provider(internal_provider_id: InternalProviderId) -> Self { +// Self { +// internal_provider_id: Some(internal_provider_id), +// ..Self::default() +// } +// } + +// /// Creates a [`read context`](Self) with a given [`ExternalProviderId`](crate::ExternalProvider) +// /// and all other fields set to their defaults. +// pub fn default_with_external_provider(external_provider_id: ExternalProviderId) -> Self { +// Self { +// external_provider_id: Some(external_provider_id), +// ..Self::default() +// } +// } + +// pub fn prop_id(&self) -> Option { +// self.prop_id +// } + +// pub fn has_prop_id(&self) -> bool { +// self.prop_id.is_some() +// } + +// pub fn has_set_prop_id(&self) -> bool { +// if let Some(prop_id) = self.prop_id { +// prop_id != PropId::NONE +// } else { +// false +// } +// } + +// pub fn has_unset_prop_id(&self) -> bool { +// if let Some(prop_id) = self.prop_id { +// prop_id == PropId::NONE +// } else { +// false +// } +// } + +// pub fn internal_provider_id(&self) -> Option { +// self.internal_provider_id +// } + +// pub fn has_internal_provider_id(&self) -> bool { +// self.internal_provider_id.is_some() +// } + +// pub fn has_set_internal_provider(&self) -> bool { +// if let Some(internal_provider) = self.internal_provider_id { +// internal_provider != InternalProviderId::NONE +// } else { +// false +// } +// } + +// pub fn has_unset_internal_provider(&self) -> bool { +// if let Some(internal_provider) = self.internal_provider_id { +// internal_provider == InternalProviderId::NONE +// } else { +// false +// } +// } + +// pub fn external_provider_id(&self) -> Option { +// self.external_provider_id +// } + +// pub fn has_external_provider_id(&self) -> bool { +// self.external_provider_id.is_some() +// } + +// pub fn has_set_external_provider(&self) -> bool { +// if let Some(external_provider) = self.external_provider_id { +// external_provider != ExternalProviderId::NONE +// } else { +// false +// } +// } + +// pub fn has_unset_external_provider(&self) -> bool { +// if let Some(external_provider) = self.external_provider_id { +// external_provider == ExternalProviderId::NONE +// } else { +// false +// } +// } + +// pub fn component_id(&self) -> Option { +// self.component_id +// } + +// pub fn has_component_id(&self) -> bool { +// self.component_id.is_some() +// } + +// pub fn has_set_component_id(&self) -> bool { +// if let Some(component_id) = self.component_id { +// component_id != ComponentId::NONE +// } else { +// false +// } +// } + +// pub fn has_unset_component_id(&self) -> bool { +// if let Some(component_id) = self.component_id { +// component_id == ComponentId::NONE +// } else { +// false +// } +// } + +// pub fn any() -> Self { +// Self { +// prop_id: None, +// internal_provider_id: None, +// external_provider_id: None, +// component_id: None, +// } +// } +// } + +// impl postgres_types::ToSql for AttributeReadContext { +// fn to_sql( +// &self, +// ty: &postgres_types::Type, +// out: &mut postgres_types::private::BytesMut, +// ) -> Result> +// where +// Self: Sized, +// { +// let json = serde_json::to_value(self)?; +// postgres_types::ToSql::to_sql(&json, ty, out) +// } + +// fn accepts(ty: &postgres_types::Type) -> bool +// where +// Self: Sized, +// { +// ty == &postgres_types::Type::JSONB +// } + +// fn to_sql_checked( +// &self, +// ty: &postgres_types::Type, +// out: &mut postgres_types::private::BytesMut, +// ) -> Result> { +// let json = serde_json::to_value(self)?; +// postgres_types::ToSql::to_sql(&json, ty, out) +// } +// } diff --git a/lib/dal/src/attribute/prototype.rs b/lib/dal/src/attribute/prototype.rs index 3ff4a5c348..96e32a35e2 100644 --- a/lib/dal/src/attribute/prototype.rs +++ b/lib/dal/src/attribute/prototype.rs @@ -9,953 +9,1137 @@ //! [`Map`](crate::prop::PropKind::Map): Which key of the `Map` the value is //! for. -use async_recursion::async_recursion; +use content_store::{ContentHash, Store}; +use petgraph::prelude::EdgeRef; +use petgraph::Direction; use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; +use strum::EnumDiscriminants; use telemetry::prelude::*; use thiserror::Error; +use crate::change_set_pointer::ChangeSetPointerError; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::node_weight::{ + NodeWeight, NodeWeightDiscriminants, NodeWeightError, +}; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - attribute::{ - context::{AttributeContext, AttributeContextError}, - value::{AttributeValue, AttributeValueError, AttributeValueId}, - }, - func::FuncId, - func::{ - binding::{FuncBindingError, FuncBindingId}, - binding_return_value::{FuncBindingReturnValueError, FuncBindingReturnValueId}, - }, - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_has_many, - AttributePrototypeArgument, AttributePrototypeArgumentError, AttributeReadContext, ComponentId, - DalContext, ExternalProvider, ExternalProviderId, Func, FuncBackendResponseType, - HistoryEventError, InternalProvider, InternalProviderId, Prop, PropId, PropKind, - SchemaVariantId, StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, - Visibility, + pk, AttributeValueId, DalContext, ExternalProviderId, FuncId, PropId, Timestamp, + TransactionsError, }; pub mod argument; -const ARGUMENT_VALUES_BY_NAME_FOR_HEAD_COMPONENT_ID: &str = include_str!( - "../queries/attribute_prototype/argument_values_by_name_for_head_component_id.sql" -); -const ATTRIBUTE_VALUES_IN_CONTEXT_OR_GREATER: &str = - include_str!("../queries/attribute_prototype/attribute_values_in_context_or_greater.sql"); -const LIST_BY_HEAD_FROM_EXTERNAL_PROVIDER_USE_WITH_TAIL: &str = include_str!( - "../queries/attribute_prototype/list_by_head_from_external_provider_use_with_tail.sql" -); -const LIST_FROM_INTERNAL_PROVIDER_USE: &str = - include_str!("../queries/attribute_prototype/list_from_internal_provider_use.sql"); -const LIST_FOR_CONTEXT: &str = include_str!("../queries/attribute_prototype/list_for_context.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/attribute_prototype/list_for_schema_variant.sql"); -const LIST_FUNCS_FOR_CONTEXT_AND_BACKEND_RESPONSE_TYPE: &str = include_str!("../queries/attribute_prototype/list_protoype_funcs_for_context_and_func_backend_response_type.sql"); -const FIND_WITH_PARENT_VALUE_AND_KEY_FOR_CONTEXT: &str = - include_str!("../queries/attribute_prototype/find_with_parent_value_and_key_for_context.sql"); -const FIND_FOR_FUNC: &str = include_str!("../queries/attribute_prototype/find_for_func.sql"); -const FIND_FOR_CONTEXT_AND_KEY: &str = - include_str!("../queries/attribute_prototype/find_for_context_and_key.sql"); -const FIND_FOR_CONTEXT_NULL_KEY: &str = - include_str!("../queries/attribute_prototype/find_for_context_null_key.sql"); -const FIND_FOR_FUNC_AS_VARIANT_AND_COMPONENT: &str = - include_str!("../queries/attribute_prototype/find_for_func_as_variant_and_component.sql"); - #[remain::sorted] #[derive(Error, Debug)] pub enum AttributePrototypeError { - #[error("attribute resolver context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextError), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("unable to construct component view for attribute function execution")] - ComponentView, - #[error("external provider not found by id: {0}")] - ExternalProviderNotFound(ExternalProviderId), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("cannot hard delete prototype from changeset if corresponding prototype does not exist on head or if the prototype does not represent an element of a map or array")] - HardDeletePrototypeWithNoHeadPrototypeOrKey(AttributePrototypeId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("internal provider not found by id: {0}")] - InternalProviderNotFound(InternalProviderId), - #[error("invalid prop value; expected {0} but got {1}")] - InvalidPropValue(String, serde_json::Value), - #[error("cannot remove prototype with a least-specific context: {0}")] - LeastSpecificContextPrototypeRemovalNotAllowed(AttributePrototypeId), - #[error("cannot remove value with a least-specific context: {0}")] - LeastSpecificContextValueRemovalNotAllowed(AttributeValueId), - #[error("AttributePrototype is missing")] - Missing, - #[error("func not found: {0}")] - MissingFunc(String), - #[error("attribute prototypes must have an associated prop, and this one does not. bug!")] - MissingProp, - #[error("missing attribute value for tenancy {0:?}, visibility {1:?}, prototype {2:?}, with parent attribute value {3:?}")] - MissingValue( - Tenancy, - Visibility, - AttributePrototypeId, - Option, - ), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("attribute prototype not found: {0} ({1:?})")] - NotFound(AttributePrototypeId, Visibility), - #[error( - "parent must be for an array, map, or object prop: attribute prototype id {0} is for a {1}" - )] - ParentNotAllowed(AttributePrototypeId, PropKind), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("prop not found by id: {0}")] - PropNotFound(PropId), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("attribute prototype {0} is missing a function edge")] + MissingFunction(AttributePrototypeId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("store error: {0}")] + Store(#[from] content_store::StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type AttributePrototypeResult = Result; -pk!(AttributePrototypePk); pk!(AttributePrototypeId); #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct AttributePrototype { - pk: AttributePrototypePk, id: AttributePrototypeId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] timestamp: Timestamp, +} - /// The [`AttributeContext`] corresponding to the prototype. - #[serde(flatten)] - pub context: AttributeContext, - /// The [`Func`](crate::Func) corresponding to the prototype. - func_id: FuncId, - /// An optional key used for tracking parentage. - pub key: Option, +#[derive(Debug, PartialEq)] +pub struct AttributePrototypeGraphNode { + id: AttributePrototypeId, + content_address: ContentAddress, + content: AttributePrototypeContentV1, } -/// This object is used for -/// [`AttributePrototype::list_by_head_from_external_provider_use_with_tail()`]. -#[derive(Serialize, Deserialize, Debug)] -pub struct AttributePrototypeGroupByHeadComponentId { - pub head_component_id: ComponentId, - pub attribute_prototype: AttributePrototype, +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum AttributePrototypeContent { + V1(AttributePrototypeContentV1), } -impl_standard_model! { - model: AttributePrototype, - pk: AttributePrototypePk, - id: AttributePrototypeId, - table_name: "attribute_prototypes", - history_event_label_base: "attribute_prototype", - history_event_message_name: "Attribute Prototype" +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct AttributePrototypeContentV1 { + pub timestamp: Timestamp, } -impl AttributePrototype { - #[allow(clippy::too_many_arguments)] - pub async fn new( - ctx: &DalContext, - func_id: FuncId, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - context: AttributeContext, - key: Option, - parent_attribute_value_id: Option, - ) -> AttributePrototypeResult { - let row = ctx.txns().await?.pg().query_one( - "SELECT new_attribute_prototype AS object FROM attribute_prototype_new_v1($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &func_binding_id, - &func_binding_return_value_id, - &context, - &key, - &parent_attribute_value_id, - ], - ).await?; - - Ok(standard_model::finish_create_from_row(ctx, row).await?) +impl AttributePrototypeGraphNode { + pub fn assemble( + id: impl Into, + content_hash: ContentHash, + content: AttributePrototypeContentV1, + ) -> Self { + Self { + id: id.into(), + content_address: ContentAddress::AttributePrototype(content_hash), + content, + } } +} - #[allow(clippy::too_many_arguments)] - pub async fn new_with_existing_value( - ctx: &DalContext, - func_id: FuncId, - context: AttributeContext, - key: Option, - parent_attribute_value_id: Option, - attribute_value_id: AttributeValueId, - ) -> AttributePrototypeResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT new_attribute_prototype_id AS prototype_id - FROM attribute_prototype_new_with_attribute_value_v1($1, - $2, - $3, - $4, - $5, - $6, - $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &context, - &key, - &parent_attribute_value_id, - &attribute_value_id, - ], - ) - .await?; - let prototype_id: AttributePrototypeId = row.try_get("prototype_id")?; - let object = Self::get_by_id(ctx, &prototype_id) - .await? - .ok_or_else(|| AttributePrototypeError::NotFound(prototype_id, *ctx.visibility()))?; - - Ok(object) +impl AttributePrototype { + pub fn assemble(id: AttributePrototypeId, inner: &AttributePrototypeContentV1) -> Self { + let inner: AttributePrototypeContentV1 = inner.to_owned(); + Self { + id, + timestamp: inner.timestamp, + } } - pub async fn new_with_context_only( - ctx: &DalContext, - func_id: FuncId, - context: AttributeContext, - key: Option<&str>, - ) -> AttributePrototypeResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_create_v1($1, $2, $3, $4, $5)", - &[ctx.tenancy(), ctx.visibility(), &context, &func_id, &key], - ) - .await?; + pub fn id(&self) -> AttributePrototypeId { + self.id + } - Ok(standard_model::finish_create_from_row(ctx, row).await?) + // NOTE(nick): all incoming edges to an attribute prototype must come from one of two places: + // - an attribute value whose lineage comes from a component + // - a prop or provider whose lineage comes from a schema variant + // Outgoing edges from an attribute prototype are used for intra and inter component relationships. + pub async fn new(ctx: &DalContext, func_id: FuncId) -> AttributePrototypeResult { + let timestamp = Timestamp::now(); + + let content = AttributePrototypeContentV1 { timestamp }; + let hash = ctx + .content_store() + .lock() + .await + .add(&AttributePrototypeContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::AttributePrototype(hash))?; + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + let _node_index = workspace_snapshot.add_node(node_weight)?; + + workspace_snapshot.add_edge( + id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_id, + )?; + + Ok(AttributePrototype::assemble( + AttributePrototypeId::from(id), + &content, + )) } - standard_model_accessor!(func_id, Pk(FuncId), AttributePrototypeResult); - standard_model_accessor!(key, Option, AttributePrototypeResult); - standard_model_has_many!( - lookup_fn: attribute_values, - table: "attribute_value_belongs_to_attribute_prototype", - model_table: "attribute_values", - returns: AttributeValue, - result: AttributePrototypeResult, - ); - - /// Permanently deletes the [`AttributePrototype`] for the given id along with any - /// corresponding [`AttributeValue`](crate::AttributeValue) prototype and - /// any [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) - /// for the prototype, if and only if any of the above values are in a changeset (i.e., - /// not in HEAD). The effect is to revert the prototype, it's values, and arguments, - /// to the HEAD state. Marking them as soft-deleted would propagate the deletion up to - /// HEAD. The implementation here is almost identical to that of - /// [`AttributePrototype::remove`](crate::AttributePrototype::remove)` but (1) - /// checks for in_change_set and (2) hard deletes. Least-specific checks are not necessary here - /// because we only do this for prototypes that exist only in a changeset. A corresponding - /// prototype for this prop will exist in head, and it will take priority when this one is - /// deleted. - pub async fn hard_delete_if_in_changeset( + pub async fn func_id( ctx: &DalContext, - attribute_prototype_id: &AttributePrototypeId, - ) -> AttributePrototypeResult<()> { - let attribute_prototype = - match AttributePrototype::get_by_id(ctx, attribute_prototype_id).await? { - Some(v) => v, - None => return Ok(()), - }; - - // Ensure a prototype matching this context exists on head, or the prototype is for a - // map/array element + prototype_id: AttributePrototypeId, + ) -> AttributePrototypeResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + for node_index in workspace_snapshot + .outgoing_targets_for_edge_weight_kind(prototype_id, EdgeWeightKindDiscriminants::Use)? { - let head_ctx = ctx.clone_with_head(); - let has_head_proto = AttributePrototype::find_for_context_and_key( - &head_ctx, - attribute_prototype.context, - &attribute_prototype.key, - ) - .await? - .pop() - .is_some(); - - if !(has_head_proto || attribute_prototype.key().is_some()) { - return Err( - AttributePrototypeError::HardDeletePrototypeWithNoHeadPrototypeOrKey( - *attribute_prototype_id, - ), - ); + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + if NodeWeightDiscriminants::Func == node_weight.into() { + return Ok(node_weight.id().into()); } } - // Delete all values and arguments found for a prototype before deleting the prototype. - let attribute_values = attribute_prototype.attribute_values(ctx).await?; - for argument in - AttributePrototypeArgument::list_for_attribute_prototype(ctx, *attribute_prototype_id) - .await? - { - if argument.visibility().in_change_set() { - argument.hard_delete(ctx).await?; - } - } - if attribute_prototype.visibility().in_change_set() { - standard_model::hard_unset_all_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_prototype", - attribute_prototype.id(), - ) - .await?; - attribute_prototype.hard_delete(ctx).await?; - } - - // Start with the initial value(s) from the prototype and build a work queue based on the - // value's children (and their children, recursively). Once we find the child values, - // we can delete the current value in the queue and its prototype. - let mut work_queue = attribute_values; - while let Some(current_value) = work_queue.pop() { - let child_attribute_values = current_value.child_attribute_values(ctx).await?; - if !child_attribute_values.is_empty() { - work_queue.extend(child_attribute_values); - } - - // Delete the prototype if we find one and if its context is not "least-specific". - if let Some(current_prototype) = current_value.attribute_prototype(ctx).await? { - // Delete all arguments found for a prototype before deleting the prototype. - for argument in AttributePrototypeArgument::list_for_attribute_prototype( - ctx, - *current_prototype.id(), - ) - .await? - { - if argument.visibility().in_change_set() { - argument.hard_delete(ctx).await?; - } - } - if current_prototype.visibility().in_change_set() { - standard_model::hard_unset_all_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_prototype", - current_prototype.id(), - ) - .await?; - current_prototype.hard_delete(ctx).await?; - } - } - - if current_value.visibility().in_change_set() { - standard_model::hard_unset_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_prototype", - current_value.id(), - ) - .await?; - standard_model::hard_unset_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_value", - current_value.id(), - ) - .await?; - standard_model::hard_unset_all_belongs_to_in_change_set( - ctx, - "attribute_value_belongs_to_attribute_value", - current_value.id(), - ) - .await?; - current_value.hard_delete(ctx).await?; - } - } - Ok(()) + Err(AttributePrototypeError::MissingFunction(prototype_id)) } - /// Deletes the [`AttributePrototype`] corresponding to a provided ID. Before deletion occurs, - /// its corresponding [`AttributeValue`](crate::AttributeValue), all of its child values - /// (and their children, recursively) and those children's prototypes are deleted. Any value or - /// prototype that could not be found or does not exist is assumed to have already been deleted - /// or never existed. Moreover, before deletion of the [`AttributePrototype`] occurs, we delete - /// all [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) that belong to the - /// prototype. - /// - /// Caution: this should be used rather than [`StandardModel::delete_by_id()`] when deleting an - /// [`AttributePrototype`]. That method should never be called directly. - /// - /// Normally we forbid deleting "least specific" attribute prototypes, that is, prototypes - /// at the schema variant level, but we need to do so when removing a schema variant and - /// all its associated objects. To make this possible, set `force` to `true` - pub async fn remove( + pub async fn find_for_prop( ctx: &DalContext, - attribute_prototype_id: &AttributePrototypeId, - force: bool, - ) -> AttributePrototypeResult<()> { - // Get the prototype for the given id. Once we get its corresponding value, we can delete - // the prototype. - let mut attribute_prototype = - match AttributePrototype::get_by_id(ctx, attribute_prototype_id).await? { - Some(v) => v, - None => return Ok(()), - }; - - let parent_proto_is_map_or_array_element = attribute_prototype.key().is_some(); - if attribute_prototype.context.is_least_specific() - && !parent_proto_is_map_or_array_element - && !force - { - return Err( - AttributePrototypeError::LeastSpecificContextPrototypeRemovalNotAllowed( - *attribute_prototype_id, - ), - ); - } - - // Delete all values and arguments found for a prototype before deleting the prototype. - let attribute_values = attribute_prototype.attribute_values(ctx).await?; - for mut argument in - AttributePrototypeArgument::list_for_attribute_prototype(ctx, *attribute_prototype_id) - .await? - { - argument.delete_by_id(ctx).await?; - } - standard_model::unset_all_belongs_to( - ctx, - "attribute_value_belongs_to_attribute_prototype", - attribute_prototype.id(), - ) - .await?; - attribute_prototype.delete_by_id(ctx).await?; - - // Start with the initial value(s) from the prototype and build a work queue based on the - // value's children (and their children, recursively). Once we find the child values, - // we can delete the current value in the queue and its prototype. - let mut work_queue = attribute_values; - while let Some(mut current_value) = work_queue.pop() { - let child_attribute_values = current_value.child_attribute_values(ctx).await?; - if !child_attribute_values.is_empty() { - work_queue.extend(child_attribute_values); - } - - // Delete the prototype if we find one and if its context is not "least-specific". - if let Some(mut current_prototype) = current_value.attribute_prototype(ctx).await? { - if current_prototype.context.is_least_specific() - && !parent_proto_is_map_or_array_element - && !force - { - return Err( - AttributePrototypeError::LeastSpecificContextPrototypeRemovalNotAllowed( - *current_prototype.id(), - ), - ); - } - // Delete all arguments found for a prototype before deleting the prototype. - for mut argument in AttributePrototypeArgument::list_for_attribute_prototype( - ctx, - *current_prototype.id(), - ) - .await? - { - argument.delete_by_id(ctx).await?; + prop_id: PropId, + key: &Option, + ) -> AttributePrototypeResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + if let Some(prototype_idx) = workspace_snapshot + .edges_directed(prop_id, Direction::Outgoing)? + .find(|edge_ref| { + if let EdgeWeightKind::Prototype(maybe_key) = edge_ref.weight().kind() { + maybe_key == key + } else { + false } - standard_model::unset_all_belongs_to( - ctx, - "attribute_value_belongs_to_attribute_prototype", - current_prototype.id(), - ) - .await?; - current_prototype.delete_by_id(ctx).await?; - } + }) + .map(|edge_ref| edge_ref.target()) + { + let node_weight = workspace_snapshot.get_node_weight(prototype_idx)?; - // Delete the value if its context is not "least-specific". - if current_value.context.is_least_specific() - && !parent_proto_is_map_or_array_element - && !force - { - return Err( - AttributePrototypeError::LeastSpecificContextValueRemovalNotAllowed( - *current_value.id(), - ), - ); + if matches!( + node_weight.content_address_discriminants(), + Some(ContentAddressDiscriminants::AttributePrototype) + ) { + return Ok(Some(node_weight.id().into())); } - current_value.unset_attribute_prototype(ctx).await?; - current_value.unset_parent_attribute_value(ctx).await?; - standard_model::unset_all_belongs_to( - ctx, - "attribute_value_belongs_to_attribute_value", - current_value.id(), - ) - .await?; - current_value.delete_by_id(ctx).await?; } - Ok(()) - } - - #[instrument(level = "debug", skip_all)] - pub async fn list_prototype_funcs_by_context_and_backend_response_type( - ctx: &DalContext, - context: AttributeContext, - backend_response_type: FuncBackendResponseType, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FUNCS_FOR_CONTEXT_AND_BACKEND_RESPONSE_TYPE, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &context.prop_id(), - &backend_response_type.as_ref(), - ], - ) - .await?; - - let mut result = Vec::new(); - for row in rows.into_iter() { - let func_json: serde_json::Value = row.try_get("func_object")?; - let func: Func = serde_json::from_value(func_json)?; - - let ap_json: serde_json::Value = row.try_get("prototype_object")?; - let ap: Self = serde_json::from_value(ap_json)?; - - result.push((ap, func)); - } - - Ok(result) - } - - pub async fn list_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + Ok(None) } - #[instrument(level = "debug", skip_all)] - pub async fn list_for_context( - ctx: &DalContext, - context: AttributeContext, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &context.prop_id(), - ], - ) - .await?; - let object = standard_model::objects_from_rows(rows)?; - Ok(object) - } - - pub async fn find_with_parent_value_and_key_for_context( - ctx: &DalContext, - parent_attribute_value_id: Option, - key: Option, - context: AttributeContext, - ) -> AttributePrototypeResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_WITH_PARENT_VALUE_AND_KEY_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &parent_attribute_value_id, - &key, - ], - ) - .await?; - - Ok(standard_model::option_object_from_row(row)?) - } - - /// List [`Vec`] that depend on a provided [`InternalProviderId`](crate::InternalProvider). - pub async fn list_from_internal_provider_use( - ctx: &DalContext, - internal_provider_id: InternalProviderId, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FROM_INTERNAL_PROVIDER_USE, - &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } - - /// List [`Vec`] that depend on a provided [`ExternalProviderId`](crate::ExternalProvider) - /// and _tail_ [`ComponentId`](crate::Component). - pub async fn list_by_head_from_external_provider_use_with_tail( + pub async fn find_for_external_provider( ctx: &DalContext, external_provider_id: ExternalProviderId, - tail_component_id: ComponentId, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_BY_HEAD_FROM_EXTERNAL_PROVIDER_USE_WITH_TAIL, - &[ - ctx.tenancy(), - ctx.visibility(), - &external_provider_id, - &tail_component_id, - ], - ) - .await?; - - let mut result = Vec::new(); - for row in rows.into_iter() { - let head_component_id: ComponentId = row.try_get("head_component_id")?; - - let attribute_prototype_json: serde_json::Value = row.try_get("object")?; - let attribute_prototype = serde_json::from_value(attribute_prototype_json)?; + ) -> AttributePrototypeResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + if let Some(prototype_idx) = workspace_snapshot + .edges_directed(external_provider_id, Direction::Outgoing)? + .find(|edge_ref| { + EdgeWeightKindDiscriminants::Prototype == edge_ref.weight().kind().into() + }) + .map(|edge_ref| edge_ref.target()) + { + let node_weight = workspace_snapshot.get_node_weight(prototype_idx)?; - result.push(AttributePrototypeGroupByHeadComponentId { - head_component_id, - attribute_prototype, - }); + if matches!( + node_weight.content_address_discriminants(), + Some(ContentAddressDiscriminants::AttributePrototype) + ) { + return Ok(Some(node_weight.id().into())); + } } - Ok(result) - } - pub async fn argument_values( - &self, - ctx: &DalContext, - attribute_write_context: AttributeContext, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ARGUMENT_VALUES_BY_NAME_FOR_HEAD_COMPONENT_ID, - &[ - ctx.tenancy(), - ctx.visibility(), - &self.id, - &attribute_write_context.component_id(), - &attribute_write_context, - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) + Ok(None) } - /// List [`AttributeValues`](crate::AttributeValue) that belong to a provided [`AttributePrototypeId`](Self) - /// and whose context contains the provided [`AttributeReadContext`](crate::AttributeReadContext) - /// or are "more-specific" than the provided [`AttributeReadContext`](crate::AttributeReadContext). - pub async fn attribute_values_in_context_or_greater( + pub async fn update_func_by_id( ctx: &DalContext, attribute_prototype_id: AttributePrototypeId, - context: AttributeReadContext, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ATTRIBUTE_VALUES_IN_CONTEXT_OR_GREATER, - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &context, - ], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + func_id: FuncId, + ) -> AttributePrototypeResult<()> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + let attribute_prototype_idx = + workspace_snapshot.get_node_index_by_id(attribute_prototype_id)?; + + let current_func_node_idx = workspace_snapshot + .edges_directed(attribute_prototype_id, Direction::Outgoing)? + .find(|edge_ref| edge_ref.weight().kind() == &EdgeWeightKind::Use) + .map(|edge_ref| edge_ref.target()) + .ok_or(AttributePrototypeError::MissingFunction( + attribute_prototype_id, + ))?; + + let change_set = ctx.change_set_pointer()?; + workspace_snapshot.remove_edge( + change_set, + attribute_prototype_idx, + current_func_node_idx, + EdgeWeightKindDiscriminants::Use, + )?; + + workspace_snapshot.add_edge( + attribute_prototype_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_id, + )?; + + Ok(()) } - #[instrument(level = "debug", skip_all)] - #[allow(clippy::too_many_arguments)] - #[async_recursion] - async fn create_intermediate_proxy_values( + pub async fn attribute_value_ids( ctx: &DalContext, - parent_attribute_value_id: Option, - prototype_id: AttributePrototypeId, - context: AttributeContext, - ) -> AttributePrototypeResult<()> { - if context.is_least_specific() { - return Ok(()); + attribute_prototype_id: AttributePrototypeId, + ) -> AttributePrototypeResult> { + if let Some(attribute_value_id) = + Self::attribute_value_id(ctx, attribute_prototype_id).await? + { + return Ok(vec![attribute_value_id]); } - if (AttributeValue::find_with_parent_and_prototype_for_context( - ctx, - parent_attribute_value_id, - prototype_id, - context, - ) - .await?) - .is_none() - { - // Need to create a proxy to the next lowest level - Self::create_intermediate_proxy_values( - ctx, - parent_attribute_value_id, - prototype_id, - context.less_specific()?, - ) - .await?; + // Remaining edges + // prototype <-- Prototype -- (Prop | Provider) <-- Prop|Provider -- Attribute Values + // (multiple avs possible) - if let Some(proxy_target) = AttributeValue::find_with_parent_and_prototype_for_context( - ctx, - parent_attribute_value_id, - prototype_id, - context.less_specific()?, - ) - .await? + let mut attribute_value_ids = vec![]; + + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + for prototype_edge_source in workspace_snapshot.incoming_sources_for_edge_weight_kind( + attribute_prototype_id, + EdgeWeightKindDiscriminants::Prototype, + )? { + let (target_id, edge_weight_discrim) = match workspace_snapshot + .get_node_weight(prototype_edge_source)? + { + NodeWeight::Prop(prop_inner) => { + (prop_inner.id(), EdgeWeightKindDiscriminants::Prop) + } + NodeWeight::Content(content_inner) => match content_inner.content_address() { + ContentAddress::ExternalProvider(_) | ContentAddress::InternalProvider(_) => { + (content_inner.id(), EdgeWeightKindDiscriminants::Provider) + } + _ => { + return Err(WorkspaceSnapshotError::UnexpectedEdgeSource( + content_inner.id(), + attribute_prototype_id.into(), + EdgeWeightKindDiscriminants::Prototype, + ) + .into()) + } + }, + other => { + return Err(WorkspaceSnapshotError::UnexpectedEdgeSource( + other.id(), + attribute_prototype_id.into(), + EdgeWeightKindDiscriminants::Prototype, + ) + .into()) + } + }; + + for attribute_value_target in workspace_snapshot + .incoming_sources_for_edge_weight_kind(target_id, edge_weight_discrim)? { - // Create the proxy at this level - let mut proxy_attribute_value = AttributeValue::new( - ctx, - proxy_target.func_binding_id(), - proxy_target.func_binding_return_value_id(), - context, - proxy_target.key().map(|k| k.to_string()), - ) - .await?; - proxy_attribute_value - .set_proxy_for_attribute_value_id(ctx, Some(*proxy_target.id())) - .await?; - proxy_attribute_value - .set_attribute_prototype(ctx, &prototype_id) - .await? - } else { - return Err(AttributePrototypeError::MissingValue( - *ctx.tenancy(), - *ctx.visibility(), - prototype_id, - parent_attribute_value_id, - )); + // There are also provider edges from the schema variant to the provider. + // These should be different edge kinds, I think + if let NodeWeight::AttributeValue(av_node_weight) = + workspace_snapshot.get_node_weight(attribute_value_target)? + { + attribute_value_ids.push(av_node_weight.id().into()) + } } } - Ok(()) + Ok(attribute_value_ids) } - #[allow(clippy::too_many_arguments)] - pub async fn update_for_context( + /// If this prototype is defined at the component level, it will have an incoming edge from the + /// AttributeValue for which it is the prototype. Otherwise this will return None, indicating a + /// prototype defined at the schema variant level (which has no attribute value) + pub async fn attribute_value_id( ctx: &DalContext, attribute_prototype_id: AttributePrototypeId, - context: AttributeContext, - func_id: FuncId, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - parent_attribute_value_id: Option, - existing_attribute_value_id: Option, - ) -> AttributePrototypeResult { - let given_attribute_prototype = Self::get_by_id(ctx, &attribute_prototype_id) - .await? - .ok_or_else(|| { - AttributePrototypeError::NotFound(attribute_prototype_id, *ctx.visibility()) - })?; - - // If the AttributePrototype we were given isn't for the _specific_ context that we're - // trying to update, make a new one. This is necessary so that we don't end up changing the - // prototype for a context less specific than the one that we're trying to update. - let mut attribute_prototype = if given_attribute_prototype.context == context { - given_attribute_prototype - } else if let Some(attribute_value_id) = existing_attribute_value_id { - // Create new prototype with an existing value and clone the arguments of the given prototype into the new one. - let prototype = Self::new_with_existing_value( - ctx, - func_id, - context, - given_attribute_prototype.key().map(|k| k.to_string()), - parent_attribute_value_id, - attribute_value_id, - ) - .await?; - - let mut value = AttributeValue::get_by_id(ctx, &attribute_value_id) - .await? - .ok_or_else(|| { - AttributePrototypeError::MissingValue( - *ctx.tenancy(), - *ctx.visibility(), - *prototype.id(), - Some(attribute_value_id), - ) - })?; - value.set_func_binding_id(ctx, func_binding_id).await?; - - prototype - } else { - // Create new prototype and clone the arguments of the given prototype into the new one. - Self::new( - ctx, - func_id, - func_binding_id, - func_binding_return_value_id, - context, - given_attribute_prototype.key().map(|k| k.to_string()), - parent_attribute_value_id, - ) - .await? - }; - - attribute_prototype.set_func_id(ctx, func_id).await?; - - Ok(*attribute_prototype.id()) - } - - pub async fn find_for_func( - ctx: &DalContext, - func_id: &FuncId, - ) -> AttributePrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), func_id]) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_func_as_variant_and_component( - ctx: &DalContext, - func_id: FuncId, - ) -> AttributePrototypeResult> { - let mut result = vec![]; - - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_FUNC_AS_VARIANT_AND_COMPONENT, - &[ctx.tenancy(), ctx.visibility(), &func_id], + ) -> AttributePrototypeResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let maybe_value_idxs = workspace_snapshot.incoming_sources_for_edge_weight_kind( + attribute_prototype_id, + EdgeWeightKindDiscriminants::Prototype, + )?; + + if maybe_value_idxs.len() > 1 { + return Err(WorkspaceSnapshotError::UnexpectedNumberOfIncomingEdges( + EdgeWeightKindDiscriminants::Prototype, + NodeWeightDiscriminants::Content, + attribute_prototype_id.into(), ) - .await?; - - for row in rows.into_iter() { - let schema_variant_id: SchemaVariantId = row.try_get("schema_variant_id")?; - let component_id: ComponentId = row.try_get("component_id")?; - - result.push((schema_variant_id, component_id)); + .into()); } - Ok(result) - } - - pub async fn find_for_context_and_key( - ctx: &DalContext, - context: AttributeContext, - key: &Option, - ) -> AttributePrototypeResult> { - let rows = if key.is_some() { - ctx.txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_AND_KEY, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.prop_id(), - &context.internal_provider_id(), - &context.external_provider_id(), - &context.component_id(), - &key, - ], - ) - .await? - } else { - ctx.txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_NULL_KEY, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.prop_id(), - &context.internal_provider_id(), - &context.external_provider_id(), - &context.component_id(), - ], - ) - .await? - }; - - Ok(standard_model::objects_from_rows(rows)?) + Ok(match maybe_value_idxs.first().copied() { + Some(value_idx) => { + if let NodeWeight::AttributeValue(av_node_weight) = + workspace_snapshot.get_node_weight(value_idx)? + { + Some(av_node_weight.id().into()) + } else { + None + } + } + None => None, + }) } - pub async fn external_provider( - &self, + pub async fn remove( ctx: &DalContext, - ) -> AttributePrototypeResult { - ExternalProvider::get_by_id(ctx, &self.context.external_provider_id()) - .await? - .ok_or(AttributePrototypeError::ExternalProviderNotFound( - self.context.external_provider_id(), - )) - } + prototype_id: AttributePrototypeId, + ) -> AttributePrototypeResult<()> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; - pub async fn internal_provider( - &self, - ctx: &DalContext, - ) -> AttributePrototypeResult { - InternalProvider::get_by_id(ctx, &self.context.internal_provider_id()) - .await? - .ok_or(AttributePrototypeError::InternalProviderNotFound( - self.context.internal_provider_id(), - )) - } + workspace_snapshot.remove_node_by_id(prototype_id)?; - pub async fn prop(&self, ctx: &DalContext) -> AttributePrototypeResult { - Prop::get_by_id(ctx, &self.context.prop_id()).await?.ok_or( - AttributePrototypeError::PropNotFound(self.context.prop_id()), - ) + Ok(()) } } -#[derive(Debug, Clone, Serialize, Deserialize)] -pub struct AttributePrototypeArgumentValues { - pub attribute_prototype_id: AttributePrototypeId, - pub argument_name: String, - pub values: Vec, -} +// /// This object is used for +// /// [`AttributePrototype::list_by_head_from_external_provider_use_with_tail()`]. +// #[derive(Serialize, Deserialize, Debug)] +// pub struct AttributePrototypeGroupByHeadComponentId { +// pub head_component_id: ComponentId, +// pub attribute_prototype: AttributePrototype, +// } + +// impl AttributePrototype { +// #[allow(clippy::too_many_arguments)] +// #[instrument(skip_all)] +// pub async fn new_with_existing_value( +// ctx: &DalContext, +// func_id: FuncId, +// context: AttributeContext, +// key: Option, +// parent_attribute_value_id: Option, +// attribute_value_id: AttributeValueId, +// ) -> AttributePrototypeResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT new_attribute_prototype_id AS prototype_id +// FROM attribute_prototype_new_with_attribute_value_v1($1, +// $2, +// $3, +// $4, +// $5, +// $6, +// $7)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &func_id, +// &context, +// &key, +// &parent_attribute_value_id, +// &attribute_value_id, +// ], +// ) +// .await?; +// let prototype_id: AttributePrototypeId = row.try_get("prototype_id")?; +// let object = Self::get_by_id(ctx, &prototype_id) +// .await? +// .ok_or_else(|| AttributePrototypeError::NotFound(prototype_id, *ctx.visibility()))?; + +// Ok(object) +// } + +// pub async fn new_with_context_only( +// ctx: &DalContext, +// func_id: FuncId, +// context: AttributeContext, +// key: Option<&str>, +// ) -> AttributePrototypeResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_create_v1($1, $2, $3, $4, $5)", +// &[ctx.tenancy(), ctx.visibility(), &context, &func_id, &key], +// ) +// .await?; + +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// standard_model_accessor!(func_id, Pk(FuncId), AttributePrototypeResult); +// standard_model_accessor!(key, Option, AttributePrototypeResult); +// standard_model_has_many!( +// lookup_fn: attribute_values, +// table: "attribute_value_belongs_to_attribute_prototype", +// model_table: "attribute_values", +// returns: AttributeValue, +// result: AttributePrototypeResult, +// ); + +// /// Permanently deletes the [`AttributePrototype`] for the given id along with any +// /// corresponding [`AttributeValue`](crate::AttributeValue) prototype and +// /// any [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) +// /// for the prototype, if and only if any of the above values are in a changeset (i.e., +// /// not in HEAD). The effect is to revert the prototype, it's values, and arguments, +// /// to the HEAD state. Marking them as soft-deleted would propagate the deletion up to +// /// HEAD. The implementation here is almost identical to that of +// /// [`AttributePrototype::remove`](crate::AttributePrototype::remove)` but (1) +// /// checks for in_change_set and (2) hard deletes. Least-specific checks are not necessary here +// /// because we only do this for prototypes that exist only in a changeset. A corresponding +// /// prototype for this prop will exist in head, and it will take priority when this one is +// /// deleted. +// pub async fn hard_delete_if_in_changeset( +// ctx: &DalContext, +// attribute_prototype_id: &AttributePrototypeId, +// ) -> AttributePrototypeResult<()> { +// let attribute_prototype = +// match AttributePrototype::get_by_id(ctx, attribute_prototype_id).await? { +// Some(v) => v, +// None => return Ok(()), +// }; + +// // Ensure a prototype matching this context exists on head, or the prototype is for a +// // map/array element +// { +// let head_ctx = ctx.clone_with_head(); +// let has_head_proto = AttributePrototype::find_for_context_and_key( +// &head_ctx, +// attribute_prototype.context, +// &attribute_prototype.key, +// ) +// .await? +// .pop() +// .is_some(); + +// if !(has_head_proto || attribute_prototype.key().is_some()) { +// return Err( +// AttributePrototypeError::HardDeletePrototypeWithNoHeadPrototypeOrKey( +// *attribute_prototype_id, +// ), +// ); +// } +// } + +// // Delete all values and arguments found for a prototype before deleting the prototype. +// let attribute_values = attribute_prototype.attribute_values(ctx).await?; +// for argument in +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *attribute_prototype_id) +// .await? +// { +// if argument.visibility().in_change_set() { +// argument.hard_delete(ctx).await?; +// } +// } +// if attribute_prototype.visibility().in_change_set() { +// standard_model::hard_unset_all_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// attribute_prototype.id(), +// ) +// .await?; +// attribute_prototype.hard_delete(ctx).await?; +// } + +// // Start with the initial value(s) from the prototype and build a work queue based on the +// // value's children (and their children, recursively). Once we find the child values, +// // we can delete the current value in the queue and its prototype. +// let mut work_queue = attribute_values; +// while let Some(current_value) = work_queue.pop() { +// let child_attribute_values = current_value.child_attribute_values(ctx).await?; +// if !child_attribute_values.is_empty() { +// work_queue.extend(child_attribute_values); +// } + +// // Delete the prototype if we find one and if its context is not "least-specific". +// if let Some(current_prototype) = current_value.attribute_prototype(ctx).await? { +// // Delete all arguments found for a prototype before deleting the prototype. +// for argument in AttributePrototypeArgument::list_for_attribute_prototype( +// ctx, +// *current_prototype.id(), +// ) +// .await? +// { +// if argument.visibility().in_change_set() { +// argument.hard_delete(ctx).await?; +// } +// } +// if current_prototype.visibility().in_change_set() { +// standard_model::hard_unset_all_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// current_prototype.id(), +// ) +// .await?; +// current_prototype.hard_delete(ctx).await?; +// } +// } + +// if current_value.visibility().in_change_set() { +// standard_model::hard_unset_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// current_value.id(), +// ) +// .await?; +// standard_model::hard_unset_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_value", +// current_value.id(), +// ) +// .await?; +// standard_model::hard_unset_all_belongs_to_in_change_set( +// ctx, +// "attribute_value_belongs_to_attribute_value", +// current_value.id(), +// ) +// .await?; +// current_value.hard_delete(ctx).await?; +// } +// } +// Ok(()) +// } + +// /// Deletes the [`AttributePrototype`] corresponding to a provided ID. Before deletion occurs, +// /// its corresponding [`AttributeValue`](crate::AttributeValue), all of its child values +// /// (and their children, recursively) and those children's prototypes are deleted. Any value or +// /// prototype that could not be found or does not exist is assumed to have already been deleted +// /// or never existed. Moreover, before deletion of the [`AttributePrototype`] occurs, we delete +// /// all [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) that belong to the +// /// prototype. +// /// +// /// Caution: this should be used rather than [`StandardModel::delete_by_id()`] when deleting an +// /// [`AttributePrototype`]. That method should never be called directly. +// /// +// /// Normally we forbid deleting "least specific" attribute prototypes, that is, prototypes +// /// at the schema variant level, but we need to do so when removing a schema variant and +// /// all its associated objects. To make this possible, set `force` to `true` +// pub async fn remove( +// ctx: &DalContext, +// attribute_prototype_id: &AttributePrototypeId, +// force: bool, +// ) -> AttributePrototypeResult<()> { +// // Get the prototype for the given id. Once we get its corresponding value, we can delete +// // the prototype. +// let mut attribute_prototype = +// match AttributePrototype::get_by_id(ctx, attribute_prototype_id).await? { +// Some(v) => v, +// None => return Ok(()), +// }; + +// let parent_proto_is_map_or_array_element = attribute_prototype.key().is_some(); +// if attribute_prototype.context.is_least_specific() +// && !parent_proto_is_map_or_array_element +// && !force +// { +// return Err( +// AttributePrototypeError::LeastSpecificContextPrototypeRemovalNotAllowed( +// *attribute_prototype_id, +// ), +// ); +// } + +// // Delete all values and arguments found for a prototype before deleting the prototype. +// let attribute_values = attribute_prototype.attribute_values(ctx).await?; +// for mut argument in +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *attribute_prototype_id) +// .await? +// { +// argument.delete_by_id(ctx).await?; +// } +// standard_model::unset_all_belongs_to( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// attribute_prototype.id(), +// ) +// .await?; +// attribute_prototype.delete_by_id(ctx).await?; + +// // Start with the initial value(s) from the prototype and build a work queue based on the +// // value's children (and their children, recursively). Once we find the child values, +// // we can delete the current value in the queue and its prototype. +// let mut work_queue = attribute_values; +// while let Some(mut current_value) = work_queue.pop() { +// let child_attribute_values = current_value.child_attribute_values(ctx).await?; +// if !child_attribute_values.is_empty() { +// work_queue.extend(child_attribute_values); +// } + +// // Delete the prototype if we find one and if its context is not "least-specific". +// if let Some(mut current_prototype) = current_value.attribute_prototype(ctx).await? { +// if current_prototype.context.is_least_specific() +// && !parent_proto_is_map_or_array_element +// && !force +// { +// return Err( +// AttributePrototypeError::LeastSpecificContextPrototypeRemovalNotAllowed( +// *current_prototype.id(), +// ), +// ); +// } +// // Delete all arguments found for a prototype before deleting the prototype. +// for mut argument in AttributePrototypeArgument::list_for_attribute_prototype( +// ctx, +// *current_prototype.id(), +// ) +// .await? +// { +// argument.delete_by_id(ctx).await?; +// } +// standard_model::unset_all_belongs_to( +// ctx, +// "attribute_value_belongs_to_attribute_prototype", +// current_prototype.id(), +// ) +// .await?; +// current_prototype.delete_by_id(ctx).await?; +// } + +// // Delete the value if its context is not "least-specific". +// if current_value.context.is_least_specific() +// && !parent_proto_is_map_or_array_element +// && !force +// { +// return Err( +// AttributePrototypeError::LeastSpecificContextValueRemovalNotAllowed( +// *current_value.id(), +// ), +// ); +// } +// current_value.unset_attribute_prototype(ctx).await?; +// current_value.unset_parent_attribute_value(ctx).await?; +// standard_model::unset_all_belongs_to( +// ctx, +// "attribute_value_belongs_to_attribute_value", +// current_value.id(), +// ) +// .await?; +// current_value.delete_by_id(ctx).await?; +// } +// Ok(()) +// } + +// #[instrument(skip_all)] +// pub async fn list_prototype_funcs_by_context_and_backend_response_type( +// ctx: &DalContext, +// context: AttributeContext, +// backend_response_type: FuncBackendResponseType, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FUNCS_FOR_CONTEXT_AND_BACKEND_RESPONSE_TYPE, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &context.prop_id(), +// &backend_response_type.as_ref(), +// ], +// ) +// .await?; + +// let mut result = Vec::new(); +// for row in rows.into_iter() { +// let func_json: serde_json::Value = row.try_get("func_object")?; +// let func: Func = serde_json::from_value(func_json)?; + +// let ap_json: serde_json::Value = row.try_get("prototype_object")?; +// let ap: Self = serde_json::from_value(ap_json)?; + +// result.push((ap, func)); +// } + +// Ok(result) +// } + +// pub async fn list_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// #[instrument(skip_all)] +// pub async fn list_for_context( +// ctx: &DalContext, +// context: AttributeContext, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &context.prop_id(), +// ], +// ) +// .await?; +// let object = standard_model::objects_from_rows(rows)?; +// Ok(object) +// } + +// #[tracing::instrument(skip_all)] +// pub async fn find_with_parent_value_and_key_for_context( +// ctx: &DalContext, +// parent_attribute_value_id: Option, +// key: Option, +// context: AttributeContext, +// ) -> AttributePrototypeResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_WITH_PARENT_VALUE_AND_KEY_FOR_CONTEXT, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context, +// &parent_attribute_value_id, +// &key, +// ], +// ) +// .await?; + +// Ok(standard_model::option_object_from_row(row)?) +// } + +// /// List [`Vec`] that depend on a provided [`InternalProviderId`](crate::InternalProvider). +// pub async fn list_from_internal_provider_use( +// ctx: &DalContext, +// internal_provider_id: InternalProviderId, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FROM_INTERNAL_PROVIDER_USE, +// &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// List [`Vec`] that depend on a provided [`ExternalProviderId`](crate::ExternalProvider) +// /// and _tail_ [`ComponentId`](crate::Component). +// pub async fn list_by_head_from_external_provider_use_with_tail( +// ctx: &DalContext, +// external_provider_id: ExternalProviderId, +// tail_component_id: ComponentId, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_BY_HEAD_FROM_EXTERNAL_PROVIDER_USE_WITH_TAIL, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &external_provider_id, +// &tail_component_id, +// ], +// ) +// .await?; + +// let mut result = Vec::new(); +// for row in rows.into_iter() { +// let head_component_id: ComponentId = row.try_get("head_component_id")?; + +// let attribute_prototype_json: serde_json::Value = row.try_get("object")?; +// let attribute_prototype = serde_json::from_value(attribute_prototype_json)?; + +// result.push(AttributePrototypeGroupByHeadComponentId { +// head_component_id, +// attribute_prototype, +// }); +// } +// Ok(result) +// } + +// pub async fn argument_values( +// &self, +// ctx: &DalContext, +// attribute_write_context: AttributeContext, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// ARGUMENT_VALUES_BY_NAME_FOR_HEAD_COMPONENT_ID, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &self.id, +// &attribute_write_context.component_id(), +// &attribute_write_context, +// ], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// List [`AttributeValues`](crate::AttributeValue) that belong to a provided [`AttributePrototypeId`](Self) +// /// and whose context contains the provided [`AttributeReadContext`](crate::AttributeReadContext) +// /// or are "more-specific" than the provided [`AttributeReadContext`](crate::AttributeReadContext). +// pub async fn attribute_values_in_context_or_greater( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// context: AttributeReadContext, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// ATTRIBUTE_VALUES_IN_CONTEXT_OR_GREATER, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &context, +// ], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// #[instrument(skip_all)] +// #[allow(clippy::too_many_arguments)] +// #[async_recursion] +// async fn create_intermediate_proxy_values( +// ctx: &DalContext, +// parent_attribute_value_id: Option, +// prototype_id: AttributePrototypeId, +// context: AttributeContext, +// ) -> AttributePrototypeResult<()> { +// if context.is_least_specific() { +// return Ok(()); +// } + +// if (AttributeValue::find_with_parent_and_prototype_for_context( +// ctx, +// parent_attribute_value_id, +// prototype_id, +// context, +// ) +// .await?) +// .is_none() +// { +// // Need to create a proxy to the next lowest level +// Self::create_intermediate_proxy_values( +// ctx, +// parent_attribute_value_id, +// prototype_id, +// context.less_specific()?, +// ) +// .await?; + +// if let Some(proxy_target) = AttributeValue::find_with_parent_and_prototype_for_context( +// ctx, +// parent_attribute_value_id, +// prototype_id, +// context.less_specific()?, +// ) +// .await? +// { +// // Create the proxy at this level +// let mut proxy_attribute_value = AttributeValue::new( +// ctx, +// proxy_target.func_binding_id(), +// proxy_target.func_binding_return_value_id(), +// context, +// proxy_target.key().map(|k| k.to_string()), +// ) +// .await?; +// proxy_attribute_value +// .set_proxy_for_attribute_value_id(ctx, Some(*proxy_target.id())) +// .await?; +// proxy_attribute_value +// .set_attribute_prototype(ctx, &prototype_id) +// .await? +// } else { +// return Err(AttributePrototypeError::MissingValue( +// *ctx.tenancy(), +// *ctx.visibility(), +// prototype_id, +// parent_attribute_value_id, +// )); +// } +// } + +// Ok(()) +// } + +// #[allow(clippy::too_many_arguments)] +// pub async fn update_for_context( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// context: AttributeContext, +// func_id: FuncId, +// func_binding_id: FuncBindingId, +// func_binding_return_value_id: FuncBindingReturnValueId, +// parent_attribute_value_id: Option, +// existing_attribute_value_id: Option, +// ) -> AttributePrototypeResult { +// let given_attribute_prototype = Self::get_by_id(ctx, &attribute_prototype_id) +// .await? +// .ok_or_else(|| { +// AttributePrototypeError::NotFound(attribute_prototype_id, *ctx.visibility()) +// })?; + +// // If the AttributePrototype we were given isn't for the _specific_ context that we're +// // trying to update, make a new one. This is necessary so that we don't end up changing the +// // prototype for a context less specific than the one that we're trying to update. +// let mut attribute_prototype = if given_attribute_prototype.context == context { +// given_attribute_prototype +// } else if let Some(attribute_value_id) = existing_attribute_value_id { +// // Create new prototype with an existing value and clone the arguments of the given prototype into the new one. +// let prototype = Self::new_with_existing_value( +// ctx, +// func_id, +// context, +// given_attribute_prototype.key().map(|k| k.to_string()), +// parent_attribute_value_id, +// attribute_value_id, +// ) +// .await?; + +// let mut value = AttributeValue::get_by_id(ctx, &attribute_value_id) +// .await? +// .ok_or_else(|| { +// AttributePrototypeError::MissingValue( +// *ctx.tenancy(), +// *ctx.visibility(), +// *prototype.id(), +// Some(attribute_value_id), +// ) +// })?; +// value.set_func_binding_id(ctx, func_binding_id).await?; + +// prototype +// } else { +// // Create new prototype and clone the arguments of the given prototype into the new one. +// Self::new( +// ctx, +// func_id, +// func_binding_id, +// func_binding_return_value_id, +// context, +// given_attribute_prototype.key().map(|k| k.to_string()), +// parent_attribute_value_id, +// ) +// .await? +// }; + +// attribute_prototype.set_func_id(ctx, func_id).await?; + +// Ok(*attribute_prototype.id()) +// } + +// pub async fn find_for_func( +// ctx: &DalContext, +// func_id: &FuncId, +// ) -> AttributePrototypeResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), func_id]) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_func_as_variant_and_component( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> AttributePrototypeResult> { +// let mut result = vec![]; + +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// FIND_FOR_FUNC_AS_VARIANT_AND_COMPONENT, +// &[ctx.tenancy(), ctx.visibility(), &func_id], +// ) +// .await?; + +// for row in rows.into_iter() { +// let schema_variant_id: SchemaVariantId = row.try_get("schema_variant_id")?; +// let component_id: ComponentId = row.try_get("component_id")?; + +// result.push((schema_variant_id, component_id)); +// } + +// Ok(result) +// } + +// pub async fn find_for_context_and_key( +// ctx: &DalContext, +// context: AttributeContext, +// key: &Option, +// ) -> AttributePrototypeResult> { +// let rows = if key.is_some() { +// ctx.txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT_AND_KEY, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.prop_id(), +// &context.internal_provider_id(), +// &context.external_provider_id(), +// &context.component_id(), +// &key, +// ], +// ) +// .await? +// } else { +// ctx.txns() +// .await? +// .pg() +// .query( +// FIND_FOR_CONTEXT_NULL_KEY, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &context.prop_id(), +// &context.internal_provider_id(), +// &context.external_provider_id(), +// &context.component_id(), +// ], +// ) +// .await? +// }; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn external_provider( +// &self, +// ctx: &DalContext, +// ) -> AttributePrototypeResult { +// ExternalProvider::get_by_id(ctx, &self.context.external_provider_id()) +// .await? +// .ok_or(AttributePrototypeError::ExternalProviderNotFound( +// self.context.external_provider_id(), +// )) +// } + +// pub async fn internal_provider( +// &self, +// ctx: &DalContext, +// ) -> AttributePrototypeResult { +// InternalProvider::get_by_id(ctx, &self.context.internal_provider_id()) +// .await? +// .ok_or(AttributePrototypeError::InternalProviderNotFound( +// self.context.internal_provider_id(), +// )) +// } + +// pub async fn prop(&self, ctx: &DalContext) -> AttributePrototypeResult { +// Prop::get_by_id(ctx, &self.context.prop_id()).await?.ok_or( +// AttributePrototypeError::PropNotFound(self.context.prop_id()), +// ) +// } +// } + +// #[derive(Debug, Clone, Serialize, Deserialize)] +// pub struct AttributePrototypeArgumentValues { +// pub attribute_prototype_id: AttributePrototypeId, +// pub argument_name: String, +// pub values: Vec, +// } diff --git a/lib/dal/src/attribute/prototype/argument.rs b/lib/dal/src/attribute/prototype/argument.rs index 6b2b6236f7..ba85eec557 100644 --- a/lib/dal/src/attribute/prototype/argument.rs +++ b/lib/dal/src/attribute/prototype/argument.rs @@ -1,451 +1,919 @@ -//! An [`AttributePrototypeArgument`] represents an argument name and how to dynamically derive -//! the corresponding value. [`AttributePrototype`](crate::AttributePrototype) can have multiple -//! arguments. +//! An [`AttributePrototypeArgument`] joins a prototype to a function argument +//! and to either the internal provider that supplies its value or to a constant +//! value. It defines source of the value for the function argument in the +//! context of the prototype. use serde::{Deserialize, Serialize}; -use thiserror::Error; - -use si_data_pg::PgError; use telemetry::prelude::*; +use thiserror::Error; +use ulid::Ulid; use crate::{ - func::argument::FuncArgumentId, impl_standard_model, pk, - provider::internal::InternalProviderId, standard_model, standard_model_accessor, - AttributePrototypeId, ComponentId, DalContext, ExternalProviderId, HistoryEventError, - StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, + change_set_pointer::ChangeSetPointerError, + func::argument::{FuncArgument, FuncArgumentError, FuncArgumentId}, + pk, + provider::internal::InternalProviderId, + workspace_snapshot::{ + content_address::ContentAddressDiscriminants, + edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants}, + node_weight::{ + AttributePrototypeArgumentNodeWeight, NodeWeight, NodeWeightDiscriminants, + NodeWeightError, + }, + WorkspaceSnapshotError, + }, + AttributePrototype, AttributePrototypeId, ComponentId, DalContext, ExternalProviderId, PropId, + Timestamp, TransactionsError, }; -const LIST_FOR_ATTRIBUTE_PROTOTYPE: &str = - include_str!("../../queries/attribute_prototype_argument/list_for_attribute_prototype.sql"); -const LIST_FOR_FUNC_ARGUMENT_ID: &str = - include_str!("../../queries/attribute_prototype_argument/list_for_func_argument.sql"); -const FIND_FOR_PROVIDERS_AND_COMPONENTS: &str = include_str!( - "../../queries/attribute_prototype_argument/find_for_providers_and_components.sql" -); +use self::{ + static_value::{StaticArgumentValue, StaticArgumentValueId}, + value_source::ValueSource, +}; + +pub use crate::workspace_snapshot::node_weight::attribute_prototype_argument_node_weight::ArgumentTargets; + +use super::AttributePrototypeError; + +pub mod static_value; +pub mod value_source; + +pk!(AttributePrototypeArgumentId); #[remain::sorted] #[derive(Error, Debug)] pub enum AttributePrototypeArgumentError { - #[error("cannot update set field to become unset: {0}")] - CannotFlipSetFieldToUnset(&'static str), - #[error("cannot update unset field to become set: {0}")] - CannotFlipUnsetFieldToSet(&'static str), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("required value fields must be set, found at least one unset required value field")] - RequiredValueFieldsUnset, + #[error("attribute prototype error: {0}")] + AttributePrototype(#[from] AttributePrototypeError), + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("func argument error: {0}")] + FuncArgument(#[from] FuncArgumentError), + #[error("Destination prototype {0} has no function arguments")] + InterComponentDestinationPrototypeHasNoFuncArgs(AttributePrototypeId), + #[error("Destination prototype {0} has more than one function argument")] + InterComponentDestinationPrototypeHasTooManyFuncArgs(AttributePrototypeId), + #[error("attribute prototype argument {0} has no func argument")] + MissingFuncArgument(AttributePrototypeArgumentId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), #[error("serde json error: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), + Serde(#[from] serde_json::Error), + #[error("store error: {0}")] + Store(#[from] content_store::StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error( + "PrototypeArgument {0} ArgumentValue edge pointing to unexpected content node weight kind: {1:?}" + )] + UnexpectedValueSourceContent(AttributePrototypeArgumentId, ContentAddressDiscriminants), + #[error( + "PrototypeArgument {0} ArgumentValue edge pointing to unexpected node weight kind: {1:?}" + )] + UnexpectedValueSourceNode(AttributePrototypeArgumentId, NodeWeightDiscriminants), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type AttributePrototypeArgumentResult = Result; -pk!(AttributePrototypeArgumentPk); -pk!(AttributePrototypeArgumentId); - -/// Contains a "key" and fields to derive a "value" that dynamically used as an argument for a -/// [`AttributePrototypes`](crate::AttributePrototype) function execution. #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct AttributePrototypeArgument { - pk: AttributePrototypeArgumentPk, id: AttributePrototypeArgumentId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] + targets: Option, timestamp: Timestamp, - - /// Indicates the [`AttributePrototype`](crate::AttributePrototype) that [`Self`] is used as - /// an argument for. - attribute_prototype_id: AttributePrototypeId, - /// Where to find the name and type of the "key" for a given argument. - func_argument_id: FuncArgumentId, - /// Where to find the value for a given argument for _intra_ [`Component`](crate::Component) - /// connections. - internal_provider_id: InternalProviderId, - /// Where to find the value for a given argument for _inter_ [`Component`](crate::Component) - /// connections. - external_provider_id: ExternalProviderId, - /// For _inter_ [`Component`](crate::Component) connections, this field provides additional - /// information to determine the _source_ of the value. - tail_component_id: ComponentId, - /// For _inter_ [`Component`](crate::Component) connections, this field provides additional - /// information to determine the _destination_ of the value. - head_component_id: ComponentId, -} - -#[derive(Serialize, Deserialize, Debug)] -pub struct AttributePrototypeArgumentGroup { - pub name: String, - pub arguments: Vec, } -impl_standard_model! { - model: AttributePrototypeArgument, - pk: AttributePrototypeArgumentPk, - id: AttributePrototypeArgumentId, - table_name: "attribute_prototype_arguments", - history_event_label_base: "attribute_prototype_argument", - history_event_message_name: "Attribute Prototype Argument" +impl From for AttributePrototypeArgument { + fn from(value: AttributePrototypeArgumentNodeWeight) -> Self { + Self { + timestamp: value.timestamp().to_owned(), + id: value.id().into(), + targets: value.targets(), + } + } } impl AttributePrototypeArgument { - /// Create a new [`AttributePrototypeArgument`] for _intra_ [`Component`](crate::Component) use. - pub async fn new_for_intra_component( + pub fn id(&self) -> AttributePrototypeArgumentId { + self.id + } + + pub fn targets(&self) -> Option { + self.targets + } + + pub fn timestamp(&self) -> &Timestamp { + &self.timestamp + } + + pub async fn static_value_by_id( ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - func_argument_id: FuncArgumentId, - internal_provider_id: InternalProviderId, - ) -> AttributePrototypeArgumentResult { - // Ensure the value fields are what we expect. - let external_provider_id = ExternalProviderId::NONE; - let tail_component_id = ComponentId::NONE; - let head_component_id = ComponentId::NONE; - if internal_provider_id == InternalProviderId::NONE { - return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); + apa_id: AttributePrototypeArgumentId, + ) -> AttributePrototypeArgumentResult> { + let mut static_value_id: Option = None; + { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + for node_idx in workspace_snapshot.outgoing_targets_for_edge_weight_kind( + apa_id, + EdgeWeightKindDiscriminants::PrototypeArgumentValue, + )? { + match workspace_snapshot.get_node_weight(node_idx)? { + NodeWeight::Content(inner) => { + let inner_addr_discrim: ContentAddressDiscriminants = + inner.content_address().into(); + + if inner_addr_discrim == ContentAddressDiscriminants::StaticArgumentValue { + static_value_id = Some(inner.id().into()); + break; + } + } + _ => continue, + } + } } - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_argument_create_v2($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &func_argument_id, - &internal_provider_id, - &external_provider_id, - &tail_component_id, - &head_component_id, - ], - ) - .await?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) + Ok(match static_value_id { + Some(static_value_id) => { + Some(StaticArgumentValue::get_by_id(ctx, static_value_id).await?) + } + None => None, + }) } - /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. - pub async fn new_for_inter_component( + pub async fn get_by_id( ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - func_argument_id: FuncArgumentId, - head_component_id: ComponentId, - tail_component_id: ComponentId, - external_provider_id: ExternalProviderId, + id: AttributePrototypeArgumentId, ) -> AttributePrototypeArgumentResult { - // Ensure the value fields are what we expect. - if external_provider_id == ExternalProviderId::NONE - || tail_component_id == ComponentId::NONE - || head_component_id == ComponentId::NONE - { - return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); - } + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; - // For inter component connections, the internal provider id field must be unset. - let internal_provider_id = InternalProviderId::NONE; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_argument_create_v2($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &func_argument_id, - &internal_provider_id, - &external_provider_id, - &tail_component_id, - &head_component_id, - ], - ) - .await?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) + let node_index = workspace_snapshot.get_node_index_by_id(id)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + + Ok(node_weight + .get_attribute_prototype_argument_node_weight()? + .into()) } - /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. - pub async fn new_explicit_internal_to_explicit_internal_inter_component( + pub async fn new( ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - func_argument_id: FuncArgumentId, - head_component_id: ComponentId, - tail_component_id: ComponentId, - internal_provider_id: InternalProviderId, + prototype_id: AttributePrototypeId, + arg_id: FuncArgumentId, ) -> AttributePrototypeArgumentResult { - // Ensure the value fields are what we expect. - if internal_provider_id == InternalProviderId::NONE - || tail_component_id == ComponentId::NONE - || head_component_id == ComponentId::NONE - { - return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); - } + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_attribute_prototype_argument(change_set, id, None)?; - // For inter component connections, the internal provider id field must be unset. - let external_provider_id = ExternalProviderId::NONE; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_argument_create_v2($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &func_argument_id, - &internal_provider_id, - &external_provider_id, - &tail_component_id, - &head_component_id, - ], - ) - .await?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + workspace_snapshot.add_node(node_weight.clone())?; + + workspace_snapshot.add_edge( + prototype_id, + EdgeWeight::new(change_set, EdgeWeightKind::PrototypeArgument)?, + id, + )?; + + workspace_snapshot.add_edge( + id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + arg_id, + )?; + + Ok(node_weight + .get_attribute_prototype_argument_node_weight()? + .into()) } - /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. - pub async fn new_external_to_external_inter_component( + pub async fn new_inter_component( ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - func_argument_id: FuncArgumentId, - head_component_id: ComponentId, - tail_component_id: ComponentId, - external_provider_id: ExternalProviderId, + source_component_id: ComponentId, + source_external_provider_id: ExternalProviderId, + destination_component_id: ComponentId, + destination_attribute_prototype_id: AttributePrototypeId, ) -> AttributePrototypeArgumentResult { - // Ensure the value fields are what we expect. - if external_provider_id == ExternalProviderId::NONE - || tail_component_id == ComponentId::NONE - || head_component_id == ComponentId::NONE - { - return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_attribute_prototype_argument( + change_set, + id, + Some(ArgumentTargets { + source_component_id, + destination_component_id, + }), + )?; + + let prototype_func_id = + AttributePrototype::func_id(ctx, destination_attribute_prototype_id).await?; + let func_arg_ids = FuncArgument::list_ids_for_func(ctx, prototype_func_id).await?; + + if func_arg_ids.len() > 1 { + return Err(AttributePrototypeArgumentError::InterComponentDestinationPrototypeHasTooManyFuncArgs(destination_attribute_prototype_id)); } - // For inter component connections, the internal provider id field must be unset. - let internal_provider_id = InternalProviderId::NONE; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_argument_create_v2($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &func_argument_id, - &internal_provider_id, - &external_provider_id, - &tail_component_id, - &head_component_id, - ], - ) - .await?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) + let func_arg_id = func_arg_ids.first().ok_or( + AttributePrototypeArgumentError::InterComponentDestinationPrototypeHasNoFuncArgs( + destination_attribute_prototype_id, + ), + )?; + + let prototype_arg: Self = { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + workspace_snapshot.add_node(node_weight.clone())?; + + workspace_snapshot.add_edge( + destination_attribute_prototype_id, + EdgeWeight::new(change_set, EdgeWeightKind::PrototypeArgument)?, + id, + )?; + + let prototype_arg: Self = node_weight + .get_attribute_prototype_argument_node_weight()? + .into(); + + workspace_snapshot.add_edge( + prototype_arg.id(), + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_arg_id, + )?; + + prototype_arg + }; + + prototype_arg + .set_value_from_external_provider_id(ctx, source_external_provider_id) + .await } - standard_model_accessor!( - attribute_prototype_id, - Pk(AttributePrototypeId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - func_argument_id, - Pk(FuncArgumentId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - internal_provider_id, - Pk(InternalProviderId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - external_provider_id, - Pk(ExternalProviderId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - tail_component_id, - Pk(ComponentId), - AttributePrototypeArgumentResult - ); - standard_model_accessor!( - head_component_id, - Pk(ComponentId), - AttributePrototypeArgumentResult - ); - - /// Wraps the standard model accessor for "internal_provider_id" to ensure that a set value - /// cannot become unset and vice versa. - pub async fn set_internal_provider_id_safe( - &mut self, + pub async fn func_argument_id_by_id( ctx: &DalContext, - internal_provider_id: InternalProviderId, - ) -> AttributePrototypeArgumentResult<()> { - if self.internal_provider_id != InternalProviderId::NONE - && internal_provider_id == InternalProviderId::NONE - { - return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( - "InternalProviderId", - )); - }; - if self.internal_provider_id == InternalProviderId::NONE - && internal_provider_id != InternalProviderId::NONE + apa_id: AttributePrototypeArgumentId, + ) -> AttributePrototypeArgumentResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + for target in workspace_snapshot + .outgoing_targets_for_edge_weight_kind(apa_id, EdgeWeightKindDiscriminants::Use)? { - return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( - "InternalProviderId", - )); + match workspace_snapshot + .get_node_weight(target)? + .get_func_argument_node_weight() + { + Ok(content_node_weight) => { + return Ok(content_node_weight.id().into()); + } + Err(NodeWeightError::UnexpectedNodeWeightVariant(_, _)) => continue, + Err(e) => Err(e)?, + } } - self.set_internal_provider_id(ctx, internal_provider_id) - .await?; - Ok(()) + + Err(AttributePrototypeArgumentError::MissingFuncArgument(apa_id)) } - /// Wraps the standard model accessor for "external_provider_id" to ensure that a set value - /// cannot become unset and vice versa. - pub async fn set_external_provider_id_safe( - mut self, + pub async fn value_source( + &self, ctx: &DalContext, - external_provider_id: ExternalProviderId, - ) -> AttributePrototypeArgumentResult<()> { - if self.external_provider_id != ExternalProviderId::NONE - && external_provider_id == ExternalProviderId::NONE - { - return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( - "ExternalProviderId", - )); - } - if self.external_provider_id == ExternalProviderId::NONE - && external_provider_id != ExternalProviderId::NONE + ) -> AttributePrototypeArgumentResult> { + Self::value_source_by_id(ctx, self.id).await + } + + pub async fn value_source_by_id( + ctx: &DalContext, + apa_id: AttributePrototypeArgumentId, + ) -> AttributePrototypeArgumentResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + if let Some(target) = workspace_snapshot + .outgoing_targets_for_edge_weight_kind( + apa_id, + EdgeWeightKindDiscriminants::PrototypeArgumentValue, + )? + .into_iter() + .next() { - return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( - "ExternalProviderId", - )); + match workspace_snapshot.get_node_weight(target)? { + NodeWeight::Prop(inner) => { + return Ok(Some(ValueSource::Prop(inner.id().into()))); + } + NodeWeight::Content(inner) => { + let discrim: ContentAddressDiscriminants = inner.content_address().into(); + return Ok(Some(match discrim { + ContentAddressDiscriminants::InternalProvider => { + ValueSource::InternalProvider(inner.id().into()) + } + ContentAddressDiscriminants::ExternalProvider => { + ValueSource::ExternalProvider(inner.id().into()) + } + ContentAddressDiscriminants::StaticArgumentValue => { + ValueSource::StaticArgumentValue(inner.id().into()) + } + other => { + return Err( + AttributePrototypeArgumentError::UnexpectedValueSourceContent( + apa_id, other, + ), + ) + } + })); + } + other => { + return Err(AttributePrototypeArgumentError::UnexpectedValueSourceNode( + apa_id, + other.into(), + )) + } + } } - self.set_external_provider_id(ctx, external_provider_id) - .await?; - Ok(()) + + Ok(None) } - /// Wraps the standard model accessor for "tail_component_id" to ensure that a set value - /// cannot become unset and vice versa. - pub async fn set_tail_component_id_safe( - mut self, + async fn set_value_source( + self, ctx: &DalContext, - tail_component_id: ComponentId, - ) -> AttributePrototypeArgumentResult<()> { - if self.tail_component_id != ComponentId::NONE && tail_component_id == ComponentId::NONE { - return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( - "tail ComponentId", - )); - } - if self.tail_component_id == ComponentId::NONE && tail_component_id != ComponentId::NONE { - return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( - "tail ComponentId", - )); + value_id: Ulid, + ) -> AttributePrototypeArgumentResult { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + let change_set = ctx.change_set_pointer()?; + + for existing_value_source in workspace_snapshot.outgoing_targets_for_edge_weight_kind( + self.id, + EdgeWeightKindDiscriminants::PrototypeArgumentValue, + )? { + let self_node_index = workspace_snapshot.get_node_index_by_id(self.id)?; + workspace_snapshot.remove_edge( + change_set, + self_node_index, + existing_value_source, + EdgeWeightKindDiscriminants::PrototypeArgumentValue, + )?; } - self.set_tail_component_id(ctx, tail_component_id).await?; - Ok(()) + + workspace_snapshot.add_edge( + self.id, + EdgeWeight::new(change_set, EdgeWeightKind::PrototypeArgumentValue)?, + value_id, + )?; + + Ok(self) } - /// Wraps the standard model accessor for "head_component_id" to ensure that a set value - /// cannot become unset and vice versa. - pub async fn set_head_component_id_safe( - mut self, + pub async fn prototype_id_for_argument_id( ctx: &DalContext, - head_component_id: ComponentId, - ) -> AttributePrototypeArgumentResult<()> { - if self.head_component_id != ComponentId::NONE && head_component_id == ComponentId::NONE { - return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( - "head ComponentId", - )); - } - if self.head_component_id == ComponentId::NONE && head_component_id != ComponentId::NONE { - return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( - "head ComponentId", - )); + attribute_prototype_argument_id: AttributePrototypeArgumentId, + ) -> AttributePrototypeArgumentResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let prototype_idxs = workspace_snapshot.incoming_sources_for_edge_weight_kind( + attribute_prototype_argument_id, + EdgeWeightKindDiscriminants::PrototypeArgument, + )?; + + if prototype_idxs.len() != 1 { + return Err(WorkspaceSnapshotError::UnexpectedNumberOfIncomingEdges( + EdgeWeightKindDiscriminants::PrototypeArgument, + NodeWeightDiscriminants::AttributePrototypeArgument, + attribute_prototype_argument_id.into(), + ) + .into()); } - self.set_head_component_id(ctx, head_component_id).await?; - Ok(()) + + let prototype_idx = prototype_idxs + .first() + .copied() + .expect("checked length above"); + + let prototype_node_weight = workspace_snapshot.get_node_weight(prototype_idx)?; + + Ok(prototype_node_weight.id().into()) } - /// Determines if the [`InternalProviderId`](crate::InternalProvider) is unset. This function - /// can be useful for determining how to build [`FuncBinding`](crate::FuncBinding) arguments. - pub fn is_internal_provider_unset(&self) -> bool { - self.internal_provider_id == InternalProviderId::NONE + pub async fn prototype_id( + &self, + ctx: &DalContext, + ) -> AttributePrototypeArgumentResult { + Self::prototype_id_for_argument_id(ctx, self.id).await } - /// List all [`AttributePrototypeArguments`](Self) for a given - /// [`AttributePrototype`](crate::AttributePrototype). - pub async fn list_for_attribute_prototype( + pub async fn set_value_from_internal_provider_id( + self, ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - ) -> AttributePrototypeArgumentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_ATTRIBUTE_PROTOTYPE, - &[ctx.tenancy(), ctx.visibility(), &attribute_prototype_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + internal_provider_id: InternalProviderId, + ) -> AttributePrototypeArgumentResult { + self.set_value_source(ctx, internal_provider_id.into()) + .await } - /// List all [`AttributePrototypeArguments`](Self) for a given [`FuncArgument`](crate::func::argument::FuncArgument). - pub async fn list_by_func_argument_id( + pub async fn set_value_from_external_provider_id( + self, ctx: &DalContext, - func_argument_id: FuncArgumentId, - ) -> AttributePrototypeArgumentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_FUNC_ARGUMENT_ID, - &[ctx.tenancy(), ctx.visibility(), &func_argument_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + external_provider_id: ExternalProviderId, + ) -> AttributePrototypeArgumentResult { + self.set_value_source(ctx, external_provider_id.into()) + .await } - pub async fn find_for_providers_and_components( + pub async fn set_value_from_prop_id( + self, ctx: &DalContext, - external_provider_id: &ExternalProviderId, - internal_provider_id: &InternalProviderId, - tail_component: &ComponentId, - head_component: &ComponentId, - ) -> AttributePrototypeArgumentResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_FOR_PROVIDERS_AND_COMPONENTS, - &[ - ctx.tenancy(), - ctx.visibility(), - external_provider_id, - internal_provider_id, - tail_component, - head_component, - ], - ) - .await?; + prop_id: PropId, + ) -> AttributePrototypeArgumentResult { + self.set_value_source(ctx, prop_id.into()).await + } + + pub async fn set_value_from_static_value_id( + self, + ctx: &DalContext, + value_id: StaticArgumentValueId, + ) -> AttributePrototypeArgumentResult { + self.set_value_source(ctx, value_id.into()).await + } + + pub async fn set_value_from_static_value( + self, + ctx: &DalContext, + value: serde_json::Value, + ) -> AttributePrototypeArgumentResult { + let static_value = StaticArgumentValue::new(ctx, value).await?; - Ok(standard_model::object_option_from_row_option(row)?) + self.set_value_from_static_value_id(ctx, static_value.id()) + .await + } + + pub async fn list_ids_for_prototype( + ctx: &DalContext, + prototype_id: AttributePrototypeId, + ) -> AttributePrototypeArgumentResult> { + let mut apas = vec![]; + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let apa_node_idxs = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + prototype_id, + EdgeWeightKindDiscriminants::PrototypeArgument, + )?; + + for idx in apa_node_idxs { + let node_weight = workspace_snapshot.get_node_weight(idx)?; + apas.push(node_weight.id().into()) + } + + Ok(apas) + } + + pub async fn remove( + ctx: &DalContext, + apa_id: AttributePrototypeArgumentId, + ) -> AttributePrototypeArgumentResult<()> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + workspace_snapshot.remove_node_by_id(apa_id)?; + + Ok(()) } } + +// use si_data_pg::PgError; +// use telemetry::prelude::*; +// use thiserror::Error; + +// const LIST_FOR_ATTRIBUTE_PROTOTYPE: &str = +// include_str!("../../queries/attribute_prototype_argument/list_for_attribute_prototype.sql"); +// const LIST_FOR_FUNC_ARGUMENT_ID: &str = +// include_str!("../../queries/attribute_prototype_argument/list_for_func_argument.sql"); +// const FIND_FOR_PROVIDERS_AND_COMPONENTS: &str = include_str!( +// "../../queries/attribute_prototype_argument/find_for_providers_and_components.sql" +// ); + +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum AttributePrototypeArgumentError { +// #[error("cannot update set field to become unset: {0}")] +// CannotFlipSetFieldToUnset(&'static str), +// #[error("cannot update unset field to become set: {0}")] +// CannotFlipUnsetFieldToSet(&'static str), +// #[error("history event error: {0}")] +// HistoryEvent(#[from] HistoryEventError), +// #[error("pg error: {0}")] +// Pg(#[from] PgError), +// #[error("required value fields must be set, found at least one unset required value field")] +// RequiredValueFieldsUnset, +// #[error("serde json error: {0}")] +// SerdeJson(#[from] serde_json::Error), +// #[error("standard model error: {0}")] +// StandardModel(#[from] StandardModelError), +// #[error("transactions error: {0}")] +// Transactions(#[from] TransactionsError), +// } + +// pub type AttributePrototypeArgumentResult = Result; + +// /// Contains a "key" and fields to derive a "value" that dynamically used as an argument for a +// /// [`AttributePrototypes`](crate::AttributePrototype) function execution. +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// pub struct AttributePrototypeArgument { +// pk: AttributePrototypeArgumentPk, +// id: AttributePrototypeArgumentId, +// #[serde(flatten)] +// tenancy: Tenancy, +// #[serde(flatten)] +// visibility: Visibility, +// #[serde(flatten)] +// timestamp: Timestamp, + +// /// Indicates the [`AttributePrototype`](crate::AttributePrototype) that [`Self`] is used as +// /// an argument for. +// attribute_prototype_id: AttributePrototypeId, +// /// Where to find the name and type of the "key" for a given argument. +// func_argument_id: FuncArgumentId, +// /// Where to find the value for a given argument for _intra_ [`Component`](crate::Component) +// /// connections. +// internal_provider_id: InternalProviderId, +// /// Where to find the value for a given argument for _inter_ [`Component`](crate::Component) +// /// connections. +// external_provider_id: ExternalProviderId, +// /// For _inter_ [`Component`](crate::Component) connections, this field provides additional +// /// information to determine the _source_ of the value. +// tail_component_id: ComponentId, +// /// For _inter_ [`Component`](crate::Component) connections, this field provides additional +// /// information to determine the _destination_ of the value. +// head_component_id: ComponentId, +// } + +// #[derive(Serialize, Deserialize, Debug)] +// pub struct AttributePrototypeArgumentGroup { +// pub name: String, +// pub arguments: Vec, +// } + +// impl_standard_model! { +// model: AttributePrototypeArgument, +// pk: AttributePrototypeArgumentPk, +// id: AttributePrototypeArgumentId, +// table_name: "attribute_prototype_arguments", +// history_event_label_base: "attribute_prototype_argument", +// history_event_message_name: "Attribute Prototype Argument" +// } + +// impl AttributePrototypeArgument { +// #[instrument(skip_all)] +// /// Create a new [`AttributePrototypeArgument`] for _intra_ [`Component`](crate::Component) use. +// pub async fn new_for_intra_component( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// func_argument_id: FuncArgumentId, +// internal_provider_id: InternalProviderId, +// ) -> AttributePrototypeArgumentResult { +// // Ensure the value fields are what we expect. +// let external_provider_id = ExternalProviderId::NONE; +// let tail_component_id = ComponentId::NONE; +// let head_component_id = ComponentId::NONE; +// if internal_provider_id == InternalProviderId::NONE { +// return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); +// } + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &func_argument_id, +// &internal_provider_id, +// &external_provider_id, +// &tail_component_id, +// &head_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. +// #[instrument(skip_all)] +// pub async fn new_for_inter_component( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// func_argument_id: FuncArgumentId, +// head_component_id: ComponentId, +// tail_component_id: ComponentId, +// external_provider_id: ExternalProviderId, +// ) -> AttributePrototypeArgumentResult { +// // Ensure the value fields are what we expect. +// if external_provider_id == ExternalProviderId::NONE +// || tail_component_id == ComponentId::NONE +// || head_component_id == ComponentId::NONE +// { +// return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); +// } + +// // For inter component connections, the internal provider id field must be unset. +// let internal_provider_id = InternalProviderId::NONE; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &func_argument_id, +// &internal_provider_id, +// &external_provider_id, +// &tail_component_id, +// &head_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. +// #[instrument(skip_all)] +// pub async fn new_explicit_internal_to_explicit_internal_inter_component( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// func_argument_id: FuncArgumentId, +// head_component_id: ComponentId, +// tail_component_id: ComponentId, +// internal_provider_id: InternalProviderId, +// ) -> AttributePrototypeArgumentResult { +// // Ensure the value fields are what we expect. +// if internal_provider_id == InternalProviderId::NONE +// || tail_component_id == ComponentId::NONE +// || head_component_id == ComponentId::NONE +// { +// return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); +// } + +// // For inter component connections, the internal provider id field must be unset. +// let external_provider_id = ExternalProviderId::NONE; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &func_argument_id, +// &internal_provider_id, +// &external_provider_id, +// &tail_component_id, +// &head_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// /// Create a new [`AttributePrototypeArgument`] for _inter_ [`Component`](crate::Component) use. +// #[instrument(skip_all)] +// pub async fn new_external_to_external_inter_component( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// func_argument_id: FuncArgumentId, +// head_component_id: ComponentId, +// tail_component_id: ComponentId, +// external_provider_id: ExternalProviderId, +// ) -> AttributePrototypeArgumentResult { +// // Ensure the value fields are what we expect. +// if external_provider_id == ExternalProviderId::NONE +// || tail_component_id == ComponentId::NONE +// || head_component_id == ComponentId::NONE +// { +// return Err(AttributePrototypeArgumentError::RequiredValueFieldsUnset); +// } + +// // For inter component connections, the internal provider id field must be unset. +// let internal_provider_id = InternalProviderId::NONE; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM attribute_prototype_argument_create_v1($1, $2, $3, $4, $5, $6, $7, $8)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &func_argument_id, +// &internal_provider_id, +// &external_provider_id, +// &tail_component_id, +// &head_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// standard_model_accessor!( +// attribute_prototype_id, +// Pk(AttributePrototypeId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// func_argument_id, +// Pk(FuncArgumentId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// internal_provider_id, +// Pk(InternalProviderId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// external_provider_id, +// Pk(ExternalProviderId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// tail_component_id, +// Pk(ComponentId), +// AttributePrototypeArgumentResult +// ); +// standard_model_accessor!( +// head_component_id, +// Pk(ComponentId), +// AttributePrototypeArgumentResult +// ); + +// /// Wraps the standard model accessor for "internal_provider_id" to ensure that a set value +// /// cannot become unset and vice versa. +// pub async fn set_internal_provider_id_safe( +// &mut self, +// ctx: &DalContext, +// internal_provider_id: InternalProviderId, +// ) -> AttributePrototypeArgumentResult<()> { +// if self.internal_provider_id != InternalProviderId::NONE +// && internal_provider_id == InternalProviderId::NONE +// { +// return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( +// "InternalProviderId", +// )); +// }; +// if self.internal_provider_id == InternalProviderId::NONE +// && internal_provider_id != InternalProviderId::NONE +// { +// return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( +// "InternalProviderId", +// )); +// } +// self.set_internal_provider_id(ctx, internal_provider_id) +// .await?; +// Ok(()) +// } + +// /// Wraps the standard model accessor for "external_provider_id" to ensure that a set value +// /// cannot become unset and vice versa. +// pub async fn set_external_provider_id_safe( +// mut self, +// ctx: &DalContext, +// external_provider_id: ExternalProviderId, +// ) -> AttributePrototypeArgumentResult<()> { +// if self.external_provider_id != ExternalProviderId::NONE +// && external_provider_id == ExternalProviderId::NONE +// { +// return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( +// "ExternalProviderId", +// )); +// } +// if self.external_provider_id == ExternalProviderId::NONE +// && external_provider_id != ExternalProviderId::NONE +// { +// return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( +// "ExternalProviderId", +// )); +// } +// self.set_external_provider_id(ctx, external_provider_id) +// .await?; +// Ok(()) +// } + +// /// Wraps the standard model accessor for "tail_component_id" to ensure that a set value +// /// cannot become unset and vice versa. +// pub async fn set_tail_component_id_safe( +// mut self, +// ctx: &DalContext, +// tail_component_id: ComponentId, +// ) -> AttributePrototypeArgumentResult<()> { +// if self.tail_component_id != ComponentId::NONE && tail_component_id == ComponentId::NONE { +// return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( +// "tail ComponentId", +// )); +// } +// if self.tail_component_id == ComponentId::NONE && tail_component_id != ComponentId::NONE { +// return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( +// "tail ComponentId", +// )); +// } +// self.set_tail_component_id(ctx, tail_component_id).await?; +// Ok(()) +// } + +// /// Wraps the standard model accessor for "head_component_id" to ensure that a set value +// /// cannot become unset and vice versa. +// pub async fn set_head_component_id_safe( +// mut self, +// ctx: &DalContext, +// head_component_id: ComponentId, +// ) -> AttributePrototypeArgumentResult<()> { +// if self.head_component_id != ComponentId::NONE && head_component_id == ComponentId::NONE { +// return Err(AttributePrototypeArgumentError::CannotFlipUnsetFieldToSet( +// "head ComponentId", +// )); +// } +// if self.head_component_id == ComponentId::NONE && head_component_id != ComponentId::NONE { +// return Err(AttributePrototypeArgumentError::CannotFlipSetFieldToUnset( +// "head ComponentId", +// )); +// } +// self.set_head_component_id(ctx, head_component_id).await?; +// Ok(()) +// } + +// /// Determines if the [`InternalProviderId`](crate::InternalProvider) is unset. This function +// /// can be useful for determining how to build [`FuncBinding`](crate::FuncBinding) arguments. +// pub fn is_internal_provider_unset(&self) -> bool { +// self.internal_provider_id == InternalProviderId::NONE +// } + +// /// List all [`AttributePrototypeArguments`](Self) for a given +// /// [`AttributePrototype`](crate::AttributePrototype). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_attribute_prototype( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// ) -> AttributePrototypeArgumentResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_ATTRIBUTE_PROTOTYPE, +// &[ctx.tenancy(), ctx.visibility(), &attribute_prototype_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// List all [`AttributePrototypeArguments`](Self) for a given [`FuncArgument`](crate::func::argument::FuncArgument). +// pub async fn list_by_func_argument_id( +// ctx: &DalContext, +// func_argument_id: FuncArgumentId, +// ) -> AttributePrototypeArgumentResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_FUNC_ARGUMENT_ID, +// &[ctx.tenancy(), ctx.visibility(), &func_argument_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// pub async fn find_for_providers_and_components( +// ctx: &DalContext, +// external_provider_id: &ExternalProviderId, +// internal_provider_id: &InternalProviderId, +// tail_component: &ComponentId, +// head_component: &ComponentId, +// ) -> AttributePrototypeArgumentResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_FOR_PROVIDERS_AND_COMPONENTS, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// external_provider_id, +// internal_provider_id, +// tail_component, +// head_component, +// ], +// ) +// .await?; + +// Ok(standard_model::object_option_from_row_option(row)?) +// } +// } diff --git a/lib/dal/src/attribute/prototype/argument/static_value.rs b/lib/dal/src/attribute/prototype/argument/static_value.rs new file mode 100644 index 0000000000..0b0e6714a1 --- /dev/null +++ b/lib/dal/src/attribute/prototype/argument/static_value.rs @@ -0,0 +1,101 @@ +use content_store::Store; +use serde::{Deserialize, Serialize}; +use strum::EnumDiscriminants; + +use crate::{ + pk, + workspace_snapshot::{ + content_address::ContentAddress, node_weight::NodeWeight, WorkspaceSnapshotError, + }, + DalContext, Timestamp, +}; + +use super::AttributePrototypeArgumentResult; + +pk!(StaticArgumentValueId); + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +pub struct StaticArgumentValue { + pub id: StaticArgumentValueId, + pub timestamp: Timestamp, + pub value: serde_json::Value, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq, Debug)] +pub enum StaticArgumentValueContent { + V1(StaticArgumentValueContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct StaticArgumentValueContentV1 { + pub timestamp: Timestamp, + pub value: content_store::Value, +} + +impl StaticArgumentValue { + pub fn assemble(id: StaticArgumentValueId, inner: StaticArgumentValueContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + value: inner.value.into(), + } + } + + pub fn id(&self) -> StaticArgumentValueId { + self.id + } + + pub async fn new( + ctx: &DalContext, + value: serde_json::Value, + ) -> AttributePrototypeArgumentResult { + let timestamp = Timestamp::now(); + let content = StaticArgumentValueContentV1 { + timestamp, + value: value.into(), + }; + + let hash = ctx + .content_store() + .lock() + .await + .add(&StaticArgumentValueContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::StaticArgumentValue(hash))?; + + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_node(node_weight)?; + } + + Ok(StaticArgumentValue::assemble(id.into(), content)) + } + + pub async fn get_by_id( + ctx: &DalContext, + id: StaticArgumentValueId, + ) -> AttributePrototypeArgumentResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let ulid: ulid::Ulid = id.into(); + let node_index = workspace_snapshot.get_node_index_by_id(ulid)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: StaticArgumentValueContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(ulid))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let StaticArgumentValueContent::V1(inner) = content; + + Ok(StaticArgumentValue::assemble(id, inner)) + } +} diff --git a/lib/dal/src/attribute/prototype/argument/value_source.rs b/lib/dal/src/attribute/prototype/argument/value_source.rs new file mode 100644 index 0000000000..d55190ffbe --- /dev/null +++ b/lib/dal/src/attribute/prototype/argument/value_source.rs @@ -0,0 +1,79 @@ +use thiserror::Error; + +use crate::{ + prop::PropError, + provider::{ + external::ExternalProviderError, + internal::{InternalProviderError, InternalProviderId}, + }, + AttributeValue, AttributeValueId, ComponentId, DalContext, ExternalProvider, + ExternalProviderId, InternalProvider, Prop, PropId, +}; + +use super::static_value::StaticArgumentValueId; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum ValueSourceError { + #[error("attribute value error: {0}")] + AttributeValue(String), + #[error("external provider error: {0}")] + ExternalProvider(#[from] ExternalProviderError), + #[error("internal provider error: {0}")] + InternalProvider(#[from] InternalProviderError), + #[error("prop error: {0}")] + Prop(#[from] PropError), + #[error("static argument value sources have no attribute values")] + StaticArgumentValueSourcesNoValues, +} + +pub type ValueSourceResult = Result; + +#[remain::sorted] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ValueSource { + ExternalProvider(ExternalProviderId), + InternalProvider(InternalProviderId), + Prop(PropId), + StaticArgumentValue(StaticArgumentValueId), +} + +impl ValueSource { + pub async fn attribute_values( + &self, + ctx: &DalContext, + ) -> ValueSourceResult> { + Ok(match self { + Self::Prop(prop_id) => Prop::attribute_values_for_prop_id(ctx, *prop_id).await?, + Self::ExternalProvider(ep_id) => { + ExternalProvider::attribute_values_for_external_provider_id(ctx, *ep_id).await? + } + Self::InternalProvider(ip_id) => { + InternalProvider::attribute_values_for_internal_provider_id(ctx, *ip_id).await? + } + Self::StaticArgumentValue(_) => { + return Err(ValueSourceError::StaticArgumentValueSourcesNoValues) + } + }) + } + + pub async fn attribute_values_for_component_id( + &self, + ctx: &DalContext, + component_id: ComponentId, + ) -> ValueSourceResult> { + let mut result = vec![]; + + for value_id in self.attribute_values(ctx).await? { + if AttributeValue::component_id(ctx, value_id) + .await + .map_err(|err| ValueSourceError::AttributeValue(err.to_string()))? + == component_id + { + result.push(value_id); + } + } + + Ok(result) + } +} diff --git a/lib/dal/src/attribute/value.rs b/lib/dal/src/attribute/value.rs index db85234bd7..aa3c8472c6 100644 --- a/lib/dal/src/attribute/value.rs +++ b/lib/dal/src/attribute/value.rs @@ -1,133 +1,138 @@ -//! An [`AttributeValue`] represents which [`FuncBinding`](crate::func::binding::FuncBinding) -//! and [`FuncBindingReturnValue`] provide attribute's value. Moreover, it tracks whether the -//! value is proxied or not. Proxied values "point" to another [`AttributeValue`] to provide -//! the attribute's value. -//! -//! ## Updating [`AttributeValues`](AttributeValue) -//! -//! Let's say you want to update a -//! [`PropertyEditorValue`](crate::property_editor::values::PropertyEditorValue) in the UI or a -//! "field" on a [`Component`](crate::Component) in general. The key to doing so is the following -//! process: -//! -//! 1) Find the appropriate [`AttributeValue`] in a [`context`](crate::AttributeContext) that is -//! either "exactly specific" to what you need or "less specific" than what you need (see the -//! [`module`](crate::attribute::context) for more information) -//! 2) Find its parent, which almost all [`AttributeValues`](AttributeValue) should have if they are -//! in the lineage of a [`RootProp`](crate::RootProp) (usually, the -//! [`standard model accessor`](crate::standard_accessors) that contains the parent will suffice -//! in finding the parent) -//! 3) Use [`AttributeValue::update_for_context()`] with the appropriate key and -//! [`context`](crate::AttributeContext) while ensuring that if you reuse the key and/or -//! [`context`](crate::AttributeContext) from the [`AttributeValue`](crate::AttributeValue) -//! that you found, that it is _exactly_ what you need (i.e. if the key changes or the -//! [`context`](crate::AttributeContext) is in a lesser specificity than what you need, you -//! mutate them accordingly) -//! -//! Often, you may not have all the information necessary to find the [`AttributeValue`] that you -//! would like to update. Ideally, you would use one of the existing accessor methods off -//! [`AttributeValue`] with contextual information such as a [`PropId`](crate::Prop), -//! a [`ComponentId`](crate::Component)), a parent [`AttributeValue`], a key, etc. -//! -//! In situations where we do not have minimal information to find the _correct_ [`AttributeValue`] -//! from existing accessor queries, we can leveraging existing queries from other structs and write -//! new queries for those structs and specific use cases. For example, since most members of the -//! [`RootProp`](crate::RootProp) tree are stable across [`SchemaVariants`](crate::SchemaVariant), -//! we can use [`Component::root_prop_child_attribute_value_for_component()`](crate::Component::root_prop_child_attribute_value_for_component) -//! to find the [`AttributeValue`] whose [`context`](crate::AttributeContext) corresponds to a -//! direct child [`Prop`](crate::Prop) of the [`RootProp`](crate::RootProp). - +// FIXME(nick): restore this module comment with the new paradigm. +// An [`AttributeValue`] represents which [`FuncBinding`](crate::func::binding::FuncBinding) +// and [`FuncBindingReturnValue`] provide attribute's value. Moreover, it tracks whether the +// value is proxied or not. Proxied values "point" to another [`AttributeValue`] to provide +// the attribute's value. +// +// ## Updating [`AttributeValues`](AttributeValue) +// +// Let's say you want to update a +// [`PropertyEditorValue`](crate::property_editor::values::PropertyEditorValue) in the UI or a +// "field" on a [`Component`](crate::Component) in general. The key to doing so is the following +// process: +// +// 1) Find the appropriate [`AttributeValue`] in a [`context`](crate::AttributeContext) that is +// either "exactly specific" to what you need or "less specific" than what you need (see the +// [`module`](crate::attribute::context) for more information) +// 2) Find its parent, which almost all [`AttributeValues`](AttributeValue) should have if they are +// in the lineage of a [`RootProp`](crate::RootProp) (usually, the +// [`standard model accessor`](crate::standard_accessors) that contains the parent will suffice +// in finding the parent) +// 3) Use [`AttributeValue::update_for_context()`] with the appropriate key and +// [`context`](crate::AttributeContext) while ensuring that if you reuse the key and/or +// [`context`](crate::AttributeContext) from the [`AttributeValue`](crate::AttributeValue) +// that you found, that it is _exactly_ what you need (i.e. if the key changes or the +// [`context`](crate::AttributeContext) is in a lesser specificity than what you need, you +// mutate them accordingly) +// +// Often, you may not have all the information necessary to find the [`AttributeValue`] that you +// would like to update. Ideally, you would use one of the existing accessor methods off +// [`AttributeValue`] with contextual information such as a [`PropId`](crate::Prop), +// a [`ComponentId`](crate::Component)), a parent [`AttributeValue`], a key, etc. +// +// In situations where we do not have minimal information to find the _correct_ [`AttributeValue`] +// from existing accessor queries, we can leveraging existing queries from other structs and write +// new queries for those structs and specific use cases. For example, since most members of the +// [`RootProp`](crate::RootProp) tree are stable across [`SchemaVariants`](crate::SchemaVariant), +// we can use [`Component::root_prop_child_attribute_value_for_component()`](crate::Component::root_prop_child_attribute_value_for_component) +// to find the [`AttributeValue`] whose [`context`](crate::AttributeContext) corresponds to a +// direct child [`Prop`](crate::Prop) of the [`RootProp`](crate::RootProp). + +use content_store::{Store, StoreError}; +use petgraph::prelude::*; use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use std::collections::HashMap; +use serde_json::Value; +use std::collections::{HashMap, VecDeque}; use telemetry::prelude::*; use thiserror::Error; - -use crate::func::before::before_funcs_for_component; +use tokio::sync::TryLockError; +use ulid::Ulid; + +use crate::attribute::prototype::AttributePrototypeError; +use crate::change_set_pointer::ChangeSetPointerError; +use crate::func::argument::{FuncArgument, FuncArgumentError}; +use crate::func::binding::{FuncBinding, FuncBindingError}; +use crate::func::execution::{FuncExecution, FuncExecutionError, FuncExecutionPk}; +use crate::func::intrinsics::IntrinsicFunc; +use crate::func::FuncError; +use crate::job::definition::DependentValuesUpdate; +use crate::prop::PropError; +use crate::provider::internal::InternalProviderError; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::node_weight::{ + AttributeValueNodeWeight, NodeWeight, NodeWeightDiscriminants, NodeWeightError, +}; +use crate::workspace_snapshot::{serde_value_to_string_type, WorkspaceSnapshotError}; use crate::{ - attribute::{ - context::{ - AttributeContext, AttributeContextBuilder, AttributeContextBuilderError, - AttributeReadContext, - }, - prototype::{AttributePrototype, AttributePrototypeId}, - }, - func::{ - binding::{FuncBindingError, FuncBindingId}, - binding_return_value::{ - FuncBindingReturnValue, FuncBindingReturnValueError, FuncBindingReturnValueId, - }, - }, - impl_standard_model, pk, - standard_model::{self, TypeHint}, - standard_model_accessor, standard_model_belongs_to, standard_model_has_many, - AttributeContextError, AttributePrototypeArgumentError, Component, ComponentId, DalContext, - Func, FuncBinding, FuncError, HistoryEventError, IndexMap, InternalProvider, - InternalProviderId, Prop, PropError, PropId, PropKind, StandardModel, StandardModelError, - Tenancy, Timestamp, TransactionsError, Visibility, WsEventError, + pk, AttributePrototype, AttributePrototypeId, ComponentId, DalContext, ExternalProviderId, + Func, FuncId, InternalProviderId, Prop, PropId, PropKind, TransactionsError, +}; + +use super::prototype::argument::static_value::StaticArgumentValue; +use super::prototype::argument::value_source::ValueSourceError; +use super::prototype::argument::{ + value_source::ValueSource, AttributePrototypeArgument, AttributePrototypeArgumentError, + AttributePrototypeArgumentId, }; -use crate::{ExternalProviderId, FuncId}; +pub mod dependent_value_graph; pub mod view; -const ATTRIBUTE_VALUE_IDS_FOR_COMPONENT: &str = - include_str!("../queries/attribute_value/ids_for_component.sql"); -const ATTRIBUTE_VALUE_IDS_WITH_DYNAMIC_FUNCTIONS: &str = - include_str!("../queries/attribute_value/ids_with_dynamic_functions.sql"); -const CHILD_ATTRIBUTE_VALUES_FOR_CONTEXT: &str = - include_str!("../queries/attribute_value/child_attribute_values_for_context.sql"); -const FETCH_UPDATE_GRAPH_DATA: &str = - include_str!("../queries/attribute_value/fetch_update_graph_data.sql"); -const FIND_PROP_FOR_VALUE: &str = - include_str!("../queries/attribute_value/find_prop_for_value.sql"); -const FIND_WITH_PARENT_AND_KEY_FOR_CONTEXT: &str = - include_str!("../queries/attribute_value/find_with_parent_and_key_for_context.sql"); -const FIND_WITH_PARENT_AND_PROTOTYPE_FOR_CONTEXT: &str = - include_str!("../queries/attribute_value/find_with_parent_and_prototype_for_context.sql"); -const LIST_FOR_CONTEXT: &str = include_str!("../queries/attribute_value/list_for_context.sql"); -const LIST_PAYLOAD_FOR_READ_CONTEXT: &str = - include_str!("../queries/attribute_value/list_payload_for_read_context.sql"); -const LIST_PAYLOAD_FOR_READ_CONTEXT_AND_ROOT: &str = - include_str!("../queries/attribute_value/list_payload_for_read_context_and_root.sql"); -const FIND_CONTROLLING_FUNCS: &str = - include_str!("../queries/attribute_value/find_controlling_funcs.sql"); -const LIST_ATTRIBUTES_WITH_OVERRIDDEN: &str = - include_str!("../queries/attribute_value/list_attributes_with_overridden.sql"); +use crate::func::before::before_funcs_for_component; +pub use dependent_value_graph::DependentValueGraph; #[remain::sorted] -#[derive(Error, Debug)] +#[derive(Debug, Error)] pub enum AttributeValueError { - #[error("AttributeContext error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("AttributeContextBuilder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("AttributePrototype error: {0}")] - AttributePrototype(String), - #[error("AttributePrototypeArgument error: {0}")] + #[error("attribute prototype error: {0}")] + AttributePrototype(#[from] AttributePrototypeError), + #[error("attribute prototype argument error: {0}")] AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("AttributePrototype not found for AttributeValue: {0} ({1:?})")] - AttributePrototypeNotFound(AttributeValueId, Visibility), - #[error("invalid json pointer: {0} for {1}")] - BadJsonPointer(String, String), - #[error("component error: {0}")] - Component(String), - #[error("component not found for id: {0}")] - ComponentNotFound(ComponentId), - #[error("component not found by id: {0}")] - ComponentNotFoundById(ComponentId), - #[error(transparent)] - Council(#[from] council_server::client::ClientError), + #[error("attribute prototype argument {0} has a value source internal provider {1} but no value for that internal provider found in component {2}")] + AttributePrototypeArgumentInternalProviderMissingValueInSourceComponent( + AttributePrototypeArgumentId, + InternalProviderId, + ComponentId, + ), + #[error("attribute prototype argument {0} has a value source {1:?} but no value for that prop found in component {2}")] + AttributePrototypeArgumentMissingValueInSourceComponent( + AttributePrototypeArgumentId, + ValueSource, + ComponentId, + ), + #[error("attribute prototype argument {0} has no value source")] + AttributePrototypeArgumentMissingValueSource(AttributePrototypeArgumentId), + #[error("attribute value {0} has no prototype")] + AttributeValueMissingPrototype(AttributeValueId), + #[error("attribute value {0} has more than one edge to a prop")] + AttributeValueMultiplePropEdges(AttributeValueId), + #[error("attribute value {0} has more than one provider edge")] + AttributeValueMultipleProviderEdges(AttributeValueId), + #[error("before func error: {0}")] + BeforeFunc(String), + #[error("Cannot create nested values for {0} since it is not the value for a prop")] + CannotCreateNestedValuesForNonPropValues(AttributeValueId), + #[error("Cannot create attribute value for provider without component id")] + CannotCreateProviderValueWithoutComponentId, + #[error("Cannot create attribute value for root prop without component id")] + CannotCreateRootPropValueWithoutComponentId, + #[error( + "cannot explicitly set the value of {0} because it is for an internal or external provider" + )] + CannotExplicitlySetProviderValues(AttributeValueId), + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), #[error("empty attribute prototype arguments for group name: {0}")] EmptyAttributePrototypeArgumentsForGroup(String), - #[error("external provider error: {0}")] - ExternalProvider(String), - #[error("found duplicate attribute value ({0}) for self ({1}) for parent: {2}")] - FoundDuplicateForParent(AttributeValueId, AttributeValueId, AttributeValueId), - #[error("found duplicate attribute value ({0}) when creating new attribute value in provider context: {1:?}")] - FoundDuplicateForProviderContext(AttributeValueId, AttributeContext), #[error("func error: {0}")] Func(#[from] FuncError), + #[error("func argument error: {0}")] + FuncArgument(#[from] FuncArgumentError), #[error("function result failure: kind={kind}, message={message}, backend={backend}")] FuncBackendResultFailure { kind: String, @@ -136,1423 +141,1763 @@ pub enum AttributeValueError { }, #[error("func binding error: {0}")] FuncBinding(#[from] FuncBindingError), - #[error("FuncBindingReturnValue error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("FuncBindingReturnValue not found for AttributeValue: {0}")] - FuncBindingReturnValueNotFound(AttributeValueId, Visibility), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("{0}")] - IncompatibleAttributeReadContext(&'static str), + #[error("func execution error: {0}")] + FuncExecution(#[from] FuncExecutionError), + #[error("cannot insert for prop kind: {0}")] + InsertionForInvalidPropKind(PropKind), #[error("internal provider error: {0}")] - InternalProvider(String), - #[error("internal provider not found by id: {0}")] - InternalProviderNotFound(InternalProviderId), - #[error("found invalid object value fields not found in corresponding prop: {0:?}")] - InvalidObjectValueFields(Vec), - #[error("invalid prop value; expected {0} but got {1}")] - InvalidPropValue(String, serde_json::Value), - #[error("json pointer missing for attribute view {0:?} {1:?}")] - JsonPointerMissing(AttributeValueId, HashMap), - #[error("missing attribute value")] - Missing, - #[error( - "attribute values must have an associated attribute prototype, and this one does not. bug!" - )] - MissingAttributePrototype, - #[error("expected prop id {0} to have a child")] - MissingChildProp(PropId), - #[error("component missing in context: {0:?}")] - MissingComponentInReadContext(AttributeReadContext), - #[error("missing attribute value with id: {0}")] - MissingForId(AttributeValueId), - #[error("func not found: {0}")] - MissingFunc(String), - #[error("FuncBinding not found: {0}")] - MissingFuncBinding(FuncBindingId), - #[error("func binding return value not found")] - MissingFuncBindingReturnValue, - #[error("func information not found for attribute value id: {0}")] - MissingFuncInformation(AttributeValueId), - #[error("missing value from func binding return value for attribute value id: {0}")] - MissingValueFromFuncBindingReturnValue(AttributeValueId), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("attribute value not found: {0} ({1:?})")] - NotFound(AttributeValueId, Visibility), - #[error("missing attribute value for external provider context: {0:?}")] - NotFoundForExternalProviderContext(AttributeContext), - #[error("missing attribute value for internal provider context: {0:?}")] - NotFoundForInternalProviderContext(AttributeContext), - #[error("No AttributeValue found for AttributeReadContext: {0:?}")] - NotFoundForReadContext(AttributeReadContext), - #[error("using json pointer for attribute view yielded no value")] - NoValueForJsonPointer, - #[error( - "parent must be for an array, map, or object prop: attribute resolver id {0} is for a {1}" - )] - ParentNotAllowed(AttributeValueId, PropKind), - #[error("parent not found or does not exist for value: {0}")] - ParentNotFound(AttributeValueId), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error(transparent)] - PgPool(#[from] si_data_pg::PgPoolError), + InternalProvider(#[from] InternalProviderError), + #[error("attribute value {0} missing prop edge when one was expected")] + MissingPropEdge(AttributeValueId), + #[error("missing prototype for attribute value {0}")] + MissingPrototype(AttributeValueId), + #[error("found multiple props ({0} and {1}, at minimum) for attribute value: {2}")] + MultiplePropsFound(PropId, PropId, AttributeValueId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("node weight mismatch, expected {0:?} to be {1:?}")] + NodeWeightMismatch(NodeIndex, NodeWeightDiscriminants), + #[error("attribute value not found for component ({0}) and explicit internal provider ({1})")] + NotFoundForComponentAndExplicitInternalProvider(ComponentId, InternalProviderId), + #[error("attribute value {0} has no outgoing edge to a prop or provider")] + OrphanedAttributeValue(AttributeValueId), #[error("prop error: {0}")] - Prop(#[from] Box), - #[error("Prop not found: {0}")] - PropNotFound(PropId), - #[error("schema missing in context")] - SchemaMissing, - #[error("schema not found for component id: {0}")] - SchemaNotFoundForComponent(ComponentId), - #[error("schema variant missing in context")] - SchemaVariantMissing, - #[error("schema variant not found for component id: {0}")] - SchemaVariantNotFoundForComponent(ComponentId), - #[error("error serializing/deserializing json: {0}")] + Prop(#[from] PropError), + #[error("array or map prop missing element prop: {0}")] + PropMissingElementProp(PropId), + #[error("array or map prop has more than one child prop: {0}")] + PropMoreThanOneChild(PropId), + #[error("prop not found for attribute value: {0}")] + PropNotFound(AttributeValueId), + #[error("serde_json: {0}")] SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error(transparent)] + #[error("store error: {0}")] + Store(#[from] StoreError), + #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), - #[error("Unable to create parent AttributeValue: {0}")] - UnableToCreateParent(String), - #[error("the root prop id stack cannot be empty while work queue is not empty")] - UnexpectedEmptyRootStack, - #[error("unexpected prop kind: {0}")] - UnexpectedPropKind(PropKind), - #[error("JSON value failed to parse as an array")] - ValueAsArray, - #[error("JSON value failed to parse as an map")] - ValueAsMap, - #[error("JSON value failed to parse as an object")] - ValueAsObject, - #[error("ws event publishing error")] - WsEvent(#[from] WsEventError), + #[error("try lock error: {0}")] + TryLock(#[from] TryLockError), + #[error("type mismatch: expected prop kind {0}, got {1}")] + TypeMismatch(PropKind, String), + #[error("unexpected graph layout: {0}")] + UnexpectedGraphLayout(&'static str), + #[error("value source error: {0}")] + ValueSource(#[from] ValueSourceError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type AttributeValueResult = Result; -pk!(AttributeValuePk); pk!(AttributeValueId); #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct AttributeValue { - pk: AttributeValuePk, id: AttributeValueId, - func_binding_id: FuncBindingId, - /// The [`FuncBindingReturnValueId`] that represents the value at this specific position & context. - func_binding_return_value_id: FuncBindingReturnValueId, - /// The [`AttributeValueId`] (from a less-specific [`AttributeContext`]) that this - /// [`AttributeValue`] is standing in for in this more-specific [`AttributeContext`]. - proxy_for_attribute_value_id: Option, - /// If this is a `sealed_proxy`, then it should **not** update its [`FuncBindingReturnValueId`] from the - /// [`AttributeValue`] referenced to in `proxy_for_attribute_value_id`. - sealed_proxy: bool, - pub index_map: Option, - pub key: Option, - #[serde(flatten)] - pub context: AttributeContext, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] - timestamp: Timestamp, + /// The unprocessed return value is the "real" result, unprocessed for any other behavior. + /// This is potentially-maybe-only-kinda-sort-of(?) useful for non-scalar values. + /// Example: a populated array. + unprocessed_value: Option, + /// The processed return value. + /// Example: empty array. + value: Option, + materialized_view: Option, + func_execution_pk: Option, } -impl_standard_model! { - model: AttributeValue, - pk: AttributeValuePk, - id: AttributeValueId, - table_name: "attribute_values", - history_event_label_base: "attribute_value", - history_event_message_name: "Attribute Value" -} - -pub struct ComponentValuePayload { - pub attribute_value: AttributeValue, - pub maybe_parent_attribute_value_id: Option, +/// What "thing" on the schema variant, (either a prop, internal provider, or external provider), +/// is a particular value the value of/for? +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +pub enum ValueIsFor { + Prop(PropId), + ExternalProvider(ExternalProviderId), + InternalProvider(InternalProviderId), } -impl AttributeValue { - #[instrument(level = "debug", skip(ctx, key), fields(key))] - pub async fn new( - ctx: &DalContext, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - context: AttributeContext, - key: Option>, - ) -> AttributeValueResult { - let key: Option = key.map(|s| s.into()); - tracing::Span::current().record("key", &key); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT new_attribute_value AS object FROM attribute_value_new_v1($1, $2, $3, $4, $5, $6)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_binding_id, - &func_binding_return_value_id, - &context, - &key, - ], - ) - .await?; - let object: Self = standard_model::finish_create_from_row(ctx, row).await?; - - Ok(object) +impl ValueIsFor { + pub fn prop_id(&self) -> Option { + match self { + ValueIsFor::Prop(prop_id) => Some(*prop_id), + _ => None, + } } - standard_model_accessor!( - proxy_for_attribute_value_id, - Option, - AttributeValueResult - ); - standard_model_accessor!(sealed_proxy, bool, AttributeValueResult); - standard_model_accessor!(func_binding_id, Pk(FuncBindingId), AttributeValueResult); - standard_model_accessor!( - func_binding_return_value_id, - Pk(FuncBindingReturnValueId), - AttributeValueResult - ); - standard_model_accessor!(index_map, Option, AttributeValueResult); - standard_model_accessor!(key, Option, AttributeValueResult); - - standard_model_belongs_to!( - lookup_fn: parent_attribute_value, - set_fn: set_parent_attribute_value_unchecked, - unset_fn: unset_parent_attribute_value, - table: "attribute_value_belongs_to_attribute_value", - model_table: "attribute_values", - belongs_to_id: AttributeValueId, - returns: AttributeValue, - result: AttributeValueResult, - ); - - standard_model_has_many!( - lookup_fn: child_attribute_values, - table: "attribute_value_belongs_to_attribute_value", - model_table: "attribute_values", - returns: AttributeValue, - result: AttributeValueResult, - ); - - standard_model_belongs_to!( - lookup_fn: attribute_prototype, - set_fn: set_attribute_prototype, - unset_fn: unset_attribute_prototype, - table: "attribute_value_belongs_to_attribute_prototype", - model_table: "attribute_prototypes", - belongs_to_id: AttributePrototypeId, - returns: AttributePrototype, - result: AttributeValueResult, - ); - - pub fn index_map_mut(&mut self) -> Option<&mut IndexMap> { - self.index_map.as_mut() + pub fn external_provider_id(&self) -> Option { + match self { + ValueIsFor::ExternalProvider(ep_id) => Some(*ep_id), + _ => None, + } } - /// Returns the *unprocessed* [`serde_json::Value`] within the [`FuncBindingReturnValue`](crate::FuncBindingReturnValue) - /// corresponding to the field on [`Self`]. - pub async fn get_unprocessed_value( - &self, - ctx: &DalContext, - ) -> AttributeValueResult> { - match FuncBindingReturnValue::get_by_id(ctx, &self.func_binding_return_value_id).await? { - Some(func_binding_return_value) => { - Ok(func_binding_return_value.unprocessed_value().cloned()) - } - None => Err(AttributeValueError::MissingFuncBindingReturnValue), + pub fn internal_provider_id(&self) -> Option { + match self { + ValueIsFor::InternalProvider(ip_id) => Some(*ip_id), + _ => None, } } +} - // pub async fn save_index_map( - // &self, - // ctx: &DalContext, - // index_map: IndexMap, - // ) -> AttributeValueResult<()> { - // } - - /// Returns the [`serde_json::Value`] within the [`FuncBindingReturnValue`](crate::FuncBindingReturnValue) - /// corresponding to the field on [`Self`]. - pub async fn get_value( - &self, - ctx: &DalContext, - ) -> AttributeValueResult> { - match FuncBindingReturnValue::get_by_id(ctx, &self.func_binding_return_value_id).await? { - Some(func_binding_return_value) => Ok(func_binding_return_value.value().cloned()), - None => Err(AttributeValueError::MissingFuncBindingReturnValue), +impl From for Ulid { + fn from(value: ValueIsFor) -> Self { + match value { + ValueIsFor::ExternalProvider(ep_id) => ep_id.into(), + ValueIsFor::InternalProvider(ip_id) => ip_id.into(), + ValueIsFor::Prop(prop_id) => prop_id.into(), } } +} - pub async fn update_stored_index_map(&self, ctx: &DalContext) -> AttributeValueResult<()> { - standard_model::update( - ctx, - "attribute_values", - "index_map", - self.id(), - &self.index_map, - TypeHint::JsonB, - ) - .await?; - Ok(()) +impl From for ValueIsFor { + fn from(value: PropId) -> Self { + Self::Prop(value) } +} - /// Returns a list of child [`AttributeValues`](crate::AttributeValue) for a given - /// [`AttributeValue`] and [`AttributeReadContext`](crate::AttributeReadContext). - pub async fn child_attribute_values_for_context( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - attribute_read_context: AttributeReadContext, - ) -> AttributeValueResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - CHILD_ATTRIBUTE_VALUES_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_value_id, - &attribute_read_context, - ], - ) - .await?; +impl From for ValueIsFor { + fn from(value: ExternalProviderId) -> Self { + Self::ExternalProvider(value) + } +} - Ok(standard_model::objects_from_rows(rows)?) +impl From for ValueIsFor { + fn from(value: InternalProviderId) -> Self { + Self::InternalProvider(value) } +} - pub async fn find_with_parent_and_prototype_for_context( - ctx: &DalContext, - parent_attribute_value_id: Option, - attribute_prototype_id: AttributePrototypeId, - context: AttributeContext, - ) -> AttributeValueResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_WITH_PARENT_AND_PROTOTYPE_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &attribute_prototype_id, - &parent_attribute_value_id, - ], - ) - .await?; +#[derive(Clone, Debug)] +pub struct PrototypeExecutionResult { + value: Option, + unprocessed_value: Option, + func_execution_pk: FuncExecutionPk, +} + +impl From for AttributeValue { + fn from(value: AttributeValueNodeWeight) -> Self { + Self { + id: value.id().into(), + unprocessed_value: value.unprocessed_value(), + value: value.value(), + materialized_view: value.materialized_view(), + func_execution_pk: value.func_execution_pk(), + } + } +} - Ok(standard_model::option_object_from_row(row)?) +impl AttributeValue { + pub fn id(&self) -> AttributeValueId { + self.id } - pub async fn find_all_values_for_component_id( + pub async fn new( ctx: &DalContext, - component_id: ComponentId, - ) -> AttributeValueResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - "SELECT DISTINCT ON (av.id) - row_to_json(av.*) AS av_object, - avbtav.belongs_to_id AS parent_attribute_value_id - FROM attribute_values_v1($1, $2) AS av - LEFT JOIN attribute_value_belongs_to_attribute_value_v1($1, $2) as avbtav - ON av.id = avbtav.object_id - WHERE attribute_context_component_id = $3", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; + is_for: impl Into, + component_id: Option, + maybe_parent_attribute_value: Option, + key: Option, + ) -> AttributeValueResult { + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_attribute_value(change_set, id, None, None, None, None)?; + let is_for = is_for.into(); + + let ordered = if let Some(prop_id) = is_for.prop_id() { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + workspace_snapshot + .get_node_weight_by_id(prop_id)? + .get_prop_node_weight()? + .kind() + .ordered() + } else { + false + }; - let mut result = vec![]; - for row in rows { - let av_json: serde_json::Value = row.try_get("av_object")?; - let attribute_value: Self = serde_json::from_value(av_json)?; + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + if ordered { + workspace_snapshot.add_ordered_node(change_set, node_weight.clone())?; + } else { + workspace_snapshot.add_node(node_weight.clone())?; + }; + } - let maybe_parent_attribute_value_id: Option = - row.try_get("parent_attribute_value_id")?; + match is_for { + ValueIsFor::Prop(prop_id) => { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + id, + EdgeWeight::new(change_set, EdgeWeightKind::Prop)?, + prop_id, + )?; + + // Attach value to parent prop (or root to component) + match maybe_parent_attribute_value { + Some(pav_id) => { + workspace_snapshot.add_ordered_edge( + change_set, + pav_id, + EdgeWeight::new(change_set, EdgeWeightKind::Contain(key))?, + id, + )?; + } + None => { + // Component --Use--> AttributeValue + workspace_snapshot.add_edge( + component_id.ok_or( + AttributeValueError::CannotCreateRootPropValueWithoutComponentId, + )?, + EdgeWeight::new(change_set, EdgeWeightKind::Root)?, + id, + )?; + } + } + } + is_for_provider => { + // Attach value to component via Socket edge and to Provider + let provider_id: Ulid = is_for_provider + .external_provider_id() + .map(Into::into) + .or_else(|| is_for_provider.internal_provider_id().map(Into::into)) + .ok_or(AttributeValueError::UnexpectedGraphLayout( + "we expected a ValueIsFor for a provider type here but did not get one", + ))?; - result.push(ComponentValuePayload { - attribute_value, - maybe_parent_attribute_value_id, - }); + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + component_id + .ok_or(AttributeValueError::CannotCreateProviderValueWithoutComponentId)?, + EdgeWeight::new(change_set, EdgeWeightKind::Socket)?, + id, + )?; + + workspace_snapshot.add_edge( + id, + EdgeWeight::new(change_set, EdgeWeightKind::Provider)?, + provider_id, + )?; + } } - Ok(result) + Ok(node_weight.get_attribute_value_node_weight()?.into()) } - /// Find [`Self`] with a given parent value and key. - pub async fn find_with_parent_and_key_for_context( + async fn update_inner( ctx: &DalContext, - parent_attribute_value_id: Option, - key: Option, - context: AttributeReadContext, - ) -> AttributeValueResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_WITH_PARENT_AND_KEY_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &parent_attribute_value_id, - &key, - ], - ) + attribute_value_id: AttributeValueId, + value: Option, + spawn_dependent_values_update: bool, + ) -> AttributeValueResult<()> { + Self::vivify_value_and_parent_values(ctx, attribute_value_id).await?; + Self::set_value(ctx, attribute_value_id, value.clone()).await?; + Self::populate_nested_values(ctx, attribute_value_id, value).await?; + + if spawn_dependent_values_update { + ctx.enqueue_job(DependentValuesUpdate::new( + ctx.access_builder(), + *ctx.visibility(), + vec![attribute_value_id], + )) .await?; + } - Ok(standard_model::option_object_from_row(row)?) + Ok(()) } - /// List [`AttributeValues`](crate::AttributeValue) for a provided - /// [`AttributeReadContext`](crate::AttributeReadContext). - /// - /// If you only anticipate one result to be returned and have an - /// [`AttributeReadContext`](crate::AttributeReadContext) - /// that is also a valid [`AttributeContext`](crate::AttributeContext), then you should use - /// [`Self::find_for_context()`] instead of this method. - /// - /// This does _not_ work for maps and arrays, barring the _first_ instance of the array or map - /// object themselves! For those objects, please use - /// [`Self::find_with_parent_and_key_for_context()`]. - pub async fn list_for_context( + pub async fn update( ctx: &DalContext, - context: AttributeReadContext, - ) -> AttributeValueResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_CONTEXT, - &[ctx.tenancy(), ctx.visibility(), &context], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + attribute_value_id: AttributeValueId, + value: Option, + ) -> AttributeValueResult<()> { + Self::update_inner(ctx, attribute_value_id, value, true).await } - /// Find one [`AttributeValue`](crate::AttributeValue) for a provided - /// [`AttributeReadContext`](crate::AttributeReadContext). - /// - /// This is a modified version of [`Self::list_for_context()`] that requires an - /// [`AttributeReadContext`](crate::AttributeReadContext) - /// that is also a valid [`AttributeContext`](crate::AttributeContext) _and_ "pops" the first - /// row off the rows found (which are sorted from most to least specific). Thus, the "popped" - /// row will corresponding to the most specific [`AttributeValue`] found. - /// - /// This does _not_ work for maps and arrays, barring the _first_ instance of the array or map - /// object themselves! For those objects, please use - /// [`Self::find_with_parent_and_key_for_context()`]. - pub async fn find_for_context( + /// Directly update an attribute value but do not trigger a dependent values update. Used + /// during component creation so that we can ensure only one job is necessary for the many + /// values updated when a component is created. Use only when you understand why you don't want + /// to trigger a job, because if you don't run a dependent values job update, the materialized + /// views for the component will *not* be updated to reflect the new value, nor will any values + /// that depend on this value be updated. + pub async fn update_no_dependent_values( ctx: &DalContext, - context: AttributeReadContext, - ) -> AttributeValueResult> { - AttributeContextBuilder::from(context).to_context()?; - let mut rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_CONTEXT, - &[ctx.tenancy(), ctx.visibility(), &context], - ) - .await?; - let maybe_row = rows.pop(); - Ok(standard_model::option_object_from_row(maybe_row)?) + attribute_value_id: AttributeValueId, + value: Option, + ) -> AttributeValueResult<()> { + Self::update_inner(ctx, attribute_value_id, value, false).await } - /// Return the [`Prop`] that the [`AttributeValueId`] belongs to, - /// following the relationship through [`AttributePrototype`]. - pub async fn find_prop_for_value( + pub async fn is_for( ctx: &DalContext, - attribute_value_id: AttributeValueId, - ) -> AttributeValueResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_PROP_FOR_VALUE, - &[ctx.tenancy(), ctx.visibility(), &attribute_value_id], - ) - .await?; + value_id: AttributeValueId, + ) -> AttributeValueResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let prop_targets = workspace_snapshot + .outgoing_targets_for_edge_weight_kind(value_id, EdgeWeightKindDiscriminants::Prop)?; + + if prop_targets.len() > 1 { + return Err(WorkspaceSnapshotError::UnexpectedNumberOfIncomingEdges( + EdgeWeightKindDiscriminants::Prop, + NodeWeightDiscriminants::Content, + value_id.into(), + ))?; + } + + if let Some(prop_target) = prop_targets.first().copied() { + let prop_id = workspace_snapshot + .get_node_weight(prop_target)? + .get_prop_node_weight()? + .id(); + return Ok(ValueIsFor::Prop(prop_id.into())); + } + + let provider_targets = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + value_id, + EdgeWeightKindDiscriminants::Provider, + )?; + + if provider_targets.len() > 1 { + return Err(WorkspaceSnapshotError::UnexpectedNumberOfIncomingEdges( + EdgeWeightKindDiscriminants::Provider, + NodeWeightDiscriminants::Content, + value_id.into(), + ))?; + } + + let provider_target = provider_targets + .first() + .ok_or(AttributeValueError::OrphanedAttributeValue(value_id))?; + + let provider_node_weight = workspace_snapshot.get_node_weight(*provider_target)?; - Ok(standard_model::object_from_row(row)?) + if let Some(internal_provider) = provider_node_weight + .get_option_content_node_weight_of_kind(ContentAddressDiscriminants::InternalProvider) + { + return Ok(ValueIsFor::InternalProvider(internal_provider.id().into())); + } + + if let Some(external_provider) = provider_node_weight + .get_option_content_node_weight_of_kind(ContentAddressDiscriminants::ExternalProvider) + { + return Ok(ValueIsFor::ExternalProvider(external_provider.id().into())); + } + + Err(WorkspaceSnapshotError::UnexpectedEdgeTarget( + provider_node_weight.id(), + value_id.into(), + EdgeWeightKindDiscriminants::Provider, + ) + .into()) } - /// List [`AttributeValuePayloads`](AttributeValuePayload) for a given - /// [`context`](crate::AttributeReadContext), which must specify a - /// [`ComponentId`](crate::Component). - pub async fn list_payload_for_read_context( + pub async fn execute_prototype_function( ctx: &DalContext, - context: AttributeReadContext, - ) -> AttributeValueResult> { - let schema_variant_id = match context.component_id { - Some(component_id) if component_id != ComponentId::NONE => { - // We get the component even if it gets deleted because we may still need to operate with - // attribute values of soft deleted components - let component = - Component::get_by_id(&ctx.clone_with_delete_visibility(), &component_id) + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult { + let prototype_id = AttributeValue::prototype_id(ctx, attribute_value_id).await?; + let prototype_func_id = AttributePrototype::func_id(ctx, prototype_id).await?; + let destination_component_id = + AttributeValue::component_id(ctx, attribute_value_id).await?; + let value_is_for = AttributeValue::is_for(ctx, attribute_value_id).await?; + let apa_ids = AttributePrototypeArgument::list_ids_for_prototype(ctx, prototype_id).await?; + let mut func_binding_args: HashMap> = HashMap::new(); + + for apa_id in apa_ids { + let apa = AttributePrototypeArgument::get_by_id(ctx, apa_id).await?; + let expected_source_component_id = apa + .targets() + .map(|targets| targets.source_component_id) + .unwrap_or(destination_component_id); + + if apa.targets().map_or(true, |targets| { + targets.destination_component_id == destination_component_id + }) { + let func_arg_id = + AttributePrototypeArgument::func_argument_id_by_id(ctx, apa_id).await?; + let func_arg_name = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot + .get_node_weight_by_id(func_arg_id)? + .get_func_argument_node_weight()? + .name() + .to_owned() + }; + + let values_for_arg = + match AttributePrototypeArgument::value_source_by_id(ctx, apa_id) .await? - .ok_or(AttributeValueError::ComponentNotFoundById(component_id))?; - let schema_variant = component - .schema_variant(ctx) - .await - .map_err(|e| AttributeValueError::Component(e.to_string()))? - .ok_or(AttributeValueError::SchemaVariantNotFoundForComponent( - component_id, - ))?; - *schema_variant.id() + .ok_or( + AttributeValueError::AttributePrototypeArgumentMissingValueSource( + apa_id, + ), + )? { + ValueSource::StaticArgumentValue(static_argument_value_id) => { + vec![ + StaticArgumentValue::get_by_id(ctx, static_argument_value_id) + .await? + .value, + ] + } + other_source => { + let mut values = vec![]; + + for av_id in other_source + .attribute_values_for_component_id( + ctx, + expected_source_component_id, + ) + .await? + { + let attribute_value = AttributeValue::get_by_id(ctx, av_id).await?; + // XXX: We need to properly handle the difference between "there is + // XXX: no value" vs "the value is null", but right now we collapse + // XXX: the two to just be "null" when passing these to a function. + values.push( + attribute_value + .materialized_view(ctx) + .await? + .unwrap_or(serde_json::Value::Null), + ); + } + + values + } + }; + + func_binding_args + .entry(func_arg_name) + .and_modify(|values| values.extend(values_for_arg.clone())) + .or_insert(values_for_arg); } - _ => { - return Err(AttributeValueError::MissingComponentInReadContext(context)); + } + + let prepared_func_binding_args = if let ValueIsFor::InternalProvider(_) = &value_is_for { + // If our destination is an internal provider, we awlays want to provide an array of + // the values so functions don't have to distinguish between a single value that is an + // array, or an array of values (for example if an input socket has multiple + // connections) + serde_json::to_value(func_binding_args)? + } else { + // The value map above could possibly have multiple values per func argument name if + // there are We need to transform these vecs to a serde_json array before sending them + // to the function executor. We also want to send a single value if there is only a + // single input, since that is the typical case and what is expected by most attribute + // functions. + let mut prepared_func_binding_args = HashMap::new(); + for (arg_name, values) in func_binding_args { + if values.is_empty() { + return Err( + AttributeValueError::EmptyAttributePrototypeArgumentsForGroup(arg_name), + ); + } else if values.len() == 1 { + prepared_func_binding_args.insert(arg_name, values[0].to_owned()); + } else { + let vec_value = serde_json::to_value(values)?; + prepared_func_binding_args.insert(arg_name, vec_value); + } } + serde_json::to_value(prepared_func_binding_args)? }; - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_PAYLOAD_FOR_READ_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &schema_variant_id, - ], - ) - .await?; - let mut result = Vec::new(); - for row in rows.into_iter() { - let func_binding_return_value_json: serde_json::Value = row.try_get("object")?; - let func_binding_return_value: Option = - serde_json::from_value(func_binding_return_value_json)?; - - let prop_json: serde_json::Value = row.try_get("prop_object")?; - let prop: Prop = serde_json::from_value(prop_json)?; - - let attribute_value_json: serde_json::Value = row.try_get("attribute_value_object")?; - let attribute_value: AttributeValue = serde_json::from_value(attribute_value_json)?; - - let parent_attribute_value_id: Option = - row.try_get("parent_attribute_value_id")?; - - result.push(AttributeValuePayload::new( - prop, - func_binding_return_value, - attribute_value, - parent_attribute_value_id, - )); - } - Ok(result) - } + // We need the associated [`ComponentId`] for this function--this is how we resolve and + // prepare before functions + let associated_component_id = AttributeValue::component_id(ctx, attribute_value_id).await?; + let before = before_funcs_for_component(ctx, &associated_component_id) + .await + .map_err(|e| AttributeValueError::BeforeFunc(e.to_string()))?; - /// This method is similar to [`Self::list_payload_for_read_context()`], but it leverages a - /// root [`AttributeValueId`](crate::AttributeValue) in order to find payloads at any - /// root [`Prop`](crate::Prop) corresponding to the provided context and root value. - /// - /// Requirements for the [`AttributeReadContext`](crate::AttributeReadContext): - /// - [`PropId`](crate::Prop) must be set to [`None`] - /// - Both providers fields must be unset - pub async fn list_payload_for_read_context_and_root( - ctx: &DalContext, - root_attribute_value_id: AttributeValueId, - context: AttributeReadContext, - ) -> AttributeValueResult> { - if context.has_prop_id() - || !context.has_unset_internal_provider() - || !context.has_unset_external_provider() + let (_, func_binding_return_value) = match FuncBinding::create_and_execute( + ctx, + prepared_func_binding_args.clone(), + prototype_func_id, + before, + ) + .instrument(debug_span!( + "Func execution", + "func.id" = %prototype_func_id, + ?prepared_func_binding_args, + )) + .await { - return Err(AttributeValueError::IncompatibleAttributeReadContext("incompatible attribute read context for query: prop must be empty and providers must be unset")); - } + Ok(function_return_value) => function_return_value, + Err(FuncBindingError::FuncBackendResultFailure { + kind, + message, + backend, + }) => { + return Err(AttributeValueError::FuncBackendResultFailure { + kind, + message, + backend, + }); + } + Err(err) => Err(err)?, + }; - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_PAYLOAD_FOR_READ_CONTEXT_AND_ROOT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context, - &root_attribute_value_id, - ], - ) - .await?; + let unprocessed_value = func_binding_return_value.unprocessed_value().cloned(); + let processed_value = match value_is_for { + ValueIsFor::Prop(prop_id) => match &unprocessed_value { + Some(unprocessed_value) => { + let prop = Prop::get_by_id(ctx, prop_id).await?; + match prop.kind { + PropKind::Object | PropKind::Map => Some(serde_json::json!({})), + PropKind::Array => Some(serde_json::json!([])), + _ => Some(unprocessed_value.to_owned()), + } + } + None => None, + }, + _ => func_binding_return_value.value().cloned(), + }; - let mut result = Vec::new(); - for row in rows.into_iter() { - let func_binding_return_value_json: serde_json::Value = row.try_get("object")?; - let func_binding_return_value: Option = - serde_json::from_value(func_binding_return_value_json)?; + Ok(PrototypeExecutionResult { + value: processed_value, + unprocessed_value, + func_execution_pk: func_binding_return_value.func_execution_pk(), + }) + } - let prop_json: serde_json::Value = row.try_get("prop_object")?; - let prop: Prop = serde_json::from_value(prop_json)?; + pub async fn set_values_from_execution_result( + ctx: &DalContext, + attribute_value_id: AttributeValueId, + PrototypeExecutionResult { + value, + unprocessed_value, + func_execution_pk, + }: PrototypeExecutionResult, + ) -> AttributeValueResult<()> { + // We need to ensure the parent value tree for this value is set. But we don't want to + // vivify the current attribute value since that would override the function which sets it + // (and we're setting it ourselves, just below). Note that this will override the + // prototypes for all parent values to intrinsic setters. But, a value set by an attribute + // function other than an intrinsic setter (si:setString, etc) must not be the child of + // *another* value set by an attribute function (other than another intrinsic setter). + // Otherwise it would be impossible to determine the function that sets the value (two + // functions would set it with two different sets of inputs). So vivify the parent and + // above, but not this value. + if let Some(parent_attribute_value_id) = + Self::parent_attribute_value_id(ctx, attribute_value_id).await? + { + Self::vivify_value_and_parent_values(ctx, parent_attribute_value_id).await?; + } - let attribute_value_json: serde_json::Value = row.try_get("attribute_value_object")?; - let attribute_value: AttributeValue = serde_json::from_value(attribute_value_json)?; + let values_are_different = value != unprocessed_value; - let parent_attribute_value_id: Option = - row.try_get("parent_attribute_value_id")?; + Self::set_real_values( + ctx, + attribute_value_id, + value, + unprocessed_value.clone(), + func_execution_pk, + ) + .await?; - result.push(AttributeValuePayload::new( - prop, - func_binding_return_value, - attribute_value, - parent_attribute_value_id, - )); + if values_are_different { + Self::populate_nested_values(ctx, attribute_value_id, unprocessed_value).await?; + } else { + let materialized_view = + AttributeValue::create_materialized_view(ctx, attribute_value_id).await?; + Self::set_materialized_view(ctx, attribute_value_id, materialized_view).await?; } - Ok(result) + + Ok(()) } - // Eventually, this should be usable for *ALL* Component AttributeValues, but - // there isn't much point in supporting not-Prop AttributeValues until there - // is a way to assign functions other than the identity function to them. - pub async fn use_default_prototype( + pub async fn update_from_prototype_function( ctx: &DalContext, attribute_value_id: AttributeValueId, ) -> AttributeValueResult<()> { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT attribute_value_use_default_prototype_v1($1, $2, $3) AS changed", - &[ctx.tenancy(), ctx.visibility(), &attribute_value_id], - ) - .await?; + let execution_result = + AttributeValue::execute_prototype_function(ctx, attribute_value_id).await?; - if row.get("changed") { - // Update from prototype & trigger dependent values update - let mut av = AttributeValue::get_by_id(ctx, &attribute_value_id) - .await? - .ok_or_else(|| { - AttributeValueError::NotFound(attribute_value_id, *ctx.visibility()) - })?; - av.update_from_prototype_function(ctx).await?; - ctx.enqueue_dependent_values_update(vec![attribute_value_id]) - .await?; - } + AttributeValue::set_values_from_execution_result(ctx, attribute_value_id, execution_result) + .await?; Ok(()) } - /// Update the [`AttributeValue`] for a specific [`AttributeContext`] to the given value. If the - /// given [`AttributeValue`] is for a different [`AttributeContext`] than the one provided, a - /// new [`AttributeValue`] will be created for the given [`AttributeContext`]. - /// - /// By passing in [`None`] as the `value`, the caller is explicitly saying "this value does not - /// exist here". This is potentially useful for "tombstoning" values that have been inherited - /// from a less-specific [`AttributeContext`]. For example, if a value has been set for a - /// [`SchemaVariant`](crate::SchemaVariant), but we do not want that value to exist for a - /// specific [`Component`](crate::Component), we can update the variant's value to [`None`] in - /// an [`AttributeContext`] specific to that component. - /// - /// This method returns the following: - /// - the [`Option`] that was passed in - /// - the updated [`AttributeValueId`](Self) - pub async fn update_for_context( + pub async fn component_id( ctx: &DalContext, attribute_value_id: AttributeValueId, - parent_attribute_value_id: Option, - context: AttributeContext, - value: Option, - // TODO: Allow updating the key - key: Option, - ) -> AttributeValueResult<(Option, AttributeValueId)> { - Self::update_for_context_raw( - ctx, - attribute_value_id, - parent_attribute_value_id, - context, - value, - key, - true, - true, - ) - .await - } + ) -> AttributeValueResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + // walk the contain edges to the root attribute value + let mut current_attribute_value_id = attribute_value_id; + while let Some(parent_target) = workspace_snapshot + .incoming_sources_for_edge_weight_kind( + current_attribute_value_id, + EdgeWeightKindDiscriminants::Contain, + )? + .first() + .copied() + { + current_attribute_value_id = workspace_snapshot + .get_node_weight(parent_target)? + .id() + .into(); + } - pub async fn update_for_context_without_propagating_dependent_values( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - parent_attribute_value_id: Option, - context: AttributeContext, - value: Option, - // TODO: Allow updating the key - key: Option, - ) -> AttributeValueResult<(Option, AttributeValueId)> { - Self::update_for_context_raw( - ctx, - attribute_value_id, - parent_attribute_value_id, - context, - value, - key, - true, - false, - ) - .await + // current_attribute_value_id is now the root attribute value. Check if it has a socket + // edge or a root edge. (Whether it is a value for a socket or for a prop) + let component_target = match workspace_snapshot + .incoming_sources_for_edge_weight_kind( + current_attribute_value_id, + EdgeWeightKindDiscriminants::Root, + )? + .first() + .copied() + { + Some(component_target) => component_target, + None => workspace_snapshot + .incoming_sources_for_edge_weight_kind( + current_attribute_value_id, + EdgeWeightKindDiscriminants::Socket, + )? + .first() + .copied() + .ok_or(AttributeValueError::OrphanedAttributeValue( + current_attribute_value_id, + ))?, + }; + + Ok(workspace_snapshot + .get_node_weight(component_target)? + .id() + .into()) } - pub async fn update_for_context_without_creating_proxies( + pub async fn insert( ctx: &DalContext, - attribute_value_id: AttributeValueId, - parent_attribute_value_id: Option, - context: AttributeContext, + parent_attribute_value_id: AttributeValueId, value: Option, - // TODO: Allow updating the key key: Option, - ) -> AttributeValueResult<(Option, AttributeValueId)> { - Self::update_for_context_raw( + ) -> AttributeValueResult { + let element_prop_id: PropId = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + // Find the array or map prop. + let prop_index = workspace_snapshot + .outgoing_targets_for_edge_weight_kind( + parent_attribute_value_id, + EdgeWeightKindDiscriminants::Prop, + )? + .first() + .copied() + .ok_or(AttributeValueError::MissingPropEdge( + parent_attribute_value_id, + ))?; + + let prop_node_weight = workspace_snapshot + .get_node_weight(prop_index)? + .get_prop_node_weight()?; + + // Ensure it actually is an array or map prop. + if prop_node_weight.kind() != PropKind::Array + && prop_node_weight.kind() != PropKind::Map + { + return Err(AttributeValueError::InsertionForInvalidPropKind( + prop_node_weight.kind(), + )); + } + + // Find a singular child prop for the map or an array prop (i.e. the "element" or "entry" prop"). + let prop_id = PropId::from(prop_node_weight.id()); + let child_prop_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + prop_node_weight.id(), + EdgeWeightKindDiscriminants::Use, + )?; + if child_prop_indices.len() > 1 { + return Err(AttributeValueError::PropMoreThanOneChild(prop_id)); + } + let element_prop_index = child_prop_indices + .first() + .ok_or(AttributeValueError::PropMissingElementProp(prop_id))? + .to_owned(); + + workspace_snapshot + .get_node_weight(element_prop_index)? + .get_prop_node_weight()? + .clone() + .id() + .into() + }; + + // Create the "element" attribute value in the array or map alongside an attribute prototype for it. + let new_attribute_value = Self::new( ctx, - attribute_value_id, - parent_attribute_value_id, - context, - value, + element_prop_id, + None, + Some(parent_attribute_value_id), key, - false, - true, ) - .await + .await?; + + let func_id = Func::find_intrinsic(ctx, IntrinsicFunc::Unset).await?; + AttributePrototype::new(ctx, func_id).await?; + + // The element has been created an inserted. Now, we can update it with the provided value. + Self::update(ctx, new_attribute_value.id, value).await?; + + Ok(new_attribute_value.id()) } - #[allow(clippy::too_many_arguments)] - async fn update_for_context_raw( + async fn vivify_value_and_parent_values( ctx: &DalContext, attribute_value_id: AttributeValueId, - parent_attribute_value_id: Option, - context: AttributeContext, - value: Option, - // TODO: Allow updating the key - key: Option, - create_child_proxies: bool, - propagate_dependent_values: bool, - ) -> AttributeValueResult<(Option, AttributeValueId)> { - // TODO(nick,paulo,zack,jacob): ensure we do not _have_ to do this in the future. - let ctx = &ctx.clone_without_deleted_visibility(); + ) -> AttributeValueResult<()> { + let mut current_attribute_value_id = Some(attribute_value_id); - let row = ctx.txns() - .await? - .pg() - .query_one( - "SELECT new_attribute_value_id FROM attribute_value_update_for_context_raw_v1($1, $2, $3, $4, $5, $6, $7, $8)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_value_id, - &parent_attribute_value_id, - &context, - &value, - &key, - &create_child_proxies, - ], - ).await?; - - let new_attribute_value_id: AttributeValueId = row.try_get("new_attribute_value_id")?; - - if !context.is_component_unset() { - ctx.enqueue_dependencies_update_component(context.component_id()) - .await?; - } + while let Some(attribute_value_id) = current_attribute_value_id { + let empty_value = { + let prop_id = match AttributeValue::is_for(ctx, attribute_value_id) + .await? + .prop_id() + { + Some(prop_id) => prop_id, + // Only prop values can be "vivified", but we don't return an error here to + // simplify the use of this function + None => return Ok(()), + }; - // TODO(fnichol): we might want to fire off a status even at this point, however we've - // already updated the initial attribute value, so is there much value? + let prop_node = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot + .get_node_weight_by_id(prop_id)? + .get_prop_node_weight()? + }; - if propagate_dependent_values && !ctx.no_dependent_values() { - ctx.enqueue_dependent_values_update(vec![new_attribute_value_id]) - .await?; - } + prop_node.kind().empty_value() + }; - if let Some(av) = AttributeValue::get_by_id(ctx, &new_attribute_value_id).await? { - Prop::run_validation( - ctx, - context.prop_id(), - context.component_id(), - av.key(), - value.clone().unwrap_or_default(), - ) - .await; + let attribute_value = Self::get_by_id(ctx, attribute_value_id).await?; + + // If we have a set value, we don't need to vivify + if attribute_value.value.is_some() { + return Ok(()); + } else { + Self::set_value(ctx, attribute_value_id, empty_value).await?; + + current_attribute_value_id = + AttributeValue::parent_attribute_value_id(ctx, attribute_value_id).await?; + } } - Ok((value, new_attribute_value_id)) + Ok(()) } - /// Insert a new value under the parent [`AttributeValue`] in the given [`AttributeContext`]. This is mostly only - /// useful for adding elements to a [`PropKind::Array`], or to a [`PropKind::Map`]. Updating existing values in an - /// [`Array`](PropKind::Array), or [`Map`](PropKind::Map), and setting/updating all other [`PropKind`] should be - /// able to directly use [`update_for_context()`](AttributeValue::update_for_context()), as there will already be an - /// appropriate [`AttributeValue`] to use. By using this function, - /// [`update_for_context()`](AttributeValue::update_for_context()) is called after we have created an appropriate - /// [`AttributeValue`] to use. - #[instrument(skip_all, level = "debug")] - pub async fn insert_for_context( + async fn create_nested_value( ctx: &DalContext, - item_attribute_context: AttributeContext, - array_or_map_attribute_value_id: AttributeValueId, + attribute_value_id: AttributeValueId, value: Option, + func_id: FuncId, + prop_id: PropId, key: Option, ) -> AttributeValueResult { - Self::insert_for_context_raw( - ctx, - item_attribute_context, - array_or_map_attribute_value_id, - value, - key, - true, - ) - .await + let prop_kind = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let prop_node_index = workspace_snapshot.get_node_index_by_id(prop_id)?; + if let NodeWeight::Prop(prop_inner) = + workspace_snapshot.get_node_weight(prop_node_index)? + { + prop_inner.kind() + } else { + return Err(AttributeValueError::NodeWeightMismatch( + prop_node_index, + NodeWeightDiscriminants::Prop, + )); + } + }; + + let new_attribute_value = + Self::new(ctx, prop_id, None, Some(attribute_value_id), key).await?; + + AttributePrototype::new(ctx, func_id).await?; + + match prop_kind { + PropKind::Object | PropKind::Map => { + Self::set_value( + ctx, + new_attribute_value.id, + if value.is_some() { + Some(serde_json::json!({})) + } else { + None + }, + ) + .await?; + } + PropKind::Array => { + Self::set_value( + ctx, + new_attribute_value.id, + if value.is_some() { + Some(serde_json::json!([])) + } else { + None + }, + ) + .await?; + } + _ => { + Self::set_value(ctx, new_attribute_value.id, value).await?; + } + } + + Ok(new_attribute_value.id) } - #[instrument(skip_all, level = "debug")] - pub async fn insert_for_context_without_creating_proxies( + pub async fn order( + &self, ctx: &DalContext, - parent_context: AttributeContext, - parent_attribute_value_id: AttributeValueId, - value: Option, - key: Option, - ) -> AttributeValueResult { - Self::insert_for_context_raw( - ctx, - parent_context, - parent_attribute_value_id, - value, - key, - false, - ) - .await + ) -> AttributeValueResult>> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + Ok(workspace_snapshot + .ordering_node_for_container(self.id())? + .map(|node| node.order().clone().into_iter().map(Into::into).collect())) } - #[instrument(skip_all, level = "debug")] - async fn insert_for_context_raw( + async fn populate_nested_values( ctx: &DalContext, - item_attribute_context: AttributeContext, - array_or_map_attribute_value_id: AttributeValueId, + attribute_value_id: AttributeValueId, value: Option, - key: Option, - create_child_proxies: bool, - ) -> AttributeValueResult { - let row = ctx.txns().await?.pg().query_one( - "SELECT new_attribute_value_id FROM attribute_value_insert_for_context_raw_v1($1, $2, $3, $4, $5, $6, $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &item_attribute_context, - &array_or_map_attribute_value_id, - &value, - &key, - &create_child_proxies, - ], - ).await?; - - let new_attribute_value_id: AttributeValueId = row.try_get("new_attribute_value_id")?; - - if !item_attribute_context.is_component_unset() { - ctx.enqueue_dependencies_update_component(item_attribute_context.component_id()) - .await?; - } + ) -> AttributeValueResult<()> { + // Cache the unset func id before getting the workspace snapshot. + let unset_func_id = Func::find_intrinsic(ctx, IntrinsicFunc::Unset).await?; - if !ctx.no_dependent_values() { - ctx.enqueue_dependent_values_update(vec![new_attribute_value_id]) - .await?; + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + // Remove child attribute value edges + for attribute_value_target in workspace_snapshot.outgoing_targets_for_edge_weight_kind( + attribute_value_id, + EdgeWeightKindDiscriminants::Contain, + )? { + let current_node_index = + workspace_snapshot.get_node_index_by_id(attribute_value_id)?; + let current_target_idx = + workspace_snapshot.get_latest_node_index(attribute_value_target)?; + + workspace_snapshot.remove_edge( + ctx.change_set_pointer()?, + current_node_index, + current_target_idx, + EdgeWeightKindDiscriminants::Contain, + )?; + } } - if let Some(av) = AttributeValue::get_by_id(ctx, &new_attribute_value_id).await? { - Prop::run_validation( - ctx, - av.context.prop_id(), - av.context.component_id(), - av.key(), - value.clone().unwrap_or_default(), - ) - .await; - } + let mut work_queue = VecDeque::from([(attribute_value_id, value)]); - Ok(new_attribute_value_id) - } + let mut view_stack = Vec::new(); - #[instrument(skip_all, level = "debug")] - pub async fn update_parent_index_map(&self, ctx: &DalContext) -> AttributeValueResult<()> { - let _row = ctx - .txns() - .await? - .pg() - .query( - "SELECT attribute_value_update_parent_index_map_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), &self.id], - ) - .await?; + while let Some((attribute_value_id, maybe_value)) = work_queue.pop_front() { + let (prop_kind, prop_id) = { + let prop_id = Self::is_for(ctx, attribute_value_id) + .await? + .prop_id() + .ok_or( + AttributeValueError::CannotCreateNestedValuesForNonPropValues( + attribute_value_id, + ), + )?; + let prop = Prop::get_by_id(ctx, prop_id).await?; + + (prop.kind, prop_id) + }; + + view_stack.push(attribute_value_id); + + let (work_queue_extension, view_stack_extension) = match prop_kind { + PropKind::Object => { + Self::process_populate_nested_values_for_object( + ctx, + prop_id, + attribute_value_id, + unset_func_id, + maybe_value, + ) + .await? + } + PropKind::Array => { + Self::process_populate_nested_values_for_array( + ctx, + prop_id, + attribute_value_id, + unset_func_id, + maybe_value, + ) + .await? + } + PropKind::Map => { + Self::process_populate_nested_values_for_map( + ctx, + prop_id, + attribute_value_id, + unset_func_id, + maybe_value, + ) + .await? + } + _ => continue, + }; + + // Extend the work queue by what was found when processing the container, if applicable. + work_queue.extend(work_queue_extension); + view_stack.extend(view_stack_extension); + } + + // walk up the tree that we touched, creating materialized views + while let Some(attribute_value_id) = view_stack.pop() { + let materialized_view = + AttributeValue::create_materialized_view(ctx, attribute_value_id).await?; + Self::set_materialized_view(ctx, attribute_value_id, materialized_view).await?; + } Ok(()) } - async fn populate_nested_values( + pub async fn create_materialized_view( ctx: &DalContext, - parent_attribute_value_id: AttributeValueId, - update_context: AttributeContext, - unprocessed_value: serde_json::Value, - ) -> AttributeValueResult<()> { - let _row = ctx - .txns() - .await? - .pg() - .query( - "SELECT attribute_value_populate_nested_values_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &parent_attribute_value_id, - &update_context, - &unprocessed_value, - ], - ) - .await?; + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult> { + let av = AttributeValue::get_by_id(ctx, attribute_value_id).await?; + if av.value(ctx).await?.is_none() { + return Ok(None); + } - Ok(()) + match AttributeValue::is_for(ctx, attribute_value_id).await? { + ValueIsFor::Prop(prop_id) => { + let prop_kind = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot + .get_node_weight_by_id(prop_id)? + .get_prop_node_weight()? + .kind() + }; + + match prop_kind { + PropKind::Object => { + let mut object_view: HashMap = HashMap::new(); + let mut child_av_ids = vec![]; + + { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + for child_target in workspace_snapshot + .outgoing_targets_for_edge_weight_kind( + attribute_value_id, + EdgeWeightKindDiscriminants::Contain, + )? + { + let av_id = workspace_snapshot.get_node_weight(child_target)?.id(); + child_av_ids.push(av_id.into()); + } + } + + for child_av_id in child_av_ids { + let child_av = AttributeValue::get_by_id(ctx, child_av_id).await?; + + if let ValueIsFor::Prop(child_prop_id) = + AttributeValue::is_for(ctx, child_av.id()).await? + { + let child_prop_name = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot + .get_node_weight_by_id(child_prop_id)? + .get_prop_node_weight()? + .name() + .to_owned() + }; + + let child_materialized_view = + child_av.materialized_view(ctx).await?; + if let Some(view) = child_materialized_view { + object_view.insert(child_prop_name, view); + } + } else { + return Err(AttributeValueError::UnexpectedGraphLayout("a child attribute value of an object has no outgoing Prop edge but has an outgoing Provider edge")); + } + } + + Ok(Some(serde_json::to_value(object_view)?)) + } + PropKind::Map => { + let mut map_view: HashMap = HashMap::new(); + + let child_av_idxs_and_keys: HashMap = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + workspace_snapshot + .edges_directed_for_edge_weight_kind( + attribute_value_id, + Outgoing, + EdgeWeightKindDiscriminants::Contain, + )? + .iter() + .filter_map(|edge_ref| { + if let EdgeWeightKind::Contain(Some(key)) = + edge_ref.weight().kind() + { + Some((key.to_owned(), edge_ref.target())) + } else { + None + } + }) + .collect() + }; + + for (key, node_index) in child_av_idxs_and_keys { + let child_av_id: AttributeValueId = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot.get_node_weight(node_index)?.id().into() + }; + + let child_av = AttributeValue::get_by_id(ctx, child_av_id).await?; + if let Some(view) = child_av.materialized_view(ctx).await? { + map_view.insert(key, view); + } + } + + Ok(Some(serde_json::to_value(map_view)?)) + } + PropKind::Array => { + let mut array_view = vec![]; + + let element_av_ids = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot + .ordered_children_for_node(attribute_value_id)? + .ok_or(AttributeValueError::UnexpectedGraphLayout( + "array attribute value has no ordering node", + ))? + }; + + for element_av_id in element_av_ids { + let av = AttributeValue::get_by_id(ctx, element_av_id.into()).await?; + if let Some(view) = av.materialized_view(ctx).await? { + array_view.push(view); + } + } + + Ok(Some(serde_json::to_value(array_view)?)) + } + _ => Ok(av.value(ctx).await?), + } + } + ValueIsFor::ExternalProvider(_) | ValueIsFor::InternalProvider(_) => { + Ok(av.value(ctx).await?) + } + } } - #[instrument(skip(ctx), level = "debug")] - pub async fn create_dependent_values( + async fn process_populate_nested_values_for_object( ctx: &DalContext, - attribute_value_ids: &[AttributeValueId], - ) -> AttributeValueResult<()> { - ctx.txns() - .await? - .pg() - .execute( - "SELECT attribute_value_create_new_affected_values_v1($1, $2, $3)", - &[&ctx.tenancy(), &ctx.visibility(), &attribute_value_ids], + prop_id: PropId, + attribute_value_id: AttributeValueId, + unset_func_id: FuncId, + maybe_value: Option, + ) -> AttributeValueResult<( + VecDeque<(AttributeValueId, Option)>, + Vec, + )> { + let maybe_object_map = match maybe_value { + Some(Value::Object(map)) => Some(map), + Some(value) => { + return Err(AttributeValueError::TypeMismatch( + PropKind::Object, + serde_value_to_string_type(&value), + )); + } + None => None, + }; + + let prop_map = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let child_prop_indexes = workspace_snapshot + .outgoing_targets_for_edge_weight_kind(prop_id, EdgeWeightKindDiscriminants::Use)?; + + let mut prop_map = HashMap::new(); + for node_index in child_prop_indexes { + if let NodeWeight::Prop(prop_inner) = + workspace_snapshot.get_node_weight(node_index)? + { + prop_map.insert( + prop_inner.name().to_string(), + (prop_inner.id(), prop_inner.kind()), + ); + } + } + prop_map + }; + + // Remove keys from our value if there is no corresponding child prop + let maybe_object_map = maybe_object_map.map(|mut map| { + map.retain(|k, _| prop_map.contains_key(k)); + map + }); + + let mut view_stack_extension = vec![]; + let mut work_queue_extension = VecDeque::new(); + for (key, (prop_id, prop_kind)) in prop_map.into_iter() { + let field_value = maybe_object_map + .as_ref() + .and_then(|map| map.get(&key).cloned()); + + let new_attribute_value_id = Self::create_nested_value( + ctx, + attribute_value_id, + field_value.clone(), + unset_func_id, + PropId::from(prop_id), + None, ) .await?; - Ok(()) + + match prop_kind { + PropKind::Array | PropKind::Map => { + if field_value.is_some() { + work_queue_extension.push_back((new_attribute_value_id, field_value)); + } + } + PropKind::Object => { + work_queue_extension.push_back((new_attribute_value_id, field_value)); + } + _ => view_stack_extension.push(new_attribute_value_id), + } + } + Ok((work_queue_extension, view_stack_extension)) } - /// Returns a [`HashMap`] with key [`AttributeValueId`](Self) and value - /// [`Vec`](Self) where the keys correspond to [`AttributeValues`](Self) that - /// are affected (directly and indirectly) by at least one of the provided - /// [`AttributeValueIds`](Self) having a new value. The [`Vec`](Self) - /// correspond to the [`AttributeValues`](Self) that the key directly depends on that are also - /// affected by at least one of the provided [`AttributeValueIds`](Self) having a new value. - /// - /// **NOTE**: This has the side effect of **CREATING NEW [`AttributeValues`](Self)** - /// if this [`AttributeValue`] affects an [`AttributeContext`](crate::AttributeContext) where an - /// [`AttributePrototype`](crate::AttributePrototype) that uses it didn't already have an - /// [`AttributeValue`]. - #[instrument(skip(ctx), level = "debug")] - pub async fn dependent_value_graph( + async fn process_populate_nested_values_for_array( ctx: &DalContext, - attribute_value_ids: &[AttributeValueId], - ) -> AttributeValueResult>> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FETCH_UPDATE_GRAPH_DATA, - &[&ctx.tenancy(), ctx.visibility(), &attribute_value_ids], + prop_id: PropId, + attribute_value_id: AttributeValueId, + unset_func_id: FuncId, + maybe_value: Option, + ) -> AttributeValueResult<( + VecDeque<(AttributeValueId, Option)>, + Vec, + )> { + let mut work_queue_extension = VecDeque::new(); + let mut view_stack_extension = vec![]; + + let array_items = match maybe_value { + Some(serde_json::Value::Array(array)) => { + if array.is_empty() { + return Ok((work_queue_extension, view_stack_extension)); + } + array + } + Some(value) => { + return Err(AttributeValueError::TypeMismatch( + PropKind::Array, + serde_value_to_string_type(&value), + )); + } + None => return Ok((work_queue_extension, view_stack_extension)), + }; + + let (element_prop_id, element_prop_kind) = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + // find the child element prop + let child_props = workspace_snapshot + .outgoing_targets_for_edge_weight_kind(prop_id, EdgeWeightKindDiscriminants::Use)?; + + if child_props.len() > 1 { + return Err(AttributeValueError::PropMoreThanOneChild(prop_id)); + } + + let element_prop_index = child_props + .first() + .ok_or(AttributeValueError::PropMissingElementProp(prop_id))? + .to_owned(); + + match workspace_snapshot.get_node_weight(element_prop_index)? { + NodeWeight::Prop(prop_inner) => (prop_inner.id(), prop_inner.kind()), + _ => { + return Err(AttributeValueError::NodeWeightMismatch( + element_prop_index, + NodeWeightDiscriminants::Prop, + )) + } + } + }; + + for array_item in array_items { + // TODO: should we type check the values here against the element prop? + let array_item_value = Some(array_item); + let new_attribute_value_id = Self::create_nested_value( + ctx, + attribute_value_id, + array_item_value.clone(), + unset_func_id, + PropId::from(element_prop_id), + None, ) .await?; - let mut result: HashMap> = HashMap::new(); - for row in rows.into_iter() { - let attr_val_id: AttributeValueId = row.try_get("attribute_value_id")?; - let dependencies: Vec = - row.try_get("dependent_attribute_value_ids")?; - result.insert(attr_val_id, dependencies); + match element_prop_kind { + PropKind::Array | PropKind::Map => { + if array_item_value.is_some() { + work_queue_extension.push_back((new_attribute_value_id, array_item_value)); + } + } + PropKind::Object => { + work_queue_extension.push_back((new_attribute_value_id, array_item_value)); + } + _ => view_stack_extension.push(new_attribute_value_id), + } } - Ok(result) + Ok((work_queue_extension, view_stack_extension)) } - #[instrument(level = "info", skip_all)] - pub async fn ids_for_component( + pub async fn parent_attribute_value_id( ctx: &DalContext, - component_id: ComponentId, - ) -> AttributeValueResult> { - let result = ctx - .txns() - .await? - .pg() - .query( - ATTRIBUTE_VALUE_IDS_FOR_COMPONENT, - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await? - .iter() - .map(|r| r.get("attribute_value_id")) - .collect(); + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + Ok( + match workspace_snapshot + .incoming_sources_for_edge_weight_kind( + attribute_value_id, + EdgeWeightKindDiscriminants::Contain, + )? + .first() + .copied() + { + Some(parent_idx) => { + Some(workspace_snapshot.get_node_weight(parent_idx)?.id().into()) + } + None => None, + }, + ) + } - Ok(result) + // AttributePrototypes for a value can be defined at the schema level, where + // they are connected by a prototype edge from the prop or provider that the + // AttributeValue is for. But they can also be defined at the component + // level, via prototype edge outgoing from the AttributeValue to the + // prototype. This fetches the component level prototype id, if it exists. + pub async fn component_prototype_id( + ctx: &DalContext, + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let maybe_prototype_idx = workspace_snapshot + .outgoing_targets_for_edge_weight_kind( + attribute_value_id, + EdgeWeightKindDiscriminants::Prototype, + )? + .first() + .copied(); + + Ok(match maybe_prototype_idx { + Some(prototype_idx) => Some( + workspace_snapshot + .get_node_weight(prototype_idx)? + .id() + .into(), + ), + None => None, + }) } - pub async fn ids_using_dynamic_functions( + /// The id of the prototype that controls this attribute value at the level of the schema + /// variant + pub async fn schema_variant_prototype_id( ctx: &DalContext, - attribute_value_ids: &Vec, - ) -> AttributeValueResult> { - let result = ctx - .txns() - .await? - .pg() - .query( - ATTRIBUTE_VALUE_IDS_WITH_DYNAMIC_FUNCTIONS, - &[ctx.tenancy(), ctx.visibility(), attribute_value_ids], - ) + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult { + let is_for_ulid: Ulid = AttributeValue::is_for(ctx, attribute_value_id) .await? - .iter() - .map(|r| r.get("attribute_value_id")) - .collect(); + .into(); + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + // find an incoming contain edge if any, to grab the key for this value if it is part of a map + let mut key = None; + for edge_ref in workspace_snapshot.edges_directed_for_edge_weight_kind( + attribute_value_id, + Incoming, + EdgeWeightKindDiscriminants::Contain, + )? { + if let EdgeWeightKind::Contain(contain_key) = edge_ref.weight().kind() { + key = contain_key.to_owned(); + } + } - Ok(result) + let mut prototype_target = None; + let mut none_prototype_target = None; + for edge_ref in workspace_snapshot.edges_directed_for_edge_weight_kind( + is_for_ulid, + Outgoing, + EdgeWeightKindDiscriminants::Prototype, + )? { + if let EdgeWeightKind::Prototype(prototype_key) = edge_ref.weight().kind() { + if &key == prototype_key { + prototype_target = Some(edge_ref.target()); + break; + } + if prototype_key.is_none() { + none_prototype_target = Some(edge_ref.target()); + } + } + } + + let real_prototype_target = prototype_target.or(none_prototype_target).ok_or( + AttributeValueError::AttributeValueMissingPrototype(attribute_value_id), + )?; + + Ok(workspace_snapshot + .get_node_weight(real_prototype_target)? + .id() + .into()) } - pub async fn vivify_value_and_parent_values( - &self, - ctx: &DalContext, - ) -> AttributeValueResult { - let row = ctx.txns().await?.pg().query_one( - "SELECT new_attribute_value_id FROM attribute_value_vivify_value_and_parent_values_raw_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &self.context, - &self.id, - &true - ]).await?; - - Ok(row.try_get("new_attribute_value_id")?) + pub async fn key(&self, ctx: &DalContext) -> AttributeValueResult> { + Self::key_for_id(ctx, self.id()).await } - #[instrument( - name = "attribute_value.update_component_dependencies", - skip(ctx), - level = "debug" - )] - pub async fn update_component_dependencies( + pub async fn key_for_id( ctx: &DalContext, - component_id: ComponentId, - ) -> AttributeValueResult<()> { - ctx.txns() - .await? - .pg() - .execute( - "SELECT attribute_value_dependencies_update_component_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; - - Ok(()) + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + Ok(workspace_snapshot + .edges_directed(attribute_value_id, Incoming)? + .find(|edge_ref| matches!(edge_ref.weight().kind(), EdgeWeightKind::Contain(Some(_)))) + .and_then(|edge_ref| match edge_ref.weight().kind() { + EdgeWeightKind::Contain(key) => key.to_owned(), + _ => None, + })) } - /// Re-evaluates the current `AttributeValue`'s `AttributePrototype` to update the - /// `FuncBinding`, and `FuncBindingReturnValue`, reflecting the current inputs to - /// the function. - #[instrument( - name = "attribute_value.update_from_prototype_function", - skip_all, - level = "debug", - fields( - attribute_value.id = % self.id, - change_set_pk = % ctx.visibility().change_set_pk, - ) - )] - pub async fn update_from_prototype_function( - &mut self, + /// Returns the most specific prototype id for this attribute value. If a component specific + /// prototype id is defined, that will be returned. Otherwise, the schema variant specific + /// prototype id is returned. + pub async fn prototype_id( ctx: &DalContext, - ) -> AttributeValueResult<()> { - // Check if this AttributeValue is for an implicit InternalProvider as they have special behavior that doesn't involve - // AttributePrototype and AttributePrototypeArguments. - if self - .context - .is_least_specific_field_kind_internal_provider()? - { - let internal_provider = - InternalProvider::get_by_id(ctx, &self.context.internal_provider_id()) - .await? - .ok_or_else(|| { - AttributeValueError::InternalProviderNotFound( - self.context.internal_provider_id(), - ) - })?; - if internal_provider.is_internal_consumer() { - // We don't care about the AttributeValue that comes back from implicit_emit, since we should already be - // operating on an AttributeValue that has the correct AttributeContext, which means that a new one should - // not need to be created. - internal_provider - .implicit_emit(ctx, self) - .await - .map_err(|e| AttributeValueError::InternalProvider(e.to_string()))?; + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult { + let maybe_prototype_id = + AttributeValue::component_prototype_id(ctx, attribute_value_id).await?; + + match maybe_prototype_id { + Some(prototype_id) => Ok(prototype_id), + // If there is no Prototype edge the prototype for this value is defined at the schema variant level + None => Ok(AttributeValue::schema_variant_prototype_id(ctx, attribute_value_id).await?), + } + } - return Ok(()); + async fn process_populate_nested_values_for_map( + ctx: &DalContext, + prop_id: PropId, + attribute_value_id: AttributeValueId, + unset_func_id: FuncId, + maybe_value: Option, + ) -> AttributeValueResult<( + VecDeque<(AttributeValueId, Option)>, + Vec, + )> { + let mut work_queue_extension = VecDeque::new(); + let mut view_stack_extension = vec![]; + + let map_map = match maybe_value { + Some(Value::Object(map)) => { + if map.is_empty() { + return Ok((work_queue_extension, view_stack_extension)); + } + map } - } else if self.context.is_least_specific_field_kind_prop()? { - if let Some(parent_attribute_value) = self.parent_attribute_value(ctx).await? { - parent_attribute_value - .vivify_value_and_parent_values(ctx) - .await?; + Some(value) => { + return Err(AttributeValueError::TypeMismatch( + PropKind::Map, + serde_value_to_string_type(&value), + )); } - } + None => return Ok((work_queue_extension, view_stack_extension)), + }; - // The following should handle explicit "normal" Attributes, InternalProviders, and ExternalProviders already. - let attribute_prototype = self.attribute_prototype(ctx).await?.ok_or_else(|| { - AttributeValueError::AttributePrototypeNotFound(self.id, *ctx.visibility()) - })?; - - // Check if the function is one of the "si:set*", or "si:unset" functions, as these are - // special, and can't actually be re-run. Their values are static anyway, so re-running it - // wouldn't change anything. The "si:setObject", "si:setArray", and "si:setMap" functions - // are a bit special, however, as the "local" value will always be an empty object, array, - // or map. - let func = Func::get_by_id(ctx, &attribute_prototype.func_id()) - .await? - .ok_or_else(|| { - AttributeValueError::MissingFunc(format!("Unable to get func for {:?}", self.id())) - })?; - if func.name() == "si:setObject" - || func.name() == "si:setMap" - || func.name() == "si:setArray" - || func.name() == "si:setString" - || func.name() == "si:setInteger" - || func.name() == "si:setBoolean" - || func.name() == "si:unset" - { - return Ok(()); - } + let (element_prop_id, element_prop_kind) = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; - // Note(victor): Secrets should never be passed to functions as arguments directly. - // We detect if they're set as dependencies and later fetch before functions to execute - // This is so secret values still trigger the dependent values system, - // and before functions are only called when necessary - let mut func_binding_args: HashMap> = HashMap::new(); - for mut argument_data in attribute_prototype - .argument_values(ctx, self.context) - .await - .map_err(|e| AttributeValueError::AttributePrototype(e.to_string()))? - { - match argument_data.values.len() { - 1 => { - let argument = argument_data.values.pop().ok_or_else(|| { - AttributeValueError::EmptyAttributePrototypeArgumentsForGroup( - argument_data.argument_name.clone(), - ) - })?; - - func_binding_args.insert( - argument_data.argument_name, - Some(serde_json::to_value(argument)?), - ); + // find the child element prop + let child_props = workspace_snapshot + .outgoing_targets_for_edge_weight_kind(prop_id, EdgeWeightKindDiscriminants::Use)?; + + if child_props.len() > 1 { + return Err(AttributeValueError::PropMoreThanOneChild(prop_id)); + } + + let element_prop_index = child_props + .first() + .ok_or(AttributeValueError::PropMissingElementProp(prop_id))? + .to_owned(); + + match workspace_snapshot.get_node_weight(element_prop_index)? { + NodeWeight::Prop(prop_inner) => (prop_inner.id(), prop_inner.kind()), + _ => { + return Err(AttributeValueError::NodeWeightMismatch( + element_prop_index, + NodeWeightDiscriminants::Prop, + )) } - 2.. => { - func_binding_args.insert( - argument_data.argument_name, - Some(serde_json::to_value(argument_data.values)?), - ); + } + }; + + for (key, value) in map_map.into_iter() { + let value = Some(value); + let new_attribute_value_id = Self::create_nested_value( + ctx, + attribute_value_id, + value.clone(), + unset_func_id, + PropId::from(element_prop_id), + Some(key.to_owned()), + ) + .await?; + + match element_prop_kind { + PropKind::Array | PropKind::Map => { + if value.is_some() { + work_queue_extension.push_back((new_attribute_value_id, value)); + } } - _ => { - return Err( - AttributeValueError::EmptyAttributePrototypeArgumentsForGroup( - argument_data.argument_name, - ), - ); + PropKind::Object => { + work_queue_extension.push_back((new_attribute_value_id, value)); } - }; + _ => view_stack_extension.push(new_attribute_value_id), + } } + Ok((work_queue_extension, view_stack_extension)) + } - // We need the associated [`ComponentId`] for this function--this is how we resolve and - // prepare before functions - let associated_component_id = self.context.component_id(); - let before = before_funcs_for_component(ctx, &associated_component_id).await?; + /// Set's the component specific prototype id for this attribute value. + pub async fn set_component_prototype_id( + ctx: &DalContext, + attribute_value_id: AttributeValueId, + attribute_prototype_id: AttributePrototypeId, + ) -> AttributeValueResult<()> { + let maybe_existing_prototype_id = + Self::component_prototype_id(ctx, attribute_value_id).await?; - let (func_binding, mut func_binding_return_value) = match FuncBinding::create_and_execute( - ctx, - serde_json::to_value(func_binding_args.clone())?, - *func.id(), - before, - ) - .instrument(debug_span!( - "Func execution", - "func.id" = %func.id(), - ?func_binding_args, - )) - .await - { - Ok(function_return_value) => function_return_value, - Err(FuncBindingError::FuncBackendResultFailure { - kind, - message, - backend, - }) => { - return Err(AttributeValueError::FuncBackendResultFailure { - kind, - message, - backend, - }); + if let Some(exsiting_prototype_id) = maybe_existing_prototype_id { + AttributePrototype::remove(ctx, exsiting_prototype_id).await?; + } + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + attribute_value_id, + EdgeWeight::new(ctx.change_set_pointer()?, EdgeWeightKind::Prototype(None))?, + attribute_prototype_id, + )?; + + Ok(()) + } + + async fn set_value( + ctx: &DalContext, + attribute_value_id: AttributeValueId, + value: Option, + ) -> AttributeValueResult<()> { + let prop_id = match AttributeValue::is_for(ctx, attribute_value_id).await? { + ValueIsFor::Prop(prop_id) => prop_id, + _ => { + // Attribute values for internal and external providers should only be set by + // functions (usually identity) since they get their values from inter-component + // connections + return Err(AttributeValueError::CannotExplicitlySetProviderValues( + attribute_value_id, + )); } - Err(err) => Err(err)?, }; - self.set_func_binding_id(ctx, *func_binding.id()).await?; - self.set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) - .await?; + let intrinsic_func = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let prop_node = workspace_snapshot + .get_node_weight_by_id(prop_id)? + .get_prop_node_weight()?; + + // None for the value means there is no value, so we use unset, but if it's a + // literal serde_json::Value::Null it means the value is set, but to null + if value.is_none() { + IntrinsicFunc::Unset + } else { + match prop_node.kind() { + PropKind::Array => IntrinsicFunc::SetArray, + PropKind::Boolean => IntrinsicFunc::SetBoolean, + PropKind::Integer => IntrinsicFunc::SetInteger, + PropKind::Map => IntrinsicFunc::SetMap, + PropKind::Object => IntrinsicFunc::SetObject, + PropKind::String => IntrinsicFunc::SetString, + } + } + }; - // If the value we just updated was for a Prop, we might have run a function that - // generates a deep data structure. If the Prop is an Array/Map/Object, then the - // value should be an empty Array/Map/Object, while the unprocessed value contains - // the deep data structure. - if self.context.is_least_specific_field_kind_prop()? { - let processed_value = match func_binding_return_value.unprocessed_value().cloned() { - Some(unprocessed_value) => { - let prop = Prop::get_by_id(ctx, &self.context.prop_id()) - .await? - .ok_or_else(|| AttributeValueError::PropNotFound(self.context.prop_id()))?; + let func_id = Func::find_intrinsic(ctx, intrinsic_func).await?; + let prototype = AttributePrototype::new(ctx, func_id).await?; - match prop.kind() { - PropKind::Object | PropKind::Map => Some(serde_json::json!({})), - PropKind::Array => Some(serde_json::json!([])), - _ => Some(unprocessed_value), - } - } - None => None, - }; + Self::set_component_prototype_id(ctx, attribute_value_id, prototype.id()).await?; - func_binding_return_value - .set_value(ctx, processed_value) - .await?; - }; - // If they are different from each other, then we know - // that we need to fully process the deep data structure, populating - // AttributeValues for the child Props. - // cannot be si:setArray / si:setMap / si:setObject - if self.context.prop_id() != PropId::NONE { - let prop = Prop::get_by_id(ctx, &self.context.prop_id()) - .await? - .ok_or_else(|| AttributeValueError::PropNotFound(self.context.prop_id()))?; + let func_binding_args = match value.to_owned() { + Some(value) => { + let func_arg_id = *FuncArgument::list_ids_for_func(ctx, func_id) + .await? + .first() + .ok_or(FuncArgumentError::IntrinsicMissingFuncArgumentEdge( + intrinsic_func.name().into(), + func_id, + ))?; - if *prop.kind() == PropKind::Array - || *prop.kind() == PropKind::Object - || *prop.kind() == PropKind::Map - { - let func_name = match *prop.kind() { - PropKind::Array => "si:setArray", - PropKind::Object => "si:setObject", - PropKind::Map => "si:setMap", - _ => unreachable!(), + let func_arg_name = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot + .get_node_weight_by_id(func_arg_id)? + .get_func_argument_node_weight()? + .name() + .to_owned() }; - let func = Func::find_by_attr(ctx, "name", &func_name) + AttributePrototypeArgument::new(ctx, prototype.id(), func_arg_id) .await? - .pop() - .ok_or_else(|| AttributeValueError::MissingFunc(func_name.to_owned()))?; - - if attribute_prototype.func_id() != *func.id() { - if let Some(unprocessed_value) = - func_binding_return_value.unprocessed_value().cloned() - { - AttributeValue::populate_nested_values( - ctx, - self.id, - self.context, - unprocessed_value, - ) - .await?; - } - } + .set_value_from_static_value(ctx, value.to_owned()) + .await?; + + serde_json::json!({ func_arg_name: value } ) } - } + None => serde_json::Value::Null, + }; + + let associated_component_id = AttributeValue::component_id(ctx, attribute_value_id).await?; + let before = before_funcs_for_component(ctx, &associated_component_id) + .await + .map_err(|e| AttributeValueError::BeforeFunc(e.to_string()))?; + + let (_, func_binding_return_value) = + match FuncBinding::create_and_execute(ctx, func_binding_args.clone(), func_id, before) + .instrument(debug_span!( + "Func execution", + "func.id" = %func_id, + ?func_binding_args, + )) + .await + { + Ok(function_return_value) => function_return_value, + Err(FuncBindingError::FuncBackendResultFailure { + kind, + message, + backend, + }) => { + return Err(AttributeValueError::FuncBackendResultFailure { + kind, + message, + backend, + }); + } + Err(err) => Err(err)?, + }; + Self::set_real_values( + ctx, + attribute_value_id, + func_binding_return_value.value().cloned(), + func_binding_return_value.unprocessed_value().cloned(), + func_binding_return_value.func_execution_pk(), + ) + .await?; Ok(()) } - pub async fn populate_child_proxies_for_value( - &self, + async fn set_materialized_view( ctx: &DalContext, - less_specific_attribute_value_id: AttributeValueId, - more_specific_context: AttributeContext, - ) -> AttributeValueResult>> { - let row = ctx.txns().await?.pg().query_one( - "SELECT new_proxy_value_ids FROM attribute_value_populate_child_proxies_for_value_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &less_specific_attribute_value_id, - &more_specific_context, - self.id(), - ], - ).await?; - - // Are we part of a map or array? Be sure to update the index map - if self.key.is_some() { - ctx.txns() - .await? - .pg() - .query_opt( - "SELECT * FROM attribute_value_update_parent_index_map_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), self.id()], - ) - .await?; + attribute_value_id: AttributeValueId, + view: Option, + ) -> AttributeValueResult<()> { + let (av_idx, av_node_weight) = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let av_idx = workspace_snapshot.get_node_index_by_id(attribute_value_id)?; + + ( + av_idx, + workspace_snapshot + .get_node_weight(av_idx)? + .get_attribute_value_node_weight()?, + ) + }; + + let content_view: Option = view.clone().map(Into::into); + + let view_address = match content_view { + Some(view) => Some(ctx.content_store().lock().await.add(&view)?), + None => None, + }; + + info!( + "set_materialized_view: {:?}, {:?}, {}", + &view, &view_address, attribute_value_id + ); + + let mut new_av_node_weight = + av_node_weight.new_with_incremented_vector_clock(ctx.change_set_pointer()?)?; + + new_av_node_weight.set_materialized_view(view_address.map(ContentAddress::JsonValue)); + + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_node(NodeWeight::AttributeValue(new_av_node_weight))?; + workspace_snapshot.replace_references(av_idx)?; } - Ok(row.try_get("new_proxy_value_ids")?) + info!("view set"); + + Ok(()) } - /// Get the controlling function id for a particular attribute value by it's id - /// This function id may be for a function on a parent of the attribute value - pub async fn get_controlling_func_id( + // todo: add func binding id and func binding return value id here to store on the attribute + // value, this will also mean creating those rows for "intrinsic" execution in set_value + async fn set_real_values( ctx: &DalContext, - component_id: ComponentId, - ) -> AttributeValueResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_CONTROLLING_FUNCS, - &[ctx.tenancy(), ctx.visibility(), &component_id], + attribute_value_id: AttributeValueId, + value: Option, + unprocessed_value: Option, + func_execution_pk: FuncExecutionPk, + ) -> AttributeValueResult<()> { + let (av_idx, av_node_weight) = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let av_idx = workspace_snapshot.get_node_index_by_id(attribute_value_id)?; + + ( + av_idx, + workspace_snapshot + .get_node_weight(av_idx)? + .get_attribute_value_node_weight()?, ) - .await?; + }; - #[derive(Clone, Debug, Deserialize)] - struct FuncInfo { - func_id: FuncId, - func_name: String, - attribute_value_id: AttributeValueId, - parent_av_ids: Vec, - } + let content_value: Option = value.map(Into::into); + let content_unprocessed_value: Option = + unprocessed_value.map(Into::into); - let func_infos: Vec = standard_model::objects_from_rows(rows)?; - let func_info_by_attribute_value_id: HashMap = func_infos - .iter() - .map(|info| (info.attribute_value_id, info.clone())) - .collect(); - let mut result = HashMap::new(); - - for (attribute_value_id, func_info) in &func_info_by_attribute_value_id { - let mut ancestor_func_info = func_info.clone(); - // The parent AV IDs are populated root -> leaf, but we're most interested - // in walking them leaf -> root. - let mut parent_av_ids = func_info.parent_av_ids.clone(); - parent_av_ids.reverse(); - for ancestor_av_id in parent_av_ids { - if let Some(parent_func_info) = func_info_by_attribute_value_id.get(&ancestor_av_id) - { - if !(parent_func_info.func_name == "si:setObject" - || parent_func_info.func_name == "si:setMap" - || parent_func_info.func_name == "si:setArray" - || parent_func_info.func_name == "si:setString" - || parent_func_info.func_name == "si:setInteger" - || parent_func_info.func_name == "si:setBoolean" - || parent_func_info.func_name == "si:unset") - { - ancestor_func_info = parent_func_info.clone(); - break; - } - } - } - result.insert( - *attribute_value_id, - ( - ancestor_func_info.func_id, - ancestor_func_info.attribute_value_id, - ancestor_func_info.func_name, - ), - ); + let value_address = match content_value { + Some(value) => Some(ctx.content_store().lock().await.add(&value)?), + None => None, + }; + + let unprocessed_value_address = match content_unprocessed_value { + Some(value) => Some(ctx.content_store().lock().await.add(&value)?), + None => None, + }; + + let mut new_av_node_weight = + av_node_weight.new_with_incremented_vector_clock(ctx.change_set_pointer()?)?; + + new_av_node_weight.set_value(value_address.map(ContentAddress::JsonValue)); + new_av_node_weight + .set_unprocessed_value(unprocessed_value_address.map(ContentAddress::JsonValue)); + new_av_node_weight.set_func_execution_pk(Some(func_execution_pk)); + + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + workspace_snapshot.add_node(NodeWeight::AttributeValue(new_av_node_weight))?; + workspace_snapshot.replace_references(av_idx)?; } - Ok(result) + Ok(()) } - /// Get all attribute value ids with a boolean for each telling whether it is using a different prototype from the schema variant - pub async fn list_attributes_with_overridden( + pub async fn get_by_id( ctx: &DalContext, - component_id: ComponentId, - ) -> AttributeValueResult> { - let component_av_ctx = AttributeReadContext { - prop_id: None, - internal_provider_id: Some(InternalProviderId::NONE), - external_provider_id: Some(ExternalProviderId::NONE), - component_id: Some(component_id), - }; + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; - let prop_av_ctx = AttributeReadContext { - prop_id: None, - internal_provider_id: Some(InternalProviderId::NONE), - external_provider_id: Some(ExternalProviderId::NONE), - component_id: Some(ComponentId::NONE), - }; + let node_idx = workspace_snapshot.get_node_index_by_id(attribute_value_id)?; + let node_weight = workspace_snapshot + .get_node_weight(node_idx)? + .get_attribute_value_node_weight()?; - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_ATTRIBUTES_WITH_OVERRIDDEN, - &[ - ctx.tenancy(), - ctx.visibility(), - &prop_av_ctx, - &component_av_ctx, - &component_id, - ], - ) - .await?; + Ok(node_weight.into()) + } - let result: HashMap = HashMap::from_iter( - rows.iter() - .map(|row| (row.get("attribute_value_id"), row.get("overridden"))), - ); + pub async fn prop( + ctx: &DalContext, + attribute_value_id: AttributeValueId, + ) -> AttributeValueResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; - Ok(result) + let mut maybe_prop_id = None; + for target in workspace_snapshot.outgoing_targets_for_edge_weight_kind( + attribute_value_id, + EdgeWeightKindDiscriminants::Prop, + )? { + let target_node_weight = workspace_snapshot.get_node_weight(target)?; + if let NodeWeight::Prop(prop_node_weight) = target_node_weight { + maybe_prop_id = match maybe_prop_id { + Some(already_found_prop_id) => { + return Err(AttributeValueError::MultiplePropsFound( + prop_node_weight.id().into(), + already_found_prop_id, + attribute_value_id, + )); + } + None => Some(target_node_weight.id().into()), + }; + } + } + + maybe_prop_id.ok_or(AttributeValueError::PropNotFound(attribute_value_id)) } - pub async fn remove_dependency_summaries_for_deleted_values( + async fn fetch_value_from_store( ctx: &DalContext, - ) -> AttributeValueResult<()> { - ctx.txns() - .await? - .pg() - .execute( - "SELECT clear_dependencies_for_deleted_values_v1($1, $2)", - &[ctx.tenancy(), ctx.visibility()], - ) - .await?; + maybe_content_address: Option, + ) -> AttributeValueResult> { + Ok(match maybe_content_address { + Some(value_address) => ctx + .content_store() + .lock() + .await + .get::(&value_address.content_hash()) + .await? + .map(Into::into), + None => None, + }) + } - Ok(()) + pub async fn value(&self, ctx: &DalContext) -> AttributeValueResult> { + Self::fetch_value_from_store(ctx, self.value).await } -} -#[derive(Debug, Clone)] -pub struct AttributeValuePayload { - pub prop: Prop, - pub func_binding_return_value: Option, - pub attribute_value: AttributeValue, - pub parent_attribute_value_id: Option, -} + pub async fn unprocessed_value( + &self, + ctx: &DalContext, + ) -> AttributeValueResult> { + Self::fetch_value_from_store(ctx, self.unprocessed_value).await + } -impl AttributeValuePayload { - pub fn new( - prop: Prop, - func_binding_return_value: Option, - attribute_value: AttributeValue, - parent_attribute_value_id: Option, - ) -> Self { - Self { - prop, - func_binding_return_value, - attribute_value, - parent_attribute_value_id, - } + pub async fn materialized_view( + &self, + ctx: &DalContext, + ) -> AttributeValueResult> { + Self::fetch_value_from_store(ctx, self.materialized_view).await + } + + pub async fn func_execution( + &self, + ctx: &DalContext, + ) -> AttributeValueResult> { + Ok(match self.func_execution_pk { + Some(pk) => Some(FuncExecution::get_by_pk(ctx, &pk).await?), + None => None, + }) } } diff --git a/lib/dal/src/attribute/value/dependent_value_graph.rs b/lib/dal/src/attribute/value/dependent_value_graph.rs new file mode 100644 index 0000000000..91d3702d3c --- /dev/null +++ b/lib/dal/src/attribute/value/dependent_value_graph.rs @@ -0,0 +1,250 @@ +use petgraph::prelude::*; +use std::collections::{hash_map::Entry, HashMap, VecDeque}; +use tokio::{fs::File, io::AsyncWriteExt}; +use ulid::Ulid; + +use crate::{ + attribute::{ + prototype::{argument::AttributePrototypeArgument, AttributePrototype}, + value::ValueIsFor, + }, + workspace_snapshot::edge_weight::EdgeWeightKindDiscriminants, + DalContext, Prop, +}; + +use super::{AttributeValue, AttributeValueId, AttributeValueResult}; + +#[derive(Debug, Clone)] +pub struct DependentValueGraph { + graph: StableDiGraph, + id_to_index_map: HashMap, +} + +impl Default for DependentValueGraph { + fn default() -> Self { + Self::new() + } +} + +impl DependentValueGraph { + pub fn new() -> Self { + Self { + id_to_index_map: HashMap::new(), + graph: StableGraph::new(), + } + } + + /// Construct a [`DependentValueGraph`] of all the [`AttributeValueId`] whose values depend on + /// the value of the values in [`values`]. This includes the entire parent tree of each value + /// discovered, up to the root for every value's component, as well as any dependencies of + /// values discovered while walking the graph (e.g., if a value's prototype takes one of the + /// passed values as an input, we also need to find the values for the other inputs to the + /// prototype, etc.). + pub async fn for_values( + ctx: &DalContext, + values: Vec, + ) -> AttributeValueResult { + let mut dependent_value_graph = Self::new(); + + let mut work_queue = VecDeque::from_iter(values); + while let Some(current_attribute_value_id) = work_queue.pop_front() { + let current_component_id = + AttributeValue::component_id(ctx, current_attribute_value_id).await?; + let data_source_id: Ulid = AttributeValue::is_for(ctx, current_attribute_value_id) + .await? + .into(); + + // Gather the Attribute Prototype Arguments that take the thing the + // current value is for (prop, or provider/socket) as an input + let relevant_apas = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let attribute_prototype_argument_idxs = workspace_snapshot + .incoming_sources_for_edge_weight_kind( + data_source_id, + EdgeWeightKindDiscriminants::PrototypeArgumentValue, + )?; + + let mut relevant_apas = vec![]; + for apa_idx in attribute_prototype_argument_idxs { + let apa = workspace_snapshot + .get_node_weight(apa_idx)? + .get_attribute_prototype_argument_node_weight()?; + + match apa.targets() { + // If there are no targets, this is a schema-level attribute prototype argument + None => relevant_apas.push(apa), + Some(targets) => { + if targets.source_component_id == current_component_id { + relevant_apas.push(apa) + } + } + } + } + relevant_apas + }; + + // Find the values that are set by the prototype for the relevant + // AttributePrototypeArguments, and declare that these values depend + // on the value of the current value + for apa in relevant_apas { + let prototype_id = + AttributePrototypeArgument::prototype_id_for_argument_id(ctx, apa.id().into()) + .await?; + + let attribute_value_ids = + AttributePrototype::attribute_value_ids(ctx, prototype_id).await?; + + for attribute_value_id in attribute_value_ids { + let filter_component_id = match apa.targets() { + None => current_component_id, + Some(targets) => targets.destination_component_id, + }; + let component_id = + AttributeValue::component_id(ctx, attribute_value_id).await?; + if component_id == filter_component_id { + work_queue.push_back(attribute_value_id); + dependent_value_graph + .value_depends_on(attribute_value_id, current_attribute_value_id); + } + } + } + + // Also walk up to the root and ensure each parent value of the + // current value depends on its child (down to the current value) + // This ensures that we update the materialized views of the parent + // tree + if let Some(parent_attribute_value_id) = + AttributeValue::parent_attribute_value_id(ctx, current_attribute_value_id).await? + { + work_queue.push_back(parent_attribute_value_id); + dependent_value_graph + .value_depends_on(parent_attribute_value_id, current_attribute_value_id); + } + } + + Ok(dependent_value_graph) + } + + pub async fn debug_dot(&self, ctx: &DalContext, suffix: Option<&str>) { + let mut is_for_map = HashMap::new(); + + for attribute_value_id in self.id_to_index_map.keys() { + let is_for: String = match AttributeValue::is_for(ctx, *attribute_value_id) + .await + .expect("able to get value is for") + { + ValueIsFor::Prop(prop_id) => format!( + "prop = {}", + Prop::path_by_id(ctx, prop_id) + .await + .expect("able to get prop path") + .with_replaced_sep("/"), + ), + ValueIsFor::ExternalProvider(_) => "output socket".into(), + ValueIsFor::InternalProvider(_) => "input socket".into(), + }; + is_for_map.insert(*attribute_value_id, is_for); + } + + let label_value_fn = + move |_: &StableDiGraph, + (_, attribute_value_id): (NodeIndex, &AttributeValueId)| { + let attribute_value_id = *attribute_value_id; + let is_for = is_for_map.clone(); + + let is_for_string = is_for + .clone() + .get(&attribute_value_id) + .map(ToOwned::to_owned) + .expect("is for exists for every value"); + + format!("label = \"{}\n{}\"", attribute_value_id, is_for_string) + }; + + let dot = petgraph::dot::Dot::with_attr_getters( + &self.graph, + &[ + petgraph::dot::Config::NodeNoLabel, + petgraph::dot::Config::EdgeNoLabel, + ], + &|_, _| "label = \"\"".to_string(), + &label_value_fn, + ); + + let filename_no_extension = format!( + "{}-{}", + Ulid::new().to_string(), + suffix.unwrap_or("depgraph") + ); + let mut file = File::create(format!("/home/zacharyhamm/{filename_no_extension}.txt")) + .await + .expect("could not create file"); + + file.write_all(format!("{dot:?}").as_bytes()) + .await + .expect("could not write file"); + println!("dot output stored in file (filename without extension: {filename_no_extension})"); + } + + pub fn add_value(&mut self, value_id: AttributeValueId) -> NodeIndex { + match self.id_to_index_map.entry(value_id) { + Entry::Vacant(entry) => { + let node_idx = self.graph.add_node(value_id); + entry.insert(node_idx); + + node_idx + } + Entry::Occupied(entry) => *entry.get(), + } + } + + pub fn value_depends_on( + &mut self, + value_id: AttributeValueId, + depends_on_id: AttributeValueId, + ) { + let value_idx = self.add_value(value_id); + let depends_on_idx = self.add_value(depends_on_id); + + self.graph.add_edge(value_idx, depends_on_idx, ()); + } + + pub fn contains_value(&self, value_id: AttributeValueId) -> bool { + self.id_to_index_map.get(&value_id).is_some() + } + + pub fn direct_dependencies_of(&self, value_id: AttributeValueId) -> Vec { + match self.id_to_index_map.get(&value_id) { + None => vec![], + Some(value_idx) => self + .graph + .edges_directed(*value_idx, Outgoing) + .filter_map(|edge_ref| self.graph.node_weight(edge_ref.target()).copied()) + .collect(), + } + } + + pub fn remove_value(&mut self, value_id: AttributeValueId) { + if let Some(node_idx) = self.id_to_index_map.remove(&value_id) { + self.graph.remove_node(node_idx); + } + } + + pub fn cycle_on_self(&mut self, value_id: AttributeValueId) { + if let Some(node_idx) = self.id_to_index_map.get(&value_id) { + self.graph.add_edge(*node_idx, *node_idx, ()); + } + } + + pub fn independent_values(&self) -> Vec { + self.graph + .externals(Outgoing) + .filter_map(|node_idx| self.graph.node_weight(node_idx).copied()) + .collect() + } + + pub fn into_graph(self) -> StableDiGraph { + self.graph + } +} diff --git a/lib/dal/src/attribute/value/view.rs b/lib/dal/src/attribute/value/view.rs index f766df0aca..47e6815688 100644 --- a/lib/dal/src/attribute/value/view.rs +++ b/lib/dal/src/attribute/value/view.rs @@ -1,254 +1,253 @@ -//! This module contains the [`AttributeView`] struct and its methods. This object does not exist -//! in the database. - -use serde_json::Value; -use std::collections::{HashMap, VecDeque}; -use telemetry::prelude::*; - -use crate::{ - AttributeReadContext, AttributeValue, AttributeValueError, AttributeValueId, - AttributeValuePayload, AttributeValueResult, DalContext, Prop, PropError, PropKind, - StandardModel, -}; - -/// A generated view for an [`AttributeReadContext`](crate::AttributeReadContext) and an optional -/// root [`AttributeValueId`](crate::AttributeValue). The requirements for the context are laid -/// out in [`Self::new()`]. -#[derive(Debug)] -pub struct AttributeView { - /// The value that was generated from [`Self::new()`]. This can also be referred to as the - /// "properties" or "tree" of the view. - value: Value, - json_pointer_for_attribute_value_id: HashMap, -} - -impl AttributeView { - /// Generates an [`AttributeView`] with an [`AttributeReadContext`](crate::AttributeReadContext) - /// and an optional root [`AttributeValueId`](crate::AttributeValue). The context's requirements - /// are specified in the following locations: - /// - /// - If the root is _not_ provided: [`AttributeValue::list_payload_for_read_context()`] - /// - If the root is provided: [`AttributeValue::list_payload_for_read_context_and_root()`] - /// - /// The view is generated based on the [`AttributeValuePayloads`](crate::AttributeValuePayload) - /// found, including their corresponding [`Props`](crate::Prop). Usually, the root should be - /// provided if a view is desired for any given context and "location" in the object value. If - /// the [`SchemaVariant`](crate::SchemaVariant) is known and you only desire to generate a view - /// for the entire value, you do not need to provide the root. - pub async fn new( - ctx: &DalContext, - attribute_read_context: AttributeReadContext, - root_attribute_value_id: Option, - ) -> AttributeValueResult { - let mut initial_work = match root_attribute_value_id { - Some(root_attribute_value_id) => { - AttributeValue::list_payload_for_read_context_and_root( - ctx, - root_attribute_value_id, - attribute_read_context, - ) - .await? - } - None => { - AttributeValue::list_payload_for_read_context(ctx, attribute_read_context).await? - } - }; - - // When we have a parent AttributeValueId (K: AttributeValueId), we need to know where in - // the structure we need to insert the value we are working with (V: String). - let mut json_pointer_for_attribute_value_id: HashMap = - HashMap::new(); - - // Handle scenarios where we are generating views starting anywhere other than the root - // of a prop tree. - let maybe_parent_attribute_value_id = - if let Some(root_attribute_value_id) = root_attribute_value_id { - let root_attribute_value = AttributeValue::get_by_id(ctx, &root_attribute_value_id) - .await? - .ok_or(AttributeValueError::Missing)?; - root_attribute_value - .parent_attribute_value(ctx) - .await? - .map(|av| *av.id()) - } else { - None - }; - if let Some(parent_attribute_value_id) = maybe_parent_attribute_value_id { - json_pointer_for_attribute_value_id.insert(parent_attribute_value_id, "".to_string()); - } - - // We sort the work queue according to the order of every nested IndexMap. This ensures that - // when we reconstruct the final shape, we don't have to worry about the order that things - // appear in. - let attribute_value_order: Vec = initial_work - .iter() - .filter_map(|avp| avp.attribute_value.index_map()) - .flat_map(|index_map| index_map.order()) - .copied() - .collect(); - initial_work.sort_by_cached_key(|avp| { - attribute_value_order - .iter() - .position(|attribute_value_id| attribute_value_id == avp.attribute_value.id()) - .unwrap_or(0) - }); - - // We need the work queue to be a VecDeque so we can pop elements off of the front - // as it's supposed to be a queue, not a stack. - let mut work_queue: VecDeque = VecDeque::from(initial_work); - - let mut properties = serde_json::json![{}]; - let mut root_stack: Vec<(Option, String)> = - vec![(maybe_parent_attribute_value_id, "".to_string())]; - - while !work_queue.is_empty() { - let mut unprocessed: Vec = vec![]; - if root_stack.is_empty() { - warn!( - "Unexpected empty root stack with work_queue: {:?}", - &work_queue - ); - break; - } - let (root_id, json_pointer) = root_stack.pop().ok_or_else(|| { - error!( - "unexpected empty root stack, current work queue state: {:?}", - work_queue - ); - AttributeValueError::UnexpectedEmptyRootStack - })?; - - while let Some(AttributeValuePayload { - prop, - func_binding_return_value, - attribute_value, - parent_attribute_value_id, - }) = work_queue.pop_front() - { - if let Some(func_binding_return_value) = func_binding_return_value { - if let Some(found_value) = func_binding_return_value.value() { - if root_id == parent_attribute_value_id { - let insertion_pointer = - if let Some(parent_avi) = parent_attribute_value_id { - match json_pointer_for_attribute_value_id.get(&parent_avi) { - Some(ptr) => ptr.clone(), - // A `None` here would mean that we're trying to process a child before we've handled its parent, - // and that shouldn't be possible given how we're going through the work_queue. - None => unreachable!(), - } - } else { - // After we've processed the "root" property, we shouldn't hit this case any more. - json_pointer.clone() - }; - - let write_location = match properties.pointer_mut(&insertion_pointer) { - Some(write_location) => write_location, - None => { - return Err(AttributeValueError::BadJsonPointer( - insertion_pointer.clone(), - properties.to_string(), - )); - } - }; - let next_json_pointer = - if let Some(object) = write_location.as_object_mut() { - if let Some(key) = attribute_value.key() { - object.insert(key.to_string(), found_value.clone()); - format!("{insertion_pointer}/{key}") - } else { - object.insert(prop.name().to_string(), found_value.clone()); - format!("{}/{}", insertion_pointer, prop.name()) - } - } else if let Some(array) = write_location.as_array_mut() { - // This code can just push, because we ordered the work queue above. - // Magic! - array.push(found_value.clone()); - format!("{}/{}", insertion_pointer, array.len() - 1) - } else { - // Note: this shouldn't ever actually get used. - insertion_pointer.to_string() - }; - // Record the json pointer path to this specific attribute value's location. - json_pointer_for_attribute_value_id - .insert(*attribute_value.id(), next_json_pointer.clone()); - - match prop.kind() { - &PropKind::Object | &PropKind::Array | &PropKind::Map => { - root_stack - .push((Some(*attribute_value.id()), next_json_pointer)); - } - _ => {} - } - } else { - unprocessed.push(AttributeValuePayload::new( - prop, - Some(func_binding_return_value), - attribute_value, - parent_attribute_value_id, - )); - } - } - } - } - work_queue = VecDeque::from(unprocessed); - } - - if let Some(root_attribute_value_id) = root_attribute_value_id { - let root_json_pointer = match json_pointer_for_attribute_value_id - .get(&root_attribute_value_id) - { - Some(pointer) => pointer, - None => { - let root_attribute_value = - AttributeValue::get_by_id(ctx, &root_attribute_value_id) - .await? - .ok_or_else(|| { - AttributeValueError::NotFound( - root_attribute_value_id, - *ctx.visibility(), - ) - })?; - let root_prop = Prop::get_by_id(ctx, &root_attribute_value.context.prop_id()) - .await? - .ok_or_else(|| { - PropError::NotFound( - root_attribute_value.context.prop_id(), - *ctx.visibility(), - ) - }) - .map_err(Box::new)?; - debug!("\ - likely tried to build an attribute view for an attribute value that is unset, \ - so the \"properties\" object is empty ({:?}), and does not contain a key matching \ - our prop's name (root attribute value ({:?}) and root prop ({:?}))", properties, root_attribute_value, root_prop - ); - return Ok(Self { - value: Value::Null, - json_pointer_for_attribute_value_id, - }); - } - }; - - let properties = properties - .pointer(root_json_pointer) - .ok_or(AttributeValueError::NoValueForJsonPointer)?; - return Ok(Self { - value: properties.to_owned(), - json_pointer_for_attribute_value_id, - }); - } - - Ok(Self { - value: properties.to_owned(), - json_pointer_for_attribute_value_id, - }) - } - - pub fn value(&self) -> &serde_json::Value { - &self.value - } - - pub fn json_pointers_for_attribute_value_id(&self) -> &HashMap { - &self.json_pointer_for_attribute_value_id - } -} +// //! This module contains the [`AttributeView`] struct and its methods. This object does not exist +// //! in the database. + +// use serde_json::Value; +// use std::collections::{HashMap, VecDeque}; +// use telemetry::prelude::*; + +// use crate::{ +// AttributeReadContext, AttributeValue, AttributeValueId, AttributeValuePayload, DalContext, +// Prop, PropKind, StandardModel, +// }; + +// /// A generated view for an [`AttributeReadContext`](crate::AttributeReadContext) and an optional +// /// root [`AttributeValueId`](crate::AttributeValue). The requirements for the context are laid +// /// out in [`Self::new()`]. +// #[derive(Debug)] +// pub struct AttributeView { +// /// The value that was generated from [`Self::new()`]. This can also be referred to as the +// /// "properties" or "tree" of the view. +// value: Value, +// json_pointer_for_attribute_value_id: HashMap, +// } + +// impl AttributeView { +// /// Generates an [`AttributeView`] with an [`AttributeReadContext`](crate::AttributeReadContext) +// /// and an optional root [`AttributeValueId`](crate::AttributeValue). The context's requirements +// /// are specified in the following locations: +// /// +// /// - If the root is _not_ provided: [`AttributeValue::list_payload_for_read_context()`] +// /// - If the root is provided: [`AttributeValue::list_payload_for_read_context_and_root()`] +// /// +// /// The view is generated based on the [`AttributeValuePayloads`](crate::AttributeValuePayload) +// /// found, including their corresponding [`Props`](crate::Prop). Usually, the root should be +// /// provided if a view is desired for any given context and "location" in the object value. If +// /// the [`SchemaVariant`](crate::SchemaVariant) is known and you only desire to generate a view +// /// for the entire value, you do not need to provide the root. +// pub async fn new( +// ctx: &DalContext, +// attribute_read_context: AttributeReadContext, +// root_attribute_value_id: Option, +// ) -> AttributeValueResult { +// let mut initial_work = match root_attribute_value_id { +// Some(root_attribute_value_id) => { +// AttributeValue::list_payload_for_read_context_and_root( +// ctx, +// root_attribute_value_id, +// attribute_read_context, +// ) +// .await? +// } +// None => { +// AttributeValue::list_payload_for_read_context(ctx, attribute_read_context).await? +// } +// }; + +// // When we have a parent AttributeValueId (K: AttributeValueId), we need to know where in +// // the structure we need to insert the value we are working with (V: String). +// let mut json_pointer_for_attribute_value_id: HashMap = +// HashMap::new(); + +// // Handle scenarios where we are generating views starting anywhere other than the root +// // of a prop tree. +// let maybe_parent_attribute_value_id = +// if let Some(root_attribute_value_id) = root_attribute_value_id { +// let root_attribute_value = AttributeValue::get_by_id(ctx, &root_attribute_value_id) +// .await? +// .ok_or(AttributeValueError::Missing)?; +// root_attribute_value +// .parent_attribute_value(ctx) +// .await? +// .map(|av| *av.id()) +// } else { +// None +// }; +// if let Some(parent_attribute_value_id) = maybe_parent_attribute_value_id { +// json_pointer_for_attribute_value_id.insert(parent_attribute_value_id, "".to_string()); +// } + +// // We sort the work queue according to the order of every nested IndexMap. This ensures that +// // when we reconstruct the final shape, we don't have to worry about the order that things +// // appear in. +// let attribute_value_order: Vec = initial_work +// .iter() +// .filter_map(|avp| avp.attribute_value.index_map()) +// .flat_map(|index_map| index_map.order()) +// .copied() +// .collect(); +// initial_work.sort_by_cached_key(|avp| { +// attribute_value_order +// .iter() +// .position(|attribute_value_id| attribute_value_id == avp.attribute_value.id()) +// .unwrap_or(0) +// }); + +// // We need the work queue to be a VecDeque so we can pop elements off of the front +// // as it's supposed to be a queue, not a stack. +// let mut work_queue: VecDeque = VecDeque::from(initial_work); + +// let mut properties = serde_json::json![{}]; +// let mut root_stack: Vec<(Option, String)> = +// vec![(maybe_parent_attribute_value_id, "".to_string())]; + +// while !work_queue.is_empty() { +// let mut unprocessed: Vec = vec![]; +// if root_stack.is_empty() { +// warn!( +// "Unexpected empty root stack with work_queue: {:?}", +// &work_queue +// ); +// break; +// } +// let (root_id, json_pointer) = root_stack.pop().ok_or_else(|| { +// error!( +// "unexpected empty root stack, current work queue state: {:?}", +// work_queue +// ); +// AttributeValueError::UnexpectedEmptyRootStack +// })?; + +// while let Some(AttributeValuePayload { +// prop, +// func_binding_return_value, +// attribute_value, +// parent_attribute_value_id, +// }) = work_queue.pop_front() +// { +// if let Some(func_binding_return_value) = func_binding_return_value { +// if let Some(found_value) = func_binding_return_value.value() { +// if root_id == parent_attribute_value_id { +// let insertion_pointer = +// if let Some(parent_avi) = parent_attribute_value_id { +// match json_pointer_for_attribute_value_id.get(&parent_avi) { +// Some(ptr) => ptr.clone(), +// // A `None` here would mean that we're trying to process a child before we've handled its parent, +// // and that shouldn't be possible given how we're going through the work_queue. +// None => unreachable!(), +// } +// } else { +// // After we've processed the "root" property, we shouldn't hit this case any more. +// json_pointer.clone() +// }; + +// let write_location = match properties.pointer_mut(&insertion_pointer) { +// Some(write_location) => write_location, +// None => { +// return Err(AttributeValueError::BadJsonPointer( +// insertion_pointer.clone(), +// properties.to_string(), +// )); +// } +// }; +// let next_json_pointer = +// if let Some(object) = write_location.as_object_mut() { +// if let Some(key) = attribute_value.key() { +// object.insert(key.to_string(), found_value.clone()); +// format!("{insertion_pointer}/{key}") +// } else { +// object.insert(prop.name().to_string(), found_value.clone()); +// format!("{}/{}", insertion_pointer, prop.name()) +// } +// } else if let Some(array) = write_location.as_array_mut() { +// // This code can just push, because we ordered the work queue above. +// // Magic! +// array.push(found_value.clone()); +// format!("{}/{}", insertion_pointer, array.len() - 1) +// } else { +// // Note: this shouldn't ever actually get used. +// insertion_pointer.to_string() +// }; +// // Record the json pointer path to this specific attribute value's location. +// json_pointer_for_attribute_value_id +// .insert(*attribute_value.id(), next_json_pointer.clone()); + +// match prop.kind() { +// &PropKind::Object | &PropKind::Array | &PropKind::Map => { +// root_stack +// .push((Some(*attribute_value.id()), next_json_pointer)); +// } +// _ => {} +// } +// } else { +// unprocessed.push(AttributeValuePayload::new( +// prop, +// Some(func_binding_return_value), +// attribute_value, +// parent_attribute_value_id, +// )); +// } +// } +// } +// } +// work_queue = VecDeque::from(unprocessed); +// } + +// if let Some(root_attribute_value_id) = root_attribute_value_id { +// let root_json_pointer = match json_pointer_for_attribute_value_id +// .get(&root_attribute_value_id) +// { +// Some(pointer) => pointer, +// None => { +// let root_attribute_value = +// AttributeValue::get_by_id(ctx, &root_attribute_value_id) +// .await? +// .ok_or_else(|| { +// AttributeValueError::NotFound( +// root_attribute_value_id, +// *ctx.visibility(), +// ) +// })?; +// let root_prop = Prop::get_by_id(ctx, &root_attribute_value.context.prop_id()) +// .await? +// .ok_or_else(|| { +// PropError::NotFound( +// root_attribute_value.context.prop_id(), +// *ctx.visibility(), +// ) +// }) +// .map_err(Box::new)?; +// debug!("\ +// likely tried to build an attribute view for an attribute value that is unset, \ +// so the \"properties\" object is empty ({:?}), and does not contain a key matching \ +// our prop's name (root attribute value ({:?}) and root prop ({:?}))", properties, root_attribute_value, root_prop +// ); +// return Ok(Self { +// value: Value::Null, +// json_pointer_for_attribute_value_id, +// }); +// } +// }; + +// let properties = properties +// .pointer(root_json_pointer) +// .ok_or(AttributeValueError::NoValueForJsonPointer)?; +// return Ok(Self { +// value: properties.to_owned(), +// json_pointer_for_attribute_value_id, +// }); +// } + +// Ok(Self { +// value: properties.to_owned(), +// json_pointer_for_attribute_value_id, +// }) +// } + +// pub fn value(&self) -> &serde_json::Value { +// &self.value +// } + +// pub fn json_pointers_for_attribute_value_id(&self) -> &HashMap { +// &self.json_pointer_for_attribute_value_id +// } +// } diff --git a/lib/dal/src/authentication_prototype.rs b/lib/dal/src/authentication_prototype.rs index f35e4db15f..d4dedd7fc5 100644 --- a/lib/dal/src/authentication_prototype.rs +++ b/lib/dal/src/authentication_prototype.rs @@ -7,20 +7,11 @@ use si_data_nats::NatsError; use si_data_pg::PgError; use telemetry::prelude::*; -use crate::authentication_prototype::AuthenticationPrototypeError::AuthAlreadySet; use crate::{ - component::view::ComponentViewError, impl_standard_model, pk, standard_model, - standard_model_accessor, ComponentId, DalContext, FuncBindingError, - FuncBindingReturnValueError, FuncId, HistoryEventError, SchemaVariantId, StandardModel, - StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, WsEventError, + pk, ComponentId, FuncId, HistoryEventError, SchemaVariantId, StandardModelError, + TransactionsError, WsEventError, }; -const FIND_FOR_CONTEXT: &str = - include_str!("./queries/authentication_prototype/find_for_context.sql"); -const FIND_FOR_FUNC: &str = include_str!("./queries/authentication_prototype/find_for_func.sql"); -const FIND_FOR_CONTEXT_AND_FUNC: &str = - include_str!("./queries/authentication_prototype/find_for_context_and_func.sql"); - #[remain::sorted] #[derive(Error, Debug)] pub enum AuthenticationPrototypeError { @@ -30,12 +21,6 @@ pub enum AuthenticationPrototypeError { Component(String), #[error("component not found: {0}")] ComponentNotFound(ComponentId), - #[error(transparent)] - ComponentView(#[from] ComponentViewError), - #[error(transparent)] - FuncBinding(#[from] FuncBindingError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), #[error("action Func {0} not found for ActionPrototype {1}")] FuncNotFound(FuncId, AuthenticationPrototypeId), #[error("history event error: {0}")] @@ -86,124 +71,13 @@ impl AuthenticationPrototypeContext { } } -pk!(AuthenticationPrototypePk); pk!(AuthenticationPrototypeId); // An ActionPrototype joins a `FuncId` to a `SchemaVariantId` with a `ActionKind` and `name` +// This only exists for deserialization of the import data #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct AuthenticationPrototype { - pk: AuthenticationPrototypePk, - id: AuthenticationPrototypeId, - func_id: FuncId, - schema_variant_id: SchemaVariantId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, -} - -impl_standard_model! { - model: AuthenticationPrototype, - pk: AuthenticationPrototypePk, - id: AuthenticationPrototypeId, - table_name: "authentication_prototypes", - history_event_label_base: "authentication_prototypes", - history_event_message_name: "Authentication Prototype" -} - -impl AuthenticationPrototype { - #[allow(clippy::too_many_arguments)] - pub async fn new( - ctx: &DalContext, - func_id: FuncId, - context: AuthenticationPrototypeContext, - ) -> AuthenticationPrototypeResult { - if !Self::find_for_context(ctx, context).await?.is_empty() { - return Err(AuthAlreadySet(context.schema_variant_id)); - } - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM authentication_prototype_create_v1($1, $2, $3, $4)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &context.schema_variant_id(), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } - - pub async fn find_for_context( - ctx: &DalContext, - context: AuthenticationPrototypeContext, - ) -> AuthenticationPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.schema_variant_id(), - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_func( - ctx: &DalContext, - func_id: FuncId, - ) -> AuthenticationPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(FIND_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), &func_id]) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn find_for_context_and_func( - ctx: &DalContext, - context: &AuthenticationPrototypeContext, - func_id: FuncId, - ) -> AuthenticationPrototypeResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - FIND_FOR_CONTEXT_AND_FUNC, - &[ - ctx.tenancy(), - ctx.visibility(), - &context.schema_variant_id(), - &func_id, - ], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - standard_model_accessor!( - schema_variant_id, - Pk(SchemaVariantId), - AuthenticationPrototypeResult - ); - standard_model_accessor!(func_id, Pk(FuncId), AuthenticationPrototypeResult); + pub id: AuthenticationPrototypeId, + pub func_id: FuncId, + pub schema_variant_id: SchemaVariantId, } diff --git a/lib/dal/src/builtins.rs b/lib/dal/src/builtins.rs index ea7ecc02b1..83b5b959bc 100644 --- a/lib/dal/src/builtins.rs +++ b/lib/dal/src/builtins.rs @@ -9,20 +9,12 @@ use thiserror::Error; use si_pkg::{SiPkgError, SpecError}; -use crate::func::argument::FuncArgumentError; -use crate::func::binding::FuncBindingError; -use crate::func::binding_return_value::FuncBindingReturnValueError; +use crate::func::FuncError; use crate::installed_pkg::InstalledPkgError; use crate::pkg::PkgError; -use crate::provider::external::ExternalProviderError; -use crate::provider::internal::InternalProviderError; -use crate::schema::variant::definition::SchemaVariantDefinitionError; -use crate::schema::variant::SchemaVariantError; -use crate::socket::SocketError; +// use crate::schema::variant::definition::SchemaVariantDefinitionError; use crate::{ - AttributeContextBuilderError, AttributePrototypeArgumentError, AttributePrototypeError, - AttributeReadContext, AttributeValueError, AttributeValueId, DalContext, ExternalProviderId, - FuncError, InternalProviderId, PropError, PropId, SchemaError, SchemaVariantId, + AttributeValueId, DalContext, ExternalProviderId, InternalProviderId, PropId, SchemaVariantId, StandardModelError, TransactionsError, }; @@ -43,36 +35,18 @@ pub const SI_AWS_LB_TARGET_GROUP_PKG: &str = "si-aws-lb-target-group-2023-12-05. #[remain::sorted] #[derive(Error, Debug)] pub enum BuiltinsError { - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), #[error("attribute value not found by id: {0}")] AttributeValueNotFound(AttributeValueId), - #[error("attribute value not found for attribute read context: {0:?}")] - AttributeValueNotFoundForContext(AttributeReadContext), #[error("builtin {0} missing func argument {1}")] BuiltinMissingFuncArgument(String, String), #[error("explicit internal provider not found by name: {0}")] ExplicitInternalProviderNotFound(String), - #[error("external provider error: {0}")] - ExternalProvider(#[from] ExternalProviderError), #[error("external provider not found by name: {0}")] ExternalProviderNotFound(String), #[error("Filesystem IO error: {0}")] FilesystemIO(#[from] std::io::Error), - #[error("func error: {0}")] + #[error(transparent)] Func(#[from] FuncError), - #[error("func argument error: {0}")] - FuncArgument(#[from] FuncArgumentError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), #[error("json error {1} at file {0}")] FuncJson(String, serde_json::Error), #[error("Func Metadata error: {0}")] @@ -83,8 +57,6 @@ pub enum BuiltinsError { ImplicitInternalProviderNotFoundForProp(PropId), #[error(transparent)] InstalledPkg(#[from] InstalledPkgError), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), #[error("missing attribute prototype for attribute value")] MissingAttributePrototypeForAttributeValue, #[error("missing attribute prototype for explicit internal provider: {0}")] @@ -95,28 +67,18 @@ pub enum BuiltinsError { MissingPkgsPath, #[error(transparent)] Pkg(#[from] PkgError), - #[error("prop error: {0}")] - Prop(#[from] PropError), #[error("prop cache not found: {0}")] PropCacheNotFound(SchemaVariantId), #[error("prop not bound by id: {0}")] PropNotFound(PropId), #[error("Regex parsing error: {0}")] Regex(#[from] regex::Error), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), - #[error("schema variant error: {0}")] - SchemaVariant(#[from] SchemaVariantError), - #[error("schema variant definition error")] - SchemaVariantDefinition(#[from] SchemaVariantDefinitionError), #[error("serde json error: {0}")] SerdeJson(#[from] serde_json::Error), #[error("encountered serde json error for func ({0}): {1}")] SerdeJsonErrorForFunc(String, serde_json::Error), #[error(transparent)] SiPkg(#[from] SiPkgError), - #[error("socket error: {0}")] - Socket(#[from] SocketError), #[error(transparent)] Spec(#[from] SpecError), #[error("standard model error: {0}")] @@ -146,23 +108,35 @@ pub enum SelectedTestBuiltinSchemas { /// Migrate all local "builtins" in a definitive order. pub async fn migrate_local( ctx: &DalContext, - selected_test_builtin_schemas: Option, + _selected_test_builtin_schemas: Option, ) -> BuiltinsResult<()> { info!("migrating intrinsic functions"); func::migrate_intrinsics(ctx).await?; - info!("migrating builtin functions"); - func::migrate(ctx).await?; + info!("intrinsics migrated"); + // info!("migrating builtin functions"); + // func::migrate(ctx).await?; + + // FIXME(nick): restore builtin migration functionality for all variants. + info!("migrate minimal number of schemas for testing the new engine"); + + schema::migrate_pkg(ctx, SI_DOCKER_IMAGE_PKG, None).await?; + schema::migrate_pkg(ctx, SI_COREOS_PKG, None).await?; + schema::migrate_pkg(ctx, SI_AWS_EC2_PKG, None).await?; + schema::migrate_pkg(ctx, SI_AWS_PKG, None).await?; + schema::migrate_test_exclusive_schema_starfield(ctx).await?; + schema::migrate_test_exclusive_schema_fallout(ctx).await?; + schema::migrate_test_exclusive_schema_bethesda_secret(ctx).await?; - match selected_test_builtin_schemas { - Some(found_selected_test_builtin_schemas) => { - schema::migrate_local_only_test_schemas(ctx, found_selected_test_builtin_schemas) - .await?; - } - None => { - schema::migrate_local_all_schemas(ctx).await?; - } - } + // match selected_test_builtin_schemas { + // Some(found_selected_test_builtin_schemas) => { + // schema::migrate_local_only_test_schemas(ctx, found_selected_test_builtin_schemas) + // .await?; + // } + // None => { + // schema::migrate_local_all_schemas(ctx).await?; + // } + // } - info!("completed migrating functions, workflows and schemas"); + // info!("completed migrating functions, workflows and schemas"); Ok(()) } diff --git a/lib/dal/src/builtins/func.rs b/lib/dal/src/builtins/func.rs index 6a4706201f..918719218b 100644 --- a/lib/dal/src/builtins/func.rs +++ b/lib/dal/src/builtins/func.rs @@ -1,38 +1,28 @@ -use base64::engine::general_purpose; -use base64::Engine; -use serde::{Deserialize, Serialize}; use si_pkg::SiPkg; -use telemetry::prelude::*; use crate::{ - func::{ - argument::{FuncArgument, FuncArgumentKind}, - intrinsics::IntrinsicFunc, - }, - installed_pkg::InstalledPkg, - pkg::import_pkg_from_pkg, - BuiltinsError, BuiltinsResult, DalContext, Func, FuncBackendKind, FuncBackendResponseType, - StandardModel, + func::intrinsics::IntrinsicFunc, installed_pkg::InstalledPkg, pkg::import_pkg_from_pkg, + BuiltinsResult, DalContext, }; -#[derive(Deserialize, Serialize, Debug)] -struct FunctionMetadataArgument { - name: String, - kind: FuncArgumentKind, -} - -#[derive(Deserialize, Serialize, Debug)] -struct FunctionMetadata { - kind: FuncBackendKind, - arguments: Option>, - response_type: FuncBackendResponseType, - hidden: Option, - display_name: Option, - description: Option, - link: Option, - code_file: Option, - code_entrypoint: Option, -} +// #[derive(Deserialize, Serialize, Debug)] +// struct FunctionMetadataArgument { +// name: String, +// kind: FuncArgumentKind, +// } +// +// #[derive(Deserialize, Serialize, Debug)] +// struct FunctionMetadata { +// kind: FuncBackendKind, +// arguments: Option>, +// response_type: FuncBackendResponseType, +// hidden: Option, +// display_name: Option, +// description: Option, +// link: Option, +// code_file: Option, +// code_entrypoint: Option, +// } /// We want the src/builtins/func/** files to be available at run time inside of the Docker container /// that we build, but it would be nice to not have to include arbitrary bits of the source tree when @@ -40,12 +30,14 @@ struct FunctionMetadata { /// in memory. /// /// The instances of this end up in a magic `ASSETS` array const. +#[allow(dead_code)] #[iftree::include_file_tree("paths = '/src/builtins/func/**'")] pub struct FuncBuiltin { relative_path: &'static str, contents_str: &'static str, } +#[allow(dead_code)] static FUNC_BUILTIN_BY_PATH: once_cell::sync::Lazy> = once_cell::sync::Lazy::new(|| { ASSETS @@ -70,123 +62,123 @@ pub async fn migrate_intrinsics(ctx: &DalContext) -> BuiltinsResult<()> { Ok(()) } -pub async fn migrate(ctx: &DalContext) -> BuiltinsResult<()> { - for builtin_func_file in ASSETS.iter() { - let builtin_path = std::path::Path::new(builtin_func_file.relative_path); - match builtin_path.extension() { - Some(extension) => { - if extension != std::ffi::OsStr::new("json") { - debug!("skipping {:?}: not a json file", builtin_path); - continue; - } - } - None => { - warn!("skipping {:?}: no file extension", builtin_path); - continue; - } - }; - - let func_metadata: FunctionMetadata = serde_json::from_str(builtin_func_file.contents_str) - .map_err(|e| BuiltinsError::FuncJson(builtin_path.to_string_lossy().to_string(), e))?; - - let func_name = format!( - "si:{}", - builtin_path - .file_stem() - .ok_or_else(|| { - BuiltinsError::FuncMetadata(format!( - "Unable to determine base file name for {builtin_path:?}" - )) - })? - .to_string_lossy() - ); - - let mut existing_func = Func::find_by_attr(ctx, "name", &func_name).await?; - if let Some(mut existing_func) = existing_func.pop() { - if *existing_func.backend_kind() != func_metadata.kind { - info!( - "updating backend kind for {:?} from {:?} to {:?}", - &func_name, - *existing_func.backend_kind(), - func_metadata.kind - ); - existing_func - .set_backend_kind(ctx, func_metadata.kind) - .await?; - } - - warn!("skipping {:?}: func already exists", &func_name); - continue; - } - - let mut new_func = Func::new( - ctx, - &func_name, - func_metadata.kind, - func_metadata.response_type, - ) - .await - .expect("cannot create func"); - - if let Some(code_file) = func_metadata.code_file { - if func_metadata.code_entrypoint.is_none() { - panic!("cannot create function with code_file but no code_entrypoint") - } - - let metadata_base_path = builtin_path.parent().ok_or_else(|| { - BuiltinsError::FuncMetadata(format!( - "Cannot determine parent path of {builtin_path:?}" - )) - })?; - let func_path = metadata_base_path.join(std::path::Path::new(&code_file)); - - let code = FUNC_BUILTIN_BY_PATH - .get(func_path.as_os_str().to_str().ok_or_else(|| { - BuiltinsError::FuncMetadata(format!("Unable to convert {func_path:?} to &str")) - })?) - .ok_or_else(|| { - BuiltinsError::FuncMetadata(format!("Code file not found: {code_file:?}")) - })?; - let code = general_purpose::STANDARD_NO_PAD.encode(code.contents_str); - new_func - .set_code_base64(ctx, Some(code)) - .await - .expect("cannot set code"); - } - - new_func - .set_handler(ctx, func_metadata.code_entrypoint) - .await - .expect("cannot set handler"); - - new_func - .set_display_name(ctx, func_metadata.display_name) - .await - .expect("cannot set display name"); - new_func - .set_description(ctx, func_metadata.description) - .await - .expect("cannot set func description"); - new_func - .set_link(ctx, func_metadata.link) - .await - .expect("cannot set func link"); - new_func - .set_hidden(ctx, func_metadata.hidden.unwrap_or(false)) - .await - .expect("cannot set func hidden"); - new_func - .set_builtin(ctx, true) - .await - .expect("cannot set func builtin"); - - if let Some(arguments) = func_metadata.arguments { - for arg in arguments { - FuncArgument::new(ctx, &arg.name, arg.kind, None, *new_func.id()).await?; - } - } - ctx.blocking_commit().await?; - } - - Ok(()) -} +// pub async fn migrate(ctx: &DalContext) -> BuiltinsResult<()> { +// for builtin_func_file in ASSETS.iter() { +// let builtin_path = std::path::Path::new(builtin_func_file.relative_path); +// match builtin_path.extension() { +// Some(extension) => { +// if extension != std::ffi::OsStr::new("json") { +// debug!("skipping {:?}: not a json file", builtin_path); +// continue; +// } +// } +// None => { +// warn!("skipping {:?}: no file extension", builtin_path); +// continue; +// } +// }; +// +// let func_metadata: FunctionMetadata = serde_json::from_str(builtin_func_file.contents_str) +// .map_err(|e| BuiltinsError::FuncJson(builtin_path.to_string_lossy().to_string(), e))?; +// +// let func_name = format!( +// "si:{}", +// builtin_path +// .file_stem() +// .ok_or_else(|| { +// BuiltinsError::FuncMetadata(format!( +// "Unable to determine base file name for {builtin_path:?}" +// )) +// })? +// .to_string_lossy() +// ); +// +// let mut existing_func = Func::find_by_attr(ctx, "name", &func_name).await?; +// if let Some(mut existing_func) = existing_func.pop() { +// if *existing_func.backend_kind() != func_metadata.kind { +// info!( +// "updating backend kind for {:?} from {:?} to {:?}", +// &func_name, +// *existing_func.backend_kind(), +// func_metadata.kind +// ); +// existing_func +// .set_backend_kind(ctx, func_metadata.kind) +// .await?; +// } +// +// warn!("skipping {:?}: func already exists", &func_name); +// continue; +// } +// +// let mut new_func = Func::new( +// ctx, +// &func_name, +// func_metadata.kind, +// func_metadata.response_type, +// ) +// .await +// .expect("cannot create func"); +// +// if let Some(code_file) = func_metadata.code_file { +// if func_metadata.code_entrypoint.is_none() { +// panic!("cannot create function with code_file but no code_entrypoint") +// } +// +// let metadata_base_path = builtin_path.parent().ok_or_else(|| { +// BuiltinsError::FuncMetadata(format!( +// "Cannot determine parent path of {builtin_path:?}" +// )) +// })?; +// let func_path = metadata_base_path.join(std::path::Path::new(&code_file)); +// +// let code = FUNC_BUILTIN_BY_PATH +// .get(func_path.as_os_str().to_str().ok_or_else(|| { +// BuiltinsError::FuncMetadata(format!("Unable to convert {func_path:?} to &str")) +// })?) +// .ok_or_else(|| { +// BuiltinsError::FuncMetadata(format!("Code file not found: {code_file:?}")) +// })?; +// let code = general_purpose::STANDARD_NO_PAD.encode(code.contents_str); +// new_func +// .set_code_base64(ctx, Some(code)) +// .await +// .expect("cannot set code"); +// } +// +// new_func +// .set_handler(ctx, func_metadata.code_entrypoint) +// .await +// .expect("cannot set handler"); +// +// new_func +// .set_display_name(ctx, func_metadata.display_name) +// .await +// .expect("cannot set display name"); +// new_func +// .set_description(ctx, func_metadata.description) +// .await +// .expect("cannot set func description"); +// new_func +// .set_link(ctx, func_metadata.link) +// .await +// .expect("cannot set func link"); +// new_func +// .set_hidden(ctx, func_metadata.hidden.unwrap_or(false)) +// .await +// .expect("cannot set func hidden"); +// new_func +// .set_builtin(ctx, true) +// .await +// .expect("cannot set func builtin"); +// +// if let Some(arguments) = func_metadata.arguments { +// for arg in arguments { +// FuncArgument::new(ctx, &arg.name, arg.kind, None, *new_func.id()).await?; +// } +// } +// ctx.blocking_commit().await?; +// } +// +// Ok(()) +// } diff --git a/lib/dal/src/builtins/schema.rs b/lib/dal/src/builtins/schema.rs index 24a11b4a01..725ced2f68 100644 --- a/lib/dal/src/builtins/schema.rs +++ b/lib/dal/src/builtins/schema.rs @@ -1,106 +1,149 @@ use si_pkg::SiPkg; -use std::collections::HashSet; use strum::{AsRefStr, Display, EnumIter, EnumString}; use telemetry::prelude::*; -use crate::builtins::schema::test_exclusive_schema_fallout::migrate_test_exclusive_schema_fallout; -use crate::builtins::schema::test_exclusive_schema_starfield::migrate_test_exclusive_schema_starfield; use crate::installed_pkg::InstalledPkg; use crate::pkg::{import_pkg_from_pkg, ImportOptions}; -use crate::{BuiltinsError, BuiltinsResult, DalContext, SelectedTestBuiltinSchemas}; +use crate::{BuiltinsError, BuiltinsResult, DalContext}; +mod test_exclusive_schema_bethesda_secret; mod test_exclusive_schema_fallout; mod test_exclusive_schema_starfield; -/// Migrate [`Schemas`](crate::Schema) for production use. -pub async fn migrate_local_all_schemas(ctx: &DalContext) -> BuiltinsResult<()> { - info!("migrating schemas"); +pub use test_exclusive_schema_bethesda_secret::migrate_test_exclusive_schema_bethesda_secret; +pub use test_exclusive_schema_fallout::migrate_test_exclusive_schema_fallout; +pub use test_exclusive_schema_starfield::migrate_test_exclusive_schema_starfield; - migrate_pkg(ctx, super::SI_AWS_PKG, None).await?; - migrate_pkg(ctx, super::SI_AWS_EC2_PKG, None).await?; - migrate_pkg(ctx, super::SI_DOCKER_IMAGE_PKG, None).await?; - migrate_pkg(ctx, super::SI_COREOS_PKG, None).await?; - migrate_pkg(ctx, super::SI_GENERIC_FRAME_PKG, None).await?; - migrate_pkg(ctx, super::SI_AWS_IAM_PKG, None).await?; - migrate_pkg(ctx, super::SI_AWS_ECS_PKG, None).await?; - migrate_pkg(ctx, super::SI_AWS_CLOUDWATCH_PKG, None).await?; - migrate_pkg(ctx, super::SI_AWS_LB_TARGET_GROUP_PKG, None).await?; +// /// Migrate [`Schemas`](crate::Schema) for production use. +// pub async fn migrate_for_production(ctx: &DalContext) -> BuiltinsResult<()> { +// info!("migrating schemas"); +// +// migrate_pkg(ctx, super::SI_AWS_PKG, None).await?; +// migrate_pkg(ctx, super::SI_AWS_EC2_PKG, None).await?; +// migrate_pkg(ctx, super::SI_DOCKER_IMAGE_PKG, None).await?; +// migrate_pkg(ctx, super::SI_COREOS_PKG, None).await?; +// migrate_pkg(ctx, super::SI_GENERIC_FRAME_PKG, None).await?; +// migrate_pkg(ctx, super::SI_AWS_IAM_PKG, None).await?; +// migrate_pkg(ctx, super::SI_AWS_ECS_PKG, None).await?; +// migrate_pkg(ctx, super::SI_AWS_CLOUDWATCH_PKG, None).await?; +// migrate_pkg(ctx, super::SI_AWS_LB_TARGET_GROUP_PKG, None).await?; +// +// Ok(()) +// } - Ok(()) +#[remain::sorted] +#[derive(Debug, Copy, Clone, AsRefStr, Display, EnumIter, EnumString, Eq, PartialEq)] +pub enum BuiltinSchema { + BethesdaSecret, + Fallout, + Starfield, } -/// Migrate [`Schemas`](crate::Schema) for use in tests. -pub async fn migrate_local_only_test_schemas( - ctx: &DalContext, - selected_test_builtin_schemas: SelectedTestBuiltinSchemas, -) -> BuiltinsResult<()> { - // Determine what to migrate based on the selected test builtin schemas provided. - let (migrate_all, migrate_test_exclusive, specific_builtin_schemas) = - match selected_test_builtin_schemas { - SelectedTestBuiltinSchemas::All => { - info!("migrating schemas for tests"); - (true, false, HashSet::new()) - } - SelectedTestBuiltinSchemas::None => { - info!("skipping migrating schemas for tests"); - return Ok(()); - } - SelectedTestBuiltinSchemas::Some(provided_set) => { - info!("migrating schemas for tests based on a provided set of names"); - debug!("provided set of builtin schemas: {:?}", &provided_set); - (false, false, provided_set) - } - SelectedTestBuiltinSchemas::Test => { - info!("migrating test-exclusive schemas solely"); - (false, true, HashSet::new()) - } - }; - - if migrate_all { - migrate_pkg(ctx, super::SI_AWS_PKG, None).await?; - migrate_pkg(ctx, super::SI_AWS_EC2_PKG, None).await?; - migrate_pkg(ctx, super::SI_COREOS_PKG, None).await?; - migrate_pkg(ctx, super::SI_DOCKER_IMAGE_PKG, None).await?; - migrate_pkg(ctx, super::SI_GENERIC_FRAME_PKG, None).await?; - migrate_pkg(ctx, super::SI_AWS_LB_TARGET_GROUP_PKG, None).await?; - - migrate_pkg_test_exclusive(ctx, TestExclusiveSchema::Fallout).await?; - migrate_pkg_test_exclusive(ctx, TestExclusiveSchema::Starfield).await?; - } else if migrate_test_exclusive { - // We migrate generic frame to get "si:resourceToPayloadValue" cheaply. This function - // should be converted to an intrinsic (or removed?) - migrate_pkg(ctx, super::SI_GENERIC_FRAME_PKG, None).await?; - - migrate_pkg_test_exclusive(ctx, TestExclusiveSchema::Fallout).await?; - migrate_pkg_test_exclusive(ctx, TestExclusiveSchema::Starfield).await?; - } else { - let schemas: Vec = specific_builtin_schemas - .iter() - .map(|s| s.to_owned()) - .collect(); - migrate_pkg(ctx, super::SI_AWS_PKG, Some(schemas.to_owned())).await?; - migrate_pkg(ctx, super::SI_AWS_EC2_PKG, Some(schemas.to_owned())).await?; - migrate_pkg(ctx, super::SI_COREOS_PKG, Some(schemas.to_owned())).await?; - migrate_pkg(ctx, super::SI_DOCKER_IMAGE_PKG, Some(schemas.to_owned())).await?; - migrate_pkg(ctx, super::SI_GENERIC_FRAME_PKG, Some(schemas.to_owned())).await?; - migrate_pkg( - ctx, - super::SI_AWS_LB_TARGET_GROUP_PKG, - Some(schemas.to_owned()), - ) - .await?; - - for test_schema in [TestExclusiveSchema::Starfield, TestExclusiveSchema::Fallout] { - if specific_builtin_schemas.contains(test_schema.real_schema_name()) { - migrate_pkg_test_exclusive(ctx, test_schema).await?; - } +impl BuiltinSchema { + pub fn real_schema_name(&self) -> &'static str { + match self { + BuiltinSchema::BethesdaSecret => "bethesda-secret", + BuiltinSchema::Fallout => "fallout", + BuiltinSchema::Starfield => "starfield", } } - - Ok(()) } -async fn migrate_pkg( +// pub async fn migrate_schema( +// ctx: &DalContext, +// schema: BuiltinSchema, +// driver: &MigrationDriver, +// ) -> BuiltinsResult<()> { +// match schema { +// BuiltinSchema::Fallout => { +// driver.migrate_test_exclusive_fallout(ctx).await?; +// } +// BuiltinSchema::Starfield => { +// driver.migrate_test_exclusive_starfield(ctx).await?; +// } +// } +// +// Ok(()) +// } + +// /// Migrate [`Schemas`](crate::Schema) for use in tests. +// pub async fn migrate_for_tests( +// ctx: &DalContext, +// selected_test_builtin_schemas: SelectedTestBuiltinSchemas, +// ) -> BuiltinsResult<()> { +// // Determine what to migrate based on the selected test builtin schemas provided. +// let (migrate_all, migrate_test_exclusive, specific_builtin_schemas) = +// match selected_test_builtin_schemas { +// SelectedTestBuiltinSchemas::All => { +// info!("migrating schemas for tests"); +// (true, false, HashSet::new()) +// } +// SelectedTestBuiltinSchemas::None => { +// info!("skipping migrating schemas for tests"); +// return Ok(()); +// } +// SelectedTestBuiltinSchemas::Some(provided_set) => { +// info!("migrating schemas for tests based on a provided set of names"); +// debug!("provided set of builtin schemas: {:?}", &provided_set); +// (false, false, provided_set) +// } +// SelectedTestBuiltinSchemas::Test => { +// info!("migrating test-exclusive schemas solely"); +// (false, true, HashSet::new()) +// } +// }; + +// // Once we know what to migrate, create the driver. +// let driver = MigrationDriver::new(ctx).await?; +// ctx.blocking_commit().await?; + +// if migrate_all { +// migrate_pkg(ctx, super::SI_AWS_PKG, None).await?; +// migrate_pkg(ctx, super::SI_AWS_EC2_PKG, None).await?; +// migrate_pkg(ctx, super::SI_COREOS_PKG, None).await?; +// migrate_pkg(ctx, super::SI_DOCKER_IMAGE_PKG, None).await?; +// migrate_pkg(ctx, super::SI_GENERIC_FRAME_PKG, None).await?; +// migrate_pkg(ctx, super::SI_AWS_LB_TARGET_GROUP_PKG, None).await?; +// for test_schema in [BuiltinSchema::Starfield, BuiltinSchema::Fallout] { +// migrate_schema(ctx, test_schema, &driver).await?; +// ctx.blocking_commit().await?; +// } +// } else if migrate_test_exclusive { +// // We migrate generic frame to get "si:resourceToPayloadValue" cheaply. This function +// // should be converted to an intrinsic (or removed?) +// migrate_pkg(ctx, super::SI_GENERIC_FRAME_PKG, None).await?; +// for test_schema in [BuiltinSchema::Starfield, BuiltinSchema::Fallout] { +// migrate_schema(ctx, test_schema, &driver).await?; +// ctx.blocking_commit().await?; +// } +// } else { +// let schemas: Vec = specific_builtin_schemas +// .iter() +// .map(|s| s.to_owned()) +// .collect(); +// migrate_pkg(ctx, super::SI_AWS_PKG, Some(schemas.to_owned())).await?; +// migrate_pkg(ctx, super::SI_AWS_EC2_PKG, Some(schemas.to_owned())).await?; +// migrate_pkg(ctx, super::SI_COREOS_PKG, Some(schemas.to_owned())).await?; +// migrate_pkg(ctx, super::SI_DOCKER_IMAGE_PKG, Some(schemas.to_owned())).await?; +// migrate_pkg(ctx, super::SI_GENERIC_FRAME_PKG, Some(schemas.to_owned())).await?; +// migrate_pkg( +// ctx, +// super::SI_AWS_LB_TARGET_GROUP_PKG, +// Some(schemas.to_owned()), +// ) +// .await?; +// for test_schema in [BuiltinSchema::Starfield, BuiltinSchema::Fallout] { +// if specific_builtin_schemas.contains(test_schema.real_schema_name()) { +// migrate_schema(ctx, test_schema, &driver).await?; +// ctx.blocking_commit().await?; +// } +// } +// } + +// Ok(()) +// } + +pub async fn migrate_pkg( ctx: &DalContext, pkg_filename: &str, schemas: Option>, @@ -127,35 +170,130 @@ async fn migrate_pkg( Ok(()) } -async fn migrate_pkg_test_exclusive( - ctx: &DalContext, - schema: TestExclusiveSchema, -) -> BuiltinsResult<()> { - match schema { - TestExclusiveSchema::Fallout => { - migrate_test_exclusive_schema_fallout(ctx).await?; - } - TestExclusiveSchema::Starfield => { - migrate_test_exclusive_schema_starfield(ctx).await?; - } - } - ctx.blocking_commit().await?; - Ok(()) -} - -/// Test exclusive [`Schema`] are solely used for "dal" integration tests. -#[remain::sorted] -#[derive(Debug, Copy, Clone, AsRefStr, Display, EnumIter, EnumString, Eq, PartialEq)] -enum TestExclusiveSchema { - Fallout, - Starfield, -} - -impl TestExclusiveSchema { - pub fn real_schema_name(&self) -> &'static str { - match self { - TestExclusiveSchema::Fallout => "fallout", - TestExclusiveSchema::Starfield => "starfield", - } - } -} +// /// A _private_ item containing useful metadata alongside a [`FuncId`](crate::Func). This is used by +// /// the [`MigrationDriver`]. +// #[derive(Copy, Clone, Debug)] +// pub struct FuncCacheItem { +// pub func_id: FuncId, +// pub func_binding_id: FuncBindingId, +// pub func_binding_return_value_id: FuncBindingReturnValueId, +// pub func_argument_id: FuncArgumentId, +// } +// +// /// This _private_ driver providing caches and helper methods for efficiently creating builtin +// /// [`Schemas`](crate::Schema). +// #[derive(Default)] +// pub struct MigrationDriver { +// pub func_item_cache: HashMap, +// pub func_id_cache: HashMap, +// } +// +// impl MigrationDriver { +// /// Create a [`driver`](Self) with commonly used, cached data. +// pub async fn new(ctx: &DalContext) -> BuiltinsResult { +// let mut driver = Self::default(); +// +// driver +// .add_func_item( +// ctx, +// "si:identity".to_string(), +// serde_json::json![{ "identity": null }], +// "identity".to_string(), +// vec![], +// ) +// .await?; +// +// for builtin_func_name in ["si:validation"] { +// driver +// .add_func_id(ctx, builtin_func_name.to_string()) +// .await?; +// } +// +// Ok(driver) +// } +// +// /// Add a `FuncCacheItem` for a given [`Func`](crate::Func) name. +// pub async fn add_func_item( +// &mut self, +// ctx: &DalContext, +// func_name: String, +// func_binding_args: Value, +// func_argument_name: String, +// before: Vec, +// ) -> BuiltinsResult<()> { +// let func: Func = Func::find_by_attr(ctx, "name", &func_name) +// .await? +// .pop() +// .ok_or_else(|| FuncError::NotFoundByName(func_name.clone()))?; +// let func_id = *func.id(); +// let (func_binding, func_binding_return_value) = +// FuncBinding::create_and_execute(ctx, func_binding_args, func_id, before).await?; +// let func_argument = FuncArgument::find_by_name_for_func(ctx, &func_argument_name, func_id) +// .await? +// .ok_or_else(|| { +// BuiltinsError::BuiltinMissingFuncArgument(func_name.clone(), func_argument_name) +// })?; +// self.func_item_cache.insert( +// func_name, +// FuncCacheItem { +// func_id, +// func_binding_id: *func_binding.id(), +// func_binding_return_value_id: *func_binding_return_value.id(), +// func_argument_id: *func_argument.id(), +// }, +// ); +// +// Ok(()) +// } +// +// /// Add a [`FuncId`](crate::Func) for a given [`Func`](crate::Func) name. +// pub async fn add_func_id(&mut self, ctx: &DalContext, func_name: String) -> BuiltinsResult<()> { +// let func = Func::find_by_attr(ctx, "name", &func_name) +// .await? +// .pop() +// .ok_or_else(|| FuncError::NotFoundByName(func_name.clone()))?; +// self.func_id_cache.insert(func_name, *func.id()); +// Ok(()) +// } +// +// /// Get a `FuncCacheItem` (from the cache) for a given [`Func`](crate::Func) name. +// pub fn get_func_item(&self, name: impl AsRef) -> Option { +// self.func_item_cache.get(name.as_ref()).copied() +// } +// +// /// Get a [`FuncId`](crate::Func) (from the cache) for a given [`Func`](crate::Func) name. +// pub fn get_func_id(&self, name: impl AsRef) -> Option { +// self.func_id_cache.get(name.as_ref()).copied() +// } +// +// /// Find a single [`Func`](crate::Func) and [`FuncArgument`](crate::FuncArgument) by providing +// /// the name for each, respectively. +// pub async fn find_func_and_single_argument_by_names( +// &self, +// ctx: &DalContext, +// func_name: &str, +// func_argument_name: &str, +// ) -> BuiltinsResult<(FuncId, FuncArgumentId)> { +// Self::find_func_and_single_argument_by_names_raw(ctx, func_name, func_argument_name).await +// } +// +// pub async fn find_func_and_single_argument_by_names_raw( +// ctx: &DalContext, +// func_name: &str, +// func_argument_name: &str, +// ) -> BuiltinsResult<(FuncId, FuncArgumentId)> { +// // NOTE(nick): we may eventually want to make "self" mutable and perform auto caching. +// let func_name = func_name.to_string(); +// let func = Func::find_by_attr(ctx, "name", &func_name) +// .await? +// .pop() +// .ok_or_else(|| SchemaError::FuncNotFound(func_name.clone()))?; +// let func_id = *func.id(); +// let func_argument = FuncArgument::find_by_name_for_func(ctx, func_argument_name, func_id) +// .await? +// .ok_or_else(|| { +// BuiltinsError::BuiltinMissingFuncArgument(func_name, func_argument_name.to_string()) +// })?; +// Ok((func_id, *func_argument.id())) +// } +// } diff --git a/lib/dal/src/builtins/schema/test_exclusive_schema_bethesda_secret.rs b/lib/dal/src/builtins/schema/test_exclusive_schema_bethesda_secret.rs new file mode 100644 index 0000000000..738def6596 --- /dev/null +++ b/lib/dal/src/builtins/schema/test_exclusive_schema_bethesda_secret.rs @@ -0,0 +1,236 @@ +use si_pkg::{ + AttrFuncInputSpec, AttrFuncInputSpecKind, AuthenticationFuncSpec, FuncArgumentSpec, FuncSpec, + FuncSpecBackendKind, FuncSpecBackendResponseType, LeafFunctionSpec, LeafInputLocation, + LeafKind, PkgSpec, PropSpec, PropSpecKind, PropSpecWidgetKind, SchemaSpec, SchemaVariantSpec, + SchemaVariantSpecData, SiPkg, SocketSpec, SocketSpecArity, SocketSpecData, SocketSpecKind, +}; +use si_pkg::{FuncSpecData, SchemaSpecData}; + +use crate::func::argument::FuncArgumentKind; +use crate::func::intrinsics::IntrinsicFunc; +use crate::pkg::import_pkg_from_pkg; +use crate::prop::PropPath; +use crate::{BuiltinsResult, DalContext}; + +pub async fn migrate_test_exclusive_schema_bethesda_secret(ctx: &DalContext) -> BuiltinsResult<()> { + let name = "bethesda-secret"; + + let mut builder = PkgSpec::builder(); + + builder + .name(name) + .version("2024-03-01") + .created_by("System Initiative"); + + let identity_func_spec = IntrinsicFunc::Identity.to_spec()?; + + let scaffold_func = "function createAsset() {\ + return new AssetBuilder().build() + }"; + let fn_name = "test:scaffoldBethesdaSecretAsset"; + let authoring_schema_func = FuncSpec::builder() + .name(fn_name) + .unique_id(fn_name) + .data( + FuncSpecData::builder() + .name(fn_name) + .code_plaintext(scaffold_func) + .handler("createAsset") + .backend_kind(FuncSpecBackendKind::JsSchemaVariantDefinition) + .response_type(FuncSpecBackendResponseType::SchemaVariantDefinition) + .build()?, + ) + .build()?; + + let auth_func_code = "async function auth(secret: Input): Promise { requestStorage.setItem('fakeSecretString', secret.value); }"; + let fn_name = "test:setFakeSecretString"; + let auth_func = FuncSpec::builder() + .name(fn_name) + .unique_id(fn_name) + .data( + FuncSpecData::builder() + .name(fn_name) + .code_plaintext(auth_func_code) + .handler("auth") + .backend_kind(FuncSpecBackendKind::JsAuthentication) + .response_type(FuncSpecBackendResponseType::Json) + .build()?, + ) + .argument( + FuncArgumentSpec::builder() + .name("secret") + .kind(FuncArgumentKind::Object) + .build()?, + ) + .build()?; + + let (fake_secret_definition_prop, fake_secret_prop, fake_secret_output_socket) = + assemble_secret_definition_fake(&identity_func_spec)?; + + let ( + qualification_fake_secret_value_is_todd_func, + qualification_fake_secret_value_is_todd_leaf, + ) = assemble_qualification_fake_secret_value_is_todd()?; + + let schema = SchemaSpec::builder() + .name(name) + .data( + SchemaSpecData::builder() + .name(name) + .category("test exclusive") + .category_name(name) + .build() + .expect("build schema spec data"), + ) + .variant( + SchemaVariantSpec::builder() + .name("v0") + .unique_id(format!("{name}_sv")) + .data( + SchemaVariantSpecData::builder() + .name("v0") + .color("#ffffff") + .func_unique_id(&authoring_schema_func.unique_id) + .build()?, + ) + .auth_func( + AuthenticationFuncSpec::builder() + .func_unique_id(&auth_func.unique_id) + .build()?, + ) + .secret_prop(fake_secret_prop) + .secret_definition_prop(fake_secret_definition_prop) + .socket(fake_secret_output_socket) + .leaf_function(qualification_fake_secret_value_is_todd_leaf) + .build()?, + ) + .build()?; + + let spec = builder + .func(identity_func_spec) + .func(authoring_schema_func) + .func(auth_func) + .func(qualification_fake_secret_value_is_todd_func) + .schema(schema) + .build()?; + + let pkg = SiPkg::load_from_spec(spec)?; + import_pkg_from_pkg( + ctx, + &pkg, + Some(crate::pkg::ImportOptions { + schemas: Some(vec![name.into()]), + ..Default::default() + }), + ) + .await?; + + Ok(()) +} + +// Mimics the "defineSecret" function in "asset_builder.ts". +fn assemble_secret_definition_fake( + identity_func_spec: &FuncSpec, +) -> BuiltinsResult<(PropSpec, PropSpec, SocketSpec)> { + let secret_definition_name = "fake"; + + // First, create the child of "/root/secret_definition" that defines our secret. + let new_secret_definition_prop = PropSpec::builder() + .name("value") + .kind(PropSpecKind::String) + .widget_kind(PropSpecWidgetKind::Password) + .build()?; + + // Second, add it as a new prop underneath "/root/secrets" object. Make sure the "secretKind" is available. + let new_secret_prop = PropSpec::builder() + .name(secret_definition_name) + .kind(PropSpecKind::String) + .widget_kind(PropSpecWidgetKind::Secret) + .widget_options(serde_json::json![ + [ + { + "label": "secretKind", + "value": secret_definition_name + } + ] + ]) + .build()?; + + // Third, add an output socket for other components to use the secret. + let new_secret_output_socket = SocketSpec::builder() + .name(secret_definition_name) + .data( + SocketSpecData::builder() + .name(secret_definition_name) + .connection_annotations(serde_json::to_string(&vec![ + secret_definition_name.to_lowercase() + ])?) + .kind(SocketSpecKind::Output) + .arity(SocketSpecArity::One) + .func_unique_id(&identity_func_spec.unique_id) + .build()?, + ) + .input( + AttrFuncInputSpec::builder() + .name("identity") + .kind(AttrFuncInputSpecKind::Prop) + .prop_path(PropPath::new(["root", "secrets", secret_definition_name])) + .build()?, + ) + .build()?; + + Ok(( + new_secret_definition_prop, + new_secret_prop, + new_secret_output_socket, + )) +} + +fn assemble_qualification_fake_secret_value_is_todd() -> BuiltinsResult<(FuncSpec, LeafFunctionSpec)> +{ + let fn_code = "async function qualification(_component: Input): Promise {\ + const authCheck = requestStorage.getItem('fakeSecretString'); + if (authCheck) { + if (authCheck === 'todd') { + return { + result: 'success', + message: 'fake secret string matches expected value' + }; + } + return { + result: 'failure', + message: 'fake secret string does not match expected value' + }; + } else { + return { + result: 'failure', + message: 'fake secret string is empty' + }; + } + }"; + let fn_name = "test:qualificationFakeSecretStringIsTodd"; + let qualification_fake_secret_value_is_todd_func = FuncSpec::builder() + .name(fn_name) + .unique_id(fn_name) + .data( + FuncSpecData::builder() + .name(fn_name) + .code_plaintext(fn_code) + .handler("qualification") + .backend_kind(FuncSpecBackendKind::JsAttribute) + .response_type(FuncSpecBackendResponseType::Qualification) + .build()?, + ) + .build()?; + + let qualification_fake_secret_value_is_todd_leaf = LeafFunctionSpec::builder() + .func_unique_id(&qualification_fake_secret_value_is_todd_func.unique_id) + .leaf_kind(LeafKind::Qualification) + .inputs(vec![LeafInputLocation::Secrets]) + .build()?; + + Ok(( + qualification_fake_secret_value_is_todd_func, + qualification_fake_secret_value_is_todd_leaf, + )) +} diff --git a/lib/dal/src/builtins/schema/test_exclusive_schema_starfield.rs b/lib/dal/src/builtins/schema/test_exclusive_schema_starfield.rs index ff5268c25e..2d07008935 100644 --- a/lib/dal/src/builtins/schema/test_exclusive_schema_starfield.rs +++ b/lib/dal/src/builtins/schema/test_exclusive_schema_starfield.rs @@ -151,6 +151,31 @@ pub async fn migrate_test_exclusive_schema_starfield(ctx: &DalContext) -> Builti ) .build()?; + let starfield_kripke_func_code = "async function hesperus_is_phosphorus(input) { + if input.hesperus === \"hesperus\" { return \"phosphorus\"; } + else return \"not hesperus\"; + }"; + let starfield_kripke_func_name = "hesperus_is_phosphorus"; + let starfield_kripke_func = FuncSpec::builder() + .name(starfield_kripke_func_name) + .unique_id(starfield_kripke_func_name) + .data( + FuncSpecData::builder() + .name(starfield_kripke_func_name) + .code_plaintext(starfield_kripke_func_code) + .handler(starfield_kripke_func_name) + .backend_kind(FuncSpecBackendKind::JsAttribute) + .response_type(FuncSpecBackendResponseType::String) + .build()?, + ) + .argument( + FuncArgumentSpec::builder() + .name("hesperus") + .kind(FuncArgumentKind::String) + .build()?, + ) + .build()?; + let starfield_schema = SchemaSpec::builder() .name("starfield") .data( @@ -214,6 +239,84 @@ pub async fn migrate_test_exclusive_schema_starfield(ctx: &DalContext) -> Builti ) .build()?, ) + .domain_prop( + PropSpec::builder() + .name("possible_world_a") + .kind(PropKind::Object) + .entry( + PropSpec::builder() + .name("wormhole_1") + .kind(PropKind::Object) + .entry( + PropSpec::builder() + .name("wormhole_2") + .kind(PropKind::Object) + .entry( + PropSpec::builder() + .name("wormhole_3") + .kind(PropKind::Object) + .entry( + PropSpec::builder() + .kind(PropKind::String) + .name("rigid_designator") + .build()?, + ) + .build()?, + ) + .build()?, + ) + .build()?, + ) + .build()?, + ) + .domain_prop( + PropSpec::builder() + .name("possible_world_b") + .kind(PropKind::Object) + .entry( + PropSpec::builder() + .name("wormhole_1") + .kind(PropKind::Object) + .entry( + PropSpec::builder() + .name("wormhole_2") + .kind(PropKind::Object) + .entry( + PropSpec::builder() + .name("wormhole_3") + .kind(PropKind::Object) + .entry( + PropSpec::builder() + .kind(PropKind::String) + .name("naming_and_necessity") + .func_unique_id( + &starfield_kripke_func.unique_id, + ) + .input( + AttrFuncInputSpec::builder() + .kind(AttrFuncInputSpecKind::Prop) + .name("hesperus") + .prop_path(PropPath::new([ + "root", + "domain", + "possible_world_a", + "wormhole_1", + "wormhole_2", + "wormhole_3", + "rigid_designator", + ])) + .build()?, + ) + .build()?, + ) + .build()?, + ) + .build()?, + ) + .build()?, + ) + .build()?, + ) .domain_prop( PropSpec::builder() .name("universe") @@ -301,6 +404,7 @@ pub async fn migrate_test_exclusive_schema_starfield(ctx: &DalContext) -> Builti .func(fallout_entries_to_galaxies_transform_func) .func(starfield_authoring_schema_func) .func(starfield_resource_payload_to_value_func) + .func(starfield_kripke_func) .schema(starfield_schema) .build()?; diff --git a/lib/dal/src/change_set.rs b/lib/dal/src/change_set.rs index c861df43d1..3ff31a384f 100644 --- a/lib/dal/src/change_set.rs +++ b/lib/dal/src/change_set.rs @@ -3,22 +3,23 @@ use postgres_types::ToSql; use serde::{Deserialize, Serialize}; use si_data_nats::NatsError; use si_data_pg::{PgError, PgPoolError}; -use std::collections::HashMap; use strum::{Display, EnumString}; use telemetry::prelude::*; use thiserror::Error; +use ulid::Ulid; +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}; use crate::standard_model::{object_option_from_row_option, objects_from_rows}; use crate::{ - action::ActionBag, pk, Action, ActionError, ActionId, HistoryActor, HistoryEvent, - HistoryEventError, LabelListError, StandardModelError, Tenancy, Timestamp, TransactionsError, - User, UserError, UserPk, Visibility, WsEvent, WsEventError, WsPayload, + pk, HistoryActor, HistoryEvent, HistoryEventError, LabelListError, StandardModelError, Tenancy, + Timestamp, TransactionsError, User, UserError, UserPk, Visibility, WsEvent, WsEventError, + WsPayload, }; -use crate::{ComponentError, DalContext, WsEventResult}; +use crate::{DalContext, WsEventResult}; const CHANGE_SET_OPEN_LIST: &str = include_str!("queries/change_set/open_list.sql"); const CHANGE_SET_GET_BY_PK: &str = include_str!("queries/change_set/get_by_pk.sql"); -const GET_ACTORS: &str = include_str!("queries/change_set/get_actors.sql"); +// const GET_ACTORS: &str = include_str!("queries/change_set/get_actors.sql"); const BEGIN_MERGE_FLOW: &str = include_str!("queries/change_set/begin_merge_flow.sql"); const CANCEL_MERGE_FLOW: &str = include_str!("queries/change_set/cancel_merge_flow.sql"); @@ -30,10 +31,8 @@ const CANCEL_ABANDON_FLOW: &str = include_str!("queries/change_set/cancel_abando #[remain::sorted] #[derive(Error, Debug)] pub enum ChangeSetError { - #[error(transparent)] - Action(#[from] ActionError), - #[error(transparent)] - Component(#[from] ComponentError), + #[error("change set pointer error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), #[error(transparent)] HistoryEvent(#[from] HistoryEventError), #[error("invalid user actor pk")] @@ -293,43 +292,45 @@ impl ChangeSet { Ok(change_set) } - pub async fn actions(&self, ctx: &DalContext) -> ChangeSetResult> { - let ctx = - ctx.clone_with_new_visibility(Visibility::new(self.pk, ctx.visibility().deleted_at)); - Ok(Action::order(&ctx).await?) - } - - pub async fn actors(&self, ctx: &DalContext) -> ChangeSetResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(GET_ACTORS, &[&ctx.tenancy().workspace_pk(), &self.pk]) - .await?; - - let mut result: Vec = vec![]; - for row in rows.into_iter() { - let email: String = row.try_get("email")?; - result.push(email); - } - - Ok(result) - } + // pub async fn actions(&self, ctx: &DalContext) -> ChangeSetResult> { + // let ctx = + // ctx.clone_with_new_visibility(Visibility::new(self.pk, ctx.visibility().deleted_at)); + // Ok(Action::order(&ctx).await?) + // } + + // pub async fn actors(&self, ctx: &DalContext) -> ChangeSetResult> { + // let rows = ctx + // .txns() + // .await? + // .pg() + // .query(GET_ACTORS, &[&ctx.tenancy().workspace_pk(), &self.pk]) + // .await?; + + // let mut result: Vec = vec![]; + // for row in rows.into_iter() { + // let email: String = row.try_get("email")?; + // result.push(email); + // } + + // Ok(result) + // } pub async fn force_new(ctx: &mut DalContext) -> ChangeSetResult> { Ok(if ctx.visibility().is_head() { - let change_set = Self::new(ctx, Self::generate_name(), None).await?; - - let new_visibility = Visibility::new(change_set.pk, ctx.visibility().deleted_at); + // TODO(nick): eventually unify this logic under one interface. + let change_set = ChangeSetPointer::fork_head(ctx, Self::generate_name()).await?; + ctx.update_visibility_v2(&change_set); + ctx.update_snapshot_to_visibility().await?; - ctx.update_visibility(new_visibility); + // TODO(nick): replace this with the new change set stuff. + let fake_pk = ChangeSetPk::from(Ulid::from(change_set.id)); - WsEvent::change_set_created(ctx, change_set.pk) + WsEvent::change_set_created(ctx, fake_pk) .await? .publish_on_commit(ctx) .await?; - Some(change_set.pk) + Some(fake_pk) } else { None }) diff --git a/lib/dal/src/change_set_pointer.rs b/lib/dal/src/change_set_pointer.rs new file mode 100644 index 0000000000..c76cb6837d --- /dev/null +++ b/lib/dal/src/change_set_pointer.rs @@ -0,0 +1,349 @@ +//! The sequel to [`ChangeSets`](crate::ChangeSet). Coming to an SI instance near you! + +use std::sync::{Arc, Mutex}; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use si_data_pg::{PgError, PgRow}; +use telemetry::prelude::*; +use thiserror::Error; +use ulid::{Generator, Ulid}; + +use crate::context::RebaseRequest; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::workspace_snapshot::WorkspaceSnapshotId; +use crate::{pk, ChangeSetStatus, DalContext, TransactionsError, Workspace, WorkspacePk}; + +#[remain::sorted] +#[derive(Debug, Error)] +pub enum ChangeSetPointerError { + #[error("change set not found")] + ChangeSetNotFound, + #[error("could not find default change set: {0}")] + DefaultChangeSetNotFound(ChangeSetPointerId), + #[error("default change set {0} has no workspace snapshot pointer")] + DefaultChangeSetNoWorkspaceSnapshotPointer(ChangeSetPointerId), + #[error("enum parse error: {0}")] + EnumParse(#[from] strum::ParseError), + #[error("ulid monotonic error: {0}")] + Monotonic(#[from] ulid::MonotonicError), + #[error("mutex error: {0}")] + Mutex(String), + #[error("Changeset {0} does not have a base change set")] + NoBaseChangeSet(ChangeSetPointerId), + #[error("no tenancy set in context")] + NoTenancySet, + #[error("Changeset {0} does not have a workspace snapshot")] + NoWorkspaceSnapshot(ChangeSetPointerId), + #[error("pg error: {0}")] + Pg(#[from] PgError), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), + #[error("workspace error: {0}")] + Workspace(String), + #[error("workspace not found: {0}")] + WorkspaceNotFound(WorkspacePk), +} + +pub type ChangeSetPointerResult = Result; + +pk!(ChangeSetPointerId); + +#[derive(Clone, Serialize, Deserialize)] +pub struct ChangeSetPointer { + pub id: ChangeSetPointerId, + pub pk: ChangeSetPointerId, + pub created_at: DateTime, + pub updated_at: DateTime, + + pub name: String, + pub status: ChangeSetStatus, + pub base_change_set_id: Option, + pub workspace_snapshot_id: Option, + pub workspace_id: Option, + + #[serde(skip)] + pub generator: Arc>, +} + +impl TryFrom for ChangeSetPointer { + type Error = ChangeSetPointerError; + + fn try_from(value: PgRow) -> Result { + let status_string: String = value.try_get("status")?; + let status = ChangeSetStatus::try_from(status_string.as_str())?; + Ok(Self { + id: value.try_get("id")?, + pk: value.try_get("id")?, + created_at: value.try_get("created_at")?, + updated_at: value.try_get("updated_at")?, + name: value.try_get("name")?, + status, + base_change_set_id: value.try_get("base_change_set_id")?, + workspace_snapshot_id: value.try_get("workspace_snapshot_id")?, + workspace_id: value.try_get("workspace_id")?, + generator: Arc::new(Mutex::new(Default::default())), + }) + } +} + +impl ChangeSetPointer { + pub fn new_local() -> ChangeSetPointerResult { + let mut generator = Generator::new(); + let id = generator.generate()?; + + Ok(Self { + id: id.into(), + pk: id.into(), + created_at: Utc::now(), + updated_at: Utc::now(), + generator: Arc::new(Mutex::new(generator)), + base_change_set_id: None, + workspace_snapshot_id: None, + workspace_id: None, + name: "".to_string(), + status: ChangeSetStatus::Open, + }) + } + + pub fn editing_changeset(&self) -> ChangeSetPointerResult { + let mut new_local = Self::new_local()?; + new_local.base_change_set_id = self.base_change_set_id; + new_local.workspace_snapshot_id = self.workspace_snapshot_id; + new_local.workspace_id = self.workspace_id; + new_local.name = self.name.to_owned(); + new_local.status = self.status.to_owned(); + Ok(new_local) + } + + pub async fn new_with_id( + ctx: &DalContext, + name: impl AsRef, + id: ChangeSetPointerId, + base_change_set_id: Option, + ) -> ChangeSetPointerResult { + let workspace_id = ctx.tenancy().workspace_pk(); + let name = name.as_ref(); + let row = ctx + .txns() + .await? + .pg() + .query_one( + "INSERT INTO change_set_pointers (id, name, base_change_set_id, status, workspace_id) VALUES ($1, $2, $3, $4, $5) RETURNING *", + &[&id, &name, &base_change_set_id, &ChangeSetStatus::Open.to_string(), &workspace_id], + ) + .await?; + Self::try_from(row) + } + + pub async fn new( + ctx: &DalContext, + name: impl AsRef, + base_change_set_id: Option, + ) -> ChangeSetPointerResult { + let workspace_id = ctx.tenancy().workspace_pk(); + let name = name.as_ref(); + let row = ctx + .txns() + .await? + .pg() + .query_one( + "INSERT INTO change_set_pointers (name, base_change_set_id, status, workspace_id) VALUES ($1, $2, $3, $4) RETURNING *", + &[&name, &base_change_set_id, &ChangeSetStatus::Open.to_string(), &workspace_id], + ) + .await?; + Self::try_from(row) + } + + pub async fn fork_head( + ctx: &DalContext, + name: impl AsRef, + ) -> ChangeSetPointerResult { + let workspace_pk = ctx + .tenancy() + .workspace_pk() + .ok_or(ChangeSetPointerError::NoTenancySet)?; + + let workspace = Workspace::get_by_pk(ctx, &workspace_pk) + .await + .map_err(|err| ChangeSetPointerError::Workspace(err.to_string()))? + .ok_or(ChangeSetPointerError::WorkspaceNotFound(workspace_pk))?; + + let base_change_set_pointer = + ChangeSetPointer::find(ctx, workspace.default_change_set_id()) + .await? + .ok_or(ChangeSetPointerError::DefaultChangeSetNotFound( + workspace.default_change_set_id(), + ))?; + + let mut change_set_pointer = + ChangeSetPointer::new(ctx, name, Some(workspace.default_change_set_id())).await?; + + change_set_pointer + .update_pointer( + ctx, + base_change_set_pointer.workspace_snapshot_id.ok_or( + ChangeSetPointerError::DefaultChangeSetNoWorkspaceSnapshotPointer( + workspace.default_change_set_id(), + ), + )?, + ) + .await?; + + Ok(change_set_pointer) + } + + pub async fn new_head(ctx: &DalContext) -> ChangeSetPointerResult { + let name = "HEAD"; + + Self::new_with_id(ctx, name, ChangeSetPointerId::NONE, None).await + } + + /// Create a [`VectorClockId`] from the [`ChangeSetPointer`]. + pub fn vector_clock_id(&self) -> VectorClockId { + VectorClockId::from(Ulid::from(self.id)) + } + + pub fn generate_ulid(&self) -> ChangeSetPointerResult { + self.generator + .lock() + .map_err(|e| ChangeSetPointerError::Mutex(e.to_string()))? + .generate() + .map_err(Into::into) + } + + pub async fn update_workspace_id( + &mut self, + ctx: &DalContext, + workspace_id: WorkspacePk, + ) -> ChangeSetPointerResult<()> { + ctx.txns() + .await? + .pg() + .query_none( + "UPDATE change_set_pointers SET workspace_id = $2 WHERE id = $1", + &[&self.id, &workspace_id], + ) + .await?; + + self.workspace_id = Some(workspace_id); + + Ok(()) + } + + pub async fn update_pointer( + &mut self, + ctx: &DalContext, + workspace_snapshot_id: WorkspaceSnapshotId, + ) -> ChangeSetPointerResult<()> { + ctx.txns() + .await? + .pg() + .query_none( + "UPDATE change_set_pointers SET workspace_snapshot_id = $2 WHERE id = $1", + &[&self.id, &workspace_snapshot_id], + ) + .await?; + + self.workspace_snapshot_id = Some(workspace_snapshot_id); + + Ok(()) + } + + pub async fn update_status( + &mut self, + ctx: &DalContext, + status: ChangeSetStatus, + ) -> ChangeSetPointerResult<()> { + ctx.txns() + .await? + .pg() + .query_none( + "UPDATE change_set_pointers SET status = $2 WHERE id = $1", + &[&self.id, &status.to_string()], + ) + .await?; + + self.status = status; + + Ok(()) + } + + #[instrument(skip_all)] + pub async fn find( + ctx: &DalContext, + change_set_pointer_id: ChangeSetPointerId, + ) -> ChangeSetPointerResult> { + let row = ctx + .txns() + .await? + .pg() + .query_opt( + "SELECT * FROM change_set_pointers WHERE id = $1", + &[&change_set_pointer_id], + ) + .await?; + + match row { + Some(row) => Ok(Some(Self::try_from(row)?)), + None => Ok(None), + } + } + + pub async fn list_open(ctx: &DalContext) -> ChangeSetPointerResult> { + let mut result = vec![]; + let rows = ctx + .txns() + .await? + .pg() + .query( + "SELECT * from change_set_pointers WHERE workspace_id = $1 AND status = $2", + &[ + &ctx.tenancy().workspace_pk(), + &ChangeSetStatus::Open.to_string(), + ], + ) + .await?; + + for row in rows { + result.push(Self::try_from(row)?); + } + + Ok(result) + } + + pub async fn apply_to_base_change_set(&self, ctx: &DalContext) -> ChangeSetPointerResult<()> { + let to_rebase_change_set_id = self + .base_change_set_id + .ok_or(ChangeSetPointerError::NoBaseChangeSet(self.id))?; + let onto_workspace_snapshot_id = self + .workspace_snapshot_id + .ok_or(ChangeSetPointerError::NoWorkspaceSnapshot(self.id))?; + let rebase_request = RebaseRequest { + onto_workspace_snapshot_id, + onto_vector_clock_id: self.vector_clock_id(), + to_rebase_change_set_id, + }; + ctx.do_rebase_request(rebase_request).await?; + + Ok(()) + } +} + +impl std::fmt::Debug for ChangeSetPointer { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("ChangeSetPointer") + .field("id", &self.id.to_string()) + .field( + "base_change_set_id", + &self.base_change_set_id.map(|bcsid| bcsid.to_string()), + ) + .field( + "workspace_snapshot_id", + &self.workspace_snapshot_id.map(|wsid| wsid.to_string()), + ) + .finish() + } +} diff --git a/lib/dal/src/change_status.rs b/lib/dal/src/change_status.rs index 2f495723fe..d0d00210f1 100644 --- a/lib/dal/src/change_status.rs +++ b/lib/dal/src/change_status.rs @@ -1,35 +1,26 @@ -//! This module contains [`ComponentChangeStatus`]. - use serde::Deserialize; use serde::Serialize; -use si_data_pg::{PgError, PgRow}; use strum::{AsRefStr, Display, EnumString}; -use telemetry::prelude::*; -use thiserror::Error; - -use crate::standard_model::objects_from_rows; -use crate::TransactionsError; -use crate::{ComponentId, DalContext, Edge, StandardModelError}; -const LIST_MODIFIED_COMPONENTS: &str = - include_str!("queries/change_status/list_modified_components.sql"); -const LIST_ADDED_COMPONENTS: &str = include_str!("queries/change_status/list_added_components.sql"); -const LIST_DELETED_COMPONENTS: &str = - include_str!("queries/change_status/list_deleted_components.sql"); -const LIST_DELETED_EDGES: &str = include_str!("queries/change_status/edges_list_deleted.sql"); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum ChangeStatusError { - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("transactions error: {0}")] - Tranactions(#[from] TransactionsError), -} - -pub type ChangeStatusResult = Result; +// const LIST_MODIFIED_COMPONENTS: &str = +// include_str!("queries/change_status/list_modified_components.sql"); +// const LIST_ADDED_COMPONENTS: &str = include_str!("queries/change_status/list_added_components.sql"); +// const LIST_DELETED_COMPONENTS: &str = +// include_str!("queries/change_status/list_deleted_components.sql"); +// const LIST_DELETED_EDGES: &str = include_str!("queries/change_status/edges_list_deleted.sql"); +// +// #[remain::sorted] +// #[derive(Error, Debug)] +// pub enum ChangeStatusError { +// #[error("pg error: {0}")] +// Pg(#[from] PgError), +// #[error("standard model error: {0}")] +// StandardModel(#[from] StandardModelError), +// #[error("transactions error: {0}")] +// Tranactions(#[from] TransactionsError), +// } +// +// pub type ChangeStatusResult = Result; /// An enum representing the change_status of an entity in the [`ChangeSet`](crate::ChangeSet). #[remain::sorted] @@ -45,129 +36,129 @@ pub enum ChangeStatus { Unmodified, } -/// A collection of statistics for [`Components`](crate::Component) in the current -/// [`ChangeSet`](crate::ChangeSet). -#[derive(Deserialize, Serialize, Debug, Default, Clone)] -#[serde(rename_all = "camelCase")] -pub struct ComponentChangeStatus { - stats: Vec, -} - -impl ComponentChangeStatus { - pub async fn new(ctx: &DalContext) -> ChangeStatusResult { - let component_stats = if ctx.visibility().is_head() { - Self::default() - } else { - let added = Self::list_added(ctx).await?; - let deleted = Self::list_deleted(ctx).await?; - let modified = Self::list_modified(ctx).await?; - - let mut stats = Vec::new(); - stats.extend(added); - stats.extend(deleted); - stats.extend(modified); - Self { stats } - }; - Ok(component_stats) - } - - #[instrument(level = "debug", skip_all)] - pub async fn list_added( - ctx: &DalContext, - ) -> ChangeStatusResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_ADDED_COMPONENTS, - &[ctx.tenancy(), &ctx.visibility().change_set_pk], - ) - .await?; - ComponentChangeStatusGroup::new_from_rows(rows, ChangeStatus::Added) - } - - #[instrument(level = "debug", skip_all)] - pub async fn list_deleted( - ctx: &DalContext, - ) -> ChangeStatusResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_DELETED_COMPONENTS, - &[ctx.tenancy(), &ctx.visibility().change_set_pk], - ) - .await?; - ComponentChangeStatusGroup::new_from_rows(rows, ChangeStatus::Deleted) - } - - #[instrument(level = "debug", skip_all)] - pub async fn list_modified( - ctx: &DalContext, - ) -> ChangeStatusResult> { - if ctx.visibility().is_head() { - return Ok(vec![]); - } - - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_MODIFIED_COMPONENTS, - &[ctx.tenancy(), &ctx.visibility().change_set_pk], - ) - .await?; - ComponentChangeStatusGroup::new_from_rows(rows, ChangeStatus::Modified) - } -} - -/// An individual unit containing metadata for each "counting" statistic. -#[derive(Deserialize, Serialize, Debug, Clone)] -#[serde(rename_all = "camelCase")] -pub struct ComponentChangeStatusGroup { - pub component_id: ComponentId, - component_name: String, - pub component_status: ChangeStatus, -} - -impl ComponentChangeStatusGroup { - pub fn new_from_rows( - rows: Vec, - component_status: ChangeStatus, - ) -> ChangeStatusResult> { - let mut result = Vec::new(); - for row in rows.into_iter() { - let component_id: ComponentId = row.try_get("component_id")?; - let component_name: Option = row.try_get("component_name")?; - let component_name = component_name.unwrap_or_else(|| "".to_owned()); - - result.push(Self { - component_id, - component_name, - component_status, - }); - } - Ok(result) - } -} - -pub struct EdgeChangeStatus; - -impl EdgeChangeStatus { - pub async fn list_deleted(ctx: &DalContext) -> ChangeStatusResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_DELETED_EDGES, - &[ctx.tenancy(), &ctx.visibility().change_set_pk], - ) - .await?; - - Ok(objects_from_rows(rows)?) - } -} +// /// A collection of statistics for [`Components`](crate::Component) in the current +// /// [`ChangeSet`](crate::ChangeSet). +// #[derive(Deserialize, Serialize, Debug, Default, Clone)] +// #[serde(rename_all = "camelCase")] +// pub struct ComponentChangeStatus { +// stats: Vec, +// } +// +// impl ComponentChangeStatus { +// pub async fn new(ctx: &DalContext) -> ChangeStatusResult { +// let component_stats = if ctx.visibility().is_head() { +// Self::default() +// } else { +// let added = Self::list_added(ctx).await?; +// let deleted = Self::list_deleted(ctx).await?; +// let modified = Self::list_modified(ctx).await?; +// +// let mut stats = Vec::new(); +// stats.extend(added); +// stats.extend(deleted); +// stats.extend(modified); +// Self { stats } +// }; +// Ok(component_stats) +// } +// +// #[instrument(skip_all)] +// pub async fn list_added( +// ctx: &DalContext, +// ) -> ChangeStatusResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_ADDED_COMPONENTS, +// &[ctx.tenancy(), &ctx.visibility().change_set_pk], +// ) +// .await?; +// ComponentChangeStatusGroup::new_from_rows(rows, ChangeStatus::Added) +// } +// +// #[instrument(skip_all)] +// pub async fn list_deleted( +// ctx: &DalContext, +// ) -> ChangeStatusResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_DELETED_COMPONENTS, +// &[ctx.tenancy(), &ctx.visibility().change_set_pk], +// ) +// .await?; +// ComponentChangeStatusGroup::new_from_rows(rows, ChangeStatus::Deleted) +// } +// +// #[instrument(skip_all)] +// pub async fn list_modified( +// ctx: &DalContext, +// ) -> ChangeStatusResult> { +// if ctx.visibility().is_head() { +// return Ok(vec![]); +// } +// +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_MODIFIED_COMPONENTS, +// &[ctx.tenancy(), &ctx.visibility().change_set_pk], +// ) +// .await?; +// ComponentChangeStatusGroup::new_from_rows(rows, ChangeStatus::Modified) +// } +// } +// +// /// An individual unit containing metadata for each "counting" statistic. +// #[derive(Deserialize, Serialize, Debug, Clone)] +// #[serde(rename_all = "camelCase")] +// pub struct ComponentChangeStatusGroup { +// pub component_id: ComponentId, +// component_name: String, +// pub component_status: ChangeStatus, +// } +// +// impl ComponentChangeStatusGroup { +// pub fn new_from_rows( +// rows: Vec, +// component_status: ChangeStatus, +// ) -> ChangeStatusResult> { +// let mut result = Vec::new(); +// for row in rows.into_iter() { +// let component_id: ComponentId = row.try_get("component_id")?; +// let component_name: Option = row.try_get("component_name")?; +// let component_name = component_name.unwrap_or_else(|| "".to_owned()); +// +// result.push(Self { +// component_id, +// component_name, +// component_status, +// }); +// } +// Ok(result) +// } +// } +// +// pub struct EdgeChangeStatus; +// +// impl EdgeChangeStatus { +// pub async fn list_deleted(ctx: &DalContext) -> ChangeStatusResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_DELETED_EDGES, +// &[ctx.tenancy(), &ctx.visibility().change_set_pk], +// ) +// .await?; +// +// Ok(objects_from_rows(rows)?) +// } +// } diff --git a/lib/dal/src/component.rs b/lib/dal/src/component.rs index 262a0403fb..cfc8b5cd7b 100644 --- a/lib/dal/src/component.rs +++ b/lib/dal/src/component.rs @@ -1,207 +1,133 @@ //! This module contains [`Component`], which is an instance of a //! [`SchemaVariant`](crate::SchemaVariant) and a _model_ of a "real world resource". -use std::collections::{HashMap, VecDeque}; - -use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use strum::{AsRefStr, Display, EnumIter, EnumString}; +use std::collections::{hash_map, HashMap, VecDeque}; +use std::hash::Hash; +use strum::{AsRefStr, Display, EnumDiscriminants, EnumIter, EnumString}; +use telemetry::prelude::*; use thiserror::Error; +use tokio::sync::TryLockError; +use ulid::Ulid; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use telemetry::prelude::*; -pub use view::{ComponentView, ComponentViewError, ComponentViewProperties}; - -use crate::attribute::context::AttributeContextBuilder; -use crate::attribute::value::AttributeValue; -use crate::attribute::value::AttributeValueError; -use crate::code_view::CodeViewError; -use crate::diagram::summary_diagram::update_socket_summary; -use crate::edge::EdgeKind; -use crate::func::binding::FuncBindingError; -use crate::func::binding_return_value::{FuncBindingReturnValueError, FuncBindingReturnValueId}; -use crate::schema::variant::root_prop::SiPropChild; -use crate::schema::variant::{SchemaVariantError, SchemaVariantId}; -use crate::schema::SchemaVariant; -use crate::socket::{SocketEdgeKind, SocketError}; -use crate::standard_model::object_from_row; -use crate::ws_event::WsEventError; -use crate::ChangeSetPk; +use content_store::{ContentHash, Store, StoreError}; + +use crate::actor_view::ActorView; +use crate::attribute::prototype::argument::value_source::ValueSource; +use crate::attribute::prototype::argument::{ + AttributePrototypeArgument, AttributePrototypeArgumentError, AttributePrototypeArgumentId, +}; +use crate::attribute::value::{AttributeValueError, DependentValueGraph}; +use crate::change_set_pointer::ChangeSetPointerError; +use crate::history_event::HistoryEventMetadata; +use crate::job::definition::DependentValuesUpdate; +use crate::prop::{PropError, PropPath}; +use crate::provider::external::ExternalProviderError; +use crate::provider::internal::InternalProviderError; +use crate::qualification::QualificationError; +use crate::schema::variant::root_prop::component_type::ComponentType; +use crate::schema::variant::SchemaVariantError; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::node_weight::attribute_prototype_argument_node_weight::ArgumentTargets; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::{NodeWeight, NodeWeightError}; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - diagram, impl_standard_model, node::NodeId, pk, provider::internal::InternalProviderError, - standard_model, standard_model_accessor, standard_model_belongs_to, standard_model_has_many, - ActionPrototypeError, AttributeContext, AttributeContextBuilderError, AttributeContextError, - AttributePrototype, AttributePrototypeArgumentError, AttributePrototypeError, - AttributePrototypeId, AttributeReadContext, ComponentType, DalContext, EdgeError, - ExternalProviderError, FixError, FixId, Func, FuncBackendKind, FuncError, HistoryActor, - HistoryEventError, IndexMap, Node, NodeError, Prop, PropError, RootPropChild, Schema, - SchemaError, SchemaId, Socket, StandardModel, StandardModelError, Tenancy, Timestamp, - TransactionsError, UserPk, Visibility, WorkspaceError, WsEvent, WsEventResult, WsPayload, + pk, AttributeValue, AttributeValueId, ChangeSetPk, DalContext, ExternalProvider, + ExternalProviderId, InternalProvider, InternalProviderId, Prop, PropId, PropKind, + SchemaVariant, SchemaVariantId, StandardModelError, Timestamp, TransactionsError, WsEvent, + WsEventError, WsEventResult, WsPayload, }; -use crate::{AttributeValueId, QualificationError}; -use crate::{Edge, FixResolverError, NodeKind}; -pub mod code; -pub mod diff; -pub mod migrate; -pub mod qualification; pub mod resource; -pub mod status; -pub mod view; + +// pub mod code; +// pub mod diff; +pub mod frame; +pub mod qualification; +// pub mod status; +// pub mod validation; +// pub mod view; + +// pub use view::{ComponentView, ComponentViewError, ComponentViewProperties}; + +pub const DEFAULT_COMPONENT_X_POSITION: &str = "0"; +pub const DEFAULT_COMPONENT_Y_POSITION: &str = "0"; +pub const DEFAULT_COMPONENT_WIDTH: &str = "500"; +pub const DEFAULT_COMPONENT_HEIGHT: &str = "500"; #[remain::sorted] -#[derive(Error, Debug)] +#[derive(Debug, Error)] pub enum ComponentError { - #[error(transparent)] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute context error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error(transparent)] - AttributePrototype(#[from] AttributePrototypeError), - /// Found an [`AttributePrototypeArgumentError`](crate::AttributePrototypeArgumentError). #[error("attribute prototype argument error: {0}")] AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute prototype not found")] - AttributePrototypeNotFound, #[error("attribute value error: {0}")] AttributeValue(#[from] AttributeValueError), - #[error("attribute value not found")] - AttributeValueNotFound, - #[error("attribute value not found for context: {0:?}")] - AttributeValueNotFoundForContext(AttributeReadContext), - #[error("cannot update the resource tree when in a change set")] - CannotUpdateResourceTreeInChangeSet, - #[error(transparent)] - CodeView(#[from] CodeViewError), - #[error("component marked as protected: {0}")] - ComponentProtected(ComponentId), - /// No "protected" boolean was found for the appropriate - /// [`AttributeValue`](crate::AttributeValue) and [`Component`](crate::Component). In other - /// words, the value contained in the [`AttributeValue`](crate::AttributeValue) was "none". - #[error("component protection is none for component ({0}) and attribute value ({1}")] - ComponentProtectionIsNone(ComponentId, AttributeValueId), - /// No [`ComponentType`](crate::ComponentType) was found for the appropriate - /// [`AttributeValue`](crate::AttributeValue) and [`Component`](crate::Component). In other - /// words, the value contained in the [`AttributeValue`](crate::AttributeValue) was "none". - #[error("component type is none for component ({0}) and attribute value ({1})")] - ComponentTypeIsNone(ComponentId, AttributeValueId), - #[error(transparent)] - ComponentView(#[from] ComponentViewError), - #[error(transparent)] - ContextTransaction(#[from] TransactionsError), - #[error("edge error: {0}")] - Edge(#[from] EdgeError), - /// Found an [`ExternalProviderError`](crate::ExternalProviderError). + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("component {0} has no attribute value for the root/si/color prop")] + ComponentMissingColorValue(ComponentId), + #[error("component {0} has no attribute value for the root/si/name prop")] + ComponentMissingNameValue(ComponentId), + #[error("component {0} has no attribute value for the root/si/type prop")] + ComponentMissingTypeValue(ComponentId), + #[error( + "connection destination component {0} has no attribute value for internal provider {1}" + )] + DestinationComponentMissingAttributeValueForInternalProvider(ComponentId, InternalProviderId), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), #[error("external provider error: {0}")] ExternalProvider(#[from] ExternalProviderError), - #[error("fix error: {0}")] - Fix(#[from] Box), - #[error("fix not found for id: {0}")] - FixNotFound(FixId), - #[error("fix resolver error: {0}")] - FixResolver(#[from] FixResolverError), - #[error("found child attribute value of a map without a key: {0}")] - FoundMapEntryWithoutKey(AttributeValueId), - #[error("unable to delete frame due to attached components")] - FrameHasAttachedComponents, - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("func binding return value: {0} not found")] - FuncBindingReturnValueNotFound(FuncBindingReturnValueId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - /// No "protected" boolean was found for the appropriate - #[error("component({0}) can't be restored because it's inside a deleted frame ({1})")] - InsideDeletedFrame(ComponentId, ComponentId), + #[error("external provider {0} has more than one attribute value")] + ExternalProviderTooManyAttributeValues(ExternalProviderId), #[error("internal provider error: {0}")] InternalProvider(#[from] InternalProviderError), - #[error("invalid context(s) provided for diff")] - InvalidContextForDiff, - #[error("invalid func backend kind (0:?) for checking validations (need validation kind)")] - InvalidFuncBackendKindForValidations(FuncBackendKind), - #[error("attribute value does not have a prototype: {0}")] - MissingAttributePrototype(AttributeValueId), - #[error("attribute prototype does not have a function: {0}")] - MissingAttributePrototypeFunction(AttributePrototypeId), - #[error("no func binding return value for leaf entry name: {0}")] - MissingFuncBindingReturnValueIdForLeafEntryName(String), - #[error("/root/si/name is unset for component {0}")] - NameIsUnset(ComponentId), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("node error: {0}")] - NodeError(#[from] NodeError), - #[error("node not found for component: {0}")] - NodeNotFoundForComponent(ComponentId), - #[error("no schema for component {0}")] - NoSchema(ComponentId), - #[error("no schema variant for component {0}")] - NoSchemaVariant(ComponentId), - #[error("component not found: {0}")] - NotFound(ComponentId), - #[error("not found for node: {0}")] - NotFoundForNode(NodeId), - /// A parent [`AttributeValue`](crate::AttributeValue) was not found for the specified - /// [`AttributeValueId`](crate::AttributeValue). - #[error("parent attribute value not found for attribute value: {0}")] - ParentAttributeValueNotFound(AttributeValueId), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error(transparent)] - PgPool(#[from] si_data_pg::PgPoolError), + #[error("internal provider {0} has more than one attribute value")] + InternalProviderTooManyAttributeValues(InternalProviderId), + #[error("component {0} missing attribute value for qualifications")] + MissingQualificationsValue(ComponentId), + #[error("found multiple parents for component: {0}")] + MultipleParentsForComponent(ComponentId), + #[error("found multiple root attribute values ({0} and {1}, at minimum) for component: {2}")] + MultipleRootAttributeValuesFound(AttributeValueId, AttributeValueId, ComponentId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("object prop {0} has no ordering node")] + ObjectPropHasNoOrderingNode(PropId), #[error("prop error: {0}")] Prop(#[from] PropError), + #[error("found prop id ({0}) that is not a prop")] + PropIdNotAProp(PropId), #[error("qualification error: {0}")] Qualification(#[from] QualificationError), - #[error("qualification result for {0} on component {1} has no value")] - QualificationResultEmpty(String, ComponentId), - #[error("cannot restore non deleted component with id: {0}")] - RestoringNonDeleted(ComponentId), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), + #[error("root attribute value not found for component: {0}")] + RootAttributeValueNotFound(ComponentId), #[error("schema variant error: {0}")] SchemaVariant(#[from] SchemaVariantError), - #[error("schema variant has not been finalized at least once: {0}")] - SchemaVariantNotFinalized(SchemaVariantId), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), + #[error("schema variant not found for component: {0}")] + SchemaVariantNotFound(ComponentId), + #[error("serde_json error: {0}")] + Serde(#[from] serde_json::Error), #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("summary diagram error: {0}")] - SummaryDiagram(String), - #[error("workspace error: {0}")] - Workspace(#[from] WorkspaceError), - #[error("ws event error: {0}")] + StandardModel(#[from] StandardModelError), + #[error("store error: {0}")] + Store(#[from] StoreError), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), + #[error("try lock error: {0}")] + TryLock(#[from] TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), + #[error("WsEvent error: {0}")] WsEvent(#[from] WsEventError), } pub type ComponentResult = Result; -const FIND_FOR_NODE: &str = include_str!("queries/component/find_for_node.sql"); -const FIND_SI_CHILD_PROP_ATTRIBUTE_VALUE: &str = - include_str!("queries/component/find_si_child_attribute_value.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = include_str!("queries/component/list_for_schema_variant.sql"); -const LIST_FOR_SCHEMA: &str = include_str!("queries/component/list_for_schema.sql"); -const LIST_SOCKETS_FOR_SOCKET_EDGE_KIND: &str = - include_str!("queries/component/list_sockets_for_socket_edge_kind.sql"); -const FIND_NAME: &str = include_str!("queries/component/find_name.sql"); -const ROOT_CHILD_ATTRIBUTE_VALUE_FOR_COMPONENT: &str = - include_str!("queries/component/root_child_attribute_value_for_component.sql"); -const LIST_INPUT_SOCKETS_FOR_ATTRIBUTE_VALUE: &str = - include_str!("queries/component/list_input_sockets_for_attribute_value.sql"); -const COMPONENT_STATUS_UPDATE_BY_PK: &str = - include_str!("queries/component/status_update_by_pk.sql"); - -pk!(ComponentPk); pk!(ComponentId); #[remain::sorted] @@ -231,1135 +157,856 @@ impl Default for ComponentKind { } } +#[derive(Clone, Debug)] +pub struct IncomingConnection { + pub attribute_prototype_argument_id: AttributePrototypeArgumentId, + pub to_component_id: ComponentId, + pub to_internal_provider_id: InternalProviderId, + pub from_component_id: ComponentId, + pub from_external_provider_id: ExternalProviderId, + pub created_info: HistoryEventMetadata, + pub deleted_info: Option, +} + /// A [`Component`] is an instantiation of a [`SchemaVariant`](crate::SchemaVariant). -/// -/// ## Updating "Fields" on a [`Component`] -/// -/// To learn more about updating a "field" on a [`Component`], please see the -/// [`AttributeValue module`](crate::attribute::value). #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct Component { - pk: ComponentPk, id: ComponentId, - kind: ComponentKind, - deletion_user_pk: Option, - needs_destroy: bool, - hidden: bool, - #[serde(flatten)] - tenancy: Tenancy, #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, + kind: ComponentKind, + needs_destroy: bool, + x: String, + y: String, + width: Option, + height: Option, } -impl_standard_model! { - model: Component, - pk: ComponentPk, - id: ComponentId, - table_name: "components", - history_event_label_base: "component", - history_event_message_name: "Component" +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum ComponentContent { + V1(ComponentContentV1), } -impl Component { - /// The primary constructor method for creating [`Components`](Self). It returns a new - /// [`Component`] with a corresponding [`Node`](crate::Node). - /// - /// If you would like to use the default [`SchemaVariant`](crate::SchemaVariant) for - /// a [`Schema`](crate::Schema) rather than - /// a specific [`SchemaVariantId`](crate::SchemaVariant), use - /// [`Self::new_for_default_variant_from_schema()`]. - #[instrument(level = "info", skip(ctx, name), fields(name = name.as_ref()))] - pub async fn new( - ctx: &DalContext, - name: impl AsRef, - schema_variant_id: SchemaVariantId, - ) -> ComponentResult<(Self, Node)> { - let schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) - .await? - .ok_or(SchemaVariantError::NotFound(schema_variant_id))?; +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct ComponentContentV1 { + pub timestamp: Timestamp, + pub kind: ComponentKind, + pub needs_destroy: bool, + pub x: String, + pub y: String, + pub width: Option, + pub height: Option, +} - // Ensure components are not created unless the variant has been finalized at least once. - if !schema_variant.finalized_once() { - return Err(ComponentError::SchemaVariantNotFinalized(schema_variant_id)); +impl From for ComponentContentV1 { + fn from(value: Component) -> Self { + Self { + timestamp: value.timestamp, + kind: value.kind, + needs_destroy: value.needs_destroy, + x: value.x, + y: value.y, + width: value.width, + height: value.height, } + } +} - let schema = schema_variant - .schema(ctx) - .await? - .ok_or(SchemaVariantError::MissingSchema(schema_variant_id))?; - let actor_user_pk = match ctx.history_actor() { - HistoryActor::User(user_pk) => Some(*user_pk), - _ => None, - }; - - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM component_create_v4($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &actor_user_pk, - &schema.component_kind().as_ref(), - schema_variant.id(), - ], - ) - .await?; - - let component: Component = standard_model::finish_create_from_row(ctx, row).await?; - - ctx.enqueue_dependencies_update_component(*component.id()) - .await?; - - // Need to flesh out node so that the template data is also included in the node we - // persist. But it isn't, - our node is anemic. - let node = Node::new(ctx, &NodeKind::Configuration).await?; - node.set_component(ctx, component.id()).await?; - - for prop in Prop::validation_props(ctx, *component.id()).await? { - Prop::run_validation( - ctx, - *prop.id(), - *component.id(), - None, - serde_json::Value::Null, - ) - .await; +impl Component { + pub fn assemble(id: ComponentId, inner: ComponentContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + kind: inner.kind, + needs_destroy: inner.needs_destroy, + x: inner.x, + y: inner.y, + width: inner.width, + height: inner.height, } - - component.set_name(ctx, Some(name.as_ref())).await?; - - // We need to make sure that *ALL* functions are run, not just those that directly - // depend on the name being set. - let component_av_ids = AttributeValue::ids_for_component(ctx, component.id).await?; - ctx.enqueue_dependent_values_update(component_av_ids) - .await?; - - diagram::summary_diagram::create_component_entry( - ctx, - &component, - &node, - &schema, - &schema_variant, - ) - .await - .map_err(|e| ComponentError::SummaryDiagram(e.to_string()))?; - - Ok((component, node)) } - pub async fn root_attribute_value(&self, ctx: &DalContext) -> ComponentResult { - let schema_variant = self - .schema_variant(ctx) - .await? - .ok_or(ComponentError::NoSchemaVariant(self.id))?; - let root_prop_id = *schema_variant - .root_prop_id() - .ok_or(PropError::NotFoundAtPath("root".into(), *ctx.visibility()))?; - - let value_context = AttributeReadContext { - prop_id: Some(root_prop_id), - component_id: Some(self.id), - ..Default::default() - }; - - Ok(AttributeValue::find_for_context(ctx, value_context) - .await? - .ok_or(AttributeValueError::NotFoundForReadContext(value_context))?) + pub fn id(&self) -> ComponentId { + self.id } - pub async fn respin( - ctx: &DalContext, - component_id: ComponentId, - schema_variant_id: SchemaVariantId, - ) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM component_respin_v1($1, $2, $3, $4)", - &[ - ctx.tenancy(), - ctx.visibility(), - &component_id, - &schema_variant_id, - ], - ) - .await?; - - let component: Component = standard_model::finish_create_from_row(ctx, row).await?; - // TODO: we may also need to do an update to the `has_resource` property of the summary - update_socket_summary(ctx, &component) - .await - .map_err(|err| ComponentError::SummaryDiagram(err.to_string()))?; - - Ok(component) + pub fn x(&self) -> &str { + &self.x } - /// A secondary constructor method that finds the default - /// [`SchemaVariant`](crate::SchemaVariant) for a given [`SchemaId`](crate::Schema). Once found, - /// the [`primary constructor method`](Self::new) is called. - pub async fn new_for_default_variant_from_schema( - ctx: &DalContext, - name: impl AsRef, - schema_id: SchemaId, - ) -> ComponentResult<(Self, Node)> { - let schema = Schema::get_by_id(ctx, &schema_id) - .await? - .ok_or(SchemaError::NotFound(schema_id))?; - - let schema_variant_id = schema - .default_schema_variant_id() - .ok_or(SchemaError::NoDefaultVariant(schema_id))?; - - Self::new(ctx, name, *schema_variant_id).await + pub fn y(&self) -> &str { + &self.y } - standard_model_accessor!(kind, Enum(ComponentKind), ComponentResult); - standard_model_accessor!(needs_destroy, bool, ComponentResult); - standard_model_accessor!(hidden, bool, ComponentResult); - standard_model_accessor!(deletion_user_pk, Option, ComponentResult); - - standard_model_belongs_to!( - lookup_fn: schema, - set_fn: set_schema, - unset_fn: unset_schema, - table: "component_belongs_to_schema", - model_table: "schemas", - belongs_to_id: SchemaId, - returns: Schema, - result: ComponentResult, - ); - - standard_model_belongs_to!( - lookup_fn: schema_variant, - set_fn: set_schema_variant, - unset_fn: unset_schema_variant, - table: "component_belongs_to_schema_variant", - model_table: "schema_variants", - belongs_to_id: SchemaVariantId, - returns: SchemaVariant, - result: ComponentResult, - ); - - standard_model_has_many!( - lookup_fn: node, - table: "node_belongs_to_component", - model_table: "nodes", - returns: Node, - result: ComponentResult, - ); - - pub fn tenancy(&self) -> &Tenancy { - &self.tenancy + pub fn width(&self) -> Option<&str> { + self.width.as_deref() } - /// List [`Sockets`](crate::Socket) with a given - /// [`SocketEdgeKind`](crate::socket::SocketEdgeKind). - pub async fn list_sockets_for_kind( - ctx: &DalContext, - component_id: ComponentId, - socket_edge_kind: SocketEdgeKind, - ) -> ComponentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_SOCKETS_FOR_SOCKET_EDGE_KIND, - &[ - ctx.tenancy(), - ctx.visibility(), - &component_id, - &(socket_edge_kind.to_string()), - ], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + pub fn height(&self) -> Option<&str> { + self.height.as_deref() } - /// Find [`Self`] with a provided [`NodeId`](crate::Node). - pub async fn find_for_node(ctx: &DalContext, node_id: NodeId) -> ComponentResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt(FIND_FOR_NODE, &[ctx.tenancy(), ctx.visibility(), &node_id]) - .await?; - Ok(standard_model::object_option_from_row_option(row)?) + pub fn timestamp(&self) -> &Timestamp { + &self.timestamp } - /// Find the [`AttributeValue`](crate::AttributeValue) whose - /// [`context`](crate::AttributeContext) corresponds to the following: - /// - /// - The [`PropId`](crate::Prop) corresponding to the child [`Prop`](crate::Prop) of "/root/si" - /// whose name matches the provided - /// [`SiPropChild`](crate::schema::variant::root_prop::SiPropChild) - /// - The [`ComponentId`](Self) matching the provided [`ComponentId`](Self). - /// - /// _Note:_ if the type has never been updated, this will find the _default_ - /// [`AttributeValue`](crate::AttributeValue) where the [`ComponentId`](Self) is unset. - pub async fn find_si_child_attribute_value( + pub async fn materialized_view( + &self, ctx: &DalContext, - component_id: ComponentId, - schema_variant_id: SchemaVariantId, - si_prop_child: SiPropChild, - ) -> ComponentResult { - let si_child_prop_name = si_prop_child.prop_name(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_SI_CHILD_PROP_ATTRIBUTE_VALUE, - &[ - ctx.tenancy(), - ctx.visibility(), - &component_id, - &schema_variant_id, - &si_child_prop_name, - ], - ) - .await?; - Ok(object_from_row(row)?) - } + ) -> ComponentResult> { + let schema_variant_id = Self::schema_variant_id(ctx, self.id()).await?; + let root_prop_id = + Prop::find_prop_id_by_path(ctx, schema_variant_id, &PropPath::new(["root"])).await?; + + let root_value_ids = Prop::attribute_values_for_prop_id(ctx, root_prop_id).await?; + for value_id in root_value_ids { + let value_component_id = AttributeValue::component_id(ctx, value_id).await?; + if value_component_id == self.id() { + let root_value = AttributeValue::get_by_id(ctx, value_id).await?; + return Ok(root_value.materialized_view(ctx).await?); + } + } - pub async fn is_in_tenancy(ctx: &DalContext, id: ComponentId) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT id FROM components WHERE id = $1 AND in_tenancy_v1($2, components.tenancy_workspace_pk) LIMIT 1", - &[ - &id, - ctx.tenancy(), - ], - ) - .await?; - Ok(row.is_some()) + // Should this be an error? + Ok(None) } - pub async fn list_for_schema( + pub async fn new( ctx: &DalContext, - schema_id: SchemaId, - ) -> ComponentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA, - &[ctx.tenancy(), ctx.visibility(), &schema_id], - ) - .await?; + name: impl Into, + schema_variant_id: SchemaVariantId, + component_kind: Option, + ) -> ComponentResult { + let name: String = name.into(); + let kind = match component_kind { + Some(provided_kind) => provided_kind, + None => ComponentKind::Standard, + }; - let mut results = Vec::new(); - for row in rows.into_iter() { - let json: serde_json::Value = row.try_get("object")?; - let object: Self = serde_json::from_value(json)?; - results.push(object); + let content = ComponentContentV1 { + kind, + timestamp: Timestamp::now(), + needs_destroy: false, + x: DEFAULT_COMPONENT_X_POSITION.to_string(), + y: DEFAULT_COMPONENT_Y_POSITION.to_string(), + width: None, + height: None, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&ComponentContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_content(change_set, id, ContentAddress::Component(hash))?; + + // Attach component to category and add use edge to schema variant + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_node(node_weight)?; + + // Root --> Component Category --> Component (this) + let component_category_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Component)?; + workspace_snapshot.add_edge( + component_category_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + + // Component (this) --> Schema Variant + workspace_snapshot.add_edge( + id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + schema_variant_id, + )?; } - Ok(results) - } + let mut attribute_values = vec![]; - pub async fn list_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> ComponentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; + // Create attribute values for all providers corresponding to input and output sockets. + for internal_provider_id in + InternalProvider::list_ids_for_schema_variant(ctx, schema_variant_id).await? + { + let attribute_value = + AttributeValue::new(ctx, internal_provider_id, Some(id.into()), None, None).await?; - let mut results = Vec::new(); - for row in rows.into_iter() { - let json: serde_json::Value = row.try_get("object")?; - let object: Self = serde_json::from_value(json)?; - results.push(object); + attribute_values.push(attribute_value.id()); } + for external_provider_id in + ExternalProvider::list_ids_for_schema_variant(ctx, schema_variant_id).await? + { + let attribute_value = + AttributeValue::new(ctx, external_provider_id, Some(id.into()), None, None).await?; - Ok(results) - } + attribute_values.push(attribute_value.id()); + } - /// Sets the "/root/si/name" for [`self`](Self). - pub async fn set_name( - &self, - ctx: &DalContext, - value: Option, - ) -> ComponentResult<()> { - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let attribute_value = - Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Name) - .await?; + // Walk all the props for the schema variant and create attribute values for all of them + let root_prop_id = SchemaVariant::get_root_prop_id(ctx, schema_variant_id).await?; + let mut work_queue = VecDeque::from([(root_prop_id, None::, None)]); + + while let Some((prop_id, maybe_parent_attribute_value_id, key)) = work_queue.pop_front() { + // If we came in with a key, we're the child of a map. We should not descend deeper + // into it because the value should be governed by its prototype function and will + // create child values when that function is executed + let should_descend = key.is_none(); + + // Ensure that we are processing a prop before creating attribute values. Cache the + // prop kind for later. + let prop_kind = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + workspace_snapshot + .get_node_weight_by_id(prop_id)? + .get_prop_node_weight()? + .kind() + }; - // Before we set the name, ensure that another function is not setting the name (e.g. - // something different than "unset" or "setString"). - let attribute_prototype = attribute_value - .attribute_prototype(ctx) - .await? - .ok_or_else(|| ComponentError::MissingAttributePrototype(*attribute_value.id()))?; - let prototype_func = Func::get_by_id(ctx, &attribute_prototype.func_id()) - .await? - .ok_or_else(|| { - ComponentError::MissingAttributePrototypeFunction(*attribute_prototype.id()) - })?; - let name = prototype_func.name(); - if name != "si:unset" && name != "si:setString" { - return Ok(()); + // Create an attribute value for the prop. + let attribute_value = AttributeValue::new( + ctx, + prop_id, + Some(id.into()), + maybe_parent_attribute_value_id, + key, + ) + .await?; + + attribute_values.push(attribute_value.id()); + + if should_descend { + match prop_kind { + PropKind::Object => { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let ordering_node_weight = workspace_snapshot + .ordering_node_for_container(prop_id)? + .ok_or(ComponentError::ObjectPropHasNoOrderingNode(prop_id))?; + + for &child_prop_id in ordering_node_weight.order() { + work_queue.push_back(( + child_prop_id.into(), + Some(attribute_value.id()), + None, + )); + } + } + PropKind::Map => { + let element_prop_id = + Prop::direct_single_child_prop_id(ctx, prop_id).await?; + + for (key, _) in Prop::prototypes_by_key(ctx, element_prop_id).await? { + if key.is_some() { + work_queue.push_back(( + element_prop_id, + Some(attribute_value.id()), + key, + )) + } + } + } + _ => {} + } + } } - let attribute_context = AttributeContext::builder() - .set_component_id(self.id) - .set_prop_id(attribute_value.context.prop_id()) - .to_context()?; + let component = Self::assemble(id.into(), content); - let json_value = match value.clone() { - Some(v) => Some(serde_json::to_value(v)?), - None => None, - }; + component.set_name(ctx, &name).await?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| ComponentError::ParentAttributeValueNotFound(*attribute_value.id()))?; - let (_, _) = AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - attribute_context, - json_value, - None, - ) + let component_graph = DependentValueGraph::for_values(ctx, attribute_values).await?; + let leaf_value_ids = component_graph.independent_values(); + for leaf_value_id in &leaf_value_ids { + // Run these concurrently in a join set? They will serialize on the lock... + AttributeValue::update_from_prototype_function(ctx, *leaf_value_id).await?; + } + ctx.enqueue_job(DependentValuesUpdate::new( + ctx.access_builder(), + *ctx.visibility(), + leaf_value_ids, + )) .await?; - Ok(()) + Ok(component) } - pub async fn set_deleted_at( + pub async fn incoming_connections( &self, ctx: &DalContext, - value: Option>, - ) -> ComponentResult>> { - let json_value = match value { - Some(v) => Some(serde_json::to_value(v)?), - None => None, - }; - - let attribute_value = Self::root_prop_child_attribute_value_for_component( - ctx, - self.id, - RootPropChild::DeletedAt, - ) - .await?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| ComponentError::ParentAttributeValueNotFound(*attribute_value.id()))?; - let attribute_context = AttributeContext::builder() - .set_component_id(self.id) - .set_prop_id(attribute_value.context.prop_id()) - .to_context()?; - let (_, _) = AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - attribute_context, - json_value, - None, - ) - .await?; + ) -> ComponentResult> { + let mut incoming_edges = vec![]; + for (to_internal_provider_id, to_value_id) in + self.internal_provider_attribute_values(ctx).await? + { + let prototype_id = AttributeValue::prototype_id(ctx, to_value_id).await?; + for apa_id in + AttributePrototypeArgument::list_ids_for_prototype(ctx, prototype_id).await? + { + let apa = AttributePrototypeArgument::get_by_id(ctx, apa_id).await?; + + let created_info = { + let history_actor = ctx.history_actor(); + let actor = ActorView::from_history_actor(ctx, *history_actor).await?; + HistoryEventMetadata { + actor, + timestamp: apa.timestamp().created_at, + } + }; + + if let Some(ArgumentTargets { + source_component_id, + .. + }) = apa.targets() + { + if let Some(ValueSource::ExternalProvider(from_external_provider_id)) = + apa.value_source(ctx).await? + { + incoming_edges.push(IncomingConnection { + attribute_prototype_argument_id: apa_id, + to_component_id: self.id(), + from_component_id: source_component_id, + to_internal_provider_id, + from_external_provider_id, + created_info, + deleted_info: None, + }); + } + } + } + } - Ok(value) + Ok(incoming_edges) } - /// Return the name of the [`Component`](Self) for the provided [`ComponentId`](Self). - pub async fn find_name(ctx: &DalContext, component_id: ComponentId) -> ComponentResult { - let component_name = ComponentView::new(ctx, component_id) - .await? - .properties - .pointer("/si/name") - .cloned() - .unwrap_or(serde_json::Value::Null); - - let component_name: Option = serde_json::from_value(component_name)?; - let component_name = if let Some(name) = component_name { - name + pub async fn parent(&self, ctx: &DalContext) -> ComponentResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let mut raw_sources = workspace_snapshot.incoming_sources_for_edge_weight_kind( + self.id, + EdgeWeightKindDiscriminants::FrameContains, + )?; + + let maybe_parent = if let Some(raw_parent) = raw_sources.pop() { + if !raw_sources.is_empty() { + return Err(ComponentError::MultipleParentsForComponent(self.id)); + } + Some(workspace_snapshot.get_node_weight(raw_parent)?.id().into()) } else { - let row = ctx - .txns() - .await? - .pg() - .query_one(FIND_NAME, &[ctx.tenancy(), ctx.visibility(), &component_id]) - .await?; - let component_name: serde_json::Value = row.try_get("component_name")?; - let component_name: Option = serde_json::from_value(component_name)?; - component_name.ok_or(ComponentError::NameIsUnset(component_id))? + None }; - Ok(component_name) - } - /// Calls [`Self::find_name()`] and provides the "id" off [`self`](Self). - pub async fn name(&self, ctx: &DalContext) -> ComponentResult { - Self::find_name(ctx, self.id).await + Ok(maybe_parent) } - /// Grabs the [`AttributeValue`](crate::AttributeValue) corresponding to the - /// [`RootPropChild`](crate::RootPropChild) [`Prop`](crate::Prop) for the given - /// [`Component`](Self). - pub async fn root_prop_child_attribute_value_for_component( + async fn get_content_with_hash( ctx: &DalContext, component_id: ComponentId, - root_prop_child: RootPropChild, - ) -> ComponentResult { - let row = ctx - .txns() + ) -> ComponentResult<(ContentHash, ComponentContentV1)> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let id: Ulid = component_id.into(); + let node_index = workspace_snapshot.get_node_index_by_id(id)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: ComponentContent = ctx + .content_store() + .lock() + .await + .get(&hash) .await? - .pg() - .query_one( - ROOT_CHILD_ATTRIBUTE_VALUE_FOR_COMPONENT, - &[ - ctx.tenancy(), - ctx.visibility(), - &root_prop_child.as_str(), - &component_id, - ], - ) - .await?; - Ok(object_from_row(row)?) + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let ComponentContent::V1(inner) = content; + + Ok((hash, inner)) } - /// List the connected input [`Sockets`](crate::Socket) for a given [`ComponentId`](Self) and - /// [`AttributeValueId`](crate::AttributeValue) whose [`context`](crate::AttributeContext)'s - /// least specific field corresponding to a [`PropId`](crate::Prop). In other words, this is - /// the list of input [`Sockets`](crate::Socket) with incoming connections from other - /// [`Component(s)`](Self) that the given [`AttributeValue`](crate::AttributeValue) depends on. - /// - /// ```raw - /// ┌────────────────────────────┐ - /// │ This │ - /// │ Component │ - /// ┌───────────┐ │ ┌────────────────┐ │ - /// │ Another │ │ ┌───►│ AttributeValue │ │ - /// │ Component │ │ │ │ for Prop │ │ - /// │ │ │ │ └────────────────┘ │ - /// │ ┌────────┤ ├────┴─────────┐ │ - /// │ │ Output ├───────►│ Input │ │ - /// │ │ Socket │ │ Socket │ │ - /// │ │ │ │ (list these) │ │ - /// └──┴────────┘ └──────────────┴─────────────┘ - /// ``` - /// - /// _Warning: users of this query must ensure that the - /// [`AttributeValueId`](crate::AttributeValue) provided has a - /// [`context`](crate::AttributeContext) whose least specific field corresponds to a - /// [`PropId`](crate::Prop)._ - #[instrument(level = "debug", skip_all)] - pub async fn list_input_sockets_for_attribute_value( - ctx: &DalContext, - attribute_value_id: AttributeValueId, - component_id: ComponentId, - ) -> ComponentResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_INPUT_SOCKETS_FOR_ATTRIBUTE_VALUE, - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_value_id, - &component_id, - ], - ) + pub async fn list(ctx: &DalContext) -> ComponentResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut components = vec![]; + let component_category_node_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Component)?; + + let component_node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + component_category_node_id, + EdgeWeightKindDiscriminants::Use, + )?; + + let mut node_weights = vec![]; + let mut hashes = vec![]; + for index in component_node_indices { + let node_weight = workspace_snapshot + .get_node_weight(index)? + .get_content_node_weight_of_kind(ContentAddressDiscriminants::Component)?; + hashes.push(node_weight.content_hash()); + node_weights.push(node_weight); + } + + let contents: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(hashes.as_slice()) .await?; - let mut result = Vec::new(); - for row in rows.into_iter() { - let json: serde_json::Value = row.try_get("object")?; - let object: Socket = serde_json::from_value(json)?; - let has_edge_connected: bool = row.try_get("has_edge_connected")?; - result.push((object, has_edge_connected)); + for node_weight in node_weights { + match contents.get(&node_weight.content_hash()) { + Some(content) => { + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let ComponentContent::V1(inner) = content; + + components.push(Self::assemble(node_weight.id().into(), inner.to_owned())); + } + None => Err(WorkspaceSnapshotError::MissingContentFromStore( + node_weight.id(), + ))?, + } } - Ok(result) + + Ok(components) } - /// Find the [`SchemaVariantId`](crate::SchemaVariantId) that belongs to the provided - /// [`Component`](crate::Component). - pub async fn schema_variant_id( + pub async fn schema_variant_for_component_id( ctx: &DalContext, component_id: ComponentId, - ) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "select belongs_to_id as schema_variant_id from - component_belongs_to_schema_variant_v1($1, $2) - where object_id = $3 - ", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; + ) -> ComponentResult { + let schema_variant_id = Self::schema_variant_id(ctx, component_id).await?; + Ok(SchemaVariant::get_by_id(ctx, schema_variant_id).await?) + } - Ok(row.try_get("schema_variant_id")?) + pub async fn schema_variant(&self, ctx: &DalContext) -> ComponentResult { + Self::schema_variant_for_component_id(ctx, self.id).await } - /// Find the [`SchemaId`](crate::SchemaId) that belongs to the provided - /// [`Component`](crate::Component). - pub async fn schema_id( + pub async fn schema_variant_id( ctx: &DalContext, component_id: ComponentId, - ) -> ComponentResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "select belongs_to_id as schema_id from - component_belongs_to_schema_v1($1, $2) - where object_id = $3 - ", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; - - Ok(row.try_get("schema_id")?) - } - - /// Gets the [`ComponentType`](crate::ComponentType) of [`self`](Self). - /// - /// Mutate this with [`Self::set_type()`]. - pub async fn get_type(&self, ctx: &DalContext) -> ComponentResult { - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let type_attribute_value = - Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Type) - .await?; - let raw_value = type_attribute_value.get_value(ctx).await?.ok_or_else(|| { - ComponentError::ComponentTypeIsNone(self.id, *type_attribute_value.id()) - })?; - let component_type: ComponentType = serde_json::from_value(raw_value)?; - Ok(component_type) - } + ) -> ComponentResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; - /// Gets the protected attribute value of [`self`](Self). - pub async fn get_protected(&self, ctx: &DalContext) -> ComponentResult { - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let protected_attribute_value = Self::find_si_child_attribute_value( - ctx, - self.id, - schema_variant_id, - SiPropChild::Protected, - ) - .await?; - let raw_value = protected_attribute_value - .get_value(ctx) - .await? - .ok_or_else(|| { - ComponentError::ComponentProtectionIsNone(self.id, *protected_attribute_value.id()) - })?; - let protected: bool = serde_json::from_value(raw_value)?; - Ok(protected) - } + let maybe_schema_variant_indices = workspace_snapshot + .outgoing_targets_for_edge_weight_kind( + component_id, + EdgeWeightKindDiscriminants::Use, + )?; - /// Sets the field corresponding to "/root/si/type" for the [`Component`]. Possible values - /// are limited to variants of [`ComponentType`](crate::ComponentType). - pub async fn set_type( - &self, - ctx: &DalContext, - component_type: ComponentType, - ) -> ComponentResult<()> { - //when we change the component_type we need to do 2 things: - //1. remove all symbollic edges to children of that component (for example if changing from a up/down frame) - //2. if the component has a parent, we need to create a symbollic edge between what was formerly grandparent -> child relationships - - let edges = Edge::list_for_component(ctx, self.id).await?; - let mut children_of_frame_connections = Vec::new(); - let mut maybe_grandparent_node_id: Option = None; - - for mut edge in edges { - if *edge.kind() == EdgeKind::Symbolic { - if edge.tail_component_id() == self.id { - // this node is a tail, so this edge is to a grandparent - // let's grab the edge so we can create edges between any children this component has - maybe_grandparent_node_id = Some(edge.clone()); - } else if edge.head_component_id() == self.id { - children_of_frame_connections.push(edge.clone()); - edge.delete_and_propagate(ctx).await?; + let mut schema_variant_id: Option = None; + for maybe_schema_variant_index in maybe_schema_variant_indices { + if let NodeWeight::Content(content) = + workspace_snapshot.get_node_weight(maybe_schema_variant_index)? + { + let content_hash_discriminants: ContentAddressDiscriminants = + content.content_address().into(); + if let ContentAddressDiscriminants::SchemaVariant = content_hash_discriminants { + // TODO(nick): consider creating a new edge weight kind to make this easier. + // We also should use a proper error here. + schema_variant_id = match schema_variant_id { + None => Some(content.id().into()), + Some(_already_found_schema_variant_id) => { + panic!("already found a schema variant") + } + }; } } } + let schema_variant_id = + schema_variant_id.ok_or(ComponentError::SchemaVariantNotFound(component_id))?; + Ok(schema_variant_id) + } + pub async fn get_by_id(ctx: &DalContext, component_id: ComponentId) -> ComponentResult { + let (_, content) = Self::get_content_with_hash(ctx, component_id).await?; + Ok(Self::assemble(component_id, content)) + } - //lets create the new symbolic edges from grandparent -> child - for edge in children_of_frame_connections { - if let Some(parent_edge) = &maybe_grandparent_node_id { - let _new_edge = Edge::new_for_connection( - ctx, - parent_edge.head_node_id(), - parent_edge.head_socket_id(), - edge.tail_node_id(), - edge.tail_socket_id(), - EdgeKind::Symbolic, - ) - .await?; - } + pub async fn set_geometry( + self, + ctx: &DalContext, + x: impl Into, + y: impl Into, + width: Option>, + height: Option>, + ) -> ComponentResult { + let id: ComponentId = self.id; + let mut component = self; + + let before = ComponentContentV1::from(component.clone()); + component.x = x.into(); + component.y = y.into(); + component.width = width.map(|w| w.into()); + component.height = height.map(|h| h.into()); + let updated = ComponentContentV1::from(component); + + if updated != before { + let hash = ctx + .content_store() + .lock() + .await + .add(&ComponentContent::V1(updated.clone()))?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.update_content(ctx.change_set_pointer()?, id.into(), hash)?; } - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let type_attribute_value = - Self::find_si_child_attribute_value(ctx, self.id, schema_variant_id, SiPropChild::Type) - .await?; - - // If we are setting the type for the first time, we will need to mutate the context to - // be component-specific. This is because the attribute value will have an unset component - // id and we will need to deviate from the schema variant default component type. - let attribute_context = if type_attribute_value.context.is_component_unset() { - AttributeContextBuilder::from(type_attribute_value.context) - .set_component_id(self.id) - .to_context()? - } else { - type_attribute_value.context - }; + Ok(Self::assemble(id, updated)) + } - let si_attribute_value = type_attribute_value - .parent_attribute_value(ctx) + async fn set_name(&self, ctx: &DalContext, name: &str) -> ComponentResult<()> { + let av_for_name = self + .attribute_values_for_prop(ctx, &["root", "si", "name"]) .await? - .ok_or_else(|| { - ComponentError::ParentAttributeValueNotFound(*type_attribute_value.id()) - })?; - AttributeValue::update_for_context( + .into_iter() + .next() + .ok_or(ComponentError::ComponentMissingNameValue(self.id()))?; + + AttributeValue::update_no_dependent_values( ctx, - *type_attribute_value.id(), - Some(*si_attribute_value.id()), - attribute_context, - Some(serde_json::to_value(component_type)?), - None, + av_for_name, + Some(serde_json::to_value(name)?), ) .await?; Ok(()) } - pub async fn delete_and_propagate(&mut self, ctx: &DalContext) -> ComponentResult<()> { - let deletion_time = Utc::now(); - - // Block deletion of frames with children - if self.get_type(ctx).await? != ComponentType::Component { - let connected_children = Edge::list_children_for_component(ctx, self.id).await?; - warn!("{:?}", connected_children); - if !connected_children.is_empty() { - return Err(ComponentError::FrameHasAttachedComponents); - } - } - - self.set_deleted_at(ctx, Some(deletion_time)).await?; + pub async fn name(&self, ctx: &DalContext) -> ComponentResult { + let name_value_id = self + .attribute_values_for_prop(ctx, &["root", "si", "name"]) + .await? + .into_iter() + .next() + .ok_or(ComponentError::ComponentMissingNameValue(self.id()))?; - if self.get_protected(ctx).await? { - return Err(ComponentError::ComponentProtected(self.id)); - } + let name_av = AttributeValue::get_by_id(ctx, name_value_id).await?; - let actor_user_pk = match ctx.history_actor() { - HistoryActor::User(user_pk) => Some(*user_pk), - _ => None, - }; + Ok(match name_av.materialized_view(ctx).await? { + Some(serde_value) => serde_json::from_value(serde_value)?, + None => "".into(), + }) + } - let has_resource = self.resource(ctx).await?.payload.is_some(); - let rows = ctx - .txns() + pub async fn color(&self, ctx: &DalContext) -> ComponentResult> { + let color_value_id = self + .attribute_values_for_prop(ctx, &["root", "si", "color"]) .await? - .pg() - .query( - "SELECT * FROM component_delete_and_propagate_v3($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - self.id(), - &actor_user_pk, - &has_resource, - ], - ) - .await?; - let mut attr_values: Vec = standard_model::objects_from_rows(rows)?; + .into_iter() + .next() + .ok_or(ComponentError::ComponentMissingColorValue(self.id()))?; - for attr_value in attr_values.iter_mut() { - attr_value.update_from_prototype_function(ctx).await?; - } + let color_av = AttributeValue::get_by_id(ctx, color_value_id).await?; - let ids = attr_values.iter().map(|av| *av.id()).collect(); + Ok(match color_av.materialized_view(ctx).await? { + Some(serde_value) => Some(serde_json::from_value(serde_value)?), + None => None, + }) + } - ctx.enqueue_dependent_values_update(ids).await?; + pub async fn get_type(&self, ctx: &DalContext) -> ComponentResult { + let type_value_id = self + .attribute_values_for_prop(ctx, &["root", "si", "type"]) + .await? + .into_iter() + .next() + .ok_or(ComponentError::ComponentMissingTypeValue(self.id()))?; - diagram::summary_diagram::component_update( - ctx, - self.id(), - self.name(ctx).await?, - self.color(ctx).await?.unwrap_or_default(), - self.get_type(ctx).await?, - self.resource(ctx).await?.payload.is_some(), - Some(deletion_time.to_string()), - ) - .await - .map_err(|e| ComponentError::SummaryDiagram(e.to_string()))?; + let type_value = AttributeValue::get_by_id(ctx, type_value_id) + .await? + .materialized_view(ctx) + .await? + .ok_or(ComponentError::ComponentMissingTypeValue(self.id()))?; - Ok(()) + Ok(serde_json::from_value(type_value)?) } - pub async fn restore_and_propagate( + pub async fn root_attribute_value_id( ctx: &DalContext, component_id: ComponentId, - ) -> ComponentResult> { - // Check if component has deleted frame before restoring - let component = { - let ctx_with_deleted = &ctx.clone_with_delete_visibility(); - - let component = Self::get_by_id(ctx_with_deleted, &component_id) - .await? - .ok_or_else(|| ComponentError::NotFound(component_id))?; - - let sockets = Socket::list_for_component(ctx_with_deleted, component_id).await?; - - let maybe_socket_to_parent = sockets.iter().find(|socket| { - socket.name() == "Frame" - && *socket.edge_kind() == SocketEdgeKind::ConfigurationOutput - }); + ) -> ComponentResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut maybe_root_attribute_value_id = None; + for target in workspace_snapshot.outgoing_targets_for_edge_weight_kind( + component_id, + EdgeWeightKindDiscriminants::Root, + )? { + let target_node_weight = workspace_snapshot.get_node_weight(target)?; + if let NodeWeight::AttributeValue(_) = target_node_weight { + maybe_root_attribute_value_id = match maybe_root_attribute_value_id { + Some(already_found_root_attribute_value_id) => { + return Err(ComponentError::MultipleRootAttributeValuesFound( + target_node_weight.id().into(), + already_found_root_attribute_value_id, + component_id, + )); + } + None => Some(target_node_weight.id().into()), + }; + } + } + maybe_root_attribute_value_id + .ok_or(ComponentError::RootAttributeValueNotFound(component_id)) + } - let edges_with_deleted = Edge::list(ctx_with_deleted).await?; + pub async fn external_provider_attribute_values_for_component_id( + ctx: &DalContext, + component_id: ComponentId, + ) -> ComponentResult> { + let mut result = HashMap::new(); - let mut maybe_deleted_parent_id = None; + let socket_values = Self::values_for_all_providers(ctx, component_id).await?; - if let Some(socket_to_parent) = maybe_socket_to_parent { - for edge in &edges_with_deleted { - if edge.tail_object_id() == (*component.id()).into() - && edge.tail_socket_id() == *socket_to_parent.id() - && (edge.visibility().deleted_at.is_some() && edge.deleted_implicitly()) - { - maybe_deleted_parent_id = Some(edge.head_object_id().into()); - break; + for socket_value_id in socket_values { + if let Some(external_provider_id) = AttributeValue::is_for(ctx, socket_value_id) + .await? + .external_provider_id() + { + match result.entry(external_provider_id) { + hash_map::Entry::Vacant(entry) => { + entry.insert(socket_value_id); + } + hash_map::Entry::Occupied(_) => { + return Err(ComponentError::ExternalProviderTooManyAttributeValues( + external_provider_id, + )) } - } - }; - - if let Some(parent_id) = maybe_deleted_parent_id { - let parent_comp = Self::get_by_id(ctx_with_deleted, &parent_id) - .await? - .ok_or_else(|| ComponentError::NotFound(parent_id))?; - - if parent_comp.visibility().deleted_at.is_some() { - return Err(ComponentError::InsideDeletedFrame(component_id, parent_id)); } } + } - component - }; - - component.set_deleted_at(ctx, None).await?; + Ok(result) + } - let rows = ctx - .txns() - .await? - .pg() - .query( - "SELECT * FROM component_restore_and_propagate_v2($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; - let mut attr_values: Vec = standard_model::objects_from_rows(rows)?; + pub async fn external_provider_attribute_values( + &self, + ctx: &DalContext, + ) -> ComponentResult> { + Self::external_provider_attribute_values_for_component_id(ctx, self.id()).await + } - for attr_value in &mut attr_values { - attr_value.update_from_prototype_function(ctx).await?; - } + pub async fn attribute_values_for_prop( + &self, + ctx: &DalContext, + prop_path: &[&str], + ) -> ComponentResult> { + let mut result = vec![]; - let ids = attr_values.iter().map(|av| *av.id()).collect(); + let schema_variant_id = Self::schema_variant_id(ctx, self.id()).await?; - ctx.enqueue_dependent_values_update(ids).await?; + let prop_id = + Prop::find_prop_id_by_path(ctx, schema_variant_id, &PropPath::new(prop_path)).await?; - diagram::summary_diagram::component_update( - ctx, - &component_id, - component.name(ctx).await?, - component.color(ctx).await?.unwrap_or_default(), - component.get_type(ctx).await?, - component.resource(ctx).await?.payload.is_some(), - None, - ) - .await - .map_err(|e| ComponentError::SummaryDiagram(e.to_string()))?; + for attribute_value_id in Prop::attribute_values_for_prop_id(ctx, prop_id).await? { + let value_component_id = AttributeValue::component_id(ctx, attribute_value_id).await?; + if value_component_id == self.id() { + result.push(attribute_value_id) + } + } - Ok(Component::get_by_id(ctx, &component_id).await?) + Ok(result) } - /// Finds the "color" that the [`Component`] should be in the [`Diagram`](crate::Diagram). - pub async fn color(&self, ctx: &DalContext) -> ComponentResult> { - let schema_variant_id = Self::schema_variant_id(ctx, self.id).await?; - let color_attribute_value = Component::find_si_child_attribute_value( - ctx, - self.id, - schema_variant_id, - SiPropChild::Color, - ) - .await?; - let color = color_attribute_value - .get_value(ctx) - .await? - .map(serde_json::from_value) - .transpose()?; - Ok(color) - } + async fn values_for_all_providers( + ctx: &DalContext, + component_id: ComponentId, + ) -> ComponentResult> { + let mut socket_values: Vec = vec![]; + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + for socket_target in workspace_snapshot.outgoing_targets_for_edge_weight_kind( + component_id, + EdgeWeightKindDiscriminants::Socket, + )? { + socket_values.push( + workspace_snapshot + .get_node_weight(socket_target)? + .get_attribute_value_node_weight()? + .id() + .into(), + ); + } - /// Check if the [`Component`] has been fully destroyed. - pub fn is_destroyed(&self) -> bool { - self.visibility.deleted_at.is_some() && !self.needs_destroy() + Ok(socket_values) } - pub async fn clone_attributes_from( - &self, + pub async fn internal_provider_attribute_values_for_component_id( ctx: &DalContext, component_id: ComponentId, - ) -> ComponentResult<()> { - let attribute_values = - AttributeValue::find_by_attr(ctx, "attribute_context_component_id", &component_id) - .await?; - let mut my_attribute_values = - AttributeValue::find_by_attr(ctx, "attribute_context_component_id", self.id()).await?; - - let mut pasted_attribute_values_by_original = HashMap::new(); + ) -> ComponentResult> { + let mut result = HashMap::new(); - let mut work_queue: VecDeque = attribute_values.iter().cloned().collect(); - while let Some(copied_av) = work_queue.pop_front() { - let context = AttributeContextBuilder::from(copied_av.context) - .set_component_id(*self.id()) - .to_context()?; + let socket_values = Self::values_for_all_providers(ctx, component_id).await?; - // TODO: should we clone the fb and fbrv? - let mut pasted_av = if let Some(av) = my_attribute_values - .iter_mut() - .find(|av| context.check(av.context)) - { - av.set_func_binding_id(ctx, copied_av.func_binding_id()) - .await?; - av.set_func_binding_return_value_id(ctx, copied_av.func_binding_return_value_id()) - .await?; - av.set_key(ctx, copied_av.key()).await?; - av.clone() - } else { - AttributeValue::new( - ctx, - copied_av.func_binding_id(), - copied_av.func_binding_return_value_id(), - context, - copied_av.key(), - ) + for socket_value_id in socket_values { + if let Some(internal_provider_id) = AttributeValue::is_for(ctx, socket_value_id) .await? - }; - - pasted_av - .set_proxy_for_attribute_value_id(ctx, copied_av.proxy_for_attribute_value_id()) - .await?; - pasted_av - .set_sealed_proxy(ctx, copied_av.sealed_proxy()) - .await?; - - pasted_attribute_values_by_original.insert(*copied_av.id(), *pasted_av.id()); - - if let Some(copied_index_map) = copied_av.index_map() { - for (_, copied_id) in copied_index_map.order_as_map() { - if let Some(attribute_value) = - AttributeValue::get_by_id(ctx, &copied_id).await? - { - work_queue.push_back(attribute_value) + .internal_provider_id() + { + match result.entry(internal_provider_id) { + hash_map::Entry::Vacant(entry) => { + entry.insert(socket_value_id); + } + hash_map::Entry::Occupied(_) => { + return Err(ComponentError::InternalProviderTooManyAttributeValues( + internal_provider_id, + )) } } } } - for copied_av in &attribute_values { - if let Some(copied_index_map) = copied_av.index_map() { - let pasted_id = pasted_attribute_values_by_original - .get(copied_av.id()) - .ok_or(ComponentError::AttributeValueNotFound)?; + Ok(result) + } + + pub async fn internal_provider_attribute_values( + &self, + ctx: &DalContext, + ) -> ComponentResult> { + Self::internal_provider_attribute_values_for_component_id(ctx, self.id()).await + } - let mut index_map = IndexMap::new(); - for (key, copied_id) in copied_index_map.order_as_map() { - if let Some(pasted_id) = pasted_attribute_values_by_original.get(&copied_id) { - index_map.push(*pasted_id, Some(key)); - } - } + async fn connect_inner( + ctx: &DalContext, + source_component_id: ComponentId, + source_external_provider_id: ExternalProviderId, + destination_component_id: ComponentId, + destination_explicit_internal_provider_id: InternalProviderId, + ) -> ComponentResult<(AttributeValueId, AttributePrototypeArgumentId)> { + let destination_attribute_value_ids = + InternalProvider::attribute_values_for_internal_provider_id( + ctx, + destination_explicit_internal_provider_id, + ) + .await?; - ctx.txns() - .await? - .pg() - .query( - "UPDATE attribute_values av - SET index_map = $3 - FROM attribute_values_v1($1, $2) as attribute_values - WHERE attribute_values.id = $4 AND av.id = attribute_values.id", - &[ - ctx.tenancy(), - ctx.visibility(), - &serde_json::to_value(&index_map)?, - &pasted_id, - ], - ) - .await?; + // filter the value ids by destination_component_id + let mut destination_attribute_value_id: Option = None; + for value_id in destination_attribute_value_ids { + let component_id = AttributeValue::component_id(ctx, value_id).await?; + if component_id == destination_component_id { + destination_attribute_value_id = Some(value_id); + break; } } - let attribute_prototypes = - AttributePrototype::find_by_attr(ctx, "attribute_context_component_id", &component_id) - .await?; - let mut my_attribute_prototypes = - AttributePrototype::find_by_attr(ctx, "attribute_context_component_id", self.id()) - .await?; + let destination_attribute_value_id = destination_attribute_value_id.ok_or( + ComponentError::DestinationComponentMissingAttributeValueForInternalProvider( + destination_component_id, + destination_explicit_internal_provider_id, + ), + )?; - let mut pasted_attribute_prototypes_by_original = HashMap::new(); - for copied_ap in &attribute_prototypes { - let context = AttributeContextBuilder::from(copied_ap.context) - .set_component_id(*self.id()) - .to_context()?; + let destination_prototype_id = + AttributeValue::prototype_id(ctx, destination_attribute_value_id).await?; - let id = if let Some(ap) = my_attribute_prototypes - .iter_mut() - .find(|av| context.check(av.context) && av.key.as_deref() == copied_ap.key()) - { - ap.set_func_id(ctx, copied_ap.func_id()).await?; - ap.set_key(ctx, copied_ap.key()).await?; - *ap.id() - } else { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM attribute_prototype_create_v1($1, $2, $3, $4, $5) AS ap", - &[ - ctx.tenancy(), - ctx.visibility(), - &serde_json::to_value(context)?, - &copied_ap.func_id(), - &copied_ap.key(), - ], - ) - .await?; - let object: AttributePrototype = standard_model::object_from_row(row)?; - *object.id() - }; + let attribute_prototype_argument = AttributePrototypeArgument::new_inter_component( + ctx, + source_component_id, + source_external_provider_id, + destination_component_id, + destination_prototype_id, + ) + .await?; - pasted_attribute_prototypes_by_original.insert(*copied_ap.id(), id); - } + AttributeValue::update_from_prototype_function(ctx, destination_attribute_value_id).await?; - let rows = ctx - .txns() - .await? - .pg() - .query( - "SELECT object_id, belongs_to_id - FROM attribute_value_belongs_to_attribute_value_v1($1, $2) - WHERE object_id = ANY($3) AND belongs_to_id = ANY($3)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_values - .iter() - .map(|av| *av.id()) - .collect::>(), - ], + Ok(( + destination_attribute_value_id, + attribute_prototype_argument.id(), + )) + } + + pub async fn connect( + ctx: &DalContext, + source_component_id: ComponentId, + source_external_provider_id: ExternalProviderId, + destination_component_id: ComponentId, + destination_explicit_internal_provider_id: InternalProviderId, + ) -> ComponentResult { + let (destination_attribute_value_id, attribute_prototype_argument_id) = + Self::connect_inner( + ctx, + source_component_id, + source_external_provider_id, + destination_component_id, + destination_explicit_internal_provider_id, ) .await?; - for row in rows { - let original_object_id: AttributeValueId = row.try_get("object_id")?; - let original_belongs_to_id: AttributeValueId = row.try_get("belongs_to_id")?; + ctx.enqueue_job(DependentValuesUpdate::new( + ctx.access_builder(), + *ctx.visibility(), + vec![destination_attribute_value_id], + )) + .await?; - let object_id = pasted_attribute_values_by_original - .get(&original_object_id) - .ok_or(ComponentError::AttributeValueNotFound)?; - let belongs_to_id = pasted_attribute_values_by_original - .get(&original_belongs_to_id) - .ok_or(ComponentError::AttributeValueNotFound)?; + Ok(attribute_prototype_argument_id) + } - ctx.txns() - .await? - .pg() - .query( - "INSERT INTO attribute_value_belongs_to_attribute_value - (object_id, belongs_to_id, tenancy_workspace_pk, visibility_change_set_pk) - VALUES ($1, $2, $3, $4) - ON CONFLICT (object_id, tenancy_workspace_pk, visibility_change_set_pk) - DO NOTHING", - &[ - &object_id, - &belongs_to_id, - &ctx.tenancy().workspace_pk(), - &ctx.visibility().change_set_pk, - ], - ) - .await?; + // NOTE(nick): this is probably algorithmically bad and we probably need to make it less bad. + /// Find all matching sockets for a given source [`Component`] and a given destination [`Component`]. + /// + /// This is useful when [`attaching`](frame::Frame::attach_child_to_parent) a child [`Component`] to a parent + /// frame. + pub async fn connect_all( + ctx: &DalContext, + source_component_id: ComponentId, + destination_component_id: ComponentId, + ) -> ComponentResult<()> { + let source_schema_variant_id = + Component::schema_variant_id(ctx, source_component_id).await?; + let destination_schema_variant_id = + Component::schema_variant_id(ctx, destination_component_id).await?; + + let source_external_providers = + ExternalProvider::list(ctx, source_schema_variant_id).await?; + let destination_internal_providers = + InternalProvider::list(ctx, destination_schema_variant_id).await?; + + // TODO(nick): use annotations instead of names. Also make this less bad. + let mut external_providers_by_annotation: HashMap = + HashMap::new(); + for source_external_provider in source_external_providers { + external_providers_by_annotation.insert( + source_external_provider.name().to_string(), + source_external_provider.id(), + ); } - let rows = ctx - .txns() - .await? - .pg() - .query( - "SELECT object_id, belongs_to_id - FROM attribute_value_belongs_to_attribute_prototype_v1($1, $2) - WHERE object_id = ANY($3) AND belongs_to_id = ANY($4)", - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_values - .iter() - .map(|av| *av.id()) - .collect::>(), - &attribute_prototypes - .iter() - .map(|av| *av.id()) - .collect::>(), - ], - ) - .await?; - - for row in rows { - let original_object_id: AttributeValueId = row.try_get("object_id")?; - let original_belongs_to_id: AttributePrototypeId = row.try_get("belongs_to_id")?; - - let object_id = pasted_attribute_values_by_original - .get(&original_object_id) - .ok_or(ComponentError::AttributeValueNotFound)?; - let belongs_to_id = pasted_attribute_prototypes_by_original - .get(&original_belongs_to_id) - .ok_or(ComponentError::AttributePrototypeNotFound)?; + let mut internal_providers_by_annotation: HashMap = + HashMap::new(); + for destination_internal_provider in destination_internal_providers { + internal_providers_by_annotation.insert( + destination_internal_provider.name().to_string(), + destination_internal_provider.id(), + ); + } - ctx - .txns() - .await? - .pg() - .query("INSERT INTO attribute_value_belongs_to_attribute_prototype - (object_id, belongs_to_id, tenancy_workspace_pk, visibility_change_set_pk) - VALUES ($1, $2, $3, $4) - ON CONFLICT (object_id, tenancy_workspace_pk, visibility_change_set_pk) DO NOTHING", - &[ - &object_id, - &belongs_to_id, - &ctx.tenancy().workspace_pk(), - &ctx.visibility().change_set_pk, - ], - ).await?; + // NOTE(nick): using the maps reliant on the name, connect all sockets we can. + let mut to_enqueue = Vec::new(); + for (key, external_provider_id) in external_providers_by_annotation { + if let Some(internal_provider_id) = internal_providers_by_annotation.get(&key) { + let (attribute_value_id, _) = Self::connect_inner( + ctx, + source_component_id, + external_provider_id, + destination_component_id, + *internal_provider_id, + ) + .await?; + to_enqueue.push(attribute_value_id); + } } + // Enqueue all the values from each connection. + ctx.enqueue_job(DependentValuesUpdate::new( + ctx.access_builder(), + *ctx.visibility(), + to_enqueue, + )) + .await?; + Ok(()) } } diff --git a/lib/dal/src/component/code.rs b/lib/dal/src/component/code.rs index 263bb664ee..45949358f4 100644 --- a/lib/dal/src/component/code.rs +++ b/lib/dal/src/component/code.rs @@ -2,15 +2,13 @@ use serde::Deserialize; use serde::Serialize; use std::collections::{HashMap, HashSet}; +use super::{ComponentError, ComponentResult}; use crate::attribute::value::AttributeValue; -use crate::attribute::value::AttributeValueError; -use crate::component::ComponentResult; use crate::{ - AttributeReadContext, AttributeValueId, CodeLanguage, CodeView, ComponentError, ComponentId, - DalContext, StandardModel, WsEvent, WsPayload, + attribute::value::AttributeValueError, code_view::CodeLanguage, code_view::CodeView, + schema::variant::root_prop::RootPropChild, AttributeValueId, Component, ComponentId, + DalContext, SchemaVariant, StandardModel, WsEvent, WsEventResult, WsPayload, }; -use crate::{Component, SchemaVariant}; -use crate::{RootPropChild, WsEventResult}; #[derive(Deserialize, Debug)] struct CodeGenerationEntry { @@ -27,13 +25,8 @@ impl Component { ctx: &DalContext, component_id: ComponentId, ) -> ComponentResult<(Vec, bool)> { - let component = Self::get_by_id(ctx, &component_id) - .await? - .ok_or(ComponentError::NotFound(component_id))?; - let schema_variant = component - .schema_variant(ctx) - .await? - .ok_or(ComponentError::NoSchemaVariant(component_id))?; + let component = Self::get_by_id(ctx, component_id).await?; + let schema_variant = component.schema_variant(ctx).await?; // Prepare to assemble code views and access the "/root/code" prop tree. let mut code_views: Vec = Vec::new(); diff --git a/lib/dal/src/component/frame.rs b/lib/dal/src/component/frame.rs new file mode 100644 index 0000000000..d74df706ed --- /dev/null +++ b/lib/dal/src/component/frame.rs @@ -0,0 +1,113 @@ +use serde::{Deserialize, Serialize}; +use strum::{AsRefStr, Display, EnumString}; +use telemetry::prelude::*; +use thiserror::Error; +use ulid::Ulid; + +use crate::diagram::{EdgeId, NodeId}; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind}; +use crate::workspace_snapshot::WorkspaceSnapshotError; +use crate::{ + Component, ComponentError, ComponentId, ComponentType, DalContext, TransactionsError, User, +}; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum FrameError { + #[error("component error: {0}")] + Component(#[from] ComponentError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("parent is not a frame (child id: {0}) (parent id: {1})")] + ParentIsNotAFrame(ComponentId, ComponentId), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), +} + +pub type FrameResult = Result; + +/// A unit struct containing logic for working with frames. +pub struct Frame; + +impl Frame { + /// Provides the ability to attach a child [`Component`] to a parent frame. + pub async fn attach_child_to_parent( + ctx: &DalContext, + parent_id: ComponentId, + child_id: ComponentId, + ) -> FrameResult<()> { + let parent = Component::get_by_id(ctx, parent_id).await?; + let parent_type = parent.get_type(ctx).await?; + + let (source_id, destination_id) = match parent_type { + ComponentType::AggregationFrame => { + unimplemented!("aggregation frames are untested in the new engine") + } + ComponentType::Component => { + return Err(FrameError::ParentIsNotAFrame(child_id, parent_id)) + } + ComponentType::ConfigurationFrameDown => (parent_id, child_id), + ComponentType::ConfigurationFrameUp => (child_id, parent_id), + }; + + Self::attach_child_to_parent_symbolic(ctx, parent_id, child_id).await?; + + Component::connect_all(ctx, source_id, destination_id).await?; + + Ok(()) + } + + async fn attach_child_to_parent_symbolic( + ctx: &DalContext, + parent_id: ComponentId, + child_id: ComponentId, + ) -> FrameResult<()> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + let change_set = ctx.change_set_pointer()?; + + workspace_snapshot.add_edge( + parent_id, + EdgeWeight::new(change_set, EdgeWeightKind::FrameContains)?, + child_id, + )?; + + Ok(()) + } +} + +// TODO(nick): replace once the switchover is complete. +#[remain::sorted] +#[derive( + Deserialize, Serialize, Debug, PartialEq, Eq, Clone, Display, EnumString, AsRefStr, Copy, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum EdgeKind { + Configuration, + Symbolic, +} + +// TODO(nick): replace once the switchover is complete. +pub type SocketId = Ulid; + +// TODO(nick): replace once the switchover is complete. +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct Vertex { + pub node_id: NodeId, + pub socket_id: SocketId, +} + +// TODO(nick): replace once the switchover is complete. +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct Connection { + pub id: EdgeId, + pub classification: EdgeKind, + pub source: Vertex, + pub destination: Vertex, + pub created_by: Option, + pub deleted_by: Option, +} diff --git a/lib/dal/src/component/qualification.rs b/lib/dal/src/component/qualification.rs index 885adb4f14..40d34552e0 100644 --- a/lib/dal/src/component/qualification.rs +++ b/lib/dal/src/component/qualification.rs @@ -1,13 +1,11 @@ use serde::Deserialize; -use std::collections::HashMap; +use telemetry::prelude::*; -use crate::attribute::value::AttributeValue; -use crate::attribute::value::AttributeValueError; use crate::component::ComponentResult; use crate::qualification::{QualificationSubCheckStatus, QualificationView}; -use crate::schema::SchemaVariant; +use crate::schema::variant::root_prop::RootPropChild; use crate::ws_event::WsEvent; -use crate::{AttributeReadContext, DalContext, RootPropChild, StandardModel}; +use crate::DalContext; use crate::{Component, ComponentError, ComponentId}; // FIXME(nick): use the formal types from the new version of function authoring instead of this @@ -19,105 +17,41 @@ pub struct QualificationEntry { } impl Component { - // TODO(nick): big query potential here. + #[instrument(skip_all)] pub async fn list_qualifications( ctx: &DalContext, component_id: ComponentId, ) -> ComponentResult> { - let component = Self::get_by_id(ctx, &component_id) - .await? - .ok_or(ComponentError::NotFound(component_id))?; - let schema_variant = component - .schema_variant(ctx) - .await? - .ok_or(ComponentError::NoSchemaVariant(component_id))?; + let component = Self::get_by_id(ctx, component_id).await?; - let mut results: Vec = vec![]; let mut qualification_views = vec![]; - // Prepare to assemble qualification views and access the "/root/qualification" prop tree. - // We will use its implicit internal provider id and its corresponding prop id to do so. - let qualification_map_implicit_internal_provider = - SchemaVariant::find_root_child_implicit_internal_provider( + let qualification_map_value_id = component + .attribute_values_for_prop( ctx, - *schema_variant.id(), - RootPropChild::Qualification, + RootPropChild::Qualification + .prop_path() + .as_parts() + .as_slice(), ) - .await?; - - // Collect all the func binding return value ids for the child attribute values - // (map entries) for reference later. - let prop_qualification_map_attribute_read_context = AttributeReadContext { - prop_id: Some(*qualification_map_implicit_internal_provider.prop_id()), - component_id: Some(component_id), - ..AttributeReadContext::default() - }; - let prop_qualification_map_attribute_value = - AttributeValue::find_for_context(ctx, prop_qualification_map_attribute_read_context) - .await? - .ok_or(AttributeValueError::NotFoundForReadContext( - prop_qualification_map_attribute_read_context, - ))?; - - let mut entries = HashMap::new(); - for entry_attribute_value in prop_qualification_map_attribute_value - .child_attribute_values(ctx) .await? - { - let entry_attribute_value_id = *entry_attribute_value.id(); - let func_binding_return_value_id = entry_attribute_value.func_binding_return_value_id(); - let entry_prototype_func_id = entry_attribute_value - .attribute_prototype(ctx) - .await? - .ok_or(ComponentError::MissingAttributePrototype( - entry_attribute_value_id, - ))? - .func_id(); - - let entry: QualificationEntry = serde_json::from_value( - entry_attribute_value - .get_unprocessed_value(ctx) - .await? - .ok_or(ComponentError::QualificationResultEmpty( - entry_attribute_value - .key - .clone() - .unwrap_or("unknown".to_string()), - *component.id(), - ))?, - )?; - - let key = - entry_attribute_value - .key() - .ok_or(ComponentError::FoundMapEntryWithoutKey( - entry_attribute_value_id, - ))?; - - // We're going to get values at both contexts (component and schema variant), but we - // should prefer component level ones - if entries.contains_key(key) && entry_attribute_value.context.is_component_unset() { - continue; + .first() + .copied() + .ok_or(ComponentError::MissingQualificationsValue(component_id))?; + + let qualification_attribute_value_ids = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + match workspace_snapshot.ordered_children_for_node(qualification_map_value_id)? { + Some(value_ids) => value_ids, + None => return Ok(vec![]), // should probably be an error } + }; - entries.insert( - key.to_string(), - (entry, entry_prototype_func_id, func_binding_return_value_id), - ); - } - - for (key, (entry, entry_prototype_func_id, func_binding_return_value_id)) in entries.drain() - { - if let Some(qual_view) = QualificationView::new( - ctx, - &key, - entry, - entry_prototype_func_id, - func_binding_return_value_id, - ) - .await? + for qualification_attribute_value_id in qualification_attribute_value_ids { + if let Some(view) = + QualificationView::new(ctx, qualification_attribute_value_id.into()).await? { - qualification_views.push(qual_view); + qualification_views.push(view); } } @@ -127,13 +61,12 @@ impl Component { qualification_views.sort(); // We want the "all fields valid" to always be first - results.extend(qualification_views); WsEvent::checked_qualifications(ctx, component_id) .await? .publish_on_commit(ctx) .await?; - Ok(results) + Ok(qualification_views) } } diff --git a/lib/dal/src/component/resource.rs b/lib/dal/src/component/resource.rs index 484ef080e9..87d38b87fa 100644 --- a/lib/dal/src/component/resource.rs +++ b/lib/dal/src/component/resource.rs @@ -4,165 +4,152 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; use veritech_client::ResourceStatus; -use crate::attribute::context::AttributeContextBuilder; -use crate::attribute::value::AttributeValue; -use crate::attribute::value::AttributeValueError; -use crate::component::ComponentResult; -use crate::func::binding_return_value::FuncBindingReturnValue; -use crate::ws_event::WsEvent; -use crate::{ - func::backend::js_action::ActionRunResult, ActionKind, ActionPrototype, ActionPrototypeContext, - AttributeReadContext, Component, ComponentError, ComponentId, DalContext, SchemaVariant, - StandardModel, WsPayload, -}; -use crate::{RootPropChild, WsEventResult}; - -impl Component { - /// Calls [`Self::resource_by_id`] using the [`ComponentId`](Component) off [`Component`]. - pub async fn resource(&self, ctx: &DalContext) -> ComponentResult { - Self::resource_by_id(ctx, self.id).await - } - - /// Find the object corresponding to "/root/resource". - pub async fn resource_by_id( - ctx: &DalContext, - component_id: ComponentId, - ) -> ComponentResult { - let attribute_value = Self::resource_attribute_value_by_id(ctx, component_id).await?; - - let func_binding_return_value = - FuncBindingReturnValue::get_by_id(ctx, &attribute_value.func_binding_return_value_id()) - .await? - .ok_or_else(|| { - ComponentError::FuncBindingReturnValueNotFound( - attribute_value.func_binding_return_value_id(), - ) - })?; - - let value = func_binding_return_value - .value() - .map(|value| { - if value == &serde_json::json!({}) { - return serde_json::json!({ - "status": "ok", - }); - } - value.clone() - }) - .unwrap_or_else(|| { - serde_json::json!({ - "status": "ok", - }) - }); - let result = ActionRunResult::deserialize(&value)?; - Ok(result) - } - - pub async fn resource_attribute_value_by_id( - ctx: &DalContext, - component_id: ComponentId, - ) -> ComponentResult { - let schema_variant_id = Self::schema_variant_id(ctx, component_id).await?; - let implicit_internal_provider = SchemaVariant::find_root_child_implicit_internal_provider( - ctx, - schema_variant_id, - RootPropChild::Resource, - ) - .await?; - - let value_context = AttributeReadContext { - internal_provider_id: Some(*implicit_internal_provider.id()), - component_id: Some(component_id), - ..AttributeReadContext::default() - }; - - let attribute_value = AttributeValue::find_for_context(ctx, value_context) - .await? - .ok_or(ComponentError::AttributeValueNotFoundForContext( - value_context, - ))?; - Ok(attribute_value) - } - - /// Sets the "string" field, "/root/resource" with a given value. After that, ensure dependent - /// [`AttributeValues`](crate::AttributeValue) are updated. - /// - /// Returns "true" if the resource tree has been updated. Returns "false" if the cached - /// value is used. - pub async fn set_resource( - &self, - ctx: &DalContext, - result: ActionRunResult, - ) -> ComponentResult { - self.set_resource_raw(ctx, result, true).await - } - - pub async fn set_resource_raw( - &self, - ctx: &DalContext, - result: ActionRunResult, - check_change_set: bool, - ) -> ComponentResult { - let ctx = &ctx.clone_without_deleted_visibility(); - - if check_change_set && !ctx.visibility().is_head() { - return Err(ComponentError::CannotUpdateResourceTreeInChangeSet); - } - - let resource_attribute_value = Component::root_prop_child_attribute_value_for_component( - ctx, - self.id, - RootPropChild::Resource, - ) - .await?; - - let root_attribute_value = resource_attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| AttributeValueError::ParentNotFound(*resource_attribute_value.id()))?; - - let update_attribute_context = - AttributeContextBuilder::from(resource_attribute_value.context) - .set_component_id(self.id) - .to_context()?; - - let (_, _) = AttributeValue::update_for_context( - ctx, - *resource_attribute_value.id(), - Some(*root_attribute_value.id()), - update_attribute_context, - Some(serde_json::to_value(result)?), - None, - ) - .await?; - Ok(true) - } - - pub async fn act(&self, ctx: &DalContext, action: ActionKind) -> ComponentResult<()> { - let schema_variant = self - .schema_variant(ctx) - .await? - .ok_or(ComponentError::NoSchemaVariant(self.id))?; - - let action = match ActionPrototype::find_for_context_and_kind( - ctx, - action, - ActionPrototypeContext { - schema_variant_id: *schema_variant.id(), - }, - ) - .await? - .pop() - { - Some(action) => action, - None => return Ok(()), - }; - - action.run(ctx, *self.id()).await?; - - Ok(()) - } -} +// impl Component { +// /// Calls [`Self::resource_by_id`] using the [`ComponentId`](Component) off [`Component`]. +// pub async fn resource(&self, ctx: &DalContext) -> ComponentResult { +// Self::resource_by_id(ctx, self.id).await +// } +// +// /// Find the object corresponding to "/root/resource". +// pub async fn resource_by_id( +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> ComponentResult { +// let attribute_value = Self::resource_attribute_value_by_id(ctx, component_id).await?; +// +// let func_binding_return_value = +// FuncBindingReturnValue::get_by_id(ctx, &attribute_value.func_binding_return_value_id()) +// .await? +// .ok_or_else(|| { +// ComponentError::FuncBindingReturnValueNotFound( +// attribute_value.func_binding_return_value_id(), +// ) +// })?; +// +// let value = func_binding_return_value +// .value() +// .map(|value| { +// if value == &serde_json::json!({}) { +// return serde_json::json!({ +// "status": "ok", +// }); +// } +// value.clone() +// }) +// .unwrap_or_else(|| { +// serde_json::json!({ +// "status": "ok", +// }) +// }); +// let result = ActionRunResult::deserialize(&value)?; +// Ok(result) +// } +// +// pub async fn resource_attribute_value_by_id( +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> ComponentResult { +// let schema_variant_id = Self::schema_variant_id(ctx, component_id).await?; +// let implicit_internal_provider = SchemaVariant::find_root_child_implicit_internal_provider( +// ctx, +// schema_variant_id, +// RootPropChild::Resource, +// ) +// .await?; +// +// let value_context = AttributeReadContext { +// internal_provider_id: Some(*implicit_internal_provider.id()), +// component_id: Some(component_id), +// ..AttributeReadContext::default() +// }; +// +// let attribute_value = AttributeValue::find_for_context(ctx, value_context) +// .await? +// .ok_or(ComponentError::AttributeValueNotFoundForContext( +// value_context, +// ))?; +// Ok(attribute_value) +// } +// +// /// Sets the "string" field, "/root/resource" with a given value. After that, ensure dependent +// /// [`AttributeValues`](crate::AttributeValue) are updated. +// /// +// /// Returns "true" if the resource tree has been updated. Returns "false" if the cached +// /// value is used. +// pub async fn set_resource( +// &self, +// ctx: &DalContext, +// result: ActionRunResult, +// ) -> ComponentResult { +// self.set_resource_raw(ctx, result, true).await +// } + +// pub async fn set_resource_raw( +// &self, +// ctx: &DalContext, +// result: ActionRunResult, +// check_change_set: bool, +// ) -> ComponentResult { +// let ctx = &ctx.clone_without_deleted_visibility(); +// +// if check_change_set && !ctx.visibility().is_head() { +// return Err(ComponentError::CannotUpdateResourceTreeInChangeSet); +// } +// +// let resource_attribute_value = Component::root_prop_child_attribute_value_for_component( +// ctx, +// self.id, +// RootPropChild::Resource, +// ) +// .await?; +// +// let root_attribute_value = resource_attribute_value +// .parent_attribute_value(ctx) +// .await? +// .ok_or_else(|| AttributeValueError::ParentNotFound(*resource_attribute_value.id()))?; +// +// let update_attribute_context = +// AttributeContextBuilder::from(resource_attribute_value.context) +// .set_component_id(self.id) +// .to_context()?; +// +// let (_, _) = AttributeValue::update_for_context( +// ctx, +// *resource_attribute_value.id(), +// Some(*root_attribute_value.id()), +// update_attribute_context, +// Some(serde_json::to_value(result)?), +// None, +// ) +// .await?; +// Ok(true) +// } +// +// pub async fn act(&self, ctx: &DalContext, action: ActionKind) -> ComponentResult<()> { +// let schema_variant = self +// .schema_variant(ctx) +// .await? +// .ok_or(ComponentError::NoSchemaVariant(self.id))?; +// +// let action = match ActionPrototype::find_for_context_and_kind( +// ctx, +// action, +// ActionPrototypeContext { +// schema_variant_id: *schema_variant.id(), +// }, +// ) +// .await? +// .pop() +// { +// Some(action) => action, +// None => return Ok(()), +// }; +// +// action.run(ctx, *self.id()).await?; +// +// Ok(()) +// } +// } #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] @@ -174,45 +161,51 @@ pub struct ResourceView { pub last_synced: Option, } -impl ResourceView { - pub fn new(result: ActionRunResult) -> Self { - Self { - data: result.payload, - message: result.message, - status: result.status, - logs: result.logs, - last_synced: result.last_synced, - } - } - - pub async fn get_by_component_id( - ctx: &DalContext, - component_id: &ComponentId, - ) -> ComponentResult { - let component = Component::get_by_id(ctx, component_id) - .await? - .ok_or(ComponentError::NotFound(*component_id))?; - - let resource = Self::new(component.resource(ctx).await?); - Ok(resource) - } -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct ResourceRefreshedPayload { - component_id: ComponentId, -} - -impl WsEvent { - pub async fn resource_refreshed( - ctx: &DalContext, - component_id: ComponentId, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::ResourceRefreshed(ResourceRefreshedPayload { component_id }), - ) - .await - } -} +// impl ResourceView { +// pub fn new(result: ActionRunResult) -> Self { +// Self { +// data: result.payload, +// message: result.message, +// status: result.status, +// logs: result.logs, +// last_synced: result.last_synced, +// } +// } +// +// /// Generate a map of [views](Self) for all [`Components`](Component) in the workspace. +// pub async fn list_with_deleted( +// ctx: &DalContext, +// ) -> ComponentResult> { +// let ctx = &ctx.clone_with_delete_visibility(); +// let mut resources = HashMap::new(); +// for component in Component::list(ctx).await? { +// if !component.is_destroyed() { +// // Use the entry API to ensure that we do not process the same component twice, if +// // duplicates were accidentally(?) provided. +// resources +// .entry(*component.id()) +// .or_insert(Self::new(component.resource(ctx).await?)); +// } +// } +// Ok(resources) +// } +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct ResourceRefreshedPayload { +// component_id: ComponentId, +// } +// +// impl WsEvent { +// pub async fn resource_refreshed( +// ctx: &DalContext, +// component_id: ComponentId, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::ResourceRefreshed(ResourceRefreshedPayload { component_id }), +// ) +// .await +// } +// } diff --git a/lib/dal/src/component/status.rs b/lib/dal/src/component/status.rs index bb81d2b058..592d150f17 100644 --- a/lib/dal/src/component/status.rs +++ b/lib/dal/src/component/status.rs @@ -2,7 +2,6 @@ use chrono::DateTime; use chrono::Utc; use serde::{Deserialize, Serialize}; -use crate::component::{ComponentResult, COMPONENT_STATUS_UPDATE_BY_PK}; use crate::standard_model::TypeHint; use crate::{ impl_standard_model, pk, standard_model, ComponentId, DalContext, HistoryActor, @@ -11,12 +10,6 @@ use crate::{ pk!(ComponentStatusPk); -#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] -pub struct HistoryActorTimestamp { - pub actor: HistoryActor, - pub timestamp: DateTime, -} - #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct ComponentStatus { pk: ComponentStatusPk, diff --git a/lib/dal/src/component/view/debug.rs b/lib/dal/src/component/view/debug.rs index 2890f9717d..eee929c550 100644 --- a/lib/dal/src/component/view/debug.rs +++ b/lib/dal/src/component/view/debug.rs @@ -5,14 +5,12 @@ use thiserror::Error; use crate::{ func::execution::{FuncExecution, FuncExecutionError}, - socket::{SocketEdgeKind, SocketError}, - AttributePrototype, AttributeReadContext, AttributeValue, AttributeValueError, - AttributeValueId, AttributeValuePayload, Component, ComponentError, ComponentId, DalContext, - ExternalProvider, ExternalProviderId, Func, FuncBinding, FuncBindingError, - FuncBindingReturnValue, FuncBindingReturnValueError, FuncError, InternalProvider, - InternalProviderError, InternalProviderId, Prop, PropError, PropId, PropKind, - SchemaVariantError, SchemaVariantId, SecretError, SecretId, Socket, SocketId, StandardModel, - StandardModelError, + socket::SocketEdgeKind, + AttributePrototype, AttributeValue, AttributeValueId, AttributeValuePayload, Component, + ComponentId, DalContext, ExternalProvider, ExternalProviderId, Func, FuncBinding, + FuncBindingError, FuncBindingReturnValue, FuncBindingReturnValueError, InternalProvider, + InternalProviderId, Prop, PropId, PropKind, SchemaVariantId, SecretError, SecretId, Socket, + SocketId, StandardModel, StandardModelError, }; type ComponentDebugViewResult = Result; diff --git a/lib/dal/src/context.rs b/lib/dal/src/context.rs index b213f2807e..9526d29ca1 100644 --- a/lib/dal/src/context.rs +++ b/lib/dal/src/context.rs @@ -1,25 +1,38 @@ use std::{collections::HashMap, collections::HashSet, mem, path::PathBuf, sync::Arc}; +use content_store::{PgStore, StoreError}; use futures::Future; +use rebaser_client::ClientError as RebaserClientError; +use rebaser_client::Config as RebaserClientConfig; +use rebaser_client::ReplyRebaseMessage; use serde::{Deserialize, Serialize}; use si_crypto::SymmetricCryptoService; use si_data_nats::{NatsClient, NatsError, NatsTxn}; use si_data_pg::{InstrumentedClient, PgError, PgPool, PgPoolError, PgPoolResult, PgTxn}; use telemetry::prelude::*; use thiserror::Error; +use tokio::sync::RwLock; use tokio::sync::{MappedMutexGuard, Mutex, MutexGuard}; +use tokio::time::Instant; +use ulid::Ulid; use veritech_client::{Client as VeritechClient, CycloneEncryptionKey}; +use crate::workspace_snapshot::conflict::Conflict; +use crate::workspace_snapshot::update::Update; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::workspace_snapshot::WorkspaceSnapshotId; use crate::{ + change_set_pointer::{ChangeSetPointer, ChangeSetPointerId}, job::{ - definition::{FixesJob, RefreshJob}, processor::{JobQueueProcessor, JobQueueProcessorError}, producer::{BlockingJobError, BlockingJobResult, JobProducer}, queue::JobQueue, }, - AttributeValueId, ChangeSetPk, ComponentId, HistoryActor, StandardModel, Tenancy, TenancyError, - Visibility, + workspace_snapshot::WorkspaceSnapshotError, + AttributeValueId, ComponentId, HistoryActor, StandardModel, Tenancy, TenancyError, Visibility, + WorkspacePk, WorkspaceSnapshot, }; +use crate::{ChangeSetPk, Workspace}; /// A context type which contains handles to common core service dependencies. /// @@ -43,6 +56,10 @@ pub struct ServicesContext { module_index_url: Option, /// A service that can encrypt and decrypt values with a set of symmetric keys symmetric_crypto_service: SymmetricCryptoService, + /// Config for the the rebaser service + rebaser_config: RebaserClientConfig, + /// Content store + content_store_pg_pool: PgPool, } impl ServicesContext { @@ -57,6 +74,8 @@ impl ServicesContext { pkgs_path: Option, module_index_url: Option, symmetric_crypto_service: SymmetricCryptoService, + rebaser_config: RebaserClientConfig, + content_store_pg_pool: PgPool, ) -> Self { Self { pg_pool, @@ -67,6 +86,8 @@ impl ServicesContext { pkgs_path, module_index_url, symmetric_crypto_service, + rebaser_config, + content_store_pg_pool, } } @@ -104,8 +125,8 @@ impl ServicesContext { } /// Get a reference to the module index url - pub fn module_index_url(&self) -> &Option { - &self.module_index_url + pub fn module_index_url(&self) -> Option<&str> { + self.module_index_url.as_deref() } /// Get a reference to the symmetric encryption service @@ -113,12 +134,34 @@ impl ServicesContext { &self.symmetric_crypto_service } + /// Gets a reference to the rebaser client configuration + pub fn rebaser_config(&self) -> &RebaserClientConfig { + &self.rebaser_config + } + + /// Gets a reference to the content store pg pool + pub fn content_store_pg_pool(&self) -> &PgPool { + &self.content_store_pg_pool + } + + /// Builds and returns a new [`content_store::PgStore`] + pub async fn content_store(&self) -> content_store::StoreResult { + PgStore::new(self.content_store_pg_pool().clone()).await + } + /// Builds and returns a new [`Connections`]. pub async fn connections(&self) -> PgPoolResult { let pg_conn = self.pg_pool.get().await?; let nats_conn = self.nats_conn.clone(); let job_processor = self.job_processor.clone(); - Ok(Connections::new(pg_conn, nats_conn, job_processor)) + let rebaser_config = self.rebaser_config.clone(); + + Ok(Connections::new( + pg_conn, + nats_conn, + job_processor, + rebaser_config, + )) } } @@ -162,29 +205,39 @@ impl ConnectionState { } } - async fn commit(self) -> Result { + async fn commit( + self, + tenancy: &Tenancy, + rebase_request: Option, + ) -> Result<(Self, Option), TransactionsError> { match self { Self::Connections(_) => { trace!("no active transactions present when commit was called, taking no action"); - Ok(self) + Ok((self, None)) } Self::Transactions(txns) => { - let conns = txns.commit_into_conns().await?; - Ok(Self::Connections(conns)) + let (conns, conflicts) = txns.commit_into_conns(tenancy, rebase_request).await?; + Ok((Self::Connections(conns), conflicts)) } Self::Invalid => Err(TransactionsError::TxnCommit), } } - async fn blocking_commit(self) -> Result { + async fn blocking_commit( + self, + tenancy: &Tenancy, + rebase_request: Option, + ) -> Result<(Self, Option), TransactionsError> { match self { Self::Connections(_) => { trace!("no active transactions present when commit was called, taking no action"); - Ok(self) + Ok((self, None)) } Self::Transactions(txns) => { - let conns = txns.blocking_commit_into_conns().await?; - Ok(Self::Connections(conns)) + let (conns, conflicts) = txns + .blocking_commit_into_conns(tenancy, rebase_request) + .await?; + Ok((Self::Connections(conns), conflicts)) } Self::Invalid => Err(TransactionsError::TxnCommit), } @@ -205,6 +258,8 @@ impl ConnectionState { } } +pub enum DalContextError {} + /// A context type which holds references to underlying services, transactions, and context for DAL objects. #[derive(Clone, Debug)] pub struct DalContext { @@ -225,6 +280,15 @@ pub struct DalContext { /// Determines if we should not enqueue dependent value update jobs for attribute updates in /// this context. Useful for builtin migrations, since we don't care about attribute values propagation then. no_dependent_values: bool, + /// The content-addressable [`store`](content_store::Store) used by the "dal". + /// + /// This should be configurable in the future, but for now, the only kind of store used is the + /// [`PgStore`](content_store::PgStore). + content_store: Arc>, + /// The workspace snapshot for this context + workspace_snapshot: Option>>, + /// The change set pointer for this context + change_set_pointer: Option, } impl DalContext { @@ -238,6 +302,124 @@ impl DalContext { } } + pub async fn get_workspace_default_change_set_id( + &self, + ) -> Result { + let workspace = Workspace::get_by_pk( + self, + &self.tenancy().workspace_pk().unwrap_or(WorkspacePk::NONE), + ) + .await + // use a proper error + .map_err(|err| TransactionsError::ChangeSet(err.to_string()))?; + + let cs_id = workspace + .map(|workspace| workspace.default_change_set_id()) + .unwrap_or(ChangeSetPointerId::NONE); + + Ok(cs_id) + } + + pub async fn update_snapshot_to_visibility(&mut self) -> Result<(), TransactionsError> { + let change_set_id = match self.change_set_id() { + ChangeSetPointerId::NONE => self.get_workspace_default_change_set_id().await?, + other => other, + }; + + let change_set_pointer = ChangeSetPointer::find(self, change_set_id) + .await + .map_err(|err| TransactionsError::ChangeSet(err.to_string()))? + .ok_or(TransactionsError::ChangeSetPointerNotFound( + self.change_set_id(), + ))?; + + let workspace_snapshot = + WorkspaceSnapshot::find_for_change_set(self, change_set_pointer.id) + .await + .map_err(|err| TransactionsError::WorkspaceSnapshot(err.to_string()))?; + + self.set_change_set_pointer(change_set_pointer)?; + self.set_workspace_snapshot(workspace_snapshot); + + Ok(()) + } + + pub async fn write_snapshot(&self) -> Result, TransactionsError> { + if let Some(snapshot) = &self.workspace_snapshot { + let vector_clock_id = self.change_set_pointer()?.vector_clock_id(); + + Ok(Some( + snapshot + .write() + .await + .write(self, vector_clock_id) + .await + .map_err(|err| TransactionsError::WorkspaceSnapshot(err.to_string()))?, + )) + } else { + Ok(None) + } + } + + fn get_rebase_request( + &self, + onto_workspace_snapshot_id: WorkspaceSnapshotId, + ) -> Result { + let vector_clock_id = self.change_set_pointer()?.vector_clock_id(); + Ok(RebaseRequest { + onto_workspace_snapshot_id, + // the vector clock id of the current change set is just the id + // of the current change set + to_rebase_change_set_id: self.change_set_id(), + onto_vector_clock_id: vector_clock_id, + }) + } + + pub async fn do_rebase_request( + &self, + rebase_request: RebaseRequest, + ) -> Result, TransactionsError> { + rebase( + &self.tenancy, + self.services_context.nats_conn.clone(), + self.services_context().rebaser_config.clone(), + rebase_request, + ) + .await + } + + async fn commit_internal( + &self, + rebase_request: Option, + ) -> Result, TransactionsError> { + let conflicts = if self.blocking { + self.blocking_commit_internal(rebase_request).await? + } else { + let mut guard = self.conns_state.lock().await; + let (new_guard, conflicts) = guard.take().commit(&self.tenancy, rebase_request).await?; + *guard = new_guard; + + conflicts + }; + + Ok(conflicts) + } + + async fn blocking_commit_internal( + &self, + rebase_request: Option, + ) -> Result, TransactionsError> { + let mut guard = self.conns_state.lock().await; + + let (new_guard, conflicts) = guard + .take() + .blocking_commit(&self.tenancy, rebase_request) + .await?; + *guard = new_guard; + + Ok(conflicts) + } + pub fn to_builder(&self) -> DalContextBuilder { DalContextBuilder { services_context: self.services_context.clone(), @@ -247,17 +429,70 @@ impl DalContext { } /// Consumes all inner transactions and committing all changes made within them. - pub async fn commit(&self) -> Result<(), TransactionsError> { + pub async fn commit(&self) -> Result, TransactionsError> { + let rebase_request = match self.write_snapshot().await? { + Some(workspace_snapshot_id) => Some(self.get_rebase_request(workspace_snapshot_id)?), + None => None, + }; + + Ok(if self.blocking { + self.blocking_commit_internal(rebase_request).await? + } else { + self.commit_internal(rebase_request).await? + }) + } + + pub async fn commit_no_rebase(&self) -> Result<(), TransactionsError> { if self.blocking { - self.blocking_commit().await?; + self.blocking_commit_internal(None).await?; } else { - let mut guard = self.conns_state.lock().await; - *guard = guard.take().commit().await?; + self.commit_internal(None).await?; } Ok(()) } + pub fn change_set_pointer(&self) -> Result<&ChangeSetPointer, TransactionsError> { + match self.change_set_pointer.as_ref() { + Some(csp_ref) => Ok(csp_ref), + None => Err(TransactionsError::ChangeSetPointerNotSet), + } + } + + /// Fetch the change set pointer for the current change set visibility + /// Should only be called by DalContextBuilder or by ourselves if changing visibility or + /// refetching after a commit + pub fn set_change_set_pointer( + &mut self, + change_set_pointer: ChangeSetPointer, + ) -> Result<&ChangeSetPointer, TransactionsError> { + // "fork" a new change set pointer for this dal context "edit session". This gives us a new + // Ulid generator and new vector clock id so that concurrent editing conflicts can be + // resolved by the rebaser. This change set pointer is not persisted to the database (the + // rebaser will persist a new one if it can) + self.change_set_pointer = Some( + change_set_pointer + .editing_changeset() + .map_err(|err| TransactionsError::ChangeSet(err.to_string()))?, + ); + + self.change_set_pointer() + } + + pub fn set_workspace_snapshot(&mut self, workspace_snapshot: WorkspaceSnapshot) { + self.workspace_snapshot = Some(Arc::new(RwLock::new(workspace_snapshot))); + } + + /// Fetch the workspace snapshot for the current visibility + pub fn workspace_snapshot( + &self, + ) -> Result<&Arc>, WorkspaceSnapshotError> { + match &self.workspace_snapshot { + Some(workspace_snapshot) => Ok(workspace_snapshot), + None => Err(WorkspaceSnapshotError::WorkspaceSnapshotNotFetched), + } + } + pub fn blocking(&self) -> bool { self.blocking } @@ -276,12 +511,14 @@ impl DalContext { /// Consumes all inner transactions, committing all changes made within them, and /// blocks until all queued jobs have reported as finishing. - pub async fn blocking_commit(&self) -> Result<(), TransactionsError> { - let mut guard = self.conns_state.lock().await; - - *guard = guard.take().blocking_commit().await?; + pub async fn blocking_commit(&self) -> Result, TransactionsError> { + info!("blocking_commit"); + let rebase_request = match self.write_snapshot().await? { + Some(workspace_snapshot_id) => Some(self.get_rebase_request(workspace_snapshot_id)?), + None => None, + }; - Ok(()) + self.blocking_commit_internal(rebase_request).await } /// Rolls all inner transactions back, discarding all changes made within them. @@ -331,6 +568,14 @@ impl DalContext { self.visibility = visibility; } + /// Updates this context with a new [`Visibility`], specific to the new engine. + pub fn update_visibility_v2(&mut self, change_set_v2: &ChangeSetPointer) { + self.update_visibility(Visibility::new( + ChangeSetPk::from(Ulid::from(change_set_v2.id)), + None, + )); + } + /// Runs a block of code with "deleted" [`Visibility`] DalContext using the same transactions pub async fn run_with_deleted_visibility(&self, fun: F) -> R where @@ -429,15 +674,16 @@ impl DalContext { Ok(()) } - pub async fn enqueue_fix(&self, job: Box) -> Result<(), TransactionsError> { - self.txns().await?.job_queue.enqueue_job(job).await; - Ok(()) - } + // pub async fn enqueue_fix(&self, job: Box) -> Result<(), TransactionsError> { + // self.txns().await?.job_queue.enqueue_job(job).await; + // Ok(()) + // } + // - pub async fn enqueue_refresh(&self, job: Box) -> Result<(), TransactionsError> { - self.txns().await?.job_queue.enqueue_job(job).await; - Ok(()) - } + // pub async fn enqueue_refresh(&self, job: Box) -> Result<(), TransactionsError> { + // self.txns().await?.job_queue.enqueue_job(job).await; + // Ok(()) + // } pub async fn enqueue_dependent_values_update( &self, @@ -535,6 +781,11 @@ impl DalContext { self.services_context.module_index_url.as_deref() } + /// Gets a reference to the content store. + pub fn content_store(&self) -> &Arc> { + &self.content_store + } + /// Determines if a standard model object matches the tenancy of the current context and /// is in the same visibility. pub async fn check_tenancy( @@ -553,14 +804,24 @@ impl DalContext { /// Needed to remove universal tenancy while packages aren't a thing #[instrument(level = "debug", skip_all)] pub async fn import_builtins(&self) -> Result<(), TransactionsError> { - self.txns() - .await? - .pg() - .execute("SELECT import_builtins_v1($1)", &[self.tenancy()]) - .await?; + // TODO(nick,zack,jacob): restore the ability to "import builtins" via the graph work. + // let source_workspace_pk = WorkspacePk::NONE; + // self.txns() + // .await? + // .pg() + // .execute( + // "SELECT import_builtins_v1($1, $2)", + // &[self.tenancy(), &source_workspace_pk], + // ) + // .await?; Ok(()) } + // NOTE(nick,zack,jacob): likely a temporary func to get the change set id from the visibility. + pub fn change_set_id(&self) -> ChangeSetPointerId { + ChangeSetPointerId::from(Ulid::from(self.visibility.change_set_pk)) + } + pub fn access_builder(&self) -> AccessBuilder { AccessBuilder::new(self.tenancy, self.history_actor) } @@ -649,9 +910,12 @@ pub struct DalContextBuilder { } impl DalContextBuilder { - /// Contructs and returns a new [`DalContext`] using a default [`RequestContext`]. + /// Constructs and returns a new [`DalContext`] using a default [`RequestContext`]. pub async fn build_default(&self) -> Result { let conns = self.services_context.connections().await?; + // should we move this into Connections? + let content_store = self.services_context.content_store().await?; + Ok(DalContext { services_context: self.services_context.clone(), blocking: self.blocking, @@ -659,16 +923,21 @@ impl DalContextBuilder { tenancy: Tenancy::new_empty(), visibility: Visibility::new_head(false), history_actor: HistoryActor::SystemInit, + content_store: Arc::new(Mutex::new(content_store)), no_dependent_values: self.no_dependent_values, + workspace_snapshot: None, + change_set_pointer: None, }) } - /// Contructs and returns a new [`DalContext`] using a [`RequestContext`]. + /// Constructs and returns a new [`DalContext`] using a [`RequestContext`]. pub async fn build_head( &self, access_builder: AccessBuilder, ) -> Result { let conns = self.services_context.connections().await?; + let content_store = self.services_context.content_store().await?; + Ok(DalContext { services_context: self.services_context.clone(), blocking: self.blocking, @@ -677,16 +946,20 @@ impl DalContextBuilder { history_actor: access_builder.history_actor, visibility: Visibility::new_head(false), no_dependent_values: self.no_dependent_values, + content_store: Arc::new(Mutex::new(content_store)), + workspace_snapshot: None, + change_set_pointer: None, }) } - /// Contructs and returns a new [`DalContext`] using a [`RequestContext`]. + /// Constructs and returns a new [`DalContext`] using a [`RequestContext`]. pub async fn build( &self, request_context: RequestContext, ) -> Result { let conns = self.services_context.connections().await?; - Ok(DalContext { + let content_store = self.services_context.content_store().await?; + let mut ctx = DalContext { services_context: self.services_context.clone(), blocking: self.blocking, conns_state: Arc::new(Mutex::new(ConnectionState::new_from_conns(conns))), @@ -694,7 +967,14 @@ impl DalContextBuilder { visibility: request_context.visibility, history_actor: request_context.history_actor, no_dependent_values: self.no_dependent_values, - }) + content_store: Arc::new(Mutex::new(content_store)), + workspace_snapshot: None, + change_set_pointer: None, + }; + + ctx.update_snapshot_to_visibility().await?; + + Ok(ctx) } /// Gets a reference to the PostgreSQL connection pool. @@ -730,6 +1010,12 @@ impl DalContextBuilder { #[remain::sorted] #[derive(Debug, Error)] pub enum TransactionsError { + #[error("change set error: {0}")] + ChangeSet(String), + #[error("change set pointer not found for change set id: {0}")] + ChangeSetPointerNotFound(ChangeSetPointerId), + #[error("Change set pointer not set on DalContext")] + ChangeSetPointerNotSet, #[error(transparent)] JobQueueProcessor(#[from] JobQueueProcessorError), #[error(transparent)] @@ -738,16 +1024,26 @@ pub enum TransactionsError { Pg(#[from] PgError), #[error(transparent)] PgPool(#[from] PgPoolError), + #[error("rebase of snapshot {0} change set id {1} failed {2}")] + RebaseFailed(WorkspaceSnapshotId, ChangeSetPointerId, String), + #[error(transparent)] + RebaserClient(#[from] RebaserClientError), #[error(transparent)] SerdeJson(#[from] serde_json::Error), + #[error("store error: {0}")] + Store(#[from] StoreError), #[error(transparent)] Tenancy(#[from] TenancyError), + #[error("Unable to acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), #[error("cannot commit transactions on invalid connections state")] TxnCommit, #[error("cannot rollback transactions on invalid connections state")] TxnRollback, #[error("cannot start transactions without connections; state={0}")] TxnStart(&'static str), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(String), } /// A type which holds ownership over connections that can be used to start transactions. @@ -755,6 +1051,7 @@ pub enum TransactionsError { pub struct Connections { pg_conn: InstrumentedClient, nats_conn: NatsClient, + rebaser_config: RebaserClientConfig, job_processor: Box, } @@ -765,10 +1062,12 @@ impl Connections { pg_conn: InstrumentedClient, nats_conn: NatsClient, job_processor: Box, + rebaser_config: RebaserClientConfig, ) -> Self { Self { pg_conn, nats_conn, + rebaser_config, job_processor, } } @@ -778,8 +1077,14 @@ impl Connections { let pg_txn = PgTxn::create(self.pg_conn).await?; let nats_txn = self.nats_conn.transaction(); let job_processor = self.job_processor; + let rebaser_config = self.rebaser_config; - Ok(Transactions::new(pg_txn, nats_txn, job_processor)) + Ok(Transactions::new( + pg_txn, + nats_txn, + job_processor, + rebaser_config, + )) } /// Gets a reference to a PostgreSQL connection. @@ -803,6 +1108,8 @@ pub struct Transactions { pg_txn: PgTxn, /// A NATS transaction. nats_txn: NatsTxn, + /// Rebaser client + rebaser_config: RebaserClientConfig, job_processor: Box, job_queue: JobQueue, #[allow(clippy::type_complexity)] @@ -810,16 +1117,77 @@ pub struct Transactions { Arc>>>, } +#[derive(Clone, Debug)] +pub struct RebaseRequest { + pub to_rebase_change_set_id: ChangeSetPointerId, + pub onto_workspace_snapshot_id: WorkspaceSnapshotId, + pub onto_vector_clock_id: VectorClockId, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +pub struct Conflicts { + conflicts_found: Vec, + updates_found_and_skipped: Vec, +} + +// TODO(nick): we need to determine the long term vision for tenancy-scoped subjects. We're leaking the tenancy into +// the connection state functions. I believe it is fine for now since rebasing is a very specific use case, but we may +// not want it long term. +async fn rebase( + tenancy: &Tenancy, + nats: NatsClient, + rebaser_config: RebaserClientConfig, + rebase_request: RebaseRequest, +) -> Result, TransactionsError> { + let start = Instant::now(); + + // TODO(nick): make this cleaner. + let workspace_id = tenancy.workspace_pk().unwrap_or(WorkspacePk::NONE).into(); + let rebaser_client = rebaser_client::Client::new(nats, rebaser_config, workspace_id); + + info!("got client and requesting rebase: {:?}", start.elapsed()); + let response = rebaser_client + .request_rebase( + rebase_request.to_rebase_change_set_id.into(), + rebase_request.onto_workspace_snapshot_id.into(), + rebase_request.onto_vector_clock_id.into(), + ) + .await?; + info!("got response from rebaser: {:?}", start.elapsed()); + + match response { + ReplyRebaseMessage::Success { .. } => Ok(None), + ReplyRebaseMessage::Error { message } => Err(TransactionsError::RebaseFailed( + rebase_request.onto_workspace_snapshot_id, + rebase_request.to_rebase_change_set_id, + message, + )), + ReplyRebaseMessage::ConflictsFound { + conflicts_found, + updates_found_and_skipped, + } => { + let conflicts = Conflicts { + conflicts_found: serde_json::from_value(conflicts_found)?, + updates_found_and_skipped: serde_json::from_value(updates_found_and_skipped)?, + }; + + Ok(Some(conflicts)) + } + } +} + impl Transactions { /// Creates and returns a new `Transactions` instance. fn new( pg_txn: PgTxn, nats_txn: NatsTxn, job_processor: Box, + rebaser_config: RebaserClientConfig, ) -> Self { Self { pg_txn, nats_txn, + rebaser_config, job_processor, job_queue: JobQueue::new(), dependencies_update_component: Default::default(), @@ -844,14 +1212,33 @@ impl Transactions { skip_all, fields() )] - pub async fn commit_into_conns(self) -> Result { - self.run_dependencies_update_component().await?; + pub async fn commit_into_conns( + self, + tenancy: &Tenancy, + rebase_request: Option, + ) -> Result<(Connections, Option), TransactionsError> { let pg_conn = self.pg_txn.commit_into_conn().await?; let nats_conn = self.nats_txn.commit_into_conn().await?; + + let conflicts = if let Some(rebase_request) = rebase_request { + let start = Instant::now(); + let conflicts = rebase( + tenancy, + nats_conn.clone(), + self.rebaser_config.clone(), + rebase_request, + ) + .await?; + info!("rebase took: {:?}", start.elapsed()); + conflicts + } else { + None + }; + self.job_processor.process_queue(self.job_queue).await?; - let conns = Connections::new(pg_conn, nats_conn, self.job_processor); + let conns = Connections::new(pg_conn, nats_conn, self.job_processor, self.rebaser_config); - Ok(conns) + Ok((conns, conflicts)) } /// Consumes all inner transactions, committing all changes made within them, and returns @@ -862,16 +1249,33 @@ impl Transactions { skip_all, fields() )] - pub async fn blocking_commit_into_conns(self) -> Result { - self.run_dependencies_update_component().await?; + pub async fn blocking_commit_into_conns( + self, + tenancy: &Tenancy, + rebase_request: Option, + ) -> Result<(Connections, Option), TransactionsError> { let pg_conn = self.pg_txn.commit_into_conn().await?; let nats_conn = self.nats_txn.commit_into_conn().await?; + + let conflicts = if let Some(rebase_request) = rebase_request { + info!("rebase request"); + rebase( + tenancy, + nats_conn.clone(), + self.rebaser_config.clone(), + rebase_request, + ) + .await? + } else { + None + }; + self.job_processor .blocking_process_queue(self.job_queue) .await?; - let conns = Connections::new(pg_conn, nats_conn, self.job_processor); + let conns = Connections::new(pg_conn, nats_conn, self.job_processor, self.rebaser_config); - Ok(conns) + Ok((conns, conflicts)) } /// Rolls all inner transactions back, discarding all changes made within them, and returns @@ -882,7 +1286,7 @@ impl Transactions { pub async fn rollback_into_conns(self) -> Result { let pg_conn = self.pg_txn.rollback_into_conn().await?; let nats_conn = self.nats_txn.rollback_into_conn().await?; - let conns = Connections::new(pg_conn, nats_conn, self.job_processor); + let conns = Connections::new(pg_conn, nats_conn, self.job_processor, self.rebaser_config); Ok(conns) } diff --git a/lib/dal/src/diagram.rs b/lib/dal/src/diagram.rs index 2db9a37e9a..68466700f8 100644 --- a/lib/dal/src/diagram.rs +++ b/lib/dal/src/diagram.rs @@ -1,46 +1,46 @@ use serde::{Deserialize, Serialize}; use si_data_pg::PgError; +use std::collections::{hash_map, HashMap}; use std::num::{ParseFloatError, ParseIntError}; -use strum::{AsRefStr, Display, EnumString}; - +use strum::{AsRefStr, Display, EnumIter, EnumString}; use thiserror::Error; -use crate::change_status::ChangeStatusError; - -use crate::diagram::summary_diagram::{SummaryDiagramComponent, SummaryDiagramEdge}; - +use crate::actor_view::ActorView; +use crate::attribute::prototype::argument::{ + AttributePrototypeArgumentError, AttributePrototypeArgumentId, +}; +use crate::attribute::value::AttributeValueError; +use crate::change_status::ChangeStatus; +use crate::component::ComponentError; +use crate::history_event::HistoryEventMetadata; use crate::provider::external::ExternalProviderError; use crate::provider::internal::InternalProviderError; use crate::schema::variant::SchemaVariantError; -use crate::socket::SocketError; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - ActionPrototypeError, AttributeContextBuilderError, AttributePrototypeArgumentError, - AttributeValueError, ComponentError, ComponentId, DalContext, EdgeError, NodeError, NodeId, - NodeKind, PropError, SchemaError, SocketId, StandardModelError, + AttributePrototypeId, Component, ComponentId, DalContext, ExternalProviderId, + HistoryEventError, InternalProviderId, ProviderArity, SchemaId, SchemaVariant, SchemaVariantId, + StandardModelError, }; -pub mod connection; -pub(crate) mod summary_diagram; -pub use summary_diagram::falsify_using_default_variant_for_components_of_schema; -pub use summary_diagram::{SummaryDiagramError, SummaryDiagramResult}; +//pub(crate) mod summary_diagram; + +// TODO(nick): this module eventually goes the way of the dinosaur. +// pub mod connection; #[remain::sorted] #[derive(Error, Debug)] pub enum DiagramError { - #[error("action prototype: {0}")] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute context error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), #[error("attribute prototype argument error: {0}")] AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), + #[error("attribute prototype argument targets not found for attribute prototype argument ({0}) found via external provider: {1}")] + AttributePrototypeArgumentTargetsNotFound(AttributePrototypeArgumentId, ExternalProviderId), #[error("attribute prototype not found")] AttributePrototypeNotFound, #[error("attribute value error: {0}")] AttributeValue(#[from] AttributeValueError), #[error("attribute value not found")] AttributeValueNotFound, - #[error("change status error: {0}")] - ChangeStatus(#[from] ChangeStatusError), #[error("component error: {0}")] Component(#[from] ComponentError), #[error("component not found")] @@ -49,24 +49,20 @@ pub enum DiagramError { ComponentStatusNotFound(ComponentId), #[error("deletion timestamp not found")] DeletionTimeStamp, - #[error("edge error: {0}")] - Edge(#[from] EdgeError), + #[error("destination attribute prototype not found for inter component attribute prototype argument: {0}")] + DestinationAttributePrototypeNotFound(AttributePrototypeArgumentId), + #[error("destination explicit internal provider not found for attribute prototype ({0}) and inter component attribute prototype argument ({1})")] + DestinationExplicitInternalProviderNotFound(AttributePrototypeId, AttributePrototypeArgumentId), #[error("edge not found")] EdgeNotFound, #[error("external provider error: {0}")] ExternalProvider(#[from] ExternalProviderError), - #[error("external provider not found for socket id: {0}")] - ExternalProviderNotFoundForSocket(SocketId), + #[error("history event error: {0}")] + HistoryEvent(#[from] HistoryEventError), #[error("internal provider error: {0}")] InternalProvider(#[from] InternalProviderError), - #[error("internal provider not found for socket id: {0}")] - InternalProviderNotFoundForSocket(SocketId), - #[error("node error: {0}")] - Node(#[from] NodeError), #[error("node not found")] NodeNotFound, - #[error("no node positions found for node ({0}) and kind ({1})")] - NoNodePositionsFound(NodeId, NodeKind), #[error(transparent)] ParseFloat(#[from] ParseFloatError), #[error(transparent)] @@ -75,70 +71,287 @@ pub enum DiagramError { Pg(#[from] PgError), #[error("position not found")] PositionNotFound, - #[error("prop error: {0}")] - Prop(#[from] PropError), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), #[error("schema not found")] SchemaNotFound, - #[error(transparent)] + #[error("schema variant error: {0}")] SchemaVariant(#[from] SchemaVariantError), #[error("schema variant not found")] SchemaVariantNotFound, - #[error("socket error: {0}")] - Socket(#[from] SocketError), + #[error("serde error: {0}")] + Serde(#[from] serde_json::Error), #[error("socket not found")] SocketNotFound, #[error("standard model error: {0}")] StandardModel(#[from] StandardModelError), - #[error("summary diagram error: {0}")] - SummaryDiagram(String), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } +pub type NodeId = ComponentId; +pub type EdgeId = AttributePrototypeArgumentId; + pub type DiagramResult = Result; -/// The kinds of [`Diagrams`](Diagram) available to choose between for rendering. +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct GridPoint { + pub x: isize, + pub y: isize, +} + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct Size2D { + pub width: isize, + pub height: isize, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all(serialize = "camelCase"))] +pub struct SummaryDiagramComponent { + pub id: ComponentId, + pub component_id: ComponentId, + pub schema_name: String, + pub schema_id: SchemaId, + pub schema_variant_id: SchemaVariantId, + pub schema_variant_name: String, + pub schema_category: String, + pub sockets: serde_json::Value, + pub node_id: NodeId, + pub display_name: String, + pub position: GridPoint, + pub size: Size2D, + pub color: String, + pub node_type: String, + pub change_status: String, + pub has_resource: bool, + pub parent_node_id: Option, + pub child_node_ids: serde_json::Value, + pub created_info: serde_json::Value, + pub updated_info: serde_json::Value, + pub deleted_info: serde_json::Value, +} + +#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all(serialize = "camelCase"))] +pub struct SummaryDiagramEdge { + pub id: EdgeId, + pub edge_id: EdgeId, + pub from_node_id: NodeId, + pub from_socket_id: ExternalProviderId, + pub to_node_id: NodeId, + pub to_socket_id: InternalProviderId, + pub change_status: String, + pub created_info: serde_json::Value, + pub deleted_info: serde_json::Value, +} + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct DiagramSocket { + pub id: String, + pub label: String, + pub connection_annotations: Vec, + pub direction: DiagramSocketDirection, + pub max_connections: Option, + pub is_required: Option, + pub node_side: DiagramSocketNodeSide, +} + #[remain::sorted] #[derive( - AsRefStr, Clone, Copy, Debug, Deserialize, Display, EnumString, Eq, PartialEq, Serialize, + AsRefStr, + Clone, + Copy, + Debug, + Deserialize, + Display, + EnumIter, + EnumString, + Eq, + PartialEq, + Serialize, )] #[serde(rename_all = "camelCase")] #[strum(serialize_all = "camelCase")] -pub enum DiagramKind { - /// Represents the collection of [`Components`](crate::Component) and connections between them - /// within a [`Workspace`](crate::Workspace) - Configuration, +pub enum DiagramSocketDirection { + Bidirectional, + Input, + Output, } -/// The shape of assembled graph-related information required to render a graphical/visual diagram. +#[remain::sorted] +#[derive( + AsRefStr, + Clone, + Copy, + Debug, + Deserialize, + Display, + EnumIter, + EnumString, + Eq, + PartialEq, + Serialize, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum DiagramSocketNodeSide { + Left, + Right, +} #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct Diagram { - /// The shape of assembled [`Node`](crate::Node) information to render graphical/visual nodes. - components: Vec, - /// The shape of assembled [`Edge`](crate::Edge) information to render graphical/visual edges. - edges: Vec, + pub components: Vec, + pub edges: Vec, } impl Diagram { /// Assemble a [`Diagram`](Self) based on existing [`Nodes`](crate::Node) and /// [`Connections`](crate::Connection). pub async fn assemble(ctx: &DalContext) -> DiagramResult { - let components = summary_diagram::component_list(ctx) - .await - .map_err(|e| DiagramError::SummaryDiagram(e.to_string()))?; - let edges = summary_diagram::edge_list(ctx) - .await - .map_err(|e| DiagramError::SummaryDiagram(e.to_string()))?; - - Ok(Self { edges, components }) - } + let mut diagram_sockets: HashMap = HashMap::new(); + let mut diagram_edges: Vec = vec![]; - pub fn components(&self) -> &[SummaryDiagramComponent] { - &self.components - } + let components = Component::list(ctx).await?; + + let mut component_views = Vec::with_capacity(components.len()); + for component in &components { + for incoming_connection in component.incoming_connections(ctx).await? { + diagram_edges.push(SummaryDiagramEdge { + id: incoming_connection.attribute_prototype_argument_id, + edge_id: incoming_connection.attribute_prototype_argument_id, + from_node_id: incoming_connection.from_component_id, + from_socket_id: incoming_connection.from_external_provider_id, + to_node_id: incoming_connection.to_component_id, + to_socket_id: incoming_connection.to_internal_provider_id, + change_status: ChangeStatus::Added.to_string(), + created_info: serde_json::to_value(incoming_connection.created_info)?, + deleted_info: serde_json::to_value(incoming_connection.deleted_info)?, + }); + } + + let schema_variant = component.schema_variant(ctx).await?; + + let sockets = match diagram_sockets.entry(schema_variant.id()) { + hash_map::Entry::Vacant(entry) => { + let (external_providers, internal_providers) = + SchemaVariant::list_external_providers_and_explicit_internal_providers( + ctx, + schema_variant.id(), + ) + .await?; + + let mut sockets = vec![]; + + for ip in internal_providers { + sockets.push(DiagramSocket { + id: ip.id().to_string(), + label: ip.name().to_string(), + connection_annotations: vec![ip.name().to_string()], + direction: DiagramSocketDirection::Input, + max_connections: match ip.arity() { + ProviderArity::Many => None, + ProviderArity::One => Some(1), + }, + is_required: Some(false), + node_side: DiagramSocketNodeSide::Left, + }); + } + + for ep in external_providers { + sockets.push(DiagramSocket { + id: ep.id().to_string(), + label: ep.name().to_string(), + connection_annotations: vec![ep.name().to_string()], + direction: DiagramSocketDirection::Output, + max_connections: match ep.arity() { + ProviderArity::Many => None, + ProviderArity::One => Some(1), + }, + is_required: Some(false), + node_side: DiagramSocketNodeSide::Right, + }); + } + + let socket_value = serde_json::to_value(sockets)?; + + entry.insert(socket_value.to_owned()); + + socket_value + } + hash_map::Entry::Occupied(entry) => entry.get().to_owned(), + }; + + let schema = SchemaVariant::schema(ctx, schema_variant.id()).await?; + + let position = GridPoint { + x: component.x().parse::()?.round() as isize, + y: component.y().parse::()?.round() as isize, + }; + let size = match (component.width(), component.height()) { + (Some(h), Some(w)) => Size2D { + height: h.parse()?, + width: w.parse()?, + }, + _ => Size2D { + height: 500, + width: 500, + }, + }; + + let updated_info = { + let history_actor = ctx.history_actor(); + let actor = ActorView::from_history_actor(ctx, *history_actor).await?; + serde_json::to_value(HistoryEventMetadata { + actor, + timestamp: component.timestamp().updated_at, + })? + }; + + let created_info = { + let history_actor = ctx.history_actor(); + let actor = ActorView::from_history_actor(ctx, *history_actor).await?; + serde_json::to_value(HistoryEventMetadata { + actor, + timestamp: component.timestamp().created_at, + })? + }; + + let component_view = SummaryDiagramComponent { + id: component.id(), + component_id: component.id(), + schema_name: schema.name().to_owned(), + schema_id: schema.id(), + schema_variant_id: schema_variant.id(), + schema_variant_name: schema_variant.name().to_owned(), + schema_category: schema_variant.category().to_owned(), + node_id: component.id(), + display_name: component.name(ctx).await?, + position, + size, + node_type: component.get_type(ctx).await?.to_string(), + color: component.color(ctx).await?.unwrap_or("#111111".into()), + change_status: ChangeStatus::Added.to_string(), + has_resource: false, + sockets, + parent_node_id: component.parent(ctx).await?, + child_node_ids: serde_json::to_value::>(vec![])?, + updated_info, + created_info, + deleted_info: serde_json::Value::Null, + }; + + component_views.push(component_view); + } - pub fn edges(&self) -> &[SummaryDiagramEdge] { - &self.edges + // TODO(nick): restore the ability to show edges. + Ok(Self { + edges: diagram_edges, + components: component_views, + }) } } diff --git a/lib/dal/src/diagram/summary_diagram.rs b/lib/dal/src/diagram/summary_diagram.rs index d20bc9b59a..b2ddc5e6c5 100644 --- a/lib/dal/src/diagram/summary_diagram.rs +++ b/lib/dal/src/diagram/summary_diagram.rs @@ -106,53 +106,6 @@ impl_standard_model! { history_event_message_name: "Summary Diagram Components" } -impl SummaryDiagramComponent { - pub fn has_resource(&self) -> bool { - self.has_resource - } - - pub async fn get_for_component_id( - ctx: &DalContext, - component_id: ComponentId, - ) -> SummaryDiagramResult> { - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT DISTINCT ON (sdc.id) - row_to_json(sdc.*) AS object - FROM summary_diagram_components_v1($1, $2) AS sdc - WHERE component_id=$3 LIMIT 1", - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) - .await?; - - Ok(standard_model::option_object_from_row(maybe_row)?) - } - - standard_model_accessor!(sockets, Json, SummaryDiagramResult); -} - -pub async fn update_socket_summary( - ctx: &DalContext, - component: &Component, -) -> SummaryDiagramResult<()> { - if let Some(mut summary_component) = - SummaryDiagramComponent::get_for_component_id(ctx, *component.id()).await? - { - if let Some(schema_variant) = component.schema_variant(ctx).await? { - let sockets = DiagramSocket::list(ctx, &schema_variant).await?; - summary_component - .set_sockets(ctx, serde_json::to_value(sockets)?) - .await?; - } - } - - Ok(()) -} - -#[instrument(level = "info", skip_all)] pub async fn create_component_entry( ctx: &DalContext, component: &Component, @@ -380,28 +333,6 @@ pub async fn component_list( Ok(objects) } -pk!(SummaryDiagramEdgePk); -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all(serialize = "camelCase"))] -pub struct SummaryDiagramEdge { - pk: SummaryDiagramEdgePk, - id: EdgeId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, - edge_id: EdgeId, - from_node_id: NodeId, - from_socket_id: SocketId, - to_node_id: NodeId, - to_socket_id: SocketId, - change_status: String, - created_info: serde_json::Value, - deleted_info: serde_json::Value, -} - impl_standard_model! { model: SummaryDiagramEdge, pk: SummaryDiagramEdgePk, @@ -594,20 +525,6 @@ pub async fn edge_list(ctx: &DalContext) -> SummaryDiagramResult = Result; + pub mod argument; pub mod backend; -pub mod before; +// pub before; pub mod binding; pub mod binding_return_value; pub mod execution; -pub mod identity; +// pub mod identity; +pub mod before; pub mod intrinsics; -pub mod variant; -pub fn is_intrinsic(name: &str) -> bool { - intrinsics::IntrinsicFunc::iter().any(|intrinsic| intrinsic.name() == name) +impl From for FuncContentV1 { + fn from(value: Func) -> Self { + Self { + timestamp: value.timestamp, + display_name: value.display_name, + description: value.description, + link: value.link, + hidden: value.hidden, + builtin: value.builtin, + backend_response_type: value.backend_response_type, + handler: value.handler, + code_base64: value.code_base64, + code_blake3: value.code_blake3, + } + } } -#[remain::sorted] -#[derive(Error, Debug)] -pub enum FuncError { - #[error("cyclone value encrypt error: {0}")] - CycloneValueEncrypt(#[from] CycloneValueEncryptError), - #[error("error decoding code_base64: {0}")] - Decode(#[from] base64::DecodeError), - #[error("utf8 encoding error: {0}")] - FromUtf8(#[from] FromUtf8Error), - #[error("func argument error: {0}")] - FuncArgument(#[from] FuncArgumentError), - #[error("func binding error: {0}")] - FuncBinding(String), - #[error("func {0} cannot be converted to frontend variant")] - FuncCannotBeTurnedIntoVariant(FuncId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - /// Could not find [`FuncArgument`](crate::FuncArgument) corresponding to the identity [`Func`]. - #[error("identity func argument not found")] - IdentityFuncArgumentNotFound, - /// Could not find the identity [`Func`]. - #[error("identity func not found")] - IdentityFuncNotFound, - #[error("intrinsic parse error: {0} is not an intrinsic")] - IntrinsicParse(String), - #[error("intrinsic spec creation error {0}")] - IntrinsicSpecCreation(String), - #[error("Function missing expected code: {0}")] - MissingCode(FuncId), - #[error("Function missing expected handler: {0}")] - MissingHandler(FuncId), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("could not find func by id: {0}")] - NotFound(FuncId), - #[error("could not find func by name: {0}")] - NotFoundByName(String), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("secret error: {0}")] - Secret(#[from] SecretError), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - /// When attempting to find the identity [`Func`], there were too many [`Funcs`](Func) returned. - #[error("too many funcs found when looking for identity func")] - TooManyFuncsFoundForIdentity, - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +// TODO(nick,jacob,zack): decide if this will work with postcard. +// #[serde(tag = "version")] +pub enum FuncContent { + V1(FuncContentV1), } -pub type FuncResult = Result; +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct FuncContentV1 { + pub timestamp: Timestamp, + pub display_name: Option, + pub description: Option, + pub link: Option, + pub hidden: bool, + pub builtin: bool, + pub backend_response_type: FuncBackendResponseType, + pub handler: Option, + pub code_base64: Option, + /// A hash of the code above + pub code_blake3: ContentHash, +} #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct FuncMetadataView { @@ -94,7 +108,10 @@ pub struct FuncMetadataView { pub link: Option, } -pk!(FuncPk); +pub fn is_intrinsic(name: &str) -> bool { + intrinsics::IntrinsicFunc::iter().any(|intrinsic| intrinsic.name() == name) +} + pk!(FuncId); /// A `Func` is the declaration of the existence of a function. It has a name, @@ -106,127 +123,143 @@ pk!(FuncId); /// the `handler` value should be `myValidator`. #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct Func { - pk: FuncPk, - id: FuncId, - name: String, - display_name: Option, - description: Option, - link: Option, - hidden: bool, - builtin: bool, - backend_kind: FuncBackendKind, - backend_response_type: FuncBackendResponseType, - handler: Option, - code_base64: Option, - code_sha256: String, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, -} - -impl_standard_model! { - model: Func, - pk: FuncPk, - id: FuncId, - table_name: "funcs", - history_event_label_base: "function", - history_event_message_name: "Function" + pub id: FuncId, + pub timestamp: Timestamp, + pub name: String, + pub display_name: Option, + pub description: Option, + pub link: Option, + pub hidden: bool, + pub builtin: bool, + pub backend_kind: FuncBackendKind, + pub backend_response_type: FuncBackendResponseType, + pub handler: Option, + pub code_base64: Option, + pub code_blake3: ContentHash, } impl Func { + pub fn assemble(node_weight: &FuncNodeWeight, content: &FuncContentV1) -> Self { + let content = content.to_owned(); + Self { + id: node_weight.id().into(), + timestamp: content.timestamp, + name: node_weight.name().to_owned(), + display_name: content.display_name, + description: content.description, + link: content.link, + hidden: content.hidden, + builtin: content.builtin, + backend_kind: node_weight.backend_kind(), + backend_response_type: content.backend_response_type, + handler: content.handler, + code_base64: content.code_base64, + code_blake3: content.code_blake3, + } + } + + #[allow(clippy::too_many_arguments)] pub async fn new( ctx: &DalContext, - name: impl AsRef, + name: impl Into, + display_name: Option>, + description: Option>, + link: Option>, + hidden: bool, + builtin: bool, backend_kind: FuncBackendKind, backend_response_type: FuncBackendResponseType, + handler: Option>, + code_base64: Option>, ) -> FuncResult { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM func_create_v1($1, $2, $3, $4, $5)", - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &backend_kind.as_ref(), - &backend_response_type.as_ref(), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) + let timestamp = Timestamp::now(); + let _finalized_once = false; + + let code_base64 = code_base64.map(Into::into); + + let code_blake3 = ContentHash::new(code_base64.as_deref().unwrap_or("").as_bytes()); + + let content = FuncContentV1 { + timestamp, + display_name: display_name.map(Into::into), + description: description.map(Into::into), + link: link.map(Into::into), + hidden, + builtin, + backend_response_type, + handler: handler.map(Into::into), + code_base64, + code_blake3, + }; + + let hash = ctx + .content_store() + .lock() + .await + .add(&FuncContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_func(change_set, id, name.into(), backend_kind, hash)?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + let _node_index = workspace_snapshot.add_node(node_weight.clone())?; + + let func_category_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Func)?; + workspace_snapshot.add_edge( + func_category_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + + let func_node_weight = node_weight.get_func_node_weight()?; + + Ok(Self::assemble(&func_node_weight, &content)) } - pub async fn duplicate_with_args( - &self, - ctx: &DalContext, - name: Option, - ) -> FuncResult { - let args = FuncArgument::list_for_func(ctx, *self.id()).await?; - - let dup = self.duplicate(ctx, name).await?; - - for arg in args { - FuncArgument::new( - ctx, - arg.name(), - *arg.kind(), - arg.element_kind().map(ToOwned::to_owned), - dup.id, - ) - .await?; + pub fn metadata_view(&self) -> FuncMetadataView { + FuncMetadataView { + display_name: self + .display_name + .as_deref() + .unwrap_or(self.name.as_str()) + .into(), + description: self.description.as_deref().map(Into::into), + link: None, } + } - Ok(dup) + pub async fn get_by_id(ctx: &DalContext, id: FuncId) -> FuncResult { + let (node_weight, content) = Self::get_node_weight_and_content(ctx, id).await?; + Ok(Self::assemble(&node_weight, &content)) } - /// Creates a new [`Func`] from [`self`](Func). All relevant fields are duplicated, but rows - /// existing on relationship tables (e.g. "belongs_to" or "many_to_many") are not. - pub async fn duplicate(&self, ctx: &DalContext, new_name: Option) -> FuncResult { - // Generate a unique name and make sure it's not in use, unless we were - // passed in a name, in which case just use that - let mut new_unique_name; - match new_name { - Some(new_name) => { - new_unique_name = new_name; + pub async fn find_by_name( + ctx: &DalContext, + name: impl AsRef, + ) -> FuncResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let func_category_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Func)?; + let func_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + func_category_id, + EdgeWeightKindDiscriminants::Use, + )?; + let name = name.as_ref(); + for func_index in func_indices { + let node_weight = workspace_snapshot.get_node_weight(func_index)?; + if let NodeWeight::Func(inner_weight) = node_weight { + if inner_weight.name() == name { + return Ok(Some(inner_weight.id().into())); + } } - None => loop { - new_unique_name = format!("{}{}", self.name(), generate_unique_id(4)); - if Self::find_by_name(ctx, &new_unique_name).await?.is_none() { - break; - }; - }, } - - let mut new_func = Self::new( - ctx, - new_unique_name, - *self.backend_kind(), - *self.backend_response_type(), - ) - .await?; - - // Duplicate all fields on the func that do not come in through the constructor. - new_func.set_display_name(ctx, self.display_name()).await?; - new_func.set_description(ctx, self.description()).await?; - new_func.set_link(ctx, self.link()).await?; - new_func.set_hidden(ctx, self.hidden).await?; - new_func.set_builtin(ctx, self.builtin).await?; - new_func.set_handler(ctx, self.handler()).await?; - new_func.set_code_base64(ctx, self.code_base64()).await?; - - Ok(new_func) + Ok(None) } - #[allow(clippy::result_large_err)] pub fn code_plaintext(&self) -> FuncResult> { - Ok(match self.code_base64() { + Ok(match &self.code_base64 { Some(base64_code) => Some(String::from_utf8( general_purpose::STANDARD_NO_PAD.decode(base64_code)?, )?), @@ -234,156 +267,335 @@ impl Func { }) } - pub async fn is_builtin(&self, ctx: &DalContext) -> FuncResult { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT id FROM funcs WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", - &[self.id(), &WorkspacePk::NONE], - ) - .await?; - - Ok(row.is_some()) + pub async fn modify_by_id(ctx: &DalContext, id: FuncId, lambda: L) -> FuncResult + where + L: FnOnce(&mut Func) -> FuncResult<()>, + { + let func = Func::get_by_id(ctx, id).await?; + let modified_func = func.modify(ctx, lambda).await?; + Ok(modified_func) } - pub async fn set_code_plaintext( - &mut self, + pub async fn get_node_weight_and_content( ctx: &DalContext, - code: Option<&'_ str>, - ) -> FuncResult<()> { - self.set_code_base64( - ctx, - code.as_ref() - .map(|code| general_purpose::STANDARD_NO_PAD.encode(code)), - ) - .await - } + func_id: FuncId, + ) -> FuncResult<(FuncNodeWeight, FuncContentV1)> { + let (func_node_weight, hash) = Self::get_node_weight_and_content_hash(ctx, func_id).await?; - pub fn metadata_view(&self) -> FuncMetadataView { - FuncMetadataView { - display_name: self.display_name().unwrap_or_else(|| self.name()).into(), - description: self.description().map(Into::into), - link: self.description().map(Into::into), - } - } + let content: FuncContent = ctx.content_store().lock().await.get(&hash).await?.ok_or( + WorkspaceSnapshotError::MissingContentFromStore(func_id.into()), + )?; - pub async fn for_binding(ctx: &DalContext, func_binding: &FuncBinding) -> FuncResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT row_to_json(funcs.*) AS object - FROM funcs_v1($1, $2) AS funcs - INNER JOIN func_binding_belongs_to_func_v1($1, $2) AS func_binding_belongs_to_func - ON funcs.id = func_binding_belongs_to_func.belongs_to_id - WHERE func_binding_belongs_to_func.object_id = $3", - &[ctx.tenancy(), ctx.visibility(), func_binding.id()], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let FuncContent::V1(inner) = content; + + Ok((func_node_weight, inner)) } - pub async fn find_by_name(ctx: &DalContext, name: &str) -> FuncResult> { - Ok(Self::find_by_attr(ctx, "name", &name).await?.pop()) + async fn get_node_weight_and_content_hash( + ctx: &DalContext, + func_id: FuncId, + ) -> FuncResult<(FuncNodeWeight, ContentHash)> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let id: Ulid = func_id.into(); + let node_index = workspace_snapshot.get_node_index_by_id(id)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + + let hash = node_weight.content_hash(); + let func_node_weight = node_weight.get_func_node_weight()?; + Ok((func_node_weight, hash)) } - /// Returns `true` if this function is one handled internally by the `dal`, `false` if the - /// function is one that will be executed by `veritech` - pub fn is_intrinsic(&self) -> bool { - is_intrinsic(self.name()) + pub async fn modify(self, ctx: &DalContext, lambda: L) -> FuncResult + where + L: FnOnce(&mut Self) -> FuncResult<()>, + { + let mut func = self; + + let before = FuncContentV1::from(func.clone()); + lambda(&mut func)?; + + let (mut node_weight, _) = Func::get_node_weight_and_content_hash(ctx, func.id).await?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + // If both either the name or backend_kind have changed, *and* parts of the FuncContent + // have changed, this ends up updating the node for the function twice. This could be + // optimized to do it only once. + if func.name.as_str() != node_weight.name() + || func.backend_kind != node_weight.backend_kind() + { + let original_node_index = workspace_snapshot.get_node_index_by_id(func.id)?; + + node_weight + .set_name(func.name.as_str()) + .set_backend_kind(func.backend_kind); + + workspace_snapshot.add_node(NodeWeight::Func( + node_weight.new_with_incremented_vector_clock(ctx.change_set_pointer()?)?, + ))?; + + workspace_snapshot.replace_references(original_node_index)?; + } + let updated = FuncContentV1::from(func.clone()); + + if updated != before { + let hash = ctx + .content_store() + .lock() + .await + .add(&FuncContent::V1(updated.clone()))?; + workspace_snapshot.update_content(ctx.change_set_pointer()?, func.id.into(), hash)?; + } + + Ok(Func::assemble(&node_weight, &updated)) } - standard_model_accessor!(name, String, FuncResult); - standard_model_accessor!(display_name, Option, FuncResult); - standard_model_accessor!(description, Option, FuncResult); - standard_model_accessor!(link, Option, FuncResult); - standard_model_accessor!(hidden, bool, FuncResult); - standard_model_accessor!(builtin, bool, FuncResult); - standard_model_accessor!(backend_kind, Enum(FuncBackendKind), FuncResult); - standard_model_accessor!( - backend_response_type, - Enum(FuncBackendResponseType), - FuncResult - ); - standard_model_accessor!(handler, Option, FuncResult); - standard_model_accessor!(code_base64, Option, FuncResult); - standard_model_accessor_ro!(code_sha256, String); -} + pub async fn remove(ctx: &DalContext, id: FuncId) -> FuncResult<()> { + // to remove a func we must remove all incoming edges to it. It will then be + // garbage collected out of the graph -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct FuncCreatedPayload { - func_id: FuncId, - change_set_pk: ChangeSetPk, -} + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct FuncDeletedPayload { - func_id: FuncId, - change_set_pk: ChangeSetPk, -} + let arg_node_idx = workspace_snapshot.get_node_index_by_id(id)?; -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct FuncRevertedPayload { - func_id: FuncId, - change_set_pk: ChangeSetPk, -} + let users = workspace_snapshot + .incoming_sources_for_edge_weight_kind(id, EdgeWeightKind::Use.into())?; -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct FuncSavedPayload { - func_id: FuncId, - change_set_pk: ChangeSetPk, -} + let change_set = ctx.change_set_pointer()?; + for user in users { + workspace_snapshot.remove_edge( + change_set, + user, + arg_node_idx, + EdgeWeightKind::Use.into(), + )?; + } -impl WsEvent { - pub async fn func_created(ctx: &DalContext, func_id: FuncId) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::FuncCreated(FuncCreatedPayload { - func_id, - change_set_pk: ctx.visibility().change_set_pk, - }), - ) - .await + // Removes the actual node from the graph + workspace_snapshot.remove_node_by_id(id)?; + + Ok(()) } - pub async fn func_deleted(ctx: &DalContext, func_id: FuncId) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::FuncDeleted(FuncDeletedPayload { - func_id, - change_set_pk: ctx.visibility().change_set_pk, - }), - ) - .await + pub async fn find_intrinsic(ctx: &DalContext, intrinsic: IntrinsicFunc) -> FuncResult { + let name = intrinsic.name(); + Self::find_by_name(ctx, name) + .await? + .ok_or(FuncError::IntrinsicFuncNotFound(name.to_owned())) } - pub async fn func_reverted(ctx: &DalContext, func_id: FuncId) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::FuncReverted(FuncRevertedPayload { - func_id, - change_set_pk: ctx.visibility().change_set_pk, - }), - ) - .await + pub async fn list(ctx: &DalContext) -> FuncResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut funcs = vec![]; + let func_category_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Func)?; + + let func_node_indexes = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + func_category_id, + EdgeWeightKindDiscriminants::Use, + )?; + + let mut func_node_weights = vec![]; + let mut func_content_hash = vec![]; + for index in func_node_indexes { + let node_weight = workspace_snapshot + .get_node_weight(index)? + .get_func_node_weight()?; + func_content_hash.push(node_weight.content_hash()); + func_node_weights.push(node_weight); + } + + let func_contents: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(func_content_hash.as_slice()) + .await?; + + for node_weight in func_node_weights { + match func_contents.get(&node_weight.content_hash()) { + Some(func_content) => { + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let FuncContent::V1(inner) = func_content; + + funcs.push(Func::assemble(&node_weight, inner)); + } + None => Err(WorkspaceSnapshotError::MissingContentFromStore( + node_weight.id(), + ))?, + } + } + + Ok(funcs) } - pub async fn func_saved(ctx: &DalContext, func_id: FuncId) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::FuncSaved(FuncSavedPayload { - func_id, - change_set_pk: ctx.visibility().change_set_pk, - }), - ) - .await + pub async fn list_schema_variants_for_auth_func( + ctx: &DalContext, + func_id: FuncId, + ) -> SchemaVariantResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut schema_variant_ids = vec![]; + + for node_id in workspace_snapshot.incoming_sources_for_edge_weight_kind( + func_id, + EdgeWeightKindDiscriminants::AuthenticationPrototype, + )? { + schema_variant_ids.push(workspace_snapshot.get_node_weight(node_id)?.id().into()) + } + + Ok(schema_variant_ids) } } + +// impl Func { +// #[instrument(skip_all)] +// pub async fn new( +// ctx: &DalContext, +// name: impl AsRef, +// backend_kind: FuncBackendKind, +// backend_response_type: FuncBackendResponseType, +// ) -> FuncResult { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM func_create_v1($1, $2, $3, $4, $5)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &name, +// &backend_kind.as_ref(), +// &backend_response_type.as_ref(), +// ], +// ) +// .await?; +// let object = standard_model::finish_create_from_row(ctx, row).await?; +// Ok(object) +// } + +// /// Creates a new [`Func`] from [`self`](Func). All relevant fields are duplicated, but rows +// /// existing on relationship tables (e.g. "belongs_to" or "many_to_many") are not. +// pub async fn duplicate(&self, ctx: &DalContext) -> FuncResult { +// // Generate a unique name and make sure it's not in use +// let mut new_unique_name; +// loop { +// new_unique_name = format!("{}{}", self.name(), generate_unique_id(4)); +// if Self::find_by_name(ctx, &new_unique_name).await?.is_none() { +// break; +// }; +// } + +// let mut new_func = Self::new( +// ctx, +// new_unique_name, +// *self.backend_kind(), +// *self.backend_response_type(), +// ) +// .await?; + +// // Duplicate all fields on the func that do not come in through the constructor. +// new_func.set_display_name(ctx, self.display_name()).await?; +// new_func.set_description(ctx, self.description()).await?; +// new_func.set_link(ctx, self.link()).await?; +// new_func.set_hidden(ctx, self.hidden).await?; +// new_func.set_builtin(ctx, self.builtin).await?; +// new_func.set_handler(ctx, self.handler()).await?; +// new_func.set_code_base64(ctx, self.code_base64()).await?; + +// Ok(new_func) +// } + +// #[allow(clippy::result_large_err)] +// pub fn code_plaintext(&self) -> FuncResult> { +// Ok(match self.code_base64() { +// Some(base64_code) => Some(String::from_utf8( +// general_purpose::STANDARD_NO_PAD.decode(base64_code)?, +// )?), +// None => None, +// }) +// } + +// pub async fn is_builtin(&self, ctx: &DalContext) -> FuncResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// "SELECT id FROM funcs WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", +// &[self.id(), &WorkspacePk::NONE], +// ) +// .await?; + +// Ok(row.is_some()) +// } + +// pub async fn set_code_plaintext( +// &mut self, +// ctx: &DalContext, +// code: Option<&'_ str>, +// ) -> FuncResult<()> { +// self.set_code_base64( +// ctx, +// code.as_ref() +// .map(|code| general_purpose::STANDARD_NO_PAD.encode(code)), +// ) +// .await +// } + +// pub fn metadata_view(&self) -> FuncMetadataView { +// FuncMetadataView { +// display_name: self.display_name().unwrap_or_else(|| self.name()).into(), +// description: self.description().map(Into::into), +// link: self.description().map(Into::into), +// } +// } + +// pub async fn for_binding(ctx: &DalContext, func_binding: &FuncBinding) -> FuncResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT row_to_json(funcs.*) AS object +// FROM funcs_v1($1, $2) AS funcs +// INNER JOIN func_binding_belongs_to_func_v1($1, $2) AS func_binding_belongs_to_func +// ON funcs.id = func_binding_belongs_to_func.belongs_to_id +// WHERE func_binding_belongs_to_func.object_id = $3", +// &[ctx.tenancy(), ctx.visibility(), func_binding.id()], +// ) +// .await?; +// let object = standard_model::finish_create_from_row(ctx, row).await?; +// Ok(object) +// } + +// pub async fn find_by_name(ctx: &DalContext, name: &str) -> FuncResult> { +// Ok(Self::find_by_attr(ctx, "name", &name).await?.pop()) +// } + +// /// Returns `true` if this function is one handled internally by the `dal`, `false` if the +// /// function is one that will be executed by `veritech` +// pub fn is_intrinsic(&self) -> bool { +// is_intrinsic(self.name()) +// } + +// standard_model_accessor!(name, String, FuncResult); +// standard_model_accessor!(display_name, Option, FuncResult); +// standard_model_accessor!(description, Option, FuncResult); +// standard_model_accessor!(link, Option, FuncResult); +// standard_model_accessor!(hidden, bool, FuncResult); +// standard_model_accessor!(builtin, bool, FuncResult); +// standard_model_accessor!(backend_kind, Enum(FuncBackendKind), FuncResult); +// standard_model_accessor!( +// backend_response_type, +// Enum(FuncBackendResponseType), +// FuncResult +// ); +// standard_model_accessor!(handler, Option, FuncResult); +// standard_model_accessor!(code_base64, Option, FuncResult); +// standard_model_accessor_ro!(code_sha256, String); +// } diff --git a/lib/dal/src/func/argument.rs b/lib/dal/src/func/argument.rs index e215fce9a9..11637177cd 100644 --- a/lib/dal/src/func/argument.rs +++ b/lib/dal/src/func/argument.rs @@ -1,31 +1,38 @@ +use content_store::{ContentHash, Store, StoreError}; use postgres_types::{FromSql, ToSql}; use serde::{Deserialize, Serialize}; -use serde_json::Value as JsonValue; +use si_pkg::FuncArgumentKind as PkgFuncArgumentKind; +use std::collections::HashMap; +use strum::EnumDiscriminants; use strum::{AsRefStr, Display, EnumIter, EnumString}; use telemetry::prelude::*; use thiserror::Error; +use ulid::Ulid; -use si_pkg::FuncArgumentKind as PkgFuncArgumentKind; - +use crate::change_set_pointer::ChangeSetPointerError; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::node_weight::{FuncArgumentNodeWeight, NodeWeight, NodeWeightError}; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, AttributePrototypeArgument, - AttributePrototypeArgumentError, AttributePrototypeId, DalContext, FuncId, HistoryEventError, - PropKind, StandardModel, StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, + pk, DalContext, FuncId, HistoryEventError, PropKind, StandardModelError, Timestamp, + TransactionsError, }; -const LIST_FOR_FUNC: &str = include_str!("../queries/func_argument/list_for_func.sql"); -const LIST_FOR_FUNC_WITH_PROTOTYPE_ARGUMENTS: &str = - include_str!("../queries/func_argument/list_for_func_with_prototype_arguments.sql"); -const FIND_BY_NAME_FOR_FUNC: &str = - include_str!("../queries/func_argument/find_by_name_for_func.sql"); - #[remain::sorted] #[derive(Debug, Error)] pub enum FuncArgumentError { - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), + #[error(transparent)] + ChangeSetPointer(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), #[error("history event error: {0}")] HistoryEvent(#[from] HistoryEventError), + #[error("intrinsic func {0} ({1}) missing func argument edge")] + IntrinsicMissingFuncArgumentEdge(String, FuncId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), #[error("func argument not found with name {0} for Func {1}")] NotFoundByNameForFunc(String, FuncId), #[error("pg error: {0}")] @@ -34,8 +41,14 @@ pub enum FuncArgumentError { SerdeJson(#[from] serde_json::Error), #[error("standard model error: {0}")] StandardModelError(#[from] StandardModelError), + #[error("store error: {0}")] + Store(#[from] StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } type FuncArgumentResult = Result; @@ -112,166 +125,335 @@ pk!(FuncArgumentId); #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct FuncArgument { - pk: FuncArgumentPk, - id: FuncArgumentId, - func_id: FuncId, - name: String, - kind: FuncArgumentKind, - element_kind: Option, - shape: Option, - #[serde(flatten)] - tenancy: Tenancy, + pub id: FuncArgumentId, + pub name: String, + pub kind: FuncArgumentKind, + pub element_kind: Option, #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, + pub timestamp: Timestamp, } -impl_standard_model! { - model: FuncArgument, - pk: FuncArgumentPk, - id: FuncArgumentId, - table_name: "func_arguments", - history_event_label_base: "func_argument", - history_event_message_name: "Func Argument" +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum FuncArgumentContent { + V1(FuncArgumentContentV1), +} + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +pub struct FuncArgumentContentV1 { + pub kind: FuncArgumentKind, + pub element_kind: Option, + pub timestamp: Timestamp, +} + +impl From for FuncArgumentContentV1 { + fn from(value: FuncArgument) -> Self { + Self { + kind: value.kind, + element_kind: value.element_kind, + timestamp: value.timestamp, + } + } } impl FuncArgument { + pub fn assemble(node_weight: &FuncArgumentNodeWeight, content: &FuncArgumentContentV1) -> Self { + let content = content.to_owned(); + + Self { + id: node_weight.id().into(), + name: node_weight.name().into(), + kind: content.kind, + element_kind: content.element_kind, + timestamp: content.timestamp, + } + } + pub async fn new( ctx: &DalContext, - name: impl AsRef, + name: impl Into, kind: FuncArgumentKind, element_kind: Option, func_id: FuncId, ) -> FuncArgumentResult { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM func_argument_create_v1($1, $2, $3, $4, $5, $6)", - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &name, - &kind.as_ref(), - &element_kind.as_ref().map(|ek| ek.as_ref()), - ], - ) - .await?; + let timestamp = Timestamp::now(); + + let content = FuncArgumentContentV1 { + kind, + element_kind, + timestamp, + }; + + let hash = ctx + .content_store() + .lock() + .await + .add(&FuncArgumentContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_func_argument(change_set, id, name.into(), hash)?; + + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + workspace_snapshot.add_node(node_weight.clone())?; + workspace_snapshot.add_edge( + func_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + } + + let func_argument_node_weight = node_weight.get_func_argument_node_weight()?; - Ok(standard_model::finish_create_from_row(ctx, row).await?) + Ok(FuncArgument::assemble(&func_argument_node_weight, &content)) } - standard_model_accessor!(func_id, Pk(FuncId), FuncArgumentResult); - standard_model_accessor!(name, String, FuncArgumentResult); - standard_model_accessor!(kind, Enum(FuncArgumentKind), FuncArgumentResult); - standard_model_accessor!( - element_kind, - Option, - FuncArgumentResult - ); - standard_model_accessor!(shape, OptionJson, FuncArgumentResult); + pub async fn get_by_id(ctx: &DalContext, id: FuncArgumentId) -> FuncArgumentResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let id: ulid::Ulid = id.into(); + let node_index = workspace_snapshot.get_node_index_by_id(id)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); - /// List all [`FuncArgument`](Self) for the provided [`FuncId`](crate::FuncId). - pub async fn list_for_func(ctx: &DalContext, func_id: FuncId) -> FuncArgumentResult> { - let rows = ctx - .txns() + let content: FuncArgumentContent = ctx + .content_store() + .lock() + .await + .get(&hash) .await? - .pg() - .query(LIST_FOR_FUNC, &[ctx.tenancy(), ctx.visibility(), &func_id]) - .await?; + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let FuncArgumentContent::V1(inner) = content; + + let arg_node_weight = node_weight.get_func_argument_node_weight()?; - Ok(standard_model::objects_from_rows(rows)?) + Ok(FuncArgument::assemble(&arg_node_weight, &inner)) } - /// List all [`FuncArgument`](Self) for the provided [`FuncId`](crate::FuncId) along with the - /// [`AttributePrototypeArgument`](crate::AttributePrototypeArgument) that corresponds to it - /// *if* one exists. - pub async fn list_for_func_with_prototype_arguments( + pub async fn list_ids_for_func( ctx: &DalContext, func_id: FuncId, - attribute_prototype_id: AttributePrototypeId, - ) -> FuncArgumentResult)>> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_FUNC_WITH_PROTOTYPE_ARGUMENTS, - &[ - ctx.tenancy(), - ctx.visibility(), - &func_id, - &attribute_prototype_id, - ], - ) + ) -> FuncArgumentResult> { + let mut func_args = vec![]; + + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let func_node_idx = workspace_snapshot.get_node_index_by_id(func_id)?; + + let func_arg_node_idxs = workspace_snapshot + .outgoing_targets_for_edge_weight_kind_by_index( + func_node_idx, + EdgeWeightKindDiscriminants::Use, + )?; + + for idx in func_arg_node_idxs { + let node_weight = workspace_snapshot.get_node_weight(idx)?; + func_args.push(node_weight.id().into()) + } + + Ok(func_args) + } + + /// List all [`FuncArgument`](Self) for the provided [`FuncId`](crate::FuncId). + pub async fn list_for_func(ctx: &DalContext, func_id: FuncId) -> FuncArgumentResult> { + let mut func_args = vec![]; + + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let func_node_idx = workspace_snapshot.get_node_index_by_id(func_id)?; + + let func_arg_node_idxs = workspace_snapshot + .outgoing_targets_for_edge_weight_kind_by_index( + func_node_idx, + EdgeWeightKindDiscriminants::Use, + )?; + + let mut arg_node_weights = vec![]; + let mut arg_content_hashes = vec![]; + + for idx in func_arg_node_idxs { + let node_weight = workspace_snapshot + .get_node_weight(idx)? + .get_func_argument_node_weight()?; + + arg_content_hashes.push(node_weight.content_hash()); + arg_node_weights.push(node_weight); + } + + let arg_contents: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(arg_content_hashes.as_slice()) .await?; - let mut result = vec![]; - - for row in rows.into_iter() { - let func_argument_json: serde_json::Value = row.try_get("func_argument_object")?; - let prototype_argument_json: Option = - row.try_get("prototype_argument_object")?; - - result.push(( - serde_json::from_value(func_argument_json)?, - match prototype_argument_json { - Some(prototype_argument_json) => { - Some(serde_json::from_value(prototype_argument_json)?) - } - None => None, - }, - )); + for weight in arg_node_weights { + match arg_contents.get(&weight.content_hash()) { + Some(arg_content) => { + let FuncArgumentContent::V1(inner) = arg_content; + + func_args.push(FuncArgument::assemble(&weight, inner)); + } + None => Err(WorkspaceSnapshotError::MissingContentFromStore(weight.id()))?, + } } - Ok(result) + Ok(func_args) } pub async fn find_by_name_for_func( ctx: &DalContext, - name: &str, + name: impl AsRef, func_id: FuncId, ) -> FuncArgumentResult> { - Ok( - match ctx - .txns() - .await? - .pg() - .query_opt( - FIND_BY_NAME_FOR_FUNC, - &[ctx.tenancy(), ctx.visibility(), &name, &func_id], - ) - .await? - { - Some(row) => standard_model::object_from_row(row)?, - None => None, - }, - ) + let name = name.as_ref(); + + for arg in FuncArgument::list_for_func(ctx, func_id).await? { + if arg.name.as_str() == name { + return Ok(Some(arg)); + } + } + + Ok(None) } - /// Remove the [`FuncArgument`](Self) along with any [`AttributePrototypeArgument`](crate::AttributePrototypeArgument) rows that reference it. - /// This should be used instead of the [`delete_by_id`](Self::delete_by_id) method since it keeps the two tables in sync. - pub async fn remove( + pub async fn modify_by_id( ctx: &DalContext, - func_argument_id: &FuncArgumentId, - ) -> FuncArgumentResult<()> { - let mut func_arg = match FuncArgument::get_by_id(ctx, func_argument_id).await? { - Some(func_arg) => func_arg, - None => return Ok(()), + id: FuncArgumentId, + lambda: L, + ) -> FuncArgumentResult + where + L: FnOnce(&mut FuncArgument) -> FuncArgumentResult<()>, + { + let ulid: Ulid = id.into(); + + let (arg_node_idx, arg_nw) = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let arg_node_idx = workspace_snapshot.get_node_index_by_id(ulid)?; + ( + arg_node_idx, + workspace_snapshot.get_node_weight(arg_node_idx)?.to_owned(), + ) }; - for mut prototype_argument in - AttributePrototypeArgument::list_by_func_argument_id(ctx, *func_argument_id).await? - { - prototype_argument.delete_by_id(ctx).await?; + let hash = arg_nw.content_hash(); + + let content: FuncArgumentContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(ulid))?; + + let FuncArgumentContent::V1(inner) = content; + + let mut func_arg_node_weight = arg_nw.get_func_argument_node_weight()?; + let mut func_arg = FuncArgument::assemble(&func_arg_node_weight, &inner); + + lambda(&mut func_arg)?; + + if func_arg_node_weight.name() != func_arg.name.as_str() { + let mut new_func_arg = func_arg_node_weight + .new_with_incremented_vector_clock(ctx.change_set_pointer()?)?; + new_func_arg.set_name(&func_arg.name); + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + workspace_snapshot.add_node(NodeWeight::FuncArgument(new_func_arg.clone()))?; + workspace_snapshot.replace_references(arg_node_idx)?; + func_arg_node_weight = new_func_arg; + } + + let updated = FuncArgumentContentV1::from(func_arg.clone()); + if updated != inner { + let hash = ctx + .content_store() + .lock() + .await + .add(&FuncArgumentContent::V1(updated.clone()))?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.update_content(ctx.change_set_pointer()?, ulid, hash)?; } - func_arg.delete_by_id(ctx).await?; + Ok(FuncArgument::assemble(&func_arg_node_weight, &updated)) + } + + pub async fn remove(ctx: &DalContext, id: FuncArgumentId) -> FuncArgumentResult<()> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + + workspace_snapshot.remove_node_by_id(id)?; Ok(()) } + + // /// List all [`FuncArgument`](Self) for the provided [`FuncId`](crate::FuncId) along with the + // /// [`AttributePrototypeArgument`](crate::AttributePrototypeArgument) that corresponds to it + // /// *if* one exists. + // pub async fn list_for_func_with_prototype_arguments( + // ctx: &DalContext, + // func_id: FuncId, + // attribute_prototype_id: AttributePrototypeId, + // ) -> FuncArgumentResult)>> { + // let rows = ctx + // .txns() + // .await? + // .pg() + // .query( + // Ok( + // match ctx + // .txns() + // .await? + // .pg() + // .query_opt( + // FIND_BY_NAME_FOR_FUNC, + // &[ctx.tenancy(), ctx.visibility(), &name, &func_id], + // ) + // .await? + // { + // Some(row) => standard_model::object_from_row(row)?, + // None => None, + // }, + // ) + // LIST_FOR_FUNC_WITH_PROTOTYPE_ARGUMENTS, + // &[ + // ctx.tenancy(), + // ctx.visibility(), + // &func_id, + // &attribute_prototype_id, + // ], + // ) + // .await?; + // + // let mut result = vec![]; + // + // for row in rows.into_iter() { + // let func_argument_json: serde_json::Value = row.try_get("func_argument_object")?; + // let prototype_argument_json: Option = + // row.try_get("prototype_argument_object")?; + // + // result.push(( + // serde_json::from_value(func_argument_json)?, + // match prototype_argument_json { + // Some(prototype_argument_json) => { + // Some(serde_json::from_value(prototype_argument_json)?) + // } + // None => None, + // }, + // )); + // } + // + // Ok(result) + // } + + // pub async fn find_by_name_for_func( + // ctx: &DalContext, + // name: &str, + // func_id: FuncId, + // ) -> FuncArgumentResult> { + // } } diff --git a/lib/dal/src/func/backend.rs b/lib/dal/src/func/backend.rs index 473ea5fd83..cf7c36fbbe 100644 --- a/lib/dal/src/func/backend.rs +++ b/lib/dal/src/func/backend.rs @@ -1,15 +1,17 @@ use async_trait::async_trait; +use tokio::sync::mpsc; + use serde::{de::DeserializeOwned, Deserialize, Serialize}; use strum::{AsRefStr, Display, EnumIter, EnumString}; use telemetry::prelude::*; use thiserror::Error; -use tokio::sync::mpsc; use veritech_client::{ ActionRunResultSuccess, BeforeFunction, Client as VeritechClient, FunctionResult, OutputStream, ResolverFunctionResponseType, }; -use crate::{label_list::ToLabelList, DalContext, Func, FuncId, PropKind, StandardModel}; +use crate::label_list::ToLabelList; +use crate::{DalContext, Func, FuncId, PropKind}; pub mod array; pub mod boolean; @@ -233,11 +235,13 @@ pub trait FuncDispatch: std::fmt::Debug { ) -> FuncBackendResult> { let args = Self::Args::deserialize(args)?; let code_base64 = func - .code_base64() - .ok_or_else(|| FuncBackendError::DispatchMissingBase64(*func.id()))?; + .code_base64 + .as_deref() + .ok_or_else(|| FuncBackendError::DispatchMissingBase64(func.id))?; let handler = func - .handler() - .ok_or_else(|| FuncBackendError::DispatchMissingHandler(*func.id()))?; + .handler + .as_deref() + .ok_or_else(|| FuncBackendError::DispatchMissingHandler(func.id))?; let value = Self::new(context, code_base64, handler, args, before); Ok(value) } diff --git a/lib/dal/src/func/before.rs b/lib/dal/src/func/before.rs index b4fd9d17c7..e3073a80b6 100644 --- a/lib/dal/src/func/before.rs +++ b/lib/dal/src/func/before.rs @@ -1,55 +1,165 @@ -use serde::Deserialize; -use veritech_client::{encrypt_value_tree, BeforeFunction}; +use thiserror::Error; +use veritech_client::{encrypt_value_tree, BeforeFunction, CycloneValueEncryptError}; +use crate::attribute::value::AttributeValueError; +use crate::prop::{PropError, PropPath}; +use crate::schema::variant::root_prop::RootPropChild; +use crate::schema::variant::SchemaVariantError; use crate::{ - standard_model, ComponentId, DalContext, EncryptedSecret, Func, FuncError, FuncResult, + AttributeValue, Component, ComponentError, ComponentId, DalContext, EncryptedSecret, Func, + FuncId, Prop, PropId, SchemaVariant, SecretError, SecretId, StandardModel, StandardModelError, }; -const AUTH_FUNCS_FOR_COMPONENT: &str = - include_str!("../queries/func/authentication_funcs_for_component.sql"); - -#[derive(Deserialize, Debug)] -struct EncryptedSecretAndFunc { - encrypted_secret: EncryptedSecret, - func: Func, +#[remain::sorted] +#[derive(Error, Debug)] +pub enum BeforeFuncError { + #[error("attribute value error: {0}")] + AttributeValue(#[from] AttributeValueError), + #[error("component error: {0}")] + Component(#[from] ComponentError), + #[error("cyclone value encrypt error: {0}")] + CycloneValueEncrypt(#[from] CycloneValueEncryptError), + #[error("func error: {0}")] + Func(String), + #[error("error deserializing json")] + JsonDeserialize(#[from] serde_json::Error), + #[error("Function missing expected code: {0}")] + MissingCode(FuncId), + #[error("Function missing expected handler: {0}")] + MissingHandler(FuncId), + #[error("no widget options on secret prop id: {0}")] + NoWidgetOptionsOnSecretProp(PropId), + #[error("prop error: {0}")] + Prop(#[from] PropError), + #[error("schema variant error: {0}")] + SchemaVariant(#[from] SchemaVariantError), + #[error("secret error: {0}")] + Secret(#[from] SecretError), + #[error("standard model error: {0}")] + StandardModel(#[from] StandardModelError), } +pub type BeforeFuncResult = Result; + pub async fn before_funcs_for_component( ctx: &DalContext, component_id: &ComponentId, -) -> FuncResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - AUTH_FUNCS_FOR_COMPONENT, - &[ctx.tenancy(), ctx.visibility(), component_id], +) -> BeforeFuncResult> { + let secret_props = { + let schema_variant = Component::schema_variant_id(ctx, *component_id).await?; + let secrets_prop = + SchemaVariant::find_root_child_prop_id(ctx, schema_variant, RootPropChild::Secrets) + .await?; + Prop::direct_child_prop_ids(ctx, secrets_prop).await? + }; + + let secret_definition_path = PropPath::new(["root", "secret_definition"]); + let secret_path = PropPath::new(["root", "secrets"]); + + let mut funcs_and_secrets = vec![]; + for secret_prop_id in secret_props { + let auth_funcs = auth_funcs_for_secret_prop_id( + ctx, + secret_prop_id, + &secret_definition_path, + &secret_path, ) .await?; + let av_ids = Prop::attribute_values_for_prop_id(ctx, secret_prop_id).await?; + let mut maybe_secret_id = None; + for av_id in av_ids { + if AttributeValue::component_id(ctx, av_id).await? != *component_id { + continue; + } + + let av = AttributeValue::get_by_id(ctx, av_id).await?; + + maybe_secret_id = av.value(ctx).await?; + break; + } + + if let Some(secret_id_str) = maybe_secret_id { + let id: SecretId = serde_json::from_value(secret_id_str)?; + + funcs_and_secrets.push((id, auth_funcs)) + } + } + let mut results = vec![]; - for EncryptedSecretAndFunc { - encrypted_secret, - func, - } in standard_model::objects_from_rows(rows)? - { + for (secret_id, funcs) in funcs_and_secrets { + let encrypted_secret = EncryptedSecret::get_by_id(ctx, &secret_id) + .await? + .ok_or(SecretError::SecretNotFound(secret_id))?; + // Decrypt message from EncryptedSecret let mut arg = encrypted_secret.decrypt(ctx).await?.message().into_inner(); // Re-encrypt raw Value for transmission to Cyclone via Veritech encrypt_value_tree(&mut arg, ctx.encryption_key())?; - results.push(BeforeFunction { - handler: func - .handler - .ok_or_else(|| FuncError::MissingHandler(func.id))?, - code_base64: func - .code_base64 - .ok_or_else(|| FuncError::MissingCode(func.id))?, - arg, - }) + for func in funcs { + results.push(BeforeFunction { + handler: func + .handler + .ok_or_else(|| BeforeFuncError::MissingHandler(func.id))?, + code_base64: func + .code_base64 + .ok_or_else(|| BeforeFuncError::MissingCode(func.id))?, + arg: arg.clone(), + }) + } } Ok(results) } + +/// This _private_ method gathers the authentication functions for a given [`PropId`](Prop) underneath "/root/secrets". +async fn auth_funcs_for_secret_prop_id( + ctx: &DalContext, + secret_prop_id: PropId, + secret_definition_path: &PropPath, + secret_path: &PropPath, +) -> BeforeFuncResult> { + let secret_prop = Prop::get_by_id(ctx, secret_prop_id).await?; + + let secret_definition_name = secret_prop + .widget_options + .ok_or(BeforeFuncError::NoWidgetOptionsOnSecretProp(secret_prop_id))? + .pop() + .ok_or(BeforeFuncError::NoWidgetOptionsOnSecretProp(secret_prop_id))? + .value; + + let mut auth_funcs = vec![]; + for secret_defining_sv_id in SchemaVariant::list_ids(ctx).await? { + if Prop::find_prop_id_by_path_opt(ctx, secret_defining_sv_id, secret_definition_path) + .await? + .is_none() + { + continue; + } + + let secrets_prop = Prop::find_prop_by_path(ctx, secret_defining_sv_id, secret_path).await?; + + let secret_child_prop_id = Prop::direct_single_child_prop_id(ctx, secrets_prop.id).await?; + let secret_child_prop = Prop::get_by_id(ctx, secret_child_prop_id).await?; + + if secret_child_prop.name != secret_definition_name { + continue; + } + + for auth_func_id in + SchemaVariant::list_auth_func_ids_for_schema_variant(ctx, secret_defining_sv_id).await? + { + auth_funcs.push( + Func::get_by_id(ctx, auth_func_id) + .await + .map_err(|e| BeforeFuncError::Func(e.to_string()))?, + ) + } + + break; + } + + Ok(auth_funcs) +} diff --git a/lib/dal/src/func/binding.rs b/lib/dal/src/func/binding.rs index f7e467090e..52fd116e5b 100644 --- a/lib/dal/src/func/binding.rs +++ b/lib/dal/src/func/binding.rs @@ -7,8 +7,13 @@ use thiserror::Error; use tokio::sync::mpsc; use veritech_client::{BeforeFunction, OutputStream, ResolverFunctionComponent}; +use super::FuncError; use crate::func::execution::FuncExecutionPk; -use crate::FuncError; +use crate::{ + func::backend::FuncBackendError, impl_standard_model, pk, standard_model, + standard_model_accessor, Func, FuncBackendKind, HistoryEventError, StandardModel, + StandardModelError, Timestamp, Visibility, +}; use crate::{ func::backend::{ array::FuncBackendArray, @@ -27,11 +32,6 @@ use crate::{ }, TransactionsError, WsEvent, WsEventError, WsEventResult, WsPayload, }; -use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_belongs_to, - Func, FuncBackendError, FuncBackendKind, HistoryEventError, StandardModel, StandardModelError, - Timestamp, Visibility, -}; use crate::{DalContext, Tenancy}; use super::{ @@ -94,9 +94,10 @@ pk!(FuncBindingId); pub struct FuncBinding { pk: FuncBindingPk, id: FuncBindingId, + func_id: FuncId, args: serde_json::Value, backend_kind: FuncBackendKind, - code_sha256: String, + code_blake3: String, #[serde(flatten)] tenancy: Tenancy, #[serde(flatten)] @@ -122,28 +123,25 @@ impl FuncBinding { func_id: FuncId, backend_kind: FuncBackendKind, ) -> FuncBindingResult { - let func = Func::get_by_id(ctx, &func_id) - .await? - .ok_or(FuncBindingError::FuncNotFound(FuncBindingPk::NONE))?; + let func = Func::get_by_id(ctx, func_id).await?; let row = ctx .txns() .await? .pg() .query_one( - "SELECT object FROM func_binding_create_v1($1, $2, $3, $4, $5, $6)", + "SELECT object FROM func_binding_create_v2($1, $2, $3, $4, $5, $6)", &[ ctx.tenancy(), ctx.visibility(), &args, &func_id, &backend_kind.as_ref(), - &func.code_sha256(), + &func.code_blake3, ], ) .await?; let object: FuncBinding = standard_model::finish_create_from_row(ctx, row).await?; - object.set_func(ctx, &func_id).await?; Ok(object) } @@ -153,9 +151,7 @@ impl FuncBinding { value: Option, func_id: FuncId, ) -> FuncBindingResult<(Self, FuncBindingReturnValue)> { - let func = Func::get_by_id(ctx, &func_id) - .await? - .ok_or(FuncError::NotFound(func_id))?; + let func = Func::get_by_id(ctx, func_id).await?; let func_binding = Self::new(ctx, args, func_id, func.backend_kind).await?; let func_binding_return_value = FuncBindingReturnValue::new( @@ -182,10 +178,8 @@ impl FuncBinding { func_id: FuncId, before: Vec, ) -> FuncBindingResult<(Self, FuncBindingReturnValue)> { - let func = Func::get_by_id(ctx, &func_id) - .await? - .ok_or(FuncError::NotFound(func_id))?; - let func_binding = Self::new(ctx, args, func_id, func.backend_kind).await?; + let func = Func::get_by_id(ctx, func_id).await?; + let func_binding = Self::new(ctx, args, func.id, func.backend_kind).await?; let func_binding_return_value: FuncBindingReturnValue = func_binding.execute(ctx, before).await?; @@ -195,18 +189,8 @@ impl FuncBinding { standard_model_accessor!(args, PlainJson, FuncBindingResult); standard_model_accessor!(backend_kind, Enum(FuncBackendKind), FuncBindingResult); - standard_model_accessor!(code_sha256, String, FuncBindingResult); - standard_model_belongs_to!( - lookup_fn: func, - set_fn: set_func, - unset_fn: unset_func, - table: "func_binding_belongs_to_func", - model_table: "funcs", - belongs_to_id: FuncId, - returns: Func, - result: FuncBindingResult, - ); - + standard_model_accessor!(code_blake3, String, FuncBindingResult); + standard_model_accessor!(func_id, Pk(FuncId), FuncBindingResult); // For a given [`FuncBinding`](Self), execute using veritech. async fn execute( &self, @@ -252,7 +236,7 @@ impl FuncBinding { }, parents: Vec::new(), }, - response_type: (*func.backend_response_type()).try_into()?, + response_type: func.backend_response_type.try_into()?, }; FuncBackendJsAttribute::create_and_execute( context, @@ -323,7 +307,7 @@ impl FuncBinding { ctx, unprocessed_value, processed_value, - *func.id(), + func.id, self.id, execution.pk(), ) @@ -348,10 +332,8 @@ impl FuncBinding { FuncDispatchContext, mpsc::Receiver, )> { - let func: Func = self - .func(ctx) - .await? - .ok_or(FuncBindingError::FuncNotFound(self.pk))?; + let func_id = self.func_id(); + let func = Func::get_by_id(ctx, func_id).await?; let mut execution = FuncExecution::new(ctx, &func, self).await?; diff --git a/lib/dal/src/func/binding_return_value.rs b/lib/dal/src/func/binding_return_value.rs index e75b453614..41b7f15140 100644 --- a/lib/dal/src/func/binding_return_value.rs +++ b/lib/dal/src/func/binding_return_value.rs @@ -16,9 +16,13 @@ use crate::{ Visibility, }; +use super::FuncError; + #[remain::sorted] #[derive(Error, Debug)] pub enum FuncBindingReturnValueError { + #[error("Func error: {0}")] + Func(#[from] FuncError), #[error("func binding error: {0}")] FuncBinding(String), #[error("function execution error: {0}")] @@ -168,9 +172,7 @@ impl FuncBindingReturnValue { &self, ctx: &DalContext, ) -> FuncBindingReturnValueResult { - let func = Func::get_by_id(ctx, &self.func_id) - .await? - .ok_or(FuncBindingReturnValueError::FuncNotFound(self.func_id))?; + let func = Func::get_by_id(ctx, self.func_id).await?; Ok(func.metadata_view()) } } diff --git a/lib/dal/src/func/execution.rs b/lib/dal/src/func/execution.rs index 6563b2d472..56780660e2 100644 --- a/lib/dal/src/func/execution.rs +++ b/lib/dal/src/func/execution.rs @@ -101,13 +101,13 @@ impl FuncExecution { &[ ctx.tenancy(), &FuncExecutionState::Start.to_string(), - &func.id(), + &func.id, &func_binding.id(), &func_binding.args(), &func_binding.backend_kind().to_string(), - &func.backend_response_type().to_string(), - &func.handler(), - &func.code_base64(), + &func.backend_response_type.to_string(), + &func.handler.as_deref(), + &func.code_base64.as_deref(), ], ) .await?; diff --git a/lib/dal/src/func/identity.rs b/lib/dal/src/func/identity.rs index add46d4fc0..7474bcb68b 100644 --- a/lib/dal/src/func/identity.rs +++ b/lib/dal/src/func/identity.rs @@ -3,50 +3,47 @@ //! prevalence of the identity [`Func`](crate::Func) across the library, this helper should help //! ease some friction. -use crate::{ - DalContext, Func, FuncArgument, FuncBinding, FuncBindingReturnValue, FuncError, FuncResult, - StandardModel, -}; +use crate::{DalContext, Func, FuncArgument, FuncBinding, FuncBindingReturnValue, StandardModel}; const IDENTITY_FUNC_NAME: &str = "si:identity"; -impl Func { - /// Returns the identity [`Func`](Self) with its corresponding - /// [`FuncBinding`](crate::FuncBinding) and - /// [`FuncBindingReturnValue`](crate::FuncBindingReturnValue). - pub async fn identity_with_binding_and_return_value( - ctx: &DalContext, - ) -> FuncResult<(Func, FuncBinding, FuncBindingReturnValue)> { - let func = Self::identity_func(ctx).await?; - let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( - ctx, - serde_json::json![{ "identity": null }], - *func.id(), - vec![], - ) - .await - .map_err(|e| FuncError::FuncBinding(e.to_string()))?; +// impl Func { +// /// Returns the identity [`Func`](Self) with its corresponding +// /// [`FuncBinding`](crate::FuncBinding) and +// /// [`FuncBindingReturnValue`](crate::FuncBindingReturnValue). +// pub async fn identity_with_binding_and_return_value( +// ctx: &DalContext, +// ) -> FuncResult<(Func, FuncBinding, FuncBindingReturnValue)> { +// let func = Self::identity_func(ctx).await?; +// let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( +// ctx, +// serde_json::json![{ "identity": null }], +// *func.id(), +// vec![], +// ) +// .await +// .map_err(|e| FuncError::FuncBinding(e.to_string()))?; - Ok((func, func_binding, func_binding_return_value)) - } +// Ok((func, func_binding, func_binding_return_value)) +// } - /// Returns the identity [`Func`](Self) with its corresponding - /// [`FuncArgument`](crate::FuncArgument). - pub async fn identity_with_argument(ctx: &DalContext) -> FuncResult<(Func, FuncArgument)> { - let func = Self::identity_func(ctx).await?; - let func_argument = FuncArgument::find_by_name_for_func(ctx, "identity", *func.id()) - .await? - .ok_or(FuncError::IdentityFuncArgumentNotFound)?; - Ok((func, func_argument)) - } +// /// Returns the identity [`Func`](Self) with its corresponding +// /// [`FuncArgument`](crate::FuncArgument). +// pub async fn identity_with_argument(ctx: &DalContext) -> FuncResult<(Func, FuncArgument)> { +// let func = Self::identity_func(ctx).await?; +// let func_argument = FuncArgument::find_by_name_for_func(ctx, "identity", *func.id()) +// .await? +// .ok_or(FuncError::IdentityFuncArgumentNotFound)?; +// Ok((func, func_argument)) +// } - /// Returns the identity [`Func`](Self). - pub async fn identity_func(ctx: &DalContext) -> FuncResult { - let mut found_funcs = Func::find_by_attr(ctx, "name", &IDENTITY_FUNC_NAME).await?; - let func = found_funcs.pop().ok_or(FuncError::IdentityFuncNotFound)?; - match found_funcs.is_empty() { - true => Ok(func), - false => Err(FuncError::TooManyFuncsFoundForIdentity), - } - } -} +// /// Returns the identity [`Func`](Self). +// pub async fn identity_func(ctx: &DalContext) -> FuncResult { +// let mut found_funcs = Func::find_by_attr(ctx, "name", &IDENTITY_FUNC_NAME).await?; +// let func = found_funcs.pop().ok_or(FuncError::IdentityFuncNotFound)?; +// match found_funcs.is_empty() { +// true => Ok(func), +// false => Err(FuncError::TooManyFuncsFoundForIdentity), +// } +// } +// } diff --git a/lib/dal/src/func/intrinsics.rs b/lib/dal/src/func/intrinsics.rs index 557e37fd03..f89a8f0b0b 100644 --- a/lib/dal/src/func/intrinsics.rs +++ b/lib/dal/src/func/intrinsics.rs @@ -1,12 +1,13 @@ +use chrono::DateTime; use si_pkg::{ FuncArgumentKind, FuncArgumentSpec, FuncSpec, FuncSpecBackendKind, FuncSpecBackendResponseType, FuncSpecData, PkgSpec, }; - -use super::{FuncError, FuncResult}; -use chrono::DateTime; use strum::{AsRefStr, Display, EnumIter, EnumString, IntoEnumIterator}; +use crate::func::{FuncError, FuncResult}; +use crate::PropKind; + #[remain::sorted] #[derive(AsRefStr, Display, EnumIter, EnumString, Debug, Clone, Copy, PartialEq, Eq)] pub enum IntrinsicFunc { @@ -71,36 +72,80 @@ impl IntrinsicFunc { .unique_id("51049a590fb64860f159972012ac2657c629479a244d6bcc4b1b73ba4b29f87f"); data_builder.backend_kind(FuncSpecBackendKind::Array); data_builder.response_type(FuncSpecBackendResponseType::Array); + builder.argument( + FuncArgumentSpec::builder() + .name("value") + .kind(FuncArgumentKind::Array) + .element_kind(FuncArgumentKind::Any) + .build() + .map_err(|e| FuncError::IntrinsicSpecCreation(e.to_string()))?, + ); } Self::SetBoolean => { builder .unique_id("577a7deea25cfad0d4b2dd1e1f3d96b86b8b1578605137b8c4128d644c86964b"); data_builder.backend_kind(FuncSpecBackendKind::Boolean); data_builder.response_type(FuncSpecBackendResponseType::Boolean); + builder.argument( + FuncArgumentSpec::builder() + .name("value") + .kind(FuncArgumentKind::Boolean) + .build() + .map_err(|e| FuncError::IntrinsicSpecCreation(e.to_string()))?, + ); } Self::SetInteger => { builder .unique_id("7d384b237852f20b8dec2fbd2e644ffc6bde901d7dc937bd77f50a0d57e642a9"); data_builder.backend_kind(FuncSpecBackendKind::Integer); data_builder.response_type(FuncSpecBackendResponseType::Integer); + builder.argument( + FuncArgumentSpec::builder() + .name("value") + .kind(FuncArgumentKind::Integer) + .build() + .map_err(|e| FuncError::IntrinsicSpecCreation(e.to_string()))?, + ); } Self::SetMap => { builder .unique_id("dea5084fbf6e7fe8328ac725852b96f4b5869b14d0fe9dd63a285fa876772496"); data_builder.backend_kind(FuncSpecBackendKind::Map); data_builder.response_type(FuncSpecBackendResponseType::Map); + builder.argument( + FuncArgumentSpec::builder() + .name("value") + .kind(FuncArgumentKind::Map) + .element_kind(FuncArgumentKind::Any) + .build() + .map_err(|e| FuncError::IntrinsicSpecCreation(e.to_string()))?, + ); } Self::SetObject => { builder .unique_id("cb9bf94739799f3a8b84bcb88495f93b27b47c31a341f8005a60ca39308909fd"); data_builder.backend_kind(FuncSpecBackendKind::Object); data_builder.response_type(FuncSpecBackendResponseType::Object); + builder.argument( + FuncArgumentSpec::builder() + .name("value") + .kind(FuncArgumentKind::Object) + .build() + .map_err(|e| FuncError::IntrinsicSpecCreation(e.to_string()))?, + ); } Self::SetString => { builder .unique_id("bbe86d1a2b92c3e34b72a407cca424878d3466d29ca60e56a251a52a0840bfbd"); data_builder.backend_kind(FuncSpecBackendKind::String); data_builder.response_type(FuncSpecBackendResponseType::String); + builder.argument( + FuncArgumentSpec::builder() + .name("value") + .kind(FuncArgumentKind::String) + .build() + .map_err(|e| FuncError::IntrinsicSpecCreation(e.to_string()))?, + ); } Self::Unset => { builder @@ -113,6 +158,13 @@ impl IntrinsicFunc { .unique_id("039ff70bc7922338978ab52a39156992b7d8e3390f0ef7e99d5b6ffd43141d8a"); data_builder.backend_kind(FuncSpecBackendKind::Validation); data_builder.response_type(FuncSpecBackendResponseType::Validation); + builder.argument( + FuncArgumentSpec::builder() + .name("value") + .kind(FuncArgumentKind::Any) + .build() + .map_err(|e| FuncError::IntrinsicSpecCreation(e.to_string()))?, + ); } }; @@ -157,3 +209,16 @@ impl IntrinsicFunc { }) } } + +impl From for IntrinsicFunc { + fn from(value: PropKind) -> Self { + match value { + PropKind::Array => IntrinsicFunc::SetArray, + PropKind::Boolean => IntrinsicFunc::SetBoolean, + PropKind::Integer => IntrinsicFunc::SetInteger, + PropKind::Map => IntrinsicFunc::SetMap, + PropKind::Object => IntrinsicFunc::SetObject, + PropKind::String => IntrinsicFunc::SetString, + } + } +} diff --git a/lib/dal/src/history_event.rs b/lib/dal/src/history_event.rs index 98da1eb001..622b31e163 100644 --- a/lib/dal/src/history_event.rs +++ b/lib/dal/src/history_event.rs @@ -1,3 +1,4 @@ +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use si_data_nats::NatsError; use si_data_pg::PgError; @@ -5,13 +6,10 @@ use strum::Display as StrumDisplay; use telemetry::prelude::*; use thiserror::Error; +use crate::actor_view::ActorView; use crate::{pk, DalContext, Timestamp, UserPk}; use crate::{Tenancy, TransactionsError}; -pub use metadata::HistoryEventMetadata; - -mod metadata; - #[remain::sorted] #[derive(Error, Debug)] pub enum HistoryEventError { @@ -21,6 +19,8 @@ pub enum HistoryEventError { Pg(#[from] PgError), #[error("error serializing/deserializing json: {0}")] SerdeJson(#[from] serde_json::Error), + #[error("standard model error: {0}")] + StandardModel(String), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), } @@ -92,3 +92,32 @@ impl HistoryEvent { Ok(object) } } + +#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct HistoryEventMetadata { + pub(crate) actor: ActorView, + pub(crate) timestamp: DateTime, +} + +impl HistoryEventMetadata { + pub async fn from_history_actor_timestamp( + ctx: &DalContext, + value: HistoryActorTimestamp, + ) -> HistoryEventResult { + let actor = ActorView::from_history_actor(ctx, value.actor) + .await + .map_err(|e| HistoryEventError::StandardModel(e.to_string()))?; + + Ok(Self { + actor, + timestamp: value.timestamp, + }) + } +} + +#[derive(Deserialize, Serialize, Debug, Clone, Copy, PartialEq, Eq)] +pub struct HistoryActorTimestamp { + pub actor: HistoryActor, + pub timestamp: DateTime, +} diff --git a/lib/dal/src/installed_pkg/asset.rs b/lib/dal/src/installed_pkg/asset.rs index c923cc133c..3b9eb1c37f 100644 --- a/lib/dal/src/installed_pkg/asset.rs +++ b/lib/dal/src/installed_pkg/asset.rs @@ -2,6 +2,7 @@ use super::{InstalledPkgId, InstalledPkgResult}; use serde::{Deserialize, Serialize}; use telemetry::prelude::*; +// use crate::schema::variant::definition::SchemaVariantDefinitionId; use crate::schema::variant::definition::SchemaVariantDefinitionId; use crate::{ impl_standard_model, pk, standard_model, standard_model_accessor, DalContext, FuncId, SchemaId, @@ -119,18 +120,18 @@ impl InstalledPkgAssetTyped { } } - pub fn new_for_schema_variant_definition( - schema_variant_definition_id: SchemaVariantDefinitionId, - installed_pkg_id: InstalledPkgId, - hash: String, - ) -> Self { - Self::SchemaVariantDefinition { - installed_pkg_asset_id: InstalledPkgAssetId::NONE, - installed_pkg_id, - id: schema_variant_definition_id, - hash, - } - } + // pub fn new_for_schema_variant_definition( + // schema_variant_definition_id: SchemaVariantDefinitionId, + // installed_pkg_id: InstalledPkgId, + // hash: String, + // ) -> Self { + // Self::SchemaVariantDefinition { + // installed_pkg_asset_id: InstalledPkgAssetId::NONE, + // installed_pkg_id, + // id: schema_variant_definition_id, + // hash, + // } + // } pub fn new_for_func(func_id: FuncId, installed_pkg_id: InstalledPkgId, hash: String) -> Self { Self::Func { @@ -230,7 +231,6 @@ impl InstalledPkgAsset { hash, InstalledPkgAssetKind::SchemaVariantDefinition, ), - InstalledPkgAssetTyped::Func { installed_pkg_id, id, @@ -297,39 +297,39 @@ impl InstalledPkgAsset { } } - pub fn as_installed_schema_variant_definition( - &self, - ) -> InstalledPkgResult { - let typed: InstalledPkgAssetTyped = self.into(); + // pub fn as_installed_schema_variant_definition( + // &self, + // ) -> InstalledPkgResult { + // let typed: InstalledPkgAssetTyped = self.into(); - match typed { - InstalledPkgAssetTyped::SchemaVariantDefinition { .. } => Ok(typed), - InstalledPkgAssetTyped::SchemaVariant { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariantDefinition, - InstalledPkgAssetKind::SchemaVariant, - )), - InstalledPkgAssetTyped::Schema { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariantDefinition, - InstalledPkgAssetKind::Schema, - )), - InstalledPkgAssetTyped::Func { - installed_pkg_asset_id, - .. - } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( - installed_pkg_asset_id, - InstalledPkgAssetKind::SchemaVariantDefinition, - InstalledPkgAssetKind::Func, - )), - } - } + // match typed { + // InstalledPkgAssetTyped::SchemaVariantDefinition { .. } => Ok(typed), + // InstalledPkgAssetTyped::SchemaVariant { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // InstalledPkgAssetKind::SchemaVariant, + // )), + // InstalledPkgAssetTyped::Schema { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // InstalledPkgAssetKind::Schema, + // )), + // InstalledPkgAssetTyped::Func { + // installed_pkg_asset_id, + // .. + // } => Err(super::InstalledPkgError::InstalledPkgKindMismatch( + // installed_pkg_asset_id, + // InstalledPkgAssetKind::SchemaVariantDefinition, + // InstalledPkgAssetKind::Func, + // )), + // } + // } pub fn as_installed_schema_variant(&self) -> InstalledPkgResult { let typed: InstalledPkgAssetTyped = self.into(); diff --git a/lib/dal/src/job/consumer.rs b/lib/dal/src/job/consumer.rs index d5e2daacbb..8a1bfbaa6d 100644 --- a/lib/dal/src/job/consumer.rs +++ b/lib/dal/src/job/consumer.rs @@ -3,86 +3,70 @@ use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use serde_json::Value; use si_data_nats::NatsError; -use si_data_pg::{PgError, PgPoolError}; +use si_data_pg::PgPoolError; use thiserror::Error; use tokio::task::JoinError; -use crate::diagram::summary_diagram::SummaryDiagramError; -use crate::property_editor::values_summary::PropertyEditorValuesSummaryError; +use crate::prop::PropError; +use crate::validation::resolver::ValidationResolverError; use crate::{ - fix::FixError, func::binding_return_value::FuncBindingReturnValueError, - job::producer::BlockingJobError, job::producer::JobProducerError, status::StatusUpdaterError, - AccessBuilder, ActionPrototypeError, ActionPrototypeId, AttributeValueError, ComponentError, - ComponentId, DalContext, DalContextBuilder, FixBatchId, FixResolverError, PropError, - StandardModelError, TransactionsError, ValidationResolverError, Visibility, WsEventError, + attribute::value::AttributeValueError, + job::definition::dependent_values_update::DependentValueUpdateError, + job::producer::BlockingJobError, job::producer::JobProducerError, AccessBuilder, + ActionPrototypeId, DalContext, DalContextBuilder, StandardModelError, TransactionsError, + Visibility, WsEventError, }; #[remain::sorted] #[derive(Error, Debug)] pub enum JobConsumerError { - #[error("action named {0} not found for component {1}")] - ActionNotFound(String, ComponentId), - #[error(transparent)] - ActionPrototype(#[from] ActionPrototypeError), + // #[error("action named {0} not found for component {1}")] + // ActionNotFound(String, ComponentId), #[error("ActionProtoype {0} not found")] ActionPrototypeNotFound(ActionPrototypeId), #[error("arg {0:?} not found at index {1}")] ArgNotFound(JobInfo, usize), - #[error(transparent)] + #[error("attribute value error: {0}")] AttributeValue(#[from] AttributeValueError), #[error("Error blocking on job: {0}")] BlockingJob(#[from] BlockingJobError), - #[error(transparent)] - Chrono(#[from] chrono::ParseError), - #[error(transparent)] - Component(#[from] ComponentError), - #[error("component {0} is destroyed")] - ComponentIsDestroyed(ComponentId), - #[error("component {0} not found")] - ComponentNotFound(ComponentId), + // #[error("component {0} is destroyed")] + // ComponentIsDestroyed(ComponentId), + // #[error("component {0} not found")] + // ComponentNotFound(ComponentId), #[error(transparent)] CouncilClient(#[from] council_server::client::ClientError), #[error("Protocol error with council: {0}")] CouncilProtocol(String), - #[error(transparent)] - Fix(#[from] FixError), - #[error(transparent)] - FixResolver(#[from] FixResolverError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), + #[error("dependent value update error: {0}")] + DependentValueUpdate(#[from] DependentValueUpdateError), #[error("Invalid job arguments. Expected: {0} Actual: {1:?}")] InvalidArguments(String, Vec), #[error(transparent)] Io(#[from] ::std::io::Error), #[error(transparent)] JobProducer(#[from] JobProducerError), - #[error("missing fix execution batch for id: {0}")] - MissingFixBatch(FixBatchId), + // #[error("missing fix execution batch for id: {0}")] + // MissingFixBatch(FixBatchId), #[error(transparent)] Nats(#[from] NatsError), #[error("nats is unavailable")] NatsUnavailable, - #[error("no schema found for component {0}")] - NoSchemaFound(ComponentId), - #[error("no schema variant found for component {0}")] - NoSchemaVariantFound(ComponentId), - #[error(transparent)] - PgError(#[from] PgError), + // #[error("no schema found for component {0}")] + // NoSchemaFound(ComponentId), + // #[error("no schema variant found for component {0}")] + // NoSchemaVariantFound(ComponentId), #[error(transparent)] PgPool(#[from] PgPoolError), #[error(transparent)] Prop(#[from] PropError), - #[error(transparent)] - PropertyEditorValuesSummary(#[from] PropertyEditorValuesSummaryError), + // #[error(transparent)] + // PropertyEditorValuesSummary(#[from] PropertyEditorValuesSummaryError), #[error(transparent)] SerdeJson(#[from] serde_json::Error), #[error(transparent)] StandardModel(#[from] StandardModelError), #[error(transparent)] - StatusUpdaterError(#[from] StatusUpdaterError), - #[error(transparent)] - SummaryDiagram(#[from] SummaryDiagramError), - #[error(transparent)] TokioTask(#[from] JoinError), #[error(transparent)] Transactions(#[from] TransactionsError), diff --git a/lib/dal/src/job/definition.rs b/lib/dal/src/job/definition.rs index 7b8d291e64..a4ca715436 100644 --- a/lib/dal/src/job/definition.rs +++ b/lib/dal/src/job/definition.rs @@ -1,7 +1,7 @@ pub mod dependent_values_update; -mod fix; -mod refresh; +//mod fix; +//mod refresh; pub use dependent_values_update::DependentValuesUpdate; -pub use fix::{FixItem, FixesJob}; -pub use refresh::RefreshJob; +//pub use fix::{FixItem, FixesJob}; +//pub use refresh::RefreshJob; diff --git a/lib/dal/src/job/definition/dependent_values_update.rs b/lib/dal/src/job/definition/dependent_values_update.rs index 4c063468b0..759f9b5975 100644 --- a/lib/dal/src/job/definition/dependent_values_update.rs +++ b/lib/dal/src/job/definition/dependent_values_update.rs @@ -1,26 +1,42 @@ +use std::{ + collections::{HashMap, HashSet}, + convert::TryFrom, +}; + use async_trait::async_trait; -use chrono::{DateTime, Utc}; -use council_server::ManagementResponse; use serde::{Deserialize, Serialize}; -use std::collections::HashSet; -use std::{collections::HashMap, convert::TryFrom}; use telemetry::prelude::*; -use tokio::task::JoinSet; +use thiserror::Error; +use tokio::task::{JoinError, JoinSet}; -use crate::property_editor; -use crate::tasks::StatusReceiverClient; -use crate::tasks::StatusReceiverRequest; -use crate::{diagram, ComponentId}; +//use crate::tasks::StatusReceiverClient; +//use crate::tasks::StatusReceiverRequest; use crate::{ + attribute::value::{ + dependent_value_graph::DependentValueGraph, AttributeValueError, PrototypeExecutionResult, + }, job::consumer::{ JobConsumer, JobConsumerError, JobConsumerMetadata, JobConsumerResult, JobInfo, }, job::producer::{JobProducer, JobProducerResult}, - AccessBuilder, AttributeValue, AttributeValueError, AttributeValueId, AttributeValueResult, - DalContext, Prop, StandardModel, StatusUpdater, ValidationResolver, ValidationStatus, - Visibility, WsEvent, + AccessBuilder, AttributeValue, AttributeValueId, DalContext, + /*WsEvent*/ + TransactionsError, /*StatusUpdater,*/ + Visibility, }; -use crate::{FuncBindingReturnValue, InternalProvider}; + +#[remain::sorted] +#[derive(Debug, Error)] +pub enum DependentValueUpdateError { + #[error("attribute value error: {0}")] + AttributeValue(#[from] AttributeValueError), + #[error(transparent)] + TokioTask(#[from] JoinError), + #[error(transparent)] + Transactions(#[from] TransactionsError), +} + +pub type DependentValueUpdateResult = Result; #[derive(Debug, Deserialize, Serialize)] struct DependentValuesUpdateArgs { @@ -49,9 +65,6 @@ impl DependentValuesUpdate { visibility: Visibility, attribute_values: Vec, ) -> Box { - // TODO(nick,paulo,zack,jacob): ensure we do not _have_ to force non deleted visibility in the future. - let visibility = visibility.to_non_deleted(); - Box::new(Self { attribute_values, access_builder, @@ -59,10 +72,6 @@ impl DependentValuesUpdate { job: None, }) } - - fn job_id(&self) -> Option { - self.job.as_ref().map(|j| j.id.clone()) - } } impl JobProducer for DependentValuesUpdate { @@ -98,513 +107,112 @@ impl JobConsumer for DependentValuesUpdate { ) )] async fn run(&self, ctx: &mut DalContext) -> JobConsumerResult<()> { - let jid = council_server::Id::from_string(&self.job_id().unwrap())?; - let mut council = council_server::Client::new( - ctx.nats_conn().clone(), - ctx.nats_conn() - .metadata() - .subject_prefix() - .map(|p| p.to_string()), - jid, - self.visibility().change_set_pk.into(), - ) - .await?; - let pub_council = council.clone_into_pub(); - let council_management = council_server::ManagementClient::new( - ctx.nats_conn(), - ctx.nats_conn() - .metadata() - .subject_prefix() - .map(|p| p.to_string()), - ) - .await?; - - let res = self - .inner_run(ctx, &mut council, pub_council, council_management) - .await; - - council.bye().await?; - - res + Ok(self.inner_run(ctx).await?) } } impl DependentValuesUpdate { - async fn inner_run( - &self, - ctx: &mut DalContext, - council: &mut council_server::Client, - pub_council: council_server::PubClient, - mut management_council: council_server::ManagementClient, - ) -> JobConsumerResult<()> { - // TODO(nick,paulo,zack,jacob): ensure we do not _have_ to do this in the future. - ctx.update_without_deleted_visibility(); - - let mut status_updater = StatusUpdater::initialize(ctx).await; + async fn inner_run(&self, ctx: &mut DalContext) -> DependentValueUpdateResult<()> { + let start = tokio::time::Instant::now(); let mut dependency_graph = - AttributeValue::dependent_value_graph(ctx, &self.attribute_values).await?; - - // The dependent_value_graph is read-only, so we can safely rollback the inner txns, to - // make sure we don't hold open txns unnecessarily - ctx.rollback().await?; - - // NOTE(nick,jacob): uncomment this for debugging. - // Save printed output to a file and execute the following: "dot -Tsvg -o .svg" - // println!("{}", dependency_graph_to_dot(ctx, &dependency_graph).await?); + DependentValueGraph::for_values(ctx, self.attribute_values.clone()).await?; - // Any of our initial inputs that aren't using one of the `si:set*`, or `si:unset` - // functions need to be evaluated, since we might be populating the initial functions of a - // `Component` during `Component` creation. - let avs_with_dynamic_functions: HashSet = HashSet::from_iter( - AttributeValue::ids_using_dynamic_functions(ctx, &self.attribute_values) - .await? - .iter() - .copied(), + debug!( + "DependentValueGraph calculation took: {:?}", + start.elapsed() ); - for id in &self.attribute_values { - if avs_with_dynamic_functions.contains(id) { - dependency_graph.entry(*id).or_insert_with(Vec::new); - } - } - - debug!(?dependency_graph, "Generated dependency graph"); - if dependency_graph.is_empty() { - return Ok(()); + // Remove the first set of independent_values since they should already have had their functions executed + for value in dependency_graph.independent_values() { + dependency_graph.remove_value(value); } - // Cache the original dependency graph to send the status receiver. - let original_dependency_graph = dependency_graph.clone(); - - council - .register_dependency_graph( - dependency_graph - .iter() - .map(|(key, value)| (key.into(), value.iter().map(Into::into).collect())) - .collect(), - ) - .await?; - - let mut enqueued: Vec = dependency_graph.keys().copied().collect(); - enqueued.extend(dependency_graph.values().flatten().copied()); - status_updater.values_queued(ctx, enqueued).await; - - // Status updater reads from the database and uses its own connection from the pg_pool to - // do writes - ctx.rollback().await?; - - let mut update_tasks = JoinSet::new(); - - // This is the core loop. Use both the individual and the management subscription to determine what to do next. - let needs_restart = tokio::select! { - result = self.listen(ctx, council, pub_council, dependency_graph, &mut status_updater, &mut update_tasks) => result, - management_result = self.listen_management(&mut management_council) => management_result, - }?; - - // If we need to restart, we need to stop what we are doing, tell the status update that we are stopping, and - // then re-enqueue ourselves. - if needs_restart { - update_tasks.abort_all(); + let mut seen_ids = HashSet::new(); + let mut task_id_to_av_id = HashMap::new(); + let mut update_join_set = JoinSet::new(); - info!(?self.attribute_values, "aborted update tasks and restarting job"); - - status_updater - .values_completed(ctx, self.attribute_values.clone()) - .await; - - ctx.enqueue_dependent_values_update(self.attribute_values.clone()) - .await?; - } - - // No matter what, we need to finish the updater - status_updater.finish(ctx).await; - - let client = StatusReceiverClient::new(ctx.nats_conn().clone()).await; - if let Err(e) = client - .publish(&StatusReceiverRequest { - visibility: *ctx.visibility(), - tenancy: *ctx.tenancy(), - dependent_graph: original_dependency_graph, - }) - .await - { - error!("could not publish status receiver request: {:?}", e); - } - - ctx.commit().await?; + let mut independent_value_ids = dependency_graph.independent_values(); - Ok(()) - } - - #[instrument( - name = "dependent_values_update.listen", - level = "info", - skip_all, - fields() - )] - async fn listen( - &self, - ctx: &DalContext, - council: &mut council_server::Client, - pub_council: council_server::PubClient, - mut dependency_graph: HashMap>, - status_updater: &mut StatusUpdater, - update_tasks: &mut JoinSet>, - ) -> JobConsumerResult { - let ctx_builder = ctx.to_builder(); - let mut needs_restart = false; - - while !dependency_graph.is_empty() { - match council.fetch_response().await? { - Some(response) => match response { - council_server::Response::OkToProcess { node_ids } => { - debug!(?node_ids, job_id = ?self.job_id(), "Ok to start processing nodes"); - for node_id in node_ids { - let id = AttributeValueId::from(node_id); - - status_updater.values_running(ctx, vec![id]).await; - // Status updater reads from the database and uses its own connection - // from the pg_pool to do writes - ctx.rollback().await?; - - let task_ctx = ctx_builder - .build(self.access_builder().build(self.visibility())) - .await?; - - let attribute_value = AttributeValue::get_by_id(&task_ctx, &id) - .await? - .ok_or_else(|| { - AttributeValueError::NotFound(id, self.visibility()) - })?; - update_tasks.spawn(update_value( - task_ctx, - attribute_value, - pub_council.clone(), - Span::current(), - )); - } - } - council_server::Response::BeenProcessed { node_id } => { - debug!(?node_id, job_id = ?self.job_id(), "Node has been processed by a job"); - let id = AttributeValueId::from(node_id); - dependency_graph.remove(&id); - - // Send a completed status for this value and *remove* it from the hash - status_updater.values_completed(ctx, vec![id]).await; + loop { + if independent_value_ids.is_empty() && task_id_to_av_id.is_empty() { + break; + } - ctx.commit().await?; - } - council_server::Response::Failed { node_id } => { - debug!(?node_id, job_id = ?self.job_id(), "Node failed on another job"); - let id = AttributeValueId::from(node_id); - dependency_graph.remove(&id); + for attribute_value_id in &independent_value_ids { + let attribute_value_id = attribute_value_id.to_owned(); // release our borrow - // Send a completed status for this value and *remove* it from the hash - status_updater.values_completed(ctx, vec![id]).await; - // Status updater reads from the database and uses its own connection from - // the pg_pool to do writes - ctx.rollback().await?; - } - council_server::Response::Restart => { - info!("received response that job needs restart"); - needs_restart = true; - - // Ejecto seato cuz. - break; - } - council_server::Response::Shutdown => break, - }, - None => { - // FIXME(nick): reconnect. Same "FIXME" as the one found in the original listener. - warn!("subscriber has been unsubscribed or the connection has been closed"); - break; + if !seen_ids.contains(&attribute_value_id) { + let join_handle = update_join_set.spawn( + values_from_prototype_function_execution(ctx.clone(), attribute_value_id), + ); + task_id_to_av_id.insert(join_handle.id(), attribute_value_id); + seen_ids.insert(attribute_value_id); } } - ctx.commit().await?; - - // If we get `None` back from the `JoinSet` that means that there are no - // further tasks in the `JoinSet` for us to wait on. This should only happen - // after we've stopped adding new tasks to the `JoinSet`, which means either: - // * We have completely walked the initial graph, and have visited every - // node. - // * We've encountered a cycle that means we can no longer make any - // progress on walking the graph. - // In both cases, there isn't anything more we can do, so we can stop looking - // at the graph to find more work. - while let Some(future_result) = update_tasks.join_next().await { - // We get back a `Some>>`. We've already unwrapped the - // `Some`, the outermost `Result` is a `JoinError` to let us know if - // anything went wrong in joining the task. - match future_result { - // We have successfully updated a value - Ok(Ok(())) => {} - // There was an error (with our code) when updating the value - Ok(Err(err)) => { - warn!(error = ?err, "error updating value"); - return Err(err); - } - // There was a Tokio JoinSet error when joining the task back (i.e. likely - // I/O error) - Err(err) => { - warn!(error = ?err, "error when joining update task"); - return Err(err.into()); + // Wait for a task to finish + if let Some(join_result) = update_join_set.join_next_with_id().await { + let (task_id, execution_result) = join_result?; + if let Some(finished_value_id) = task_id_to_av_id.remove(&task_id) { + match execution_result { + Ok(execution_values) => { + match AttributeValue::set_values_from_execution_result( + ctx, + finished_value_id, + execution_values, + ) + .await + { + // Remove the value, so that any values that dependent on it will + // become independent values (once all other dependencies are removed) + Ok(_) => dependency_graph.remove_value(finished_value_id), + Err(err) => { + error!("error setting values from executed prototype function for AttributeValue {finished_value_id}: {err}"); + dependency_graph.cycle_on_self(finished_value_id); + } + } + } + Err(err) => { + // By adding an outgoing edge from the failed node to itself it will + // never appear in the `independent_values` call above since that looks for + // nodes *without* outgoing edges. Thus we will never try to re-execute + // the function for this value, nor will we execute anything in the + // dependency graph connected to this value + + error!("error executing prototype function for AttributeValue {finished_value_id}: {err}"); + dependency_graph.cycle_on_self(finished_value_id); + } } } } + + independent_value_ids = dependency_graph.independent_values(); } - Ok(needs_restart) - } + debug!("DependentValuesUpdate took: {:?}", start.elapsed()); - #[instrument( - name = "dependent_values_update.listen_management", - level = "info", - skip_all, - fields() - )] - async fn listen_management( - &self, - council_management: &mut council_server::ManagementClient, - ) -> JobConsumerResult { - let needs_restart = match council_management.fetch_response().await? { - Some(management_response) => match management_response { - ManagementResponse::Restart => true, - }, - None => { - // FIXME(nick): reconnect. Same "FIXME" as the one found in the original listener. - warn!( - "management subscriber has been unsubscribed or the connection has been closed" - ); - false - } - }; - info!("received management response that job needs restart"); - Ok(needs_restart) + ctx.commit().await?; + + Ok(()) } } -/// Wrapper around `AttributeValue.update_from_prototype_function(&ctx)` to get it to +/// Wrapper around `AttributeValue.values_from_prototype_function_execution(&ctx)` to get it to /// play more nicely with being spawned into a `JoinSet`. #[instrument( - name = "dependent_values_update.update_value", - parent = &parent_span, + name = "dependent_values_update.values_from_prototype_function_execution", skip_all, level = "info", fields( - attribute_value.id = %attribute_value.id(), + attribute_value.id = %attribute_value_id, ) )] -async fn update_value( +async fn values_from_prototype_function_execution( ctx: DalContext, - mut attribute_value: AttributeValue, - council: council_server::PubClient, - parent_span: Span, -) -> JobConsumerResult<()> { - let update_result = attribute_value.update_from_prototype_function(&ctx).await; - // We don't propagate the error up, because we want the rest of the nodes in the graph to make progress - // if they are able to. - if update_result.is_err() { - error!(?update_result, attribute_value_id = %attribute_value.id(), "Error updating AttributeValue"); - council - .failed_processing_value(attribute_value.id().into()) - .await?; - ctx.rollback().await?; - } - - // If this is for an internal provider corresponding to a root prop for the schema variant of an existing component, - // then we want to update summary tables. - let value = if let Some(fbrv) = - FuncBindingReturnValue::get_by_id(&ctx, &attribute_value.func_binding_return_value_id()) - .await? - { - if let Some(component_value_json) = fbrv.unprocessed_value() { - if !attribute_value.context.is_component_unset() - && !attribute_value.context.is_internal_provider_unset() - && InternalProvider::is_for_root_prop( - &ctx, - attribute_value.context.internal_provider_id(), - ) - .await - .unwrap() - { - update_summary_tables( - &ctx, - component_value_json, - attribute_value.context.component_id(), - ) - .await?; - } - component_value_json.clone() - } else { - serde_json::Value::Null - } - } else { - serde_json::Value::Null - }; - - ctx.commit().await?; - - if update_result.is_ok() { - council.processed_value(attribute_value.id().into()).await?; - } - - Prop::run_validation( - &ctx, - attribute_value.context.prop_id(), - attribute_value.context.component_id(), - attribute_value.key(), - value, - ) - .await; - - ctx.commit().await?; - - Ok(()) -} - -#[instrument( - name = "dependent_values_update.update_summary_tables", - skip_all, - level = "info", - fields( - component.id = %component_id, - ) -)] -pub async fn update_summary_tables( - ctx: &DalContext, - component_value_json: &serde_json::Value, - component_id: ComponentId, -) -> JobConsumerResult<()> { - // Qualification summary table - if we add more summary tables, this should be extracted to its - // own method. - let mut total: i64 = 0; - let mut warned: i64 = 0; - let mut succeeded: i64 = 0; - let mut failed: i64 = 0; - let mut name: String = String::new(); - let mut color: String = String::new(); - let mut component_type: String = String::new(); - let mut has_resource: bool = false; - let mut deleted_at: Option = None; - let mut deleted_at_datetime: Option> = None; - if let Some(ref deleted_at) = deleted_at { - let deleted_at_datetime_inner: DateTime = deleted_at.parse()?; - deleted_at_datetime = Some(deleted_at_datetime_inner); - } - - if let Some(component_name) = component_value_json.pointer("/si/name") { - if let Some(component_name_str) = component_name.as_str() { - name = String::from(component_name_str); - } - } - - if let Some(component_color) = component_value_json.pointer("/si/color") { - if let Some(component_color_str) = component_color.as_str() { - color = String::from(component_color_str); - } - } - - if let Some(component_type_json) = component_value_json.pointer("/si/type") { - if let Some(component_type_str) = component_type_json.as_str() { - component_type = String::from(component_type_str); - } - } - - if let Some(_resource) = component_value_json.pointer("/resource/payload") { - has_resource = true; - } - - if let Some(deleted_at_value) = component_value_json.pointer("/deleted_at") { - if let Some(deleted_at_str) = deleted_at_value.as_str() { - deleted_at = Some(deleted_at_str.into()); - } - } - - if let Some(qualification_map_value) = component_value_json.pointer("/qualification") { - if let Some(qualification_map) = qualification_map_value.as_object() { - for qual_result_map_value in qualification_map.values() { - if let Some(qual_result_map) = qual_result_map_value.as_object() { - if let Some(qual_result) = qual_result_map.get("result") { - if let Some(qual_result_string) = qual_result.as_str() { - total += 1; - match qual_result_string { - "success" => succeeded += 1, - "warning" => warned += 1, - "failure" => failed += 1, - &_ => (), - } - } - } - } - } - } - } - - let mut success = None; - for resolver in ValidationResolver::find_by_attr(ctx, "component_id", &component_id).await? { - if success.is_none() { - success = Some(true); - } - - if resolver.value()?.status != ValidationStatus::Success { - success = Some(false); - } - } - - if let Some(success) = success { - total += 1; - if success { - succeeded += 1; - } else { - failed += 1; - } - } - - let _row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM summary_qualification_update_v2($1, $2, $3, $4, $5, $6, $7, $8, $9)", - &[ - ctx.tenancy(), - ctx.visibility(), - &component_id, - &name, - &total, - &warned, - &succeeded, - &failed, - &deleted_at_datetime, - ], - ) - .await?; - - diagram::summary_diagram::component_update( - ctx, - &component_id, - name, - color, - component_type, - has_resource, - deleted_at, - ) - .await?; - - property_editor::values_summary::PropertyEditorValuesSummary::create_or_update_component_entry( - ctx, - component_id, - ) - .await?; - - WsEvent::component_updated(ctx, component_id) - .await? - .publish_on_commit(ctx) - .await?; - - Ok(()) + attribute_value_id: AttributeValueId, +) -> DependentValueUpdateResult { + Ok(AttributeValue::execute_prototype_function(&ctx, attribute_value_id).await?) } impl TryFrom for DependentValuesUpdate { @@ -620,41 +228,3 @@ impl TryFrom for DependentValuesUpdate { }) } } - -#[allow(unused)] -async fn dependency_graph_to_dot( - ctx: &DalContext, - graph: &HashMap>, -) -> AttributeValueResult { - let mut node_definitions = String::new(); - for attr_val_id in graph.keys() { - let attr_val = AttributeValue::get_by_id(ctx, attr_val_id) - .await? - .ok_or_else(|| AttributeValueError::NotFound(*attr_val_id, *ctx.visibility()))?; - let prop_id = attr_val.context.prop_id(); - let internal_provider_id = attr_val.context.internal_provider_id(); - let external_provider_id = attr_val.context.external_provider_id(); - let component_id = attr_val.context.component_id(); - node_definitions.push_str(&format!( - "\"{attr_val_id}\"[label=\"\\lAttribute Value: {attr_val_id}\\n\\lProp: {prop_id}\\lInternal Provider: {internal_provider_id}\\lExternal Provider: {external_provider_id}\\lComponent: {component_id}\"];", - )); - } - - let mut node_graph = String::new(); - for (attr_val, inputs) in graph { - let dependencies = format!( - "{{{dep_list}}}", - dep_list = inputs - .iter() - .map(|i| format!("\"{i}\"")) - .collect::>() - .join(" ") - ); - let dependency_line = format!("{dependencies} -> \"{attr_val}\";",); - node_graph.push_str(&dependency_line); - } - - let dot_digraph = format!("digraph G {{{node_definitions}{node_graph}}}"); - - Ok(dot_digraph) -} diff --git a/lib/dal/src/job/processor/nats_processor.rs b/lib/dal/src/job/processor/nats_processor.rs index d6921287b3..bb6e57228f 100644 --- a/lib/dal/src/job/processor/nats_processor.rs +++ b/lib/dal/src/job/processor/nats_processor.rs @@ -136,6 +136,8 @@ impl JobQueueProcessor for NatsProcessor { } } + info!("processed_queue"); + if !results.is_empty() { Err(BlockingJobError::JobExecution( results diff --git a/lib/dal/src/job/queue.rs b/lib/dal/src/job/queue.rs index d6bede0c0a..3cebdbbd15 100644 --- a/lib/dal/src/job/queue.rs +++ b/lib/dal/src/job/queue.rs @@ -1,5 +1,6 @@ use super::producer::JobProducer; -use crate::{AccessBuilder, AttributeValueId, ChangeSetPk, DependentValuesUpdate, Visibility}; +use crate::job::definition::DependentValuesUpdate; +use crate::{AccessBuilder, AttributeValueId, ChangeSetPk, Visibility}; use std::{collections::HashMap, collections::HashSet, collections::VecDeque, sync::Arc}; use tokio::sync::Mutex; diff --git a/lib/dal/src/lib.rs b/lib/dal/src/lib.rs index e952bc9531..c59107e45e 100644 --- a/lib/dal/src/lib.rs +++ b/lib/dal/src/lib.rs @@ -5,169 +5,124 @@ use std::sync::Arc; use std::time::Duration; use rand::Rng; +use rebaser_client::Config as RebaserClientConfig; use serde_with::{DeserializeFromStr, SerializeDisplay}; use si_crypto::SymmetricCryptoService; +use si_data_nats::{NatsClient, NatsError}; +use si_data_pg::{PgError, PgPool, PgPoolError}; use strum::{Display, EnumString, EnumVariantNames}; +use telemetry::prelude::*; use thiserror::Error; use tokio::time; use tokio::time::Instant; - -pub use action::{Action, ActionError, ActionId}; -pub use action_prototype::{ - ActionKind, ActionPrototype, ActionPrototypeContext, ActionPrototypeError, ActionPrototypeId, - ActionPrototypeView, -}; -pub use actor_view::ActorView; -pub use attribute::value::view::AttributeView; -pub use attribute::{ - context::{ - AttributeContext, AttributeContextBuilder, AttributeContextBuilderError, - AttributeContextError, AttributeReadContext, - }, - prototype::argument::{ - AttributePrototypeArgument, AttributePrototypeArgumentError, AttributePrototypeArgumentId, - AttributePrototypeArgumentResult, - }, - prototype::{ - AttributePrototype, AttributePrototypeError, AttributePrototypeId, AttributePrototypeResult, - }, - value::{ - AttributeValue, AttributeValueError, AttributeValueId, AttributeValuePayload, - AttributeValueResult, - }, -}; -pub use builtins::{BuiltinsError, BuiltinsResult}; -pub use change_set::{ChangeSet, ChangeSetError, ChangeSetPk, ChangeSetStatus}; -pub use code_view::{CodeLanguage, CodeView}; -pub use component::{ - resource::ResourceView, status::ComponentStatus, status::HistoryActorTimestamp, Component, - ComponentError, ComponentId, ComponentView, ComponentViewProperties, -}; -pub use context::{ - AccessBuilder, Connections, DalContext, DalContextBuilder, RequestContext, ServicesContext, - Transactions, TransactionsError, -}; -pub use diagram::{connection::Connection, Diagram, DiagramError, DiagramKind}; -pub use edge::{Edge, EdgeError, EdgeResult}; -pub use fix::batch::{FixBatch, FixBatchId}; -pub use fix::resolver::{FixResolver, FixResolverError, FixResolverId}; -pub use fix::{Fix, FixCompletionStatus, FixError, FixId}; -pub use func::argument::FuncArgument; -pub use func::binding_return_value::{FuncBindingReturnValue, FuncBindingReturnValueError}; -pub use func::{ - backend::{FuncBackendError, FuncBackendKind, FuncBackendResponseType}, - binding::{FuncBinding, FuncBindingError, FuncBindingId}, - variant::FuncVariant, - Func, FuncError, FuncId, FuncResult, -}; -pub use history_event::{HistoryActor, HistoryEvent, HistoryEventError}; -pub use index_map::IndexMap; -pub use job::definition::DependentValuesUpdate; -pub use job::processor::{JobQueueProcessor, NatsProcessor}; -pub use job_failure::{JobFailure, JobFailureError, JobFailureResult}; -pub use jwt_key::JwtPublicSigningKey; -pub use key_pair::{KeyPair, KeyPairError, KeyPairResult, PublicKey}; -pub use label_list::{LabelEntry, LabelList, LabelListError}; -pub use node::NodeId; -pub use node::{Node, NodeError, NodeKind}; -pub use node_menu::NodeMenuError; -pub use prop::{Prop, PropError, PropId, PropKind, PropPk, PropResult}; -pub use prototype_context::HasPrototypeContext; -pub use prototype_list_for_func::{ - PrototypeListForFunc, PrototypeListForFuncError, PrototypeListForFuncResult, -}; -pub use provider::external::{ExternalProvider, ExternalProviderError, ExternalProviderId}; -pub use provider::internal::{InternalProvider, InternalProviderError, InternalProviderId}; -pub use qualification::{QualificationError, QualificationView}; -pub use reconciliation_prototype::{ - ReconciliationPrototype, ReconciliationPrototypeContext, ReconciliationPrototypeError, - ReconciliationPrototypeId, -}; -pub use schema::variant::leaves::LeafInput; -pub use schema::variant::leaves::LeafInputLocation; -pub use schema::variant::leaves::LeafKind; -pub use schema::variant::root_prop::component_type::ComponentType; -pub use schema::variant::root_prop::RootProp; -pub use schema::variant::root_prop::RootPropChild; -pub use schema::variant::SchemaVariantError; -pub use schema::{Schema, SchemaError, SchemaId, SchemaPk, SchemaVariant, SchemaVariantId}; -pub use secret::{ - DecryptedSecret, EncryptedSecret, Secret, SecretAlgorithm, SecretError, SecretId, SecretPk, - SecretResult, SecretVersion, -}; -use si_data_nats::{NatsClient, NatsError}; -use si_data_pg::{PgError, PgPool, PgPoolError}; -pub use socket::{Socket, SocketArity, SocketId}; -pub use standard_model::{StandardModel, StandardModelError, StandardModelResult}; -pub use status::{ - StatusUpdate, StatusUpdateError, StatusUpdateResult, StatusUpdater, StatusUpdaterError, -}; -use telemetry::prelude::*; -pub use tenancy::{Tenancy, TenancyError}; -pub use timestamp::{Timestamp, TimestampError}; -pub use user::{User, UserClaim, UserError, UserPk, UserResult}; -pub use validation::resolver::{ - ValidationOutput, ValidationResolver, ValidationResolverError, ValidationResolverId, - ValidationStatus, -}; use veritech_client::CycloneEncryptionKey; -pub use visibility::{Visibility, VisibilityError}; -pub use workspace::{Workspace, WorkspaceError, WorkspacePk, WorkspaceResult, WorkspaceSignup}; -pub use ws_event::{WsEvent, WsEventError, WsEventResult, WsPayload}; use crate::builtins::SelectedTestBuiltinSchemas; -pub mod action; +//pub mod action; pub mod action_prototype; pub mod actor_view; pub mod attribute; pub mod authentication_prototype; pub mod builtins; pub mod change_set; +pub mod change_set_pointer; pub mod change_status; -pub mod code_view; pub mod component; pub mod context; pub mod diagram; -pub mod edge; -pub mod fix; pub mod func; pub mod history_event; -pub mod index_map; pub mod installed_pkg; pub mod job; pub mod job_failure; pub mod jwt_key; pub mod key_pair; pub mod label_list; -pub mod node; -pub mod node_menu; pub mod pkg; pub mod prop; -pub mod prop_tree; pub mod property_editor; -pub mod prototype_context; -pub mod prototype_list_for_func; pub mod provider; -pub mod qualification; -pub mod reconciliation_prototype; pub mod schema; -pub mod secret; pub mod serde_impls; -pub mod socket; pub mod standard_accessors; pub mod standard_model; pub mod standard_pk; -pub mod status; -pub mod tasks; pub mod tenancy; pub mod timestamp; pub mod user; pub mod validation; pub mod visibility; pub mod workspace; +pub mod workspace_snapshot; pub mod ws_event; +// TODO(nick,jacob): this should self-destruct once the new engine is in place. +// pub mod node; +// pub mod socket; + +//pub mod code_view; +// pub mod edge; +// pub mod fix; +// pub mod index_map; +pub mod node_menu; +// pub mod prop_tree; +// pub mod prototype_context; +// pub mod prototype_list_for_func; +pub mod qualification; +// pub mod reconciliation_prototype; +pub mod secret; +// pub mod status; +//pub mod tasks; + +pub use action_prototype::{ActionKind, ActionPrototype, ActionPrototypeId}; +pub use actor_view::ActorView; +pub use attribute::{ + prototype::{AttributePrototype, AttributePrototypeId}, + value::{AttributeValue, AttributeValueId}, +}; +pub use builtins::{BuiltinsError, BuiltinsResult}; +pub use change_set::{ChangeSet, ChangeSetError, ChangeSetPk, ChangeSetStatus}; +pub use component::Component; +pub use component::ComponentError; +pub use component::ComponentId; +pub use component::ComponentKind; +pub use context::{ + AccessBuilder, Connections, DalContext, DalContextBuilder, RequestContext, ServicesContext, + Transactions, TransactionsError, +}; +pub use func::{ + backend::{FuncBackendKind, FuncBackendResponseType}, + Func, FuncId, +}; +pub use history_event::{HistoryActor, HistoryEvent, HistoryEventError}; +pub use job::processor::{JobQueueProcessor, NatsProcessor}; +pub use job_failure::{JobFailure, JobFailureError, JobFailureResult}; +pub use jwt_key::JwtPublicSigningKey; +pub use key_pair::{KeyPair, KeyPairError, KeyPairResult, PublicKey}; +pub use label_list::{LabelEntry, LabelList, LabelListError}; +pub use prop::{Prop, PropId, PropKind}; +pub use provider::external::{ExternalProvider, ExternalProviderId}; +pub use provider::internal::{InternalProvider, InternalProviderId}; +pub use provider::ProviderArity; +pub use provider::ProviderKind; +pub use schema::variant::root_prop::component_type::ComponentType; +pub use schema::{Schema, SchemaError, SchemaId, SchemaVariant, SchemaVariantId}; +pub use secret::Secret; +pub use secret::SecretError; +pub use secret::SecretId; +pub use secret::SecretView; +pub use secret::{EncryptedSecret, SecretAlgorithm, SecretVersion}; +pub use standard_model::{StandardModel, StandardModelError, StandardModelResult}; +pub use tenancy::{Tenancy, TenancyError}; +pub use timestamp::{Timestamp, TimestampError}; +pub use user::{User, UserClaim, UserError, UserPk, UserResult}; +pub use visibility::{Visibility, VisibilityError}; +pub use workspace::{Workspace, WorkspaceError, WorkspacePk, WorkspaceResult}; +pub use workspace_snapshot::graph::WorkspaceSnapshotGraph; +pub use workspace_snapshot::WorkspaceSnapshot; +pub use ws_event::{WsEvent, WsEventError, WsEventResult, WsPayload}; + #[remain::sorted] #[derive(Error, Debug)] pub enum InitializationError { @@ -197,14 +152,16 @@ pub enum ModelError { #[error("builtins error: {0}")] Builtins(#[from] BuiltinsError), #[error(transparent)] + ContentStorePg(#[from] content_store::StoreError), + #[error(transparent)] Migration(#[from] PgPoolError), #[error(transparent)] Nats(#[from] NatsError), #[error("database error")] PgError(#[from] PgError), - #[error("transactions error")] + #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), - #[error(transparent)] + #[error("workspace error: {0}")] Workspace(#[from] WorkspaceError), } @@ -212,7 +169,11 @@ pub type ModelResult = Result; #[instrument(level = "info", skip_all)] pub async fn migrate_all(services_context: &ServicesContext) -> ModelResult<()> { - migrate(services_context.pg_pool()).await?; + migrate( + services_context.pg_pool(), + services_context.content_store_pg_pool(), + ) + .await?; Ok(()) } @@ -242,14 +203,16 @@ pub async fn migrate_all_with_progress(services_context: &ServicesContext) -> Mo } #[instrument(level = "info", skip_all)] -pub async fn migrate(pg: &PgPool) -> ModelResult<()> { - Ok(pg.migrate(embedded::migrations::runner()).await?) +pub async fn migrate(pg: &PgPool, content_store_pg_pool: &PgPool) -> ModelResult<()> { + content_store::PgStore::migrate(content_store_pg_pool).await?; + pg.migrate(embedded::migrations::runner()).await?; + Ok(()) } #[allow(clippy::too_many_arguments)] #[instrument(level = "info", skip_all)] pub async fn migrate_local_builtins( - pg: &PgPool, + dal_pg: &PgPool, nats: &NatsClient, job_processor: Box, veritech: veritech_client::Client, @@ -258,9 +221,11 @@ pub async fn migrate_local_builtins( pkgs_path: PathBuf, module_index_url: String, symmetric_crypto_service: &SymmetricCryptoService, + rebaser_config: RebaserClientConfig, + content_store_pg_pool: &PgPool, ) -> ModelResult<()> { let services_context = ServicesContext::new( - pg.clone(), + dal_pg.clone(), nats.clone(), job_processor, veritech, @@ -268,13 +233,16 @@ pub async fn migrate_local_builtins( Some(pkgs_path), Some(module_index_url), symmetric_crypto_service.clone(), + rebaser_config, + content_store_pg_pool.clone(), ); let dal_context = services_context.into_builder(true); let mut ctx = dal_context.build_default().await?; - let workspace = Workspace::builtin(&ctx).await?; + let workspace = Workspace::builtin(&mut ctx).await?; ctx.update_tenancy(Tenancy::new(*workspace.pk())); - ctx.blocking_commit().await?; + ctx.update_to_head(); + ctx.update_snapshot_to_visibility().await?; builtins::migrate_local(&ctx, selected_test_builtin_schemas).await?; diff --git a/lib/dal/src/migrations/U0004__standard_model.sql b/lib/dal/src/migrations/U0004__standard_model.sql index 556301a7f2..43baa7f9fb 100644 --- a/lib/dal/src/migrations/U0004__standard_model.sql +++ b/lib/dal/src/migrations/U0004__standard_model.sql @@ -1181,7 +1181,7 @@ BEGIN END; $$ LANGUAGE plpgsql VOLATILE; -CREATE OR REPLACE FUNCTION import_builtins_v1(destination_tenancy jsonb) +CREATE OR REPLACE FUNCTION import_builtins_v1(destination_tenancy jsonb, source_workspace_pk ident) RETURNS VOID AS $$ DECLARE @@ -1189,7 +1189,6 @@ DECLARE destination_tenancy_record tenancy_record_v1; this_table_name regclass; insert_column_names text; - source_workspace_pk ident; BEGIN destination_tenancy_record = tenancy_json_to_columns_v1(destination_tenancy); FOR standard_model IN SELECT * FROM standard_models @@ -1203,10 +1202,6 @@ BEGIN AND information_schema.columns.is_generated = 'NEVER' INTO insert_column_names; - SELECT (object ->> 'pk')::ident - INTO source_workspace_pk - FROM workspace_find_or_create_builtin_v1(); - -- No history events for this update EXECUTE format('INSERT INTO %1$I (tenancy_workspace_pk, visibility_change_set_pk, diff --git a/lib/dal/src/migrations/U0030__workspaces.sql b/lib/dal/src/migrations/U0030__workspaces.sql index 62618ae395..175f12b1c6 100644 --- a/lib/dal/src/migrations/U0030__workspaces.sql +++ b/lib/dal/src/migrations/U0030__workspaces.sql @@ -1,35 +1,12 @@ CREATE TABLE workspaces ( - pk ident primary key default ident_create_v1(), - visibility_deleted_at timestamp with time zone, - created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), - name text NOT NULL + pk ident primary key DEFAULT ident_create_v1(), + visibility_deleted_at timestamp with time zone, + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + name text NOT NULL, + default_change_set_id ident NOT NULL + -- TODO(nick): add "REFERENCES change_set_pointers (id)" to column type ); CREATE UNIQUE INDEX ON workspaces (pk); CREATE INDEX ON workspaces (visibility_deleted_at NULLS FIRST); - -CREATE OR REPLACE FUNCTION workspace_create_v1( - this_pk ident, - this_name text, - OUT object json) AS -$$ -DECLARE - this_new_row workspaces%ROWTYPE; -BEGIN - - INSERT INTO workspaces (pk, name) - VALUES (this_pk, this_name) - RETURNING * INTO this_new_row; - - object := row_to_json(this_new_row); -END; -$$ LANGUAGE PLPGSQL VOLATILE; - -CREATE OR REPLACE FUNCTION workspace_find_or_create_builtin_v1(OUT object json) AS -$$ -BEGIN - INSERT INTO workspaces (pk, name) VALUES (ident_nil_v1(), 'builtin') ON CONFLICT (pk) DO NOTHING; - SELECT row_to_json(workspaces.*) INTO STRICT object FROM workspaces WHERE pk = ident_nil_v1(); -END; -$$ LANGUAGE PLPGSQL VOLATILE; diff --git a/lib/dal/src/migrations/U0072__action_prototype.sql b/lib/dal/src/migrations/U0072__action_prototype.sql index 2ecf9299a3..d2bb1a252f 100644 --- a/lib/dal/src/migrations/U0072__action_prototype.sql +++ b/lib/dal/src/migrations/U0072__action_prototype.sql @@ -49,4 +49,4 @@ BEGIN object := row_to_json(this_new_row); END; -$$ LANGUAGE PLPGSQL VOLATILE; +$$ LANGUAGE PLPGSQL VOLATILE; \ No newline at end of file diff --git a/lib/dal/src/migrations/U3000__workspace_snapshots.sql b/lib/dal/src/migrations/U3000__workspace_snapshots.sql new file mode 100644 index 0000000000..33a89c2a4b --- /dev/null +++ b/lib/dal/src/migrations/U3000__workspace_snapshots.sql @@ -0,0 +1,10 @@ +CREATE TABLE workspace_snapshots +( + id ident primary key NOT NULL DEFAULT ident_create_v1(), + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + snapshot bytea NOT NULL + -- TODO(nick): add once workspaces are added + -- workspace_id ident REFERENCES workspaces_v2 (id) NOT NULL, + -- TODO(nick): replace the existing primary key with this once workspaces are added + -- primary key (id, workspace_id) +); \ No newline at end of file diff --git a/lib/dal/src/migrations/U3001__change_set_pointers.sql b/lib/dal/src/migrations/U3001__change_set_pointers.sql new file mode 100644 index 0000000000..e225e3b12f --- /dev/null +++ b/lib/dal/src/migrations/U3001__change_set_pointers.sql @@ -0,0 +1,12 @@ +CREATE TABLE change_set_pointers +( + id ident primary key NOT NULL DEFAULT ident_create_v1(), + created_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + updated_at timestamp with time zone NOT NULL DEFAULT CLOCK_TIMESTAMP(), + name text NOT NULL, + base_change_set_id ident, + status text NOT NULL, + + workspace_id ident REFERENCES workspaces (pk) DEFERRABLE, + workspace_snapshot_id ident REFERENCES workspace_snapshots (id) +); diff --git a/lib/dal/src/migrations/U3010__modify_func_bindings.sql b/lib/dal/src/migrations/U3010__modify_func_bindings.sql new file mode 100644 index 0000000000..3e5f89b891 --- /dev/null +++ b/lib/dal/src/migrations/U3010__modify_func_bindings.sql @@ -0,0 +1,47 @@ +--- we no longer use a sha256 hash on a function's code, blake3 is used instead +ALTER TABLE func_bindings + ADD COLUMN func_id ident, + ADD COLUMN code_blake3 TEXT, + DROP COLUMN code_sha256; + +DROP TABLE func_binding_belongs_to_func CASCADE; +DROP FUNCTION func_binding_create_v1; + +CREATE OR REPLACE FUNCTION func_binding_create_v2( + this_tenancy jsonb, + this_visibility jsonb, + this_args json, + this_func_id ident, + this_backend_kind text, + this_code_blake3 text, + OUT object json) AS +$$ +DECLARE + this_tenancy_record tenancy_record_v1; + this_visibility_record visibility_record_v1; + this_new_row func_bindings%ROWTYPE; +BEGIN + this_tenancy_record := tenancy_json_to_columns_v1(this_tenancy); + this_visibility_record := visibility_json_to_columns_v1(this_visibility); + + INSERT INTO func_bindings ( + tenancy_workspace_pk, + visibility_change_set_pk, + args, + backend_kind, + code_blake3, + func_id + ) + VALUES (this_tenancy_record.tenancy_workspace_pk, + this_visibility_record.visibility_change_set_pk, + this_args, + this_backend_kind, + COALESCE(this_code_blake3, '0'), + this_func_id + ) + RETURNING * INTO this_new_row; + + object := row_to_json(this_new_row); +END; +$$ LANGUAGE PLPGSQL VOLATILE; + diff --git a/lib/dal/src/node.rs b/lib/dal/src/node.rs index 08e4b8197a..6069f04cb4 100644 --- a/lib/dal/src/node.rs +++ b/lib/dal/src/node.rs @@ -192,13 +192,13 @@ impl Node { let total_start = std::time::Instant::now(); let ctx_with_deleted = &ctx.clone_with_delete_visibility(); - // Gather all nodes with at least one edge. - let mut edges = Edge::list_for_kind(ctx_with_deleted, EdgeKind::Configuration) - .await - .map_err(|e| NodeError::Edge(e.to_string()))?; - if shuffle_edges { - edges.shuffle(&mut thread_rng()); - } + // // Gather all nodes with at least one edge. + // let mut edges = Edge::list_for_kind(ctx_with_deleted, EdgeKind::Configuration) + // .await + // .map_err(|e| NodeError::Edge(e.to_string()))?; + // if shuffle_edges { + // edges.shuffle(&mut thread_rng()); + // } // Populate the nodes map based on all configuration edges. The "key" is every node with at // least one edge. The "value" is a set of nodes that the "key" node depends on (i.e. the diff --git a/lib/dal/src/node_menu.rs b/lib/dal/src/node_menu.rs index e818108c7d..57241af3e1 100644 --- a/lib/dal/src/node_menu.rs +++ b/lib/dal/src/node_menu.rs @@ -4,13 +4,12 @@ use serde::{Deserialize, Serialize}; use si_data_pg::PgError; -use std::cell::RefCell; -use std::rc::Rc; +use std::collections::HashMap; use thiserror::Error; -use crate::schema::SchemaUiMenu; -use crate::DalContext; -use crate::{SchemaError, SchemaId, StandardModel, StandardModelError}; +use crate::schema::variant::SchemaVariantError; +use crate::{DalContext, Schema, SchemaVariant}; +use crate::{SchemaError, SchemaId, StandardModelError}; #[allow(clippy::large_enum_variant)] #[remain::sorted] @@ -24,6 +23,8 @@ pub enum NodeMenuError { Pg(#[from] PgError), #[error("schema error: {0}")] Schema(#[from] SchemaError), + #[error("schema variant error: {0}")] + SchemaVariant(#[from] SchemaVariantError), #[error("error serializing/deserializing json: {0}")] SerdeJson(#[from] serde_json::Error), #[error("standard model: {0}")] @@ -35,27 +36,18 @@ pub type NodeMenuResult = Result; #[derive(Clone, Debug, Deserialize, Serialize)] pub struct Category { pub name: String, - // Whoa! I'm pretty sure this can be refactored to be - // less intense. But it's working, and I'm a tired of looking - // at it. So lets take care of that in a refactor the next - // time we find a problem with this code, eh? - // - // Love, - // Adam - pub items: Rc>>>>, + pub items: Vec, } impl Category { pub fn new(name: impl Into) -> Self { let name = name.into(); - let items = Rc::new(RefCell::new(Vec::new())); + let items = Vec::new(); Category { name, items } } - pub fn push(&self, menu_item: MenuItem) { - self.items - .borrow_mut() - .push(Rc::new(RefCell::new(menu_item))); + pub fn push(&mut self, menu_item: MenuItem) { + self.items.push(menu_item); } } @@ -104,217 +96,49 @@ impl MenuItem { } } -#[derive(Clone, Debug, Deserialize, Serialize)] -pub struct MenuItems { - // Same thing here - we probably need some of these, but - // likely not all of them? -- Adam - list: Rc>>>>, -} - -impl Default for MenuItems { - fn default() -> Self { - Self::new() - } -} - -impl MenuItems { - pub fn new() -> Self { - MenuItems { - list: Rc::new(RefCell::new(Vec::new())), - } - } - - /// Search the list of menu items for the path given, and return the menu item - /// when it is found. - pub fn item_for_path(&self, path: &[String]) -> NodeMenuResult>> { - let mut current_list = self.list.clone(); - let final_path_index = if path.is_empty() { 0 } else { path.len() - 1 }; - for (path_idx, path_part) in path.iter().enumerate() { - let ref_list = current_list.clone(); - if let Some(menu_item) = ref_list - .borrow() - .iter() - .find(|i| i.borrow().name() == *path_part) - { - if path_idx == final_path_index { - return Ok(menu_item.clone()); - } else { - current_list = menu_item.clone().borrow().inner_category()?.items.clone(); - } - } else { - return Err(NodeMenuError::PathDoesNotExist(path.join("."))); - }; // <- ensures the borrow above doesn't live too long - } - Err(NodeMenuError::PathDoesNotExist(path.join("."))) - } - - /// Insert a new menu item into the list, creating any categories that might not exist along - /// the way, and eventually adding the `menu_item` to the list for the correct categories. - pub fn insert_menu_item(&self, path: &[String], menu_item: MenuItem) -> NodeMenuResult<()> { - let fallback_path = vec![menu_item.name().to_string()]; - let path_to_check = if path.is_empty() { - path - } else { - &fallback_path - }; - match self.item_for_path(path_to_check) { - Ok(parent) => { - let pb = parent.borrow(); - let f = pb.inner_category()?; - f.push(menu_item); - } - Err(NodeMenuError::PathDoesNotExist(_)) => { - if path.is_empty() { - self.list - .borrow_mut() - .push(Rc::new(RefCell::new(menu_item))); - } else { - let mut insert_into = self.list.clone(); - for (path_idx, path_part) in path.iter().enumerate() { - match self.item_for_path(&path[0..=path_idx]) { - Ok(parent) => { - insert_into = parent.borrow().inner_category()?.items.clone(); - } - _ => { - let new_category = - Rc::new(RefCell::new(MenuItem::category(path_part.clone()))); - insert_into.borrow_mut().push(new_category.clone()); - insert_into = new_category.borrow().inner_category()?.items.clone(); - } - } - } - insert_into - .borrow_mut() - .push(Rc::new(RefCell::new(menu_item))); - } - } - _ => {} - } - Ok(()) - } - - pub fn list(&self) -> Rc>>>> { - self.list.clone() - } - - pub fn to_json_value(&self) -> NodeMenuResult { - Ok(serde_json::to_value(self.list.clone())?) - } -} - /// Used to generate a [`serde_json::Value`] of menu items. #[derive(Deserialize, Serialize, Debug)] pub struct GenerateMenuItem { - pub raw_items: Vec<(Vec, Item)>, - menu_items: MenuItems, + menu_items: Vec, } impl GenerateMenuItem { /// Generates raw items and initializes menu items as an empty vec. - pub async fn new(ctx: &DalContext, include_ui_hidden: bool) -> NodeMenuResult { - let mut item_list = Vec::new(); - - // NOTE(nick): currently, we only generate ui menus for schemas. - let mut ui_menus = SchemaUiMenu::list(ctx).await?; - - // Ensure the names and categories are alphabetically sorted. - ui_menus.sort_by(|a, b| a.name().cmp(b.name())); - ui_menus.sort_by(|a, b| a.category().cmp(b.category())); - - for ui_menu in ui_menus.into_iter() { - if let Some(schema) = ui_menu.schema(ctx).await? { - if !include_ui_hidden && schema.ui_hidden() { - continue; - } - item_list.push(( - ui_menu.category_path(), - Item::new(ui_menu.name(), *schema.id()), - )); + pub async fn new(ctx: &DalContext, _include_ui_hidden: bool) -> NodeMenuResult { + let mut item_map: HashMap> = HashMap::new(); + + for schema in Schema::list(ctx).await? { + for variant in SchemaVariant::list_for_schema(ctx, schema.id()).await? { + let category = variant.category().to_owned(); + + let item = MenuItem::Item(Item { + name: schema.name().to_owned(), + schema_id: schema.id(), + }); + + item_map + .entry(category) + .and_modify(|items| items.push(item.to_owned())) + .or_insert(vec![item]); } } - Ok(Self { - raw_items: item_list, - menu_items: MenuItems::new(), - }) + let mut menu_items: Vec = item_map + .into_iter() + .map(|(name, mut items)| { + items.sort_by_key(|item| item.name().to_owned()); + MenuItem::Category(Category { name, items }) + }) + .collect(); + + menu_items.sort_by_key(|item| item.name().to_owned()); + + Ok(Self { menu_items }) } /// Create a usable [`serde_json::Value`] from the raw menu items assembled from /// [`Self::new()`]. pub fn create_menu_json(self) -> NodeMenuResult { - for (path, item) in self.raw_items { - self.menu_items - .insert_menu_item(&path, MenuItem::Item(item))?; - } - self.menu_items.to_json_value() - } -} - -#[cfg(test)] -mod test { - use super::*; - - #[test] - fn menu_item_for_top_level_path() { - let menu_items = MenuItems::new(); - menu_items - .insert_menu_item(&Vec::new(), MenuItem::category("valorant")) - .expect("cannot insert menu item"); - let item = menu_items - .item_for_path(&["valorant".to_string()]) - .expect("cannot find valorant in menu"); - assert_eq!(item.borrow().name(), "valorant"); - } - - #[test] - fn nested_menu_items_for_top_level_path() { - let menu_items = MenuItems::new(); - menu_items - .insert_menu_item( - &["planes".to_string(), "snakes".to_string()], - MenuItem::item("ninjas", SchemaId::generate()), - ) - .expect("cannot insert menu item"); - let item = menu_items - .item_for_path(&[ - "planes".to_string(), - "snakes".to_string(), - "ninjas".to_string(), - ]) - .expect("cannot find planes.snakes in menu"); - assert_eq!(item.borrow().name(), "ninjas".to_string()); - } - - #[test] - fn multiple_nested_menu_items_for_top_level_path() { - let menu_items = MenuItems::new(); - menu_items - .insert_menu_item( - &["planes".to_string(), "snakes".to_string()], - MenuItem::item("ninjas", SchemaId::generate()), - ) - .expect("cannot insert menu item"); - menu_items - .insert_menu_item( - &["planes".to_string(), "snakes".to_string()], - MenuItem::item("dragons", SchemaId::generate()), - ) - .expect("cannot insert menu item"); - let ninjas = menu_items - .item_for_path(&[ - "planes".to_string(), - "snakes".to_string(), - "ninjas".to_string(), - ]) - .expect("cannot find planes.snakes in menu"); - assert_eq!(ninjas.borrow().name(), "ninjas".to_string()); - let dragons = menu_items - .item_for_path(&[ - "planes".to_string(), - "snakes".to_string(), - "dragons".to_string(), - ]) - .expect("cannot find planes.snakes in menu"); - assert_eq!(dragons.borrow().name(), "dragons".to_string()); + Ok(serde_json::to_value(self.menu_items.clone())?) } } diff --git a/lib/dal/src/pkg.rs b/lib/dal/src/pkg.rs index 89e1f567c4..56212349b1 100644 --- a/lib/dal/src/pkg.rs +++ b/lib/dal/src/pkg.rs @@ -1,197 +1,156 @@ -use std::collections::HashMap; - -use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; -use thiserror::Error; -use url::ParseError; - -pub use export::{get_component_type, PkgExporter}; -pub use import::{ - attach_resource_payload_to_value, import_pkg, import_pkg_from_pkg, ImportAttributeSkip, - ImportEdgeSkip, ImportOptions, ImportSkips, -}; use si_pkg::{FuncSpecBackendKind, FuncSpecBackendResponseType, SiPkgError, SpecError}; +use std::collections::HashMap; +use thiserror::Error; -use crate::authentication_prototype::AuthenticationPrototypeError; -use crate::component::migrate::ComponentMigrateError; -use crate::diagram::summary_diagram::SummaryDiagramError; -use crate::property_editor::values_summary::PropertyEditorValuesSummaryError; +use crate::action_prototype::ActionPrototypeError; +use crate::attribute::prototype::argument::AttributePrototypeArgumentError; +use crate::attribute::prototype::AttributePrototypeError; +use crate::schema::variant::SchemaVariantError; use crate::{ - component::view::debug::ComponentDebugViewError, - func::{ - argument::{FuncArgumentError, FuncArgumentId}, - binding::FuncBindingError, - }, + change_set_pointer::ChangeSetPointerError, + func::{argument::FuncArgumentError, FuncError}, installed_pkg::InstalledPkgError, - prop_tree::PropTreeError, - schema::variant::definition::{SchemaVariantDefinitionError, SchemaVariantDefinitionId}, - socket::{SocketEdgeKind, SocketError}, - ActionPrototypeError, AttributeContextBuilderError, AttributePrototypeArgumentError, - AttributePrototypeArgumentId, AttributePrototypeError, AttributePrototypeId, - AttributeReadContext, AttributeValueError, ChangeSetError, ChangeSetPk, ComponentError, - ComponentId, DalContext, EdgeError, ExternalProviderError, ExternalProviderId, FuncBackendKind, - FuncBackendResponseType, FuncBindingReturnValueError, FuncError, FuncId, InternalProviderError, - InternalProviderId, NodeError, PropError, PropId, PropKind, SchemaError, SchemaId, - SchemaVariantError, SchemaVariantId, StandardModelError, UserPk, WorkspaceError, WorkspacePk, - WsEvent, WsEventResult, WsPayload, + prop::PropError, + provider::external::ExternalProviderError, + provider::internal::InternalProviderError, + workspace_snapshot::WorkspaceSnapshotError, + ChangeSetPk, ExternalProviderId, FuncBackendKind, FuncBackendResponseType, SchemaError, + SchemaVariantId, }; +use crate::{FuncId, PropId, PropKind}; + +pub use import::{import_pkg, import_pkg_from_pkg, ImportOptions}; -pub mod export; -pub mod import; +mod import; + +// mod export; #[remain::sorted] #[derive(Debug, Error)] pub enum PkgError { - #[error("Action creation error: {0}")] - Action(#[from] ActionPrototypeError), - #[error(transparent)] - AttributeContextBuilder(#[from] AttributeContextBuilderError), + #[error("action prototype error: {0}")] + ActionPrototype(#[from] ActionPrototypeError), + // #[error(transparent)] + // AttributeContextBuilder(#[from] AttributeContextBuilderError), #[error("attribute function for context {0:?} has key {1} but is not setting a prop value")] - AttributeFuncForKeyMissingProp(AttributeReadContext, String), + AttributeFuncForKeyMissingProp(import::AttrFuncContext, String), #[error("attribute function for prop {0} has a key {1} but prop kind is {2} not a map)")] AttributeFuncForKeySetOnWrongKind(PropId, String, PropKind), #[error(transparent)] AttributePrototype(#[from] AttributePrototypeError), - #[error(transparent)] + #[error("attrbute prototype argument error: {0}")] AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("Missing ExternalProvider {1} for AttributePrototypeArgument {1}")] - AttributePrototypeArgumentMissingExternalProvider( - AttributePrototypeArgumentId, - ExternalProviderId, - ), - #[error("AttributePrototypeArgument {0} missing FuncArgument {1}")] - AttributePrototypeArgumentMissingFuncArgument(AttributePrototypeArgumentId, FuncArgumentId), - #[error("Missing InternalProvider {1} for AttributePrototypeArgument {1}")] - AttributePrototypeArgumentMissingInternalProvider( - AttributePrototypeArgumentId, - InternalProviderId, - ), - #[error(transparent)] - AttributeValue(#[from] AttributeValueError), - #[error("parent prop could not be found with path: {0}")] - AttributeValueParentPropNotFound(String), - #[error("parent value could not be found for prop path: {0} and key {1:?}, index {2:?}")] - AttributeValueParentValueNotFound(String, Option, Option), - #[error("attribute value is a proxy but there is no value to proxy")] - AttributeValueSetToProxyButNoProxyFound, - #[error("encountered an attribute value with a key or index but no parent")] - AttributeValueWithKeyOrIndexButNoParent, - #[error("Auth func creation error: {0}")] - AuthFunc(#[from] AuthenticationPrototypeError), + // #[error("Missing ExternalProvider {1} for AttributePrototypeArgument {1}")] + // AttributePrototypeArgumentMissingExternalProvider( + // AttributePrototypeArgumentId, + // ExternalProviderId, + // ), + // #[error("AttributePrototypeArgument {0} missing FuncArgument {1}")] + // AttributePrototypeArgumentMissingFuncArgument(AttributePrototypeArgumentId, FuncArgumentId), + // #[error("Missing InternalProvider {1} for AttributePrototypeArgument {1}")] + // AttributePrototypeArgumentMissingInternalProvider( + // AttributePrototypeArgumentId, + // InternalProviderId, + // ), + // #[error(transparent)] + // AttributeValue(#[from] AttributeValueError), + // #[error("parent prop could not be found with path: {0}")] + // AttributeValueParentPropNotFound(String), + // #[error("parent value could not be found for prop path: {0} and key {1:?}, index {2:?}")] + // AttributeValueParentValueNotFound(String, Option, Option), + // #[error("attribute value is a proxy but there is no value to proxy")] + // AttributeValueSetToProxyButNoProxyFound, + // #[error("encountered an attribute value with a key or index but no parent")] + // AttributeValueWithKeyOrIndexButNoParent, + // #[error(transparent)] + // ChangeSet(#[from] ChangeSetError), + // #[error("change set {0} not found")] + // ChangeSetNotFound(ChangeSetPk), #[error(transparent)] - ChangeSet(#[from] ChangeSetError), - #[error("change set {0} not found")] - ChangeSetNotFound(ChangeSetPk), - #[error(transparent)] - Component(#[from] ComponentError), - #[error(transparent)] - ComponentDebugView(#[from] ComponentDebugViewError), - #[error("component import can only happen during a workspace import")] - ComponentImportWithoutChangeSet, - #[error("component migration error: {0}")] - ComponentMigrate(#[from] ComponentMigrateError), - #[error("could not find schema {0} for package component {1}")] - ComponentMissingBuiltinSchema(String, String), - #[error("could not find schema {0} with variant {1} for package component {2}")] - ComponentMissingBuiltinSchemaVariant(String, String, String), - #[error("component has no node: {0}")] - ComponentMissingNode(ComponentId), - #[error("could not find schema variant {0} for package component {1}")] - ComponentMissingSchemaVariant(String, String), - #[error("could not update find schema {0} with variant {1} for package component {2}")] - ComponentMissingUpdateSchemaVariant(String, String, String), - #[error("component spec has no position")] - ComponentSpecMissingPosition, - #[error("map item prop {0} has both custom key prototypes and custom prop only prototype")] - ConflictingMapKeyPrototypes(PropId), + ChangeSetPointer(#[from] ChangeSetPointerError), #[error("expected data on an SiPkg node, but none found: {0}")] DataNotFound(String), - #[error(transparent)] - Edge(#[from] EdgeError), - #[error("edge refers to component not in export: {0}")] - EdgeRefersToMissingComponent(ComponentId), - #[error("Cannot find Socket for explicit InternalProvider {0}")] - ExplicitInternalProviderMissingSocket(InternalProviderId), + // #[error(transparent)] + // Edge(#[from] EdgeError), + // #[error("edge refers to component not in export: {0}")] + // EdgeRefersToMissingComponent(ComponentId), + // #[error("Cannot find Socket for explicit InternalProvider {0}")] + // ExplicitInternalProviderMissingSocket(InternalProviderId), #[error(transparent)] ExternalProvider(#[from] ExternalProviderError), - #[error("Cannot find Socket for ExternalProvider {0}")] - ExternalProviderMissingSocket(ExternalProviderId), + #[error("external provider {0} missing attribute prototype")] + ExternalProviderMissingPrototype(ExternalProviderId), + // #[error("Cannot find Socket for ExternalProvider {0}")] + // ExternalProviderMissingSocket(ExternalProviderId), #[error(transparent)] Func(#[from] FuncError), #[error(transparent)] FuncArgument(#[from] FuncArgumentError), - #[error(transparent)] - FuncBinding(#[from] FuncBindingError), - #[error(transparent)] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error(transparent)] - FuncExecution(#[from] crate::func::execution::FuncExecutionError), - #[error("Installed func id {0} does not exist")] - InstalledFuncMissing(FuncId), + #[error("func argument for {0} not found with name {1}")] + FuncArgumentNotFoundByName(FuncId, String), + #[error("func {0} could not be found by name")] + FuncNotFoundByName(String), + // #[error(transparent)] + // FuncBinding(#[from] FuncBindingError), + // #[error(transparent)] + // FuncBindingReturnValue(#[from] FuncBindingReturnValueError), + // #[error(transparent)] + // FuncExecution(#[from] crate::func::execution::FuncExecutionError), + // #[error("Installed func id {0} does not exist")] + // InstalledFuncMissing(FuncId), #[error(transparent)] InstalledPkg(#[from] InstalledPkgError), - #[error("Installed schema id {0} does not exist")] - InstalledSchemaMissing(SchemaId), - #[error("Installed schema variant definition {0} does not exist")] - InstalledSchemaVariantDefinitionMissing(SchemaVariantDefinitionId), - #[error("Installed schema variant {0} does not exist")] - InstalledSchemaVariantMissing(SchemaVariantId), + // #[error("Installed schema variant definition {0} does not exist")] + // InstalledSchemaVariantDefinitionMissing(SchemaVariantDefinitionId), + // #[error("Installed schema variant {0} does not exist")] + // InstalledSchemaVariantMissing(SchemaVariantId), #[error(transparent)] InternalProvider(#[from] InternalProviderError), - #[error("Missing Prop {1} for InternalProvider {1}")] - InternalProviderMissingProp(InternalProviderId, PropId), - #[error("Leaf Function {0} has invalid argument {1}")] - InvalidLeafArgument(FuncId, String), - #[error("json pointer {1} not found in {0:?}")] - JsonPointerNotFound(serde_json::Value, String), - #[error("json value is not an object: {0:?}")] - JsonValueIsNotAnObject(serde_json::Value), - #[error("Missing AttributePrototype {0} for explicit InternalProvider {1}")] - MissingAttributePrototypeForInputSocket(AttributePrototypeId, InternalProviderId), - #[error("Missing AttributePrototype {0} for ExternalProvider {1}")] - MissingAttributePrototypeForOutputSocket(AttributePrototypeId, ExternalProviderId), - #[error("Missing Func {1} for AttributePrototype {0}")] - MissingAttributePrototypeFunc(AttributePrototypeId, FuncId), - #[error("Missing value for context {0:?}")] - MissingAttributeValueForContext(AttributeReadContext), - #[error("Missing builtin schema {0}")] - MissingBuiltinSchema(String), - #[error("Missing builtin schema variant {0}")] - MissingBuiltinSchemaVariant(String), - #[error("Missing a func map for changeset {0}")] - MissingChangeSetFuncMap(ChangeSetPk), - #[error("Missing component {0} for edge from {1} to {2}")] - MissingComponentForEdge(String, String, String), - #[error("Func {0} missing from exported funcs")] - MissingExportedFunc(FuncId), + #[error("InternalProvider not found for prop {0}")] + InternalProviderNotFoundForProp(PropId), + // #[error("Leaf Function {0} has invalid argument {1}")] + // InvalidLeafArgument(FuncId, String), + // #[error("Missing AttributePrototype {0} for explicit InternalProvider {1}")] + // MissingAttributePrototypeForInputSocket(AttributePrototypeId, InternalProviderId), + // #[error("Missing AttributePrototype {0} for ExternalProvider {1}")] + // MissingAttributePrototypeForOutputSocket(AttributePrototypeId, ExternalProviderId), + // #[error("Missing Func {1} for AttributePrototype {0}")] + // MissingAttributePrototypeFunc(AttributePrototypeId, FuncId), + // #[error("Missing value for context {0:?}")] + // MissingAttributeValueForContext(AttributeReadContext), + // #[error("Missing a func map for changeset {0}")] + // MissingChangeSetFuncMap(ChangeSetPk), + // #[error("Missing component {0} for edge from {1} to {2}")] + // MissingComponentForEdge(String, String, String), + // #[error("Func {0} missing from exported funcs")] + // MissingExportedFunc(FuncId), #[error("Cannot find FuncArgument {0} for Func {1}")] MissingFuncArgument(String, FuncId), - #[error("Cannot find FuncArgument {0}")] - MissingFuncArgumentById(FuncArgumentId), + // #[error("Cannot find FuncArgument {0}")] + // MissingFuncArgumentById(FuncArgumentId), #[error("Package asked for a function with the unique id {0} but none could be found")] MissingFuncUniqueId(String), - #[error("Cannot find InternalProvider for Prop {0}")] - MissingInternalProviderForProp(PropId), + #[error("Cannot find InternalProvider for Prop {0} ({1})")] + MissingInternalProviderForProp(PropId, String), #[error("Cannot find InternalProvider for Socket named {0}")] MissingInternalProviderForSocketName(String), - #[error("Intrinsic function {0} not found")] - MissingIntrinsicFunc(String), - #[error("Intrinsic function (0) argument {1} not found")] - MissingIntrinsicFuncArgument(String, String), - #[error("Cannot find item prop for installed map prop {0}")] - MissingItemPropForMapProp(PropId), - #[error("Cannot find installed prop {0}")] - MissingProp(PropId), - #[error("Cannot find root prop for variant {0}")] - MissingRootProp(SchemaVariantId), - #[error("Cannot find schema_variant_definition {0}")] - MissingSchemaVariantDefinition(SchemaVariantId), - #[error("Cannot find socket with name {0} for edge kind {1}")] - MissingSocketName(String, SocketEdgeKind), + // #[error("Intrinsic function {0} not found")] + // MissingIntrinsicFunc(String), + // #[error("Intrinsic function (0) argument {1} not found")] + // MissingIntrinsicFuncArgument(String, String), + // #[error("Cannot find item prop for installed map prop {0}")] + // MissingItemPropForMapProp(PropId), + // #[error("Cannot find installed prop {0}")] + // MissingProp(PropId), + // #[error("Cannot find root prop for variant {0}")] + // MissingRootProp(SchemaVariantId), + // #[error("Cannot find schema_variant_definition {0}")] + // MissingSchemaVariantDefinition(SchemaVariantId), + // #[error("Cannot find socket with name {0} for edge kind {1}")] + // MissingSocketName(String, SocketEdgeKind), #[error("Unique id missing for node in workspace backup: {0}")] MissingUniqueIdForNode(String), - #[error(transparent)] - Node(#[from] NodeError), #[error("Package with that hash already installed: {0}")] PackageAlreadyInstalled(String), #[error(transparent)] @@ -200,60 +159,58 @@ pub enum PkgError { PkgSpec(#[from] SpecError), #[error(transparent)] Prop(#[from] PropError), - #[error("property editor value summary error: {0}")] - PropertyEditorValuesSummary(#[from] PropertyEditorValuesSummaryError), - #[error("prop spec structure is invalid: {0}")] - PropSpecChildrenInvalid(String), - #[error(transparent)] - PropTree(#[from] PropTreeError), - #[error("prop tree structure is invalid: {0}")] - PropTreeInvalid(String), - #[error(transparent)] + #[error("prop {0} missing attribute prototype")] + PropMissingPrototype(PropId), + // #[error("prop spec structure is invalid: {0}")] + // PropSpecChildrenInvalid(String), + // #[error(transparent)] + // PropTree(#[from] PropTreeError), + // #[error("prop tree structure is invalid: {0}")] + // PropTreeInvalid(String), + #[error("schema error: {0}")] Schema(#[from] SchemaError), - #[error(transparent)] + #[error("schema variant error: {0}")] SchemaVariant(#[from] SchemaVariantError), - #[error(transparent)] - SchemaVariantDefinition(#[from] SchemaVariantDefinitionError), - #[error("schema variant not found: {0}")] - SchemaVariantNotFound(SchemaVariantId), + // #[error(transparent)] + // SchemaVariantDefinition(#[from] SchemaVariantDefinitionError), + // #[error("schema variant not found: {0}")] + // SchemaVariantNotFound(SchemaVariantId), #[error("json serialization error: {0}")] SerdeJson(#[from] serde_json::Error), + // #[error(transparent)] + // Socket(#[from] SocketError), + // #[error(transparent)] + // StandardModel(#[from] StandardModelError), + // #[error("standard model relationship {0} missing belongs_to for {1} with id {2}")] + // StandardModelMissingBelongsTo(&'static str, &'static str, String), + // #[error("standard model relationship {0} found multiple belongs_to for {1} with id {2}")] + // StandardModelMultipleBelongsTo(&'static str, &'static str, String), + // #[error(transparent)] + // UlidDecode(#[from] ulid::DecodeError), + // #[error(transparent)] + // UrlParse(#[from] ParseError), + // #[error(transparent)] + // Workspace(#[from] WorkspaceError), + // #[error("Cannot find default change set \"{0}\" in workspace backup")] + // WorkspaceBackupNoDefaultChangeSet(String), + // #[error("Workspace backup missing workspace name")] + // WorkspaceNameNotInBackup, + // #[error("Workspace not found: {0}")] + // WorkspaceNotFound(WorkspacePk), + // #[error("Workspace backup missing workspace pk")] + // WorkspacePkNotInBackup, #[error(transparent)] - Socket(#[from] SocketError), - #[error(transparent)] - StandardModel(#[from] StandardModelError), - #[error("standard model relationship {0} missing belongs_to for {1} with id {2}")] - StandardModelMissingBelongsTo(&'static str, &'static str, String), - #[error("standard model relationship {0} found multiple belongs_to for {1} with id {2}")] - StandardModelMultipleBelongsTo(&'static str, &'static str, String), - #[error("summary diagram error: {0}")] - SummaryDiagram(#[from] SummaryDiagramError), - #[error(transparent)] - UlidDecode(#[from] ulid::DecodeError), - #[error("unable to export component: {0}")] - UnableToExportComponent(ComponentId), - #[error(transparent)] - UrlParse(#[from] ParseError), - #[error(transparent)] - Workspace(#[from] WorkspaceError), - #[error("Cannot find default change set \"{0}\" in workspace backup")] - WorkspaceBackupNoDefaultChangeSet(String), - #[error("Workspace backup missing workspace name")] - WorkspaceNameNotInBackup, - #[error("Workspace not found: {0}")] - WorkspaceNotFound(WorkspacePk), - #[error("Workspace backup missing workspace pk")] - WorkspacePkNotInBackup, + WorkspaceSnaphot(#[from] WorkspaceSnapshotError), } impl PkgError { - fn prop_tree_invalid(message: impl Into) -> Self { - Self::PropTreeInvalid(message.into()) - } + // fn prop_tree_invalid(message: impl Into) -> Self { + // Self::PropTreeInvalid(message.into()) + // } - fn prop_spec_children_invalid(message: impl Into) -> Self { - Self::PropSpecChildrenInvalid(message.into()) - } + // fn prop_spec_children_invalid(message: impl Into) -> Self { + // Self::PropSpecChildrenInvalid(message.into()) + // } } pub type PkgResult = Result; @@ -353,7 +310,6 @@ impl From for FuncBackendResponseType { /// A generic hash map of hash maps for tracking the presence of a thing in each change set. If a /// thing is asked for in a specific change set, and not found, the NONE change set will be /// checked. -#[derive(Debug)] pub struct ChangeSetThingMap(HashMap>); impl ChangeSetThingMap @@ -369,12 +325,8 @@ where Self(change_set_map) } - pub fn get_change_set_map(&self, change_set_pk: ChangeSetPk) -> Option<&HashMap> { - self.0.get(&change_set_pk) - } - - pub fn get(&self, change_set_pk: ChangeSetPk, key: &Key) -> Option<&Thing> { - match self.0.get(&change_set_pk) { + pub fn get(&self, change_set_pk: Option, key: &Key) -> Option<&Thing> { + match self.0.get(&change_set_pk.unwrap_or(ChangeSetPk::NONE)) { Some(change_set_map) => change_set_map.get(key).or_else(|| { self.0 .get(&ChangeSetPk::NONE) @@ -387,8 +339,16 @@ where } } - pub fn insert(&mut self, change_set_pk: ChangeSetPk, key: Key, thing: Thing) -> Option { - self.0.entry(change_set_pk).or_default().insert(key, thing) + pub fn insert( + &mut self, + change_set_pk: Option, + key: Key, + thing: Thing, + ) -> Option { + self.0 + .entry(change_set_pk.unwrap_or(ChangeSetPk::NONE)) + .or_default() + .insert(key, thing) } } @@ -407,137 +367,137 @@ pub struct ModuleImportedPayload { schema_variant_ids: Vec, } -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct WorkspaceImportPayload { - workspace_pk: Option, - user_pk: Option, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct WorkspaceExportPayload { - workspace_pk: Option, - user_pk: Option, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct ImportWorkspaceVotePayload { - workspace_pk: Option, - user_pk: UserPk, - vote: String, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct WorkspaceActorPayload { - workspace_pk: Option, - user_pk: Option, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct WorkspaceImportApprovalActorPayload { - workspace_pk: Option, - user_pk: Option, - created_at: DateTime, - created_by: String, - name: String, -} - -impl WsEvent { - pub async fn module_imported( - ctx: &DalContext, - schema_variant_ids: Vec, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::ModuleImported(ModuleImportedPayload { schema_variant_ids }), - ) - .await - } - - pub async fn workspace_imported( - ctx: &DalContext, - workspace_pk: Option, - user_pk: Option, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::WorkspaceImported(WorkspaceImportPayload { - workspace_pk, - user_pk, - }), - ) - .await - } - - pub async fn workspace_exported( - ctx: &DalContext, - workspace_pk: Option, - user_pk: Option, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::WorkspaceExported(WorkspaceExportPayload { - workspace_pk, - user_pk, - }), - ) - .await - } - - pub async fn import_workspace_vote( - ctx: &DalContext, - workspace_pk: Option, - user_pk: UserPk, - vote: String, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::ImportWorkspaceVote(ImportWorkspaceVotePayload { - workspace_pk, - user_pk, - vote, - }), - ) - .await - } - - pub async fn workspace_import_begin_approval_process( - ctx: &DalContext, - workspace_pk: Option, - user_pk: Option, - workspace_export_created_at: DateTime, - workspace_export_created_by: String, - workspace_export_name: String, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::WorkspaceImportBeginApprovalProcess(WorkspaceImportApprovalActorPayload { - workspace_pk, - user_pk, - created_at: workspace_export_created_at, - created_by: workspace_export_created_by, - name: workspace_export_name, - }), - ) - .await - } - - pub async fn workspace_import_cancel_approval_process( - ctx: &DalContext, - workspace_pk: Option, - user_pk: Option, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::WorkspaceImportCancelApprovalProcess(WorkspaceActorPayload { - workspace_pk, - user_pk, - }), - ) - .await - } -} +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct WorkspaceImportPayload { +// workspace_pk: Option, +// user_pk: Option, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct WorkspaceExportPayload { +// workspace_pk: Option, +// user_pk: Option, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct ImportWorkspaceVotePayload { +// workspace_pk: Option, +// user_pk: UserPk, +// vote: String, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct WorkspaceActorPayload { +// workspace_pk: Option, +// user_pk: Option, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct WorkspaceImportApprovalActorPayload { +// workspace_pk: Option, +// user_pk: Option, +// created_at: DateTime, +// created_by: String, +// name: String, +// } +// +// impl WsEvent { +// pub async fn module_imported( +// ctx: &DalContext, +// schema_variant_ids: Vec, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::ModuleImported(ModuleImportedPayload { schema_variant_ids }), +// ) +// .await +// } +// +// pub async fn workspace_imported( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: Option, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::WorkspaceImported(WorkspaceImportPayload { +// workspace_pk, +// user_pk, +// }), +// ) +// .await +// } +// +// pub async fn workspace_exported( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: Option, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::WorkspaceExported(WorkspaceExportPayload { +// workspace_pk, +// user_pk, +// }), +// ) +// .await +// } +// +// pub async fn import_workspace_vote( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: UserPk, +// vote: String, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::ImportWorkspaceVote(ImportWorkspaceVotePayload { +// workspace_pk, +// user_pk, +// vote, +// }), +// ) +// .await +// } +// +// pub async fn workspace_import_begin_approval_process( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: Option, +// workspace_export_created_at: DateTime, +// workspace_export_created_by: String, +// workspace_export_name: String, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::WorkspaceImportBeginApprovalProcess(WorkspaceImportApprovalActorPayload { +// workspace_pk, +// user_pk, +// created_at: workspace_export_created_at, +// created_by: workspace_export_created_by, +// name: workspace_export_name, +// }), +// ) +// .await +// } +// +// pub async fn workspace_import_cancel_approval_process( +// ctx: &DalContext, +// workspace_pk: Option, +// user_pk: Option, +// ) -> WsEventResult { +// WsEvent::new( +// ctx, +// WsPayload::WorkspaceImportCancelApprovalProcess(WorkspaceActorPayload { +// workspace_pk, +// user_pk, +// }), +// ) +// .await +// } +// } diff --git a/lib/dal/src/pkg/import.rs b/lib/dal/src/pkg/import.rs index fd283cc311..5092cc6164 100644 --- a/lib/dal/src/pkg/import.rs +++ b/lib/dal/src/pkg/import.rs @@ -1,68 +1,48 @@ -use std::{collections::HashMap, path::Path, str::FromStr}; - -use chrono::Utc; -use serde::{Deserialize, Serialize}; -use tokio::sync::Mutex; - use si_pkg::{ - AttributeValuePath, ComponentSpec, ComponentSpecVariant, EdgeSpec, EdgeSpecKind, - FuncArgumentSpec, FuncSpec, FuncSpecData, SchemaVariantSpecPropRoot, SiPkg, SiPkgActionFunc, - SiPkgAttrFuncInputView, SiPkgAuthFunc, SiPkgComponent, SiPkgEdge, SiPkgError, SiPkgFunc, - SiPkgKind, SiPkgLeafFunction, SiPkgMetadata, SiPkgProp, SiPkgPropData, SiPkgSchema, - SiPkgSchemaData, SiPkgSchemaVariant, SiPkgSocket, SiPkgSocketData, SocketSpecKind, + SchemaVariantSpecPropRoot, SiPkg, SiPkgActionFunc, SiPkgAttrFuncInputView, SiPkgAuthFunc, + SiPkgComponent, SiPkgEdge, SiPkgError, SiPkgFunc, SiPkgFuncArgument, SiPkgFuncData, SiPkgKind, + SiPkgLeafFunction, SiPkgMetadata, SiPkgProp, SiPkgPropData, SiPkgSchema, SiPkgSchemaData, + SiPkgSchemaVariant, SiPkgSocket, SiPkgSocketData, SocketSpecKind, }; +use std::{collections::HashMap, path::Path}; use telemetry::prelude::*; +use tokio::sync::Mutex; -use crate::{ - authentication_prototype::{AuthenticationPrototype, AuthenticationPrototypeContext}, - component, diagram, - property_editor::values_summary::PropertyEditorValuesSummary, +use crate::attribute::prototype::argument::{ + value_source::ValueSource, AttributePrototypeArgument, AttributePrototypeArgumentId, }; +use crate::authentication_prototype::{AuthenticationPrototype, AuthenticationPrototypeId}; +use crate::prop::PropParent; +use crate::{func::intrinsics::IntrinsicFunc, ComponentKind, ProviderKind}; use crate::{ - component::ComponentKind, - edge::EdgeKind, - func::{ - self, - argument::{FuncArgumentError, FuncArgumentKind}, - binding::FuncBinding, - binding_return_value::FuncBindingReturnValue, - }, + func::{self, argument::FuncArgument}, installed_pkg::{ InstalledPkg, InstalledPkgAsset, InstalledPkgAssetKind, InstalledPkgAssetTyped, InstalledPkgId, }, prop::PropPath, - schema::{ - variant::{ - definition::{SchemaVariantDefinition, SchemaVariantDefinitionJson}, - leaves::LeafInputLocation, - }, - SchemaUiMenu, - }, - socket::SocketEdgeKind, - ActionKind, ActionPrototype, ActionPrototypeContext, AttributeContextBuilder, - AttributePrototype, AttributePrototypeArgument, AttributePrototypeId, AttributeReadContext, - AttributeValue, AttributeValueError, ChangeSet, ChangeSetPk, Component, DalContext, Edge, - ExternalProvider, ExternalProviderId, Func, FuncArgument, FuncError, FuncId, InternalProvider, - InternalProviderError, LeafKind, Node, Prop, PropId, PropKind, Schema, SchemaId, SchemaVariant, - SchemaVariantError, SchemaVariantId, Socket, StandardModel, Tenancy, UserPk, Workspace, - WorkspacePk, + schema::variant::leaves::{LeafInputLocation, LeafKind}, + ActionPrototype, ChangeSetPk, DalContext, ExternalProvider, ExternalProviderId, Func, FuncId, + InternalProvider, Prop, PropId, PropKind, Schema, SchemaId, SchemaVariant, SchemaVariantId, + StandardModel, }; +use crate::{AttributePrototype, AttributePrototypeId}; use super::{PkgError, PkgResult}; #[derive(Clone, Debug)] -pub enum Thing { +pub(crate) enum Thing { ActionPrototype(ActionPrototype), AuthPrototype(AuthenticationPrototype), - AttributePrototypeArgument(AttributePrototypeArgument), - Component((Component, Node)), - Edge(Edge), + // AttributePrototypeArgument(AttributePrototypeArgument), + // Component((Component, Node)), + // Edge(Edge), Func(Func), + #[allow(dead_code)] FuncArgument(FuncArgument), Schema(Schema), SchemaVariant(SchemaVariant), - Socket(Box<(Socket, Option, Option)>), + Socket(Box<(Option, Option)>), } pub type ThingMap = super::ChangeSetThingMap; @@ -84,21 +64,23 @@ const SPECIAL_CASE_FUNCS: [&str; 2] = ["si:resourcePayloadToValue", "si:normaliz #[allow(clippy::too_many_arguments)] async fn import_change_set( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, metadata: &SiPkgMetadata, funcs: &[SiPkgFunc<'_>], schemas: &[SiPkgSchema<'_>], - components: &[SiPkgComponent<'_>], - edges: &[SiPkgEdge<'_>], + _components: &[SiPkgComponent<'_>], + _edges: &[SiPkgEdge<'_>], installed_pkg_id: Option, thing_map: &mut ThingMap, options: &ImportOptions, ) -> PkgResult<( Vec, - Vec<(String, Vec)>, - Vec, + Vec<(String, Vec)>, + Vec, )> { for func_spec in funcs { + let unique_id = func_spec.unique_id().to_string(); + // This is a hack because the hash of the intrinsics has changed from the version in the // packages. We also apply this to si:resourcePayloadToValue since it should be an // intrinsic but is only in our packages @@ -106,64 +88,43 @@ async fn import_change_set( || SPECIAL_CASE_FUNCS.contains(&func_spec.name()) || func_spec.is_from_builtin().unwrap_or(false) { - let hash = func_spec.hash(); - let func_spec: SiPkgFunc<'_> = func_spec.clone(); - let func_spec: FuncSpec = func_spec.try_into()?; - if let (Some(mut func), Some(data)) = ( - Func::find_by_name(ctx, &func_spec.name).await?, - &func_spec.data, - ) { - func.set_description(ctx, data.description.clone()).await?; - func.set_display_name(ctx, data.display_name.clone()) - .await?; - func.set_handler(ctx, Some(data.handler.clone())).await?; - func.set_link(ctx, data.link.clone()).await?; - func.set_hidden(ctx, data.hidden).await?; - func.set_backend_kind(ctx, data.backend_kind).await?; - func.set_backend_response_type(ctx, data.response_type) - .await?; - func.set_code_base64(ctx, Some(data.code_base64.clone())) - .await?; + if let Some(func_id) = Func::find_by_name(ctx, func_spec.name()).await? { + let func = Func::get_by_id(ctx, func_id).await?; thing_map.insert( change_set_pk, - func_spec.unique_id.to_owned(), + unique_id.to_owned(), Thing::Func(func.to_owned()), ); } else if let Some(func) = import_func( ctx, - change_set_pk, - &func_spec, - Some(hash), + None, + func_spec, installed_pkg_id, thing_map, options.is_builtin, ) .await? { - let args = func_spec.arguments; + let args = func_spec.arguments()?; if !args.is_empty() { - import_func_arguments(ctx, change_set_pk, *func.id(), &args, thing_map).await?; + import_func_arguments(ctx, None, func.id, &args, thing_map).await?; } } } else { - let hash = func_spec.hash(); - let func_spec: SiPkgFunc<'_> = func_spec.clone(); - let func_spec: FuncSpec = func_spec.try_into()?; - let func = if let Some(Some(func)) = options .skip_import_funcs .as_ref() - .map(|skip_funcs| skip_funcs.get(&func_spec.unique_id)) + .map(|skip_funcs| skip_funcs.get(&unique_id)) { if let Some(installed_pkg_id) = installed_pkg_id { InstalledPkgAsset::new( ctx, InstalledPkgAssetTyped::new_for_func( - *func.id(), + func.id, installed_pkg_id, - hash.to_string(), + func_spec.hash().to_string(), ), ) .await?; @@ -172,7 +133,7 @@ async fn import_change_set( // We're not going to import this func but we need it in the map for lookups later thing_map.insert( change_set_pk, - func_spec.unique_id.to_owned(), + func_spec.unique_id().to_owned(), Thing::Func(func.to_owned()), ); @@ -181,8 +142,7 @@ async fn import_change_set( import_func( ctx, change_set_pk, - &func_spec, - Some(hash), + func_spec, installed_pkg_id, thing_map, options.is_builtin, @@ -191,10 +151,16 @@ async fn import_change_set( }; if let Some(func) = func { - let args = func_spec.arguments; + thing_map.insert( + change_set_pk, + unique_id.to_owned(), + Thing::Func(func.to_owned()), + ); + + let args = func_spec.arguments()?; if !args.is_empty() { - import_func_arguments(ctx, change_set_pk, *func.id(), &args, thing_map).await?; + import_func_arguments(ctx, change_set_pk, func.id, &args, thing_map).await?; } } }; @@ -218,414 +184,1176 @@ async fn import_change_set( metadata.name(), ); - let (_, schema_variant_ids) = import_schema( - ctx, - change_set_pk, - schema_spec, - installed_pkg_id, - thing_map, - metadata, - ) - .await?; + let (_, schema_variant_ids) = + import_schema(ctx, change_set_pk, schema_spec, installed_pkg_id, thing_map).await?; installed_schema_variant_ids.extend(schema_variant_ids); } - let mut component_attribute_skips = vec![]; - for component_spec in components { - let component_spec: SiPkgComponent<'_> = component_spec.clone(); - let name = component_spec.name().to_owned(); - let skips = import_component( - ctx, - change_set_pk, - component_spec.try_into()?, - thing_map, - false, - ) - .await?; - if !skips.is_empty() { - component_attribute_skips.push((name, skips)); - } - } - - let mut edge_skips = vec![]; - for edge_spec in edges { - let edge_spec: SiPkgEdge<'_> = edge_spec.clone(); - if let Some(skip) = - import_edge(ctx, change_set_pk, &edge_spec.try_into()?, thing_map).await? - { - edge_skips.push(skip); - } - } - - if !components.is_empty() { - info!("calculating cached data for imported components."); - AttributeValue::remove_dependency_summaries_for_deleted_values(ctx).await?; - for component in Component::list(ctx).await? { - info!("calculating cached data for component {:?}", component.id()); - AttributeValue::update_component_dependencies(ctx, *component.id()).await?; - PropertyEditorValuesSummary::create_or_update_component_entry(ctx, *component.id()) - .await?; - diagram::summary_diagram::update_socket_summary(ctx, &component).await?; - - // We want the serde representation of the deleted_at value since it's - // identical to what we send to the database - let deleted_at_value = match component.visibility().deleted_at { - Some(deleted_at) => Some(serde_json::to_value(deleted_at)?), - None => None, - } - .map(|v| v.to_string()); - - diagram::summary_diagram::component_update( - ctx, - component.id(), - component.name(ctx).await?, - component.color(ctx).await?.unwrap_or_default(), - component.get_type(ctx).await?, - component.resource(ctx).await?.payload.is_some(), - deleted_at_value, - ) - .await?; - } - } - - info!("Finished Imports: {}", Utc::now()); + // let mut component_attribute_skips = vec![]; + // for component_spec in components { + // let skips = import_component(ctx, change_set_pk, component_spec, thing_map).await?; + // if !skips.is_empty() { + // component_attribute_skips.push((component_spec.name().to_owned(), skips)); + // } + // } + + // let mut edge_skips = vec![]; + // for edge_spec in edges { + // if let Some(skip) = import_edge(ctx, change_set_pk, edge_spec, thing_map).await? { + // edge_skips.push(skip); + // } + // } + // Ok(( installed_schema_variant_ids, - component_attribute_skips, - edge_skips, + vec![], // component_attribute_skips, + vec![], // edge_skips, )) } -async fn import_edge( - ctx: &DalContext, - change_set_pk: ChangeSetPk, - edge_spec: &EdgeSpec, - thing_map: &mut ThingMap, -) -> PkgResult> { - let edge = match thing_map.get(change_set_pk, &edge_spec.unique_id.clone()) { - Some(Thing::Edge(edge)) => Some(edge.to_owned()), - _ => { - if !edge_spec.deleted { - let head_component_unique_id = edge_spec.to_component_unique_id.clone(); - let (_, head_node) = match thing_map.get(change_set_pk, &head_component_unique_id) { - Some(Thing::Component((component, node))) => (component, node), - _ => { - return Err(PkgError::MissingComponentForEdge( - head_component_unique_id, - edge_spec.from_socket_name.clone(), - edge_spec.to_socket_name.clone(), - )); - } - }; - - let tail_component_unique_id = edge_spec.from_component_unique_id.clone(); - let (_, tail_node) = match thing_map.get(change_set_pk, &tail_component_unique_id) { - Some(Thing::Component((component, node))) => (component, node), - _ => { - return Err(PkgError::MissingComponentForEdge( - tail_component_unique_id, - edge_spec.from_socket_name.clone(), - edge_spec.to_socket_name.clone(), - )); - } - }; - - let to_socket = match Socket::find_by_name_for_edge_kind_and_node( - ctx, - &edge_spec.to_socket_name, - SocketEdgeKind::ConfigurationInput, - *head_node.id(), - ) - .await? - { - Some(socket) => socket, - None => { - return Ok(Some(ImportEdgeSkip::MissingInputSocket( - edge_spec.to_socket_name.clone(), - ))); - } - }; - - let from_socket = match Socket::find_by_name_for_edge_kind_and_node( - ctx, - &edge_spec.from_socket_name, - SocketEdgeKind::ConfigurationOutput, - *tail_node.id(), - ) - .await? - { - Some(socket) => socket, - None => { - return Ok(Some(ImportEdgeSkip::MissingOutputSocket( - edge_spec.from_socket_name.clone(), - ))); - } - }; - - Some( - Edge::new_for_connection( - ctx, - *head_node.id(), - *to_socket.id(), - *tail_node.id(), - *from_socket.id(), - match edge_spec.edge_kind { - EdgeSpecKind::Configuration => EdgeKind::Configuration, - EdgeSpecKind::Symbolic => EdgeKind::Symbolic, - }, - ) - .await?, - ) - } else { - None - } - } - }; - - if let Some(mut edge) = edge { - let creation_user_pk = match &edge_spec.creation_user_pk { - Some(pk_str) => Some(UserPk::from_str(pk_str)?), - None => None, - }; - if creation_user_pk.as_ref() != edge.creation_user_pk() { - edge.set_creation_user_pk(ctx, creation_user_pk).await?; - } - - let deletion_user_pk = match &edge_spec.deletion_user_pk { - Some(pk_str) => Some(UserPk::from_str(pk_str)?), - None => None, - }; - - if deletion_user_pk.as_ref() != edge.deletion_user_pk() { - edge.set_deletion_user_pk(ctx, deletion_user_pk).await?; - } - - if edge.deleted_implicitly() != edge_spec.deleted_implicitly { - edge.set_deleted_implicitly(ctx, edge_spec.deleted_implicitly) - .await?; - } - - if edge.visibility().is_deleted() && !edge_spec.deleted { - Edge::restore_by_id(ctx, *edge.id()).await?; - } else if !edge.visibility().is_deleted() && edge_spec.deleted { - // ignore errors here, since they mostly occur if we've already deleted a node that - // this is an edge to - let _ = edge.delete_and_propagate(ctx).await; - } - - thing_map.insert( - change_set_pk, - edge_spec.unique_id.clone(), - Thing::Edge(edge), - ); - } - - Ok(None) -} - -async fn import_component( - ctx: &DalContext, - change_set_pk: ChangeSetPk, - component_spec: ComponentSpec, - thing_map: &mut ThingMap, - force_resource_patch: bool, -) -> PkgResult> { - info!("importing component {:?}", component_spec.name); - let variant = match &component_spec.variant { - ComponentSpecVariant::BuiltinVariant { - schema_name, - variant_name, - } => { - let schema = Schema::find_by_name_builtin(ctx, schema_name.as_str()) - .await? - .ok_or(PkgError::ComponentMissingBuiltinSchema( - schema_name.to_owned(), - component_spec.name.clone(), - ))?; - - schema - .find_variant_by_name(ctx, variant_name.as_str()) - .await? - .ok_or(PkgError::ComponentMissingBuiltinSchemaVariant( - schema_name.to_owned(), - variant_name.to_owned(), - component_spec.name.clone(), - ))? - } - ComponentSpecVariant::UpdateVariant { - schema_name, - variant_name, - } => { - let schema = Schema::find_by_name(ctx, schema_name.as_str()).await?; - - schema - .find_variant_by_name(ctx, variant_name.as_str()) - .await? - .ok_or(PkgError::ComponentMissingUpdateSchemaVariant( - schema_name.to_owned(), - variant_name.to_owned(), - component_spec.name.clone(), - ))? - } - ComponentSpecVariant::WorkspaceVariant { variant_unique_id } => { - match thing_map.get(change_set_pk, variant_unique_id) { - Some(Thing::SchemaVariant(variant)) => variant.to_owned(), - _ => { - return Err(PkgError::ComponentMissingSchemaVariant( - variant_unique_id.to_owned(), - component_spec.name.clone(), - )); - } - } - } - }; - - let (mut component, mut node) = match thing_map - .get(change_set_pk, &component_spec.unique_id.clone()) - { - Some(Thing::Component((existing_component, node))) => { - if Component::schema_variant_id(ctx, *existing_component.id()).await? != *variant.id() { - info!( - "respining component {:?} onto variant {}", - existing_component.id(), - variant.name() - ); - // If the component exists already, but the schema variant is - // different, we need to respin the component to the change-set - // specific schema variant - ( - Component::respin(ctx, *existing_component.id(), *variant.id()).await?, - node.to_owned(), - ) - } else { - (existing_component.to_owned(), node.to_owned()) - } - } - _ => { - let (component, node) = - Component::new(ctx, component_spec.name.clone(), *variant.id()).await?; - - thing_map.insert( - change_set_pk, - component_spec.unique_id.clone(), - Thing::Component((component.to_owned(), node.to_owned())), - ); - - (component, node) - } - }; - - if component.name(ctx).await? != component_spec.name { - component - .set_name(ctx, Some(component_spec.name.clone())) - .await?; - } - - let position = component_spec.position; - if node.x() != position.x - || node.y() != position.y - || node.height() != position.height.as_deref() - || node.width() != position.width.as_deref() - { - // Use set_geometry to ensure summary diagram gets updated positioning - node.set_geometry(ctx, position.x, position.y, position.width, position.height) - .await?; - } - - let mut component_root_implicit_value = - component::migrate::build_empty_json_for_prop_tree(ctx, *variant.id()).await?; - let imported_json = component_spec.attributes[0] - .implicit_value - .as_ref() - .cloned() - .unwrap_or(serde_json::Value::Null); - component::migrate::serde_value_merge_in_place_recursive( - &mut component_root_implicit_value, - imported_json, - ); - - info!( - "component root implicit value: {:?}", - &component_root_implicit_value - ); - - if component_root_implicit_value != serde_json::Value::Null { - let root_attribute_value = component.root_attribute_value(ctx).await?; - AttributeValue::update_for_context_without_propagating_dependent_values( - ctx, - *root_attribute_value.id(), - None, - root_attribute_value.context, - Some(component_root_implicit_value), - None, - ) - .await?; - - component::migrate::restore_prototypes_and_implicit_values(ctx, *component.id()).await?; - } - - let mut resource_value = None; - for attribute in &component_spec.attributes { - if let AttributeValuePath::Prop { path, .. } = &attribute.path { - if path == &PropPath::new(["root", "resource"]).to_string() { - resource_value = attribute.implicit_value.clone(); - break; - } - } - } - - if component_spec.needs_destroy { - component.set_needs_destroy(ctx, true).await?; - } - - if let Some(resource_value) = resource_value { - if force_resource_patch || change_set_pk == ChangeSetPk::NONE { - if let Ok(result) = serde_json::from_value(resource_value) { - component.set_resource(ctx, result).await?; - } - } - } - - if component.visibility().is_deleted() && !component_spec.deleted { - Component::restore_and_propagate(ctx, *component.id()).await?; - } else if !component.visibility().is_deleted() && component_spec.deleted { - component.delete_and_propagate(ctx).await?; - } - - Ok(vec![]) -} - -#[derive(Debug, Clone, Deserialize, Serialize)] -#[serde(rename_all = "camelCase")] -pub struct ImportSkips { - pub change_set_pk: ChangeSetPk, - pub edge_skips: Vec, - pub attribute_skips: Vec<(String, Vec)>, -} - -#[remain::sorted] -#[derive(Debug, Clone, Serialize, Deserialize)] -#[serde(tag = "type", rename_all = "camelCase")] -pub enum ImportAttributeSkip { - #[serde(rename_all = "camelCase")] - KindMismatch { - path: PropPath, - expected_kind: PropKind, - variant_kind: PropKind, - }, - MissingInputSocket(String), - MissingOutputSocket(String), - MissingProp(PropPath), -} - -#[derive(Clone, Debug, Deserialize, Serialize)] -#[serde(tag = "type", rename_all = "camelCase")] -pub enum ImportEdgeSkip { - MissingInputSocket(String), - MissingOutputSocket(String), -} +// #[derive(Eq, PartialEq, Hash, Debug, Clone)] +// struct ValueCacheKey { +// context: AttributeContext, +// key: Option, +// index: Option, +// } + +// impl ValueCacheKey { +// pub fn new( +// component_id: ComponentId, +// prop_id: PropId, +// key: Option, +// index: Option, +// ) -> Self { +// let mut context_builder = AttributeContextBuilder::new(); +// context_builder +// .set_prop_id(prop_id) +// .set_component_id(component_id); + +// Self { +// context: context_builder.to_context_unchecked(), +// key, +// index, +// } +// } +// } + +// async fn import_edge( +// ctx: &DalContext, +// change_set_pk: Option, +// edge_spec: &SiPkgEdge<'_>, +// thing_map: &mut ThingMap, +// ) -> PkgResult> { +// let edge = match thing_map.get(change_set_pk, &edge_spec.unique_id().to_owned()) { +// Some(Thing::Edge(edge)) => Some(edge.to_owned()), +// _ => { +// if !edge_spec.deleted() { +// let head_component_unique_id = edge_spec.to_component_unique_id().to_owned(); +// let (_, head_node) = match thing_map.get(change_set_pk, &head_component_unique_id) { +// Some(Thing::Component((component, node))) => (component, node), +// _ => { +// return Err(PkgError::MissingComponentForEdge( +// head_component_unique_id, +// edge_spec.from_socket_name().to_owned(), +// edge_spec.to_socket_name().to_owned(), +// )) +// } +// }; + +// let tail_component_unique_id = edge_spec.from_component_unique_id().to_owned(); +// let (_, tail_node) = match thing_map.get(change_set_pk, &tail_component_unique_id) { +// Some(Thing::Component((component, node))) => (component, node), +// _ => { +// return Err(PkgError::MissingComponentForEdge( +// tail_component_unique_id, +// edge_spec.from_socket_name().to_owned(), +// edge_spec.to_socket_name().to_owned(), +// )) +// } +// }; + +// let to_socket = match Socket::find_by_name_for_edge_kind_and_node( +// ctx, +// edge_spec.to_socket_name(), +// SocketEdgeKind::ConfigurationInput, +// *head_node.id(), +// ) +// .await? +// { +// Some(socket) => socket, +// None => { +// return Ok(Some(ImportEdgeSkip::MissingInputSocket( +// edge_spec.to_socket_name().to_owned(), +// ))) +// } +// }; + +// let from_socket = match Socket::find_by_name_for_edge_kind_and_node( +// ctx, +// edge_spec.from_socket_name(), +// SocketEdgeKind::ConfigurationOutput, +// *tail_node.id(), +// ) +// .await? +// { +// Some(socket) => socket, +// None => { +// return Ok(Some(ImportEdgeSkip::MissingOutputSocket( +// edge_spec.from_socket_name().to_owned(), +// ))) +// } +// }; + +// Some( +// Edge::new_for_connection( +// ctx, +// *head_node.id(), +// *to_socket.id(), +// *tail_node.id(), +// *from_socket.id(), +// match edge_spec.edge_kind() { +// EdgeSpecKind::Configuration => EdgeKind::Configuration, +// EdgeSpecKind::Symbolic => EdgeKind::Symbolic, +// }, +// ) +// .await?, +// ) +// } else { +// None +// } +// } +// }; + +// if let Some(mut edge) = edge { +// let creation_user_pk = match edge_spec.creation_user_pk() { +// Some(pk_str) => Some(UserPk::from_str(pk_str)?), +// None => None, +// }; +// if creation_user_pk.as_ref() != edge.creation_user_pk() { +// edge.set_creation_user_pk(ctx, creation_user_pk).await?; +// } + +// let deletion_user_pk = match edge_spec.deletion_user_pk() { +// Some(pk_str) => Some(UserPk::from_str(pk_str)?), +// None => None, +// }; + +// if deletion_user_pk.as_ref() != edge.deletion_user_pk() { +// edge.set_deletion_user_pk(ctx, deletion_user_pk).await?; +// } + +// if edge.deleted_implicitly() != edge_spec.deleted_implicitly() { +// edge.set_deleted_implicitly(ctx, edge_spec.deleted_implicitly()) +// .await?; +// } + +// if edge.visibility().is_deleted() && !edge_spec.deleted() { +// Edge::restore_by_id(ctx, *edge.id()).await?; +// } else if !edge.visibility().is_deleted() && edge_spec.deleted() { +// edge.delete_and_propagate(ctx).await?; +// } + +// thing_map.insert( +// change_set_pk, +// edge_spec.unique_id().to_owned(), +// Thing::Edge(edge), +// ); +// } + +// Ok(None) +// } + +// async fn import_component( +// ctx: &DalContext, +// change_set_pk: Option, +// component_spec: &SiPkgComponent<'_>, +// thing_map: &mut ThingMap, +// ) -> PkgResult> { +// let _change_set_pk_inner = change_set_pk.ok_or(PkgError::ComponentImportWithoutChangeSet)?; + +// let variant = match component_spec.variant() { +// ComponentSpecVariant::BuiltinVariant { +// schema_name, +// variant_name, +// } => { +// let schema = Schema::find_by_name_builtin(ctx, schema_name.as_str()) +// .await? +// .ok_or(PkgError::ComponentMissingBuiltinSchema( +// schema_name.to_owned(), +// component_spec.name().into(), +// ))?; + +// schema +// .find_variant_by_name(ctx, variant_name.as_str()) +// .await? +// .ok_or(PkgError::ComponentMissingBuiltinSchemaVariant( +// schema_name.to_owned(), +// variant_name.to_owned(), +// component_spec.name().into(), +// ))? +// } +// ComponentSpecVariant::WorkspaceVariant { variant_unique_id } => { +// match thing_map.get(change_set_pk, variant_unique_id) { +// Some(Thing::SchemaVariant(variant)) => variant.to_owned(), +// _ => { +// return Err(PkgError::ComponentMissingSchemaVariant( +// variant_unique_id.to_owned(), +// component_spec.name().into(), +// )) +// } +// } +// } +// }; + +// let (mut component, mut node) = +// match thing_map.get(change_set_pk, &component_spec.unique_id().to_owned()) { +// Some(Thing::Component((existing_component, node))) => { +// (existing_component.to_owned(), node.to_owned()) +// } +// _ => { +// let (component, node) = +// Component::new(ctx, component_spec.name(), *variant.id()).await?; +// thing_map.insert( +// change_set_pk, +// component_spec.unique_id().into(), +// Thing::Component((component.to_owned(), node.to_owned())), +// ); + +// (component, node) +// } +// }; + +// if component.name(ctx).await? != component_spec.name() { +// component.set_name(ctx, Some(component_spec.name())).await?; +// } + +// let position = component_spec +// .position()? +// .pop() +// .ok_or(PkgError::ComponentSpecMissingPosition)?; + +// if node.x() != position.x() { +// node.set_x(ctx, position.x()).await?; +// } +// if node.y() != position.y() { +// node.set_y(ctx, position.y()).await?; +// } + +// if node.height() != position.height() { +// node.set_height(ctx, position.height().map(ToOwned::to_owned)) +// .await?; +// } +// if node.width() != position.width() { +// node.set_width(ctx, position.width().map(ToOwned::to_owned)) +// .await?; +// } + +// let mut value_cache: HashMap = HashMap::new(); +// let mut prop_cache: HashMap> = HashMap::new(); + +// let mut skips = vec![]; + +// for attribute in component_spec.attributes()? { +// if let Some(skip) = import_component_attribute( +// ctx, +// change_set_pk, +// &component, +// &variant, +// attribute, +// &mut value_cache, +// &mut prop_cache, +// thing_map, +// ) +// .await? +// { +// skips.push(skip); +// } +// } +// for attribute in component_spec.input_sockets()? { +// if let Some(skip) = import_component_attribute( +// ctx, +// change_set_pk, +// &component, +// &variant, +// attribute, +// &mut value_cache, +// &mut prop_cache, +// thing_map, +// ) +// .await? +// { +// skips.push(skip); +// } +// } +// for attribute in component_spec.output_sockets()? { +// if let Some(skip) = import_component_attribute( +// ctx, +// change_set_pk, +// &component, +// &variant, +// attribute, +// &mut value_cache, +// &mut prop_cache, +// thing_map, +// ) +// .await? +// { +// skips.push(skip); +// } +// } + +// if component_spec.needs_destroy() { +// component.set_needs_destroy(ctx, true).await?; +// } + +// if component.visibility().is_deleted() && !component_spec.deleted() { +// Component::restore_and_propagate(ctx, *component.id()).await?; +// } else if !component.visibility().is_deleted() && component_spec.deleted() { +// component.delete_and_propagate(ctx).await?; +// } + +// Ok(skips) +// } + +// fn get_prop_kind_for_value(value: Option<&serde_json::Value>) -> Option { +// match value { +// Some(serde_json::Value::Array(_)) => Some(PropKind::Array), +// Some(serde_json::Value::Bool(_)) => Some(PropKind::Boolean), +// Some(serde_json::Value::Number(_)) => Some(PropKind::Integer), +// Some(serde_json::Value::Object(_)) => Some(PropKind::Object), +// Some(serde_json::Value::String(_)) => Some(PropKind::String), + +// _ => None, +// } +// } + +// #[allow(clippy::too_many_arguments)] +// async fn import_component_attribute( +// ctx: &DalContext, +// change_set_pk: Option, +// component: &Component, +// variant: &SchemaVariant, +// attribute: &SiPkgAttributeValue<'_>, +// value_cache: &mut HashMap, +// prop_cache: &mut HashMap>, +// thing_map: &mut ThingMap, +// ) -> PkgResult> { +// match attribute.path() { +// AttributeValuePath::Prop { path, key, index } => { +// if attribute.parent_path().is_none() && (key.is_some() || index.is_some()) { +// return Err(PkgError::AttributeValueWithKeyOrIndexButNoParent); +// } + +// let prop = match prop_cache.get(path) { +// Some(prop) => prop.to_owned(), +// None => { +// let prop = Prop::find_prop_by_path_opt( +// ctx, +// *variant.id(), +// &PropPath::from(path.to_owned()), +// ) +// .await?; +// prop_cache.insert(path.to_owned(), prop.to_owned()); + +// prop +// } +// }; + +// struct ParentData<'a> { +// prop: Option<&'a Prop>, +// attribute_value: Option, +// default_attribute_value: Option, +// } + +// match prop { +// Some(prop) => { +// // Validate type if possible +// let expected_prop_kind = get_prop_kind_for_value(attribute.value()); +// if let Some(expected_kind) = expected_prop_kind { +// if expected_kind +// != match prop.kind() { +// PropKind::Map | PropKind::Object => PropKind::Object, +// other => *other, +// } +// { +// return Ok(Some(ImportAttributeSkip::KindMismatch { +// path: PropPath::from(path), +// expected_kind, +// variant_kind: *prop.kind(), +// })); +// } +// } + +// let parent_data = if let Some(AttributeValuePath::Prop { path, key, index }) = +// attribute.parent_path() +// { +// let parent_prop = prop_cache +// .get(path) +// .and_then(|p| p.as_ref()) +// .ok_or(PkgError::AttributeValueParentPropNotFound(path.to_owned()))?; + +// let parent_value_cache_key = ValueCacheKey::new( +// *component.id(), +// *parent_prop.id(), +// key.to_owned(), +// index.to_owned(), +// ); + +// let parent_av = value_cache.get(&parent_value_cache_key).ok_or( +// PkgError::AttributeValueParentValueNotFound( +// path.to_owned(), +// key.to_owned(), +// index.to_owned(), +// ), +// )?; + +// let parent_default_value_cache_key = ValueCacheKey::new( +// ComponentId::NONE, +// *parent_prop.id(), +// key.to_owned(), +// index.to_owned(), +// ); + +// let parent_default_av = +// value_cache.get(&parent_default_value_cache_key).cloned(); + +// ParentData { +// prop: Some(parent_prop), +// attribute_value: Some(parent_av.to_owned()), +// default_attribute_value: parent_default_av, +// } +// } else { +// ParentData { +// prop: None, +// attribute_value: None, +// default_attribute_value: None, +// } +// }; + +// // If we're an array element, we might already exist in the index map +// let av_id_from_index_map = match index { +// Some(index) => match parent_data.attribute_value.as_ref() { +// Some(parent_av) => { +// match parent_av +// .index_map() +// .and_then(|index_map| index_map.order().get(*index as usize)) +// { +// None => { +// let attribute_context = AttributeContext::builder() +// .set_prop_id(*prop.id()) +// .set_component_id(*component.id()) +// .to_context_unchecked(); + +// // This value will get updated by +// // update_attribute_value +// Some( +// AttributeValue::insert_for_context( +// ctx, +// attribute_context, +// *parent_av.id(), +// None, +// None, +// ) +// .await?, +// ) +// } +// Some(av_id) => Some(*av_id), +// } +// } +// None => None, +// }, +// None => None, +// }; + +// let default_value_cache_key = ValueCacheKey::new( +// ComponentId::NONE, +// *prop.id(), +// key.to_owned(), +// index.to_owned(), +// ); + +// let default_av = match value_cache.entry(default_value_cache_key) { +// Entry::Occupied(occupied) => Some(occupied.get().to_owned()), +// Entry::Vacant(vacant) => { +// if parent_data.default_attribute_value.is_none() +// && parent_data.prop.is_some() +// { +// None +// } else { +// let default_parent_av_id = +// parent_data.default_attribute_value.map(|av| *av.id()); + +// let default_value_context = AttributeReadContext { +// prop_id: Some(*prop.id()), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: None, +// }; + +// let value = AttributeValue::find_with_parent_and_key_for_context( +// ctx, +// default_parent_av_id, +// key.to_owned(), +// default_value_context, +// ) +// .await?; + +// if let Some(value) = &value { +// vacant.insert(value.to_owned()); +// } + +// value +// } +// } +// }; + +// let context = AttributeReadContext { +// prop_id: Some(*prop.id()), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: Some(*component.id()), +// }; + +// let parent_av_id = parent_data.attribute_value.as_ref().map(|av| *av.id()); +// let maybe_av = match av_id_from_index_map { +// Some(av_id) => Some(AttributeValue::get_by_id(ctx, &av_id).await?.ok_or( +// AttributeValueError::NotFound(av_id, ctx.visibility().to_owned()), +// )?), +// None => { +// AttributeValue::find_with_parent_and_key_for_context( +// ctx, +// parent_av_id, +// key.to_owned(), +// context, +// ) +// .await? +// } +// }; + +// let mut av_to_update = match maybe_av { +// Some(av) => av, +// None => { +// if index.is_some() { +// dbg!( +// "should always have an attribute value here for an indexed av" +// ); +// } +// let context = AttributeReadContext { +// prop_id: Some(*prop.id()), +// internal_provider_id: None, +// external_provider_id: None, +// component_id: None, +// }; +// let maybe_av = AttributeValue::find_with_parent_and_key_for_context( +// ctx, +// parent_av_id, +// key.to_owned(), +// context, +// ) +// .await?; + +// match maybe_av { +// Some(av) => av, +// None => { +// let parent_av_id = parent_av_id.ok_or( +// PkgError::AttributeValueParentValueNotFound( +// "in av search".into(), +// key.to_owned(), +// index.to_owned(), +// ), +// )?; + +// let attribute_context = AttributeContext::builder() +// .set_prop_id(*prop.id()) +// .set_component_id(*component.id()) +// .to_context_unchecked(); + +// if key.is_some() { +// let av_id = AttributeValue::insert_for_context( +// ctx, +// attribute_context, +// parent_av_id, +// None, +// key.to_owned(), +// ) +// .await?; + +// AttributeValue::get_by_id(ctx, &av_id).await?.ok_or( +// AttributeValueError::NotFound( +// av_id, +// ctx.visibility().to_owned(), +// ), +// )? +// } else { +// let (_, value) = create_attribute_value( +// ctx, +// change_set_pk, +// attribute_context, +// *component.id(), +// key, +// parent_data.attribute_value.as_ref(), +// default_av.as_ref(), +// &attribute, +// thing_map, +// ) +// .await?; + +// value +// } +// } +// } +// } +// }; + +// let updated_av = update_attribute_value( +// ctx, +// change_set_pk, +// *variant.id(), +// *component.id(), +// &attribute, +// &mut av_to_update, +// parent_data.attribute_value.as_ref(), +// default_av.as_ref(), +// thing_map, +// ) +// .await?; + +// let this_cache_key = ValueCacheKey::new( +// *component.id(), +// *prop.id(), +// key.to_owned(), +// index.to_owned(), +// ); + +// value_cache.insert(this_cache_key, updated_av); +// } +// None => { +// // collect missing props and log them +// return Ok(Some(ImportAttributeSkip::MissingProp(PropPath::from(path)))); +// } +// } +// } +// AttributeValuePath::InputSocket(socket_name) +// | AttributeValuePath::OutputSocket(socket_name) => { +// let (default_read_context, read_context, write_context) = +// if matches!(attribute.path(), AttributeValuePath::InputSocket(_)) { +// let internal_provider = +// match InternalProvider::find_explicit_for_schema_variant_and_name( +// ctx, +// *variant.id(), +// socket_name.as_str(), +// ) +// .await? +// { +// None => { +// return Ok(Some(ImportAttributeSkip::MissingInputSocket( +// socket_name.to_owned(), +// ))) +// } +// Some(ip) => ip, +// }; + +// let default_read_context = AttributeReadContext { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(*internal_provider.id()), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: None, +// }; +// let read_context = AttributeReadContext { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(*internal_provider.id()), +// external_provider_id: Some(ExternalProviderId::NONE), +// component_id: Some(*component.id()), +// }; +// let write_context = AttributeContext::builder() +// .set_internal_provider_id(*internal_provider.id()) +// .set_component_id(*component.id()) +// .to_context_unchecked(); + +// (default_read_context, read_context, write_context) +// } else { +// let external_provider = +// match ExternalProvider::find_for_schema_variant_and_name( +// ctx, +// *variant.id(), +// socket_name.as_str(), +// ) +// .await? +// { +// None => { +// return Ok(Some(ImportAttributeSkip::MissingOutputSocket( +// socket_name.to_owned(), +// ))) +// } +// Some(ep) => ep, +// }; + +// let default_read_context = AttributeReadContext { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(*external_provider.id()), +// component_id: None, +// }; +// let read_context = AttributeReadContext { +// prop_id: Some(PropId::NONE), +// internal_provider_id: Some(InternalProviderId::NONE), +// external_provider_id: Some(*external_provider.id()), +// component_id: Some(*component.id()), +// }; +// let write_context = AttributeContext::builder() +// .set_external_provider_id(*external_provider.id()) +// .set_component_id(*component.id()) +// .to_context_unchecked(); + +// (default_read_context, read_context, write_context) +// }; + +// let default_value = AttributeValue::find_for_context(ctx, default_read_context).await?; + +// match AttributeValue::find_for_context(ctx, read_context).await? { +// Some(mut existing_av) => { +// update_attribute_value( +// ctx, +// change_set_pk, +// *variant.id(), +// *component.id(), +// &attribute, +// &mut existing_av, +// None, +// default_value.as_ref(), +// thing_map, +// ) +// .await?; +// } +// None => { +// create_attribute_value( +// ctx, +// change_set_pk, +// write_context, +// *component.id(), +// &None, +// None, +// default_value.as_ref(), +// &attribute, +// thing_map, +// ) +// .await?; +// } +// } +// } +// } + +// Ok(None) +// } + +// async fn get_ip_for_input( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// input: &SiPkgAttrFuncInput<'_>, +// ) -> PkgResult> { +// Ok(match input { +// SiPkgAttrFuncInput::Prop { prop_path, .. } => { +// let input_source_prop = match Prop::find_prop_by_path_opt( +// ctx, +// schema_variant_id, +// &PropPath::from(prop_path), +// ) +// .await? +// { +// Some(p) => p, +// None => return Ok(None), +// }; + +// let ip = InternalProvider::find_for_prop(ctx, *input_source_prop.id()) +// .await? +// .ok_or(PkgError::MissingInternalProviderForProp( +// *input_source_prop.id(), +// ))?; + +// Some(*ip.id()) +// } +// SiPkgAttrFuncInput::InputSocket { socket_name, .. } => { +// let explicit_ip = match InternalProvider::find_explicit_for_schema_variant_and_name( +// ctx, +// schema_variant_id, +// &socket_name, +// ) +// .await? +// { +// Some(ip) => ip, +// None => return Ok(None), +// }; + +// Some(*explicit_ip.id()) +// } +// SiPkgAttrFuncInput::OutputSocket { .. } => None, +// }) +// } + +// #[allow(clippy::too_many_arguments)] +// async fn create_attribute_value( +// ctx: &DalContext, +// change_set_pk: Option, +// context: AttributeContext, +// component_id: ComponentId, +// real_key: &Option, +// parent_attribute_value: Option<&AttributeValue>, +// default_attribute_value: Option<&AttributeValue>, +// attribute_spec: &SiPkgAttributeValue<'_>, +// thing_map: &mut ThingMap, +// ) -> PkgResult<(AttributePrototype, AttributeValue)> { +// let attribute_func = +// match thing_map.get(change_set_pk, &attribute_spec.func_unique_id().to_owned()) { +// Some(Thing::Func(func)) => func, +// _ => { +// return Err(PkgError::MissingFuncUniqueId(format!( +// "here, {}", +// attribute_spec.func_unique_id().to_owned() +// ))); +// } +// }; + +// let new_context = AttributeContext::builder() +// .set_prop_id(context.prop_id()) +// .set_internal_provider_id(context.internal_provider_id()) +// .set_external_provider_id(context.external_provider_id()) +// .set_component_id(component_id) +// .to_context_unchecked(); + +// let func_binding = FuncBinding::new( +// ctx, +// attribute_spec.func_binding_args().to_owned(), +// *attribute_func.id(), +// attribute_spec.backend_kind().into(), +// ) +// .await?; + +// let mut func_binding_return_value = FuncBindingReturnValue::new( +// ctx, +// attribute_spec.unprocessed_value().cloned(), +// attribute_spec.value().cloned(), +// *attribute_func.id(), +// *func_binding.id(), +// FuncExecutionPk::NONE, +// ) +// .await?; + +// let execution = FuncExecution::new(ctx, attribute_func, &func_binding).await?; +// // TODO: add output stream? + +// func_binding_return_value +// .set_func_execution_pk(ctx, execution.pk()) +// .await?; + +// let mut new_value = AttributeValue::new( +// ctx, +// *func_binding.id(), +// *func_binding_return_value.id(), +// new_context, +// real_key.to_owned(), +// ) +// .await?; + +// if let Some(parent_attribute_value) = parent_attribute_value.as_ref() { +// new_value +// .set_parent_attribute_value_unchecked(ctx, parent_attribute_value.id()) +// .await?; +// } + +// if attribute_spec.is_proxy() { +// let default_av = +// default_attribute_value.ok_or(PkgError::AttributeValueSetToProxyButNoProxyFound)?; + +// new_value +// .set_proxy_for_attribute_value_id(ctx, Some(*default_av.id())) +// .await?; +// } + +// let prototype_context = AttributeContext::builder() +// .set_prop_id(new_context.prop_id()) +// .set_external_provider_id(new_context.external_provider_id()) +// .set_internal_provider_id(new_context.internal_provider_id()) +// .set_component_id(if attribute_spec.component_specific() { +// new_context.component_id() +// } else { +// ComponentId::NONE +// }) +// .to_context_unchecked(); + +// let prototype = +// match AttributePrototype::find_for_context_and_key(ctx, prototype_context, real_key) +// .await? +// .pop() +// { +// Some(existing_proto) => { +// new_value +// .set_attribute_prototype(ctx, existing_proto.id()) +// .await?; + +// existing_proto +// } +// None => { +// AttributePrototype::new_with_existing_value( +// ctx, +// *attribute_func.id(), +// new_context, +// real_key.to_owned(), +// parent_attribute_value.map(|pav| *pav.id()), +// *new_value.id(), +// ) +// .await? +// } +// }; + +// Ok((prototype, new_value)) +// } + +// #[allow(clippy::too_many_arguments)] +// async fn update_attribute_value( +// ctx: &DalContext, +// change_set_pk: Option, +// schema_variant_id: SchemaVariantId, +// component_id: ComponentId, +// attribute_spec: &SiPkgAttributeValue<'_>, +// attribute_value: &mut AttributeValue, +// parent_attribute_value: Option<&AttributeValue>, +// default_attribute_value: Option<&AttributeValue>, +// thing_map: &mut ThingMap, +// ) -> PkgResult { +// let prototype = attribute_value +// .attribute_prototype(ctx) +// .await? +// .ok_or(AttributeValueError::MissingAttributePrototype)?; + +// let attribute_func = +// match thing_map.get(change_set_pk, &attribute_spec.func_unique_id().to_owned()) { +// Some(Thing::Func(func)) => func, +// _ => { +// return Err(PkgError::MissingFuncUniqueId(format!( +// "here, {}", +// attribute_spec.func_unique_id().to_owned() +// ))); +// } +// }; + +// let (mut prototype, value) = if prototype.context.component_id().is_none() +// && attribute_spec.component_specific() +// { +// let current_context = attribute_value.context; +// let new_context = AttributeContext::builder() +// .set_prop_id(current_context.prop_id()) +// .set_internal_provider_id(current_context.internal_provider_id()) +// .set_external_provider_id(current_context.external_provider_id()) +// .set_component_id(component_id) +// .to_context_unchecked(); + +// let func_binding = FuncBinding::new( +// ctx, +// attribute_spec.func_binding_args().to_owned(), +// *attribute_func.id(), +// attribute_spec.backend_kind().into(), +// ) +// .await?; + +// let mut func_binding_return_value = FuncBindingReturnValue::new( +// ctx, +// attribute_spec.unprocessed_value().cloned(), +// attribute_spec.value().cloned(), +// *attribute_func.id(), +// *func_binding.id(), +// FuncExecutionPk::NONE, +// ) +// .await?; + +// let execution = FuncExecution::new(ctx, attribute_func, &func_binding).await?; +// // TODO: add output stream? + +// func_binding_return_value +// .set_func_execution_pk(ctx, execution.pk()) +// .await?; + +// let mut new_value = AttributeValue::new( +// ctx, +// *func_binding.id(), +// *func_binding_return_value.id(), +// new_context, +// attribute_value.key(), +// ) +// .await?; + +// if attribute_spec.is_proxy() { +// let default_av = +// default_attribute_value.ok_or(PkgError::AttributeValueSetToProxyButNoProxyFound)?; + +// new_value +// .set_proxy_for_attribute_value_id(ctx, Some(*default_av.id())) +// .await?; +// } + +// ( +// AttributePrototype::new_with_existing_value( +// ctx, +// *attribute_func.id(), +// new_context, +// attribute_value.key().map(|k| k.to_owned()), +// parent_attribute_value.map(|pav| *pav.id()), +// *new_value.id(), +// ) +// .await?, +// new_value, +// ) +// } else { +// let current_fb = FuncBinding::get_by_id(ctx, &attribute_value.func_binding_id()) +// .await? +// .ok_or(FuncBindingError::NotFound( +// attribute_value.func_binding_id(), +// ))?; + +// let current_fbrv = +// FuncBindingReturnValue::get_by_id(ctx, &attribute_value.func_binding_return_value_id()) +// .await? +// .ok_or(FuncBindingReturnValueError::NotFound( +// attribute_value.func_binding_return_value_id(), +// ))?; + +// if current_fb.args() != attribute_spec.func_binding_args() +// || current_fbrv.unprocessed_value() != attribute_spec.unprocessed_value() +// || current_fbrv.func_id() != attribute_func.id() +// || current_fb.code_sha256() != attribute_func.code_sha256() +// { +// let func_binding = FuncBinding::new( +// ctx, +// attribute_spec.func_binding_args().to_owned(), +// *attribute_func.id(), +// attribute_spec.backend_kind().into(), +// ) +// .await?; + +// let mut func_binding_return_value = FuncBindingReturnValue::new( +// ctx, +// attribute_spec.unprocessed_value().cloned(), +// attribute_spec.value().cloned(), +// *attribute_func.id(), +// *func_binding.id(), +// FuncExecutionPk::NONE, +// ) +// .await?; + +// let execution = FuncExecution::new(ctx, attribute_func, &func_binding).await?; +// // TODO: add output stream? + +// func_binding_return_value +// .set_func_execution_pk(ctx, execution.pk()) +// .await?; + +// attribute_value +// .set_func_binding_id(ctx, *func_binding.id()) +// .await?; + +// attribute_value +// .set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) +// .await?; +// } + +// (prototype, attribute_value.to_owned()) +// }; + +// if prototype.func_id() != *attribute_func.id() { +// prototype.set_func_id(ctx, attribute_func.id()).await?; +// } + +// let inputs = attribute_spec.inputs()?; + +// let mut current_apas = +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *prototype.id()).await?; + +// if inputs.is_empty() && !current_apas.is_empty() { +// for apa in current_apas.iter_mut() { +// apa.delete_by_id(ctx).await?; +// } +// } else if !inputs.is_empty() { +// let mut processed_inputs = HashSet::new(); +// for apa in current_apas.iter_mut() { +// let func_arg = FuncArgument::get_by_id(ctx, &apa.func_argument_id()) +// .await? +// .ok_or(PkgError::MissingFuncArgumentById(apa.func_argument_id()))?; + +// let matching_input = inputs.iter().find(|input| input.name() == func_arg.name()); + +// match matching_input { +// Some(input) => { +// if let Some(ip_id) = get_ip_for_input(ctx, schema_variant_id, input).await? { +// if apa.internal_provider_id() != ip_id { +// apa.set_internal_provider_id(ctx, ip_id).await?; +// } +// } + +// processed_inputs.insert(input.name()); +// } +// None => apa.delete_by_id(ctx).await?, +// } +// } + +// for input in &inputs { +// let name = input.name(); + +// if processed_inputs.contains(name) { +// continue; +// } + +// let func_arg = FuncArgument::find_by_name_for_func(ctx, name, *attribute_func.id()) +// .await? +// .ok_or(PkgError::MissingFuncArgument( +// name.into(), +// *attribute_func.id(), +// ))?; + +// if let Some(ip_id) = get_ip_for_input(ctx, schema_variant_id, input).await? { +// AttributePrototypeArgument::new_for_intra_component( +// ctx, +// *prototype.id(), +// *func_arg.id(), +// ip_id, +// ) +// .await?; +// } +// } +// } + +// Ok(value) +// } + +// #[derive(Debug, Clone, Deserialize, Serialize)] +// #[serde(rename_all = "camelCase")] +// pub struct ImportSkips { +// change_set_pk: ChangeSetPk, +// edge_skips: Vec, +// attribute_skips: Vec<(String, Vec)>, +// } + +// #[remain::sorted] +// #[derive(Debug, Clone, Serialize, Deserialize)] +// #[serde(tag = "type", rename_all = "camelCase")] +// pub enum ImportAttributeSkip { +// #[serde(rename_all = "camelCase")] +// KindMismatch { +// path: PropPath, +// expected_kind: PropKind, +// variant_kind: PropKind, +// }, +// MissingInputSocket(String), +// MissingOutputSocket(String), +// MissingProp(PropPath), +// } + +// #[derive(Clone, Debug, Deserialize, Serialize)] +// #[serde(tag = "type", rename_all = "camelCase")] +// pub enum ImportEdgeSkip { +// MissingInputSocket(String), +// MissingOutputSocket(String), +// } pub async fn import_pkg_from_pkg( ctx: &DalContext, @@ -634,7 +1362,7 @@ pub async fn import_pkg_from_pkg( ) -> PkgResult<( Option, Vec, - Option>, + Option>, )> { // We have to write the installed_pkg row first, so that we have an id, and rely on transaction // semantics to remove the row if anything in the installation process fails @@ -664,7 +1392,7 @@ pub async fn import_pkg_from_pkg( SiPkgKind::Module => { let (installed_schema_variant_ids, _, _) = import_change_set( ctx, - ctx.visibility().change_set_pk, + None, &metadata, &pkg.funcs()?, &pkg.schemas()?, @@ -679,93 +1407,85 @@ pub async fn import_pkg_from_pkg( Ok((installed_pkg_id, installed_schema_variant_ids, None)) } SiPkgKind::WorkspaceBackup => { - let mut ctx = ctx.clone_with_new_visibility(ctx.visibility().to_head()); - - let mut import_skips = vec![]; - - let workspace_pk = WorkspacePk::from_str( - metadata - .workspace_pk() - .ok_or(PkgError::WorkspacePkNotInBackup)?, - )?; - let workspace_name = metadata - .workspace_name() - .ok_or(PkgError::WorkspaceNameNotInBackup)?; - let default_change_set_name = metadata.default_change_set().unwrap_or("head"); - - Workspace::clear_or_create_workspace(&mut ctx, workspace_pk, workspace_name).await?; - - ctx.update_tenancy(Tenancy::new(workspace_pk)); - - let change_sets = pkg.change_sets()?; - let default_change_set = change_sets - .iter() - .find(|cs| cs.name() == default_change_set_name) - .ok_or(PkgError::WorkspaceBackupNoDefaultChangeSet( - default_change_set_name.into(), - ))?; - - let (_, attribute_skips, edge_skips) = import_change_set( - &ctx, - ChangeSetPk::NONE, - &metadata, - &default_change_set.funcs()?, - &default_change_set.schemas()?, - &default_change_set.components()?, - &default_change_set.edges()?, - installed_pkg_id, - &mut change_set_things, - &options, - ) - .await?; - - import_skips.push(ImportSkips { - change_set_pk: ChangeSetPk::NONE, - attribute_skips, - edge_skips, - }); - - for change_set in change_sets { - if change_set.name() == default_change_set_name { - continue; - } - - // Revert to head to create new change set - let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_head()); - let new_cs = ChangeSet::new(&ctx, change_set.name(), None).await?; - // Switch to new change set visibility - let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_change_set(new_cs.pk)); - - let (_, attribute_skips, edge_skips) = import_change_set( - &ctx, - new_cs.pk, - &metadata, - &change_set.funcs()?, - &change_set.schemas()?, - &change_set.components()?, - &change_set.edges()?, - installed_pkg_id, - &mut change_set_things, - &options, - ) - .await?; - - import_skips.push(ImportSkips { - change_set_pk: new_cs.pk, - attribute_skips, - edge_skips, - }); - } - - Ok(( - None, - vec![], - if import_skips.is_empty() { - None - } else { - Some(import_skips) - }, - )) + // let mut ctx = ctx.clone_with_new_visibility(ctx.visibility().to_head()); + + // let mut import_skips = vec![]; + + // let workspace_pk = WorkspacePk::from_str( + // metadata + // .workspace_pk() + // .ok_or(PkgError::WorkspacePkNotInBackup)?, + // )?; + // let workspace_name = metadata + // .workspace_name() + // .ok_or(PkgError::WorkspaceNameNotInBackup)?; + // let default_change_set_name = metadata.default_change_set().unwrap_or("head"); + + // Workspace::clear_or_create_workspace(&mut ctx, workspace_pk, workspace_name).await?; + + // ctx.update_tenancy(Tenancy::new(workspace_pk)); + + // let change_sets = pkg.change_sets()?; + // let default_change_set = change_sets + // .iter() + // .find(|cs| cs.name() == default_change_set_name) + // .ok_or(PkgError::WorkspaceBackupNoDefaultChangeSet( + // default_change_set_name.into(), + // ))?; + + // let (_, attribute_skips, edge_skips) = import_change_set( + // &ctx, + // Some(ChangeSetPk::NONE), + // &metadata, + // &default_change_set.funcs()?, + // &default_change_set.schemas()?, + // &default_change_set.components()?, + // &default_change_set.edges()?, + // installed_pkg_id, + // &mut change_set_things, + // &options, + // ) + // .await?; + + // import_skips.push(ImportSkips { + // change_set_pk: ChangeSetPk::NONE, + // attribute_skips, + // edge_skips, + // }); + + // for change_set in change_sets { + // if change_set.name() == default_change_set_name { + // continue; + // } + + // // Revert to head to create new change set + // let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_head()); + // let new_cs = ChangeSet::new(&ctx, change_set.name(), None).await?; + // // Switch to new change set visibility + // let ctx = ctx.clone_with_new_visibility(ctx.visibility().to_change_set(new_cs.pk)); + + // let (_, attribute_skips, edge_skips) = import_change_set( + // &ctx, + // Some(new_cs.pk), + // &metadata, + // &change_set.funcs()?, + // &change_set.schemas()?, + // &change_set.components()?, + // &change_set.edges()?, + // installed_pkg_id, + // &mut change_set_things, + // &options, + // ) + // .await?; + + // import_skips.push(ImportSkips { + // change_set_pk: new_cs.pk, + // attribute_skips, + // edge_skips, + // }); + // } + + Ok((None, vec![], None)) } } } @@ -779,163 +1499,147 @@ pub async fn import_pkg(ctx: &DalContext, pkg_file_path: impl AsRef) -> Pk Ok(pkg) } -async fn create_func(ctx: &DalContext, func_spec: &FuncSpec) -> PkgResult { - let name = func_spec.name.clone(); +async fn create_func( + ctx: &DalContext, + func_spec: &SiPkgFunc<'_>, + is_builtin: bool, +) -> PkgResult { + let name = func_spec.name(); let func_spec_data = func_spec - .data - .clone() - .ok_or_else(|| PkgError::DataNotFound(name.clone()))?; + .data() + .ok_or(PkgError::DataNotFound(name.into()))?; - // How to handle name conflicts? - let mut func = Func::new( + let func = Func::new( ctx, name, - func_spec_data.backend_kind.into(), - func_spec_data.response_type.into(), + func_spec_data + .display_name() + .map(|display_name| display_name.to_owned()), + func_spec_data.description().map(|desc| desc.to_owned()), + func_spec_data.link().map(|l| l.to_string()), + func_spec_data.hidden(), + is_builtin, + func_spec_data.backend_kind().into(), + func_spec_data.response_type().into(), + Some(func_spec_data.handler().to_owned()), + Some(func_spec_data.code_base64().to_owned()), ) .await?; - func.set_display_name(ctx, func_spec_data.display_name.clone()) - .await?; - func.set_code_base64(ctx, Some(func_spec_data.code_base64.clone())) - .await?; - func.set_description(ctx, func_spec_data.description.clone()) - .await?; - func.set_handler(ctx, Some(func_spec_data.handler.clone())) - .await?; - func.set_hidden(ctx, func_spec_data.hidden).await?; - func.set_link(ctx, func_spec_data.link.map(|l| l.to_string())) - .await?; - Ok(func) } +#[allow(dead_code)] async fn update_func( ctx: &DalContext, - func: &mut Func, - func_spec_data: &FuncSpecData, -) -> PkgResult<()> { - func.set_name(ctx, func_spec_data.name.clone()).await?; - func.set_backend_kind(ctx, func_spec_data.backend_kind) - .await?; - func.set_backend_response_type(ctx, func_spec_data.response_type) + func: Func, + func_spec_data: &SiPkgFuncData, +) -> PkgResult { + let func = func + .modify(ctx, |func| { + func.name = func_spec_data.name().to_owned(); + func.backend_kind = func_spec_data.backend_kind().into(); + func.backend_response_type = func_spec_data.response_type().into(); + func.display_name = func_spec_data + .display_name() + .map(|display_name| display_name.to_owned()); + func.code_base64 = Some(func_spec_data.code_base64().to_owned()); + func.description = func_spec_data.description().map(|desc| desc.to_owned()); + func.handler = Some(func_spec_data.handler().to_owned()); + func.hidden = func_spec_data.hidden(); + func.link = func_spec_data.link().map(|l| l.to_string()); + + Ok(()) + }) .await?; - func.set_display_name(ctx, func_spec_data.display_name.clone()) - .await?; - func.set_code_base64(ctx, Some(func_spec_data.code_base64.clone())) - .await?; - func.set_description(ctx, func_spec_data.description.clone()) - .await?; - func.set_handler(ctx, Some(func_spec_data.handler.clone())) - .await?; - func.set_hidden(ctx, func_spec_data.hidden).await?; - func.set_link(ctx, func_spec_data.link.clone()).await?; - Ok(()) + Ok(func) } pub async fn import_func( ctx: &DalContext, - change_set_pk: ChangeSetPk, - func_spec: &FuncSpec, - hash: Option, + change_set_pk: Option, + func_spec: &SiPkgFunc<'_>, installed_pkg_id: Option, thing_map: &mut ThingMap, is_builtin: bool, ) -> PkgResult> { - let mut func = { - let existing_func = InstalledPkgAsset::list_for_kind_and_hash( - ctx, - InstalledPkgAssetKind::Func, - &hash.map_or_else(String::new, |h| h.to_string()), - ) - .await? - .pop(); - - if let Some(installed_func_record) = existing_func { - match installed_func_record.as_installed_func()? { - InstalledPkgAssetTyped::Func { id, .. } => match Func::get_by_id(ctx, &id).await? { - Some(mut func) => { - if is_builtin { - func.set_builtin(ctx, true).await? - } - - if let (Some(installed_pkg_id), Some(hash)) = (installed_pkg_id, hash) { - InstalledPkgAsset::new( - ctx, - InstalledPkgAssetTyped::new_for_func( - *func.id(), - installed_pkg_id, - hash.to_string(), - ), - ) - .await?; - } - - thing_map.insert( - change_set_pk, - func_spec.unique_id.clone(), - Thing::Func(func.to_owned()), - ); - None + let func = match change_set_pk { + None => { + let hash = func_spec.hash().to_string(); + let existing_func = + InstalledPkgAsset::list_for_kind_and_hash(ctx, InstalledPkgAssetKind::Func, &hash) + .await? + .pop(); + + let (func, created) = match existing_func { + Some(installed_func_record) => match installed_func_record.as_installed_func()? { + InstalledPkgAssetTyped::Func { id, .. } => { + (Func::get_by_id(ctx, id).await?, false) } - None => return Err(PkgError::InstalledFuncMissing(id)), + _ => unimplemented!("no idea what happens here!"), }, - _ => unreachable!(), - } - } else { - let existing_func = thing_map.get(change_set_pk, &func_spec.unique_id.clone()); - - match existing_func { - Some(Thing::Func(existing_func)) => { - let mut existing_func = existing_func.to_owned(); + None => (create_func(ctx, func_spec, is_builtin).await?, true), + }; - if func_spec.deleted { - existing_func.delete_by_id(ctx).await?; + if let Some(installed_pkg_id) = installed_pkg_id { + InstalledPkgAsset::new( + ctx, + InstalledPkgAssetTyped::new_for_func(func.id, installed_pkg_id, hash), + ) + .await?; + } - None - } else { - if let Some(data) = &func_spec.data { - update_func(ctx, &mut existing_func, data).await?; - } + thing_map.insert( + change_set_pk, + func_spec.unique_id().to_owned(), + Thing::Func(func.to_owned()), + ); - Some(existing_func) - } - } - _ => { - if func_spec.deleted { - // If we're "deleted" but there is no existing function, this means we're - // deleted only in a change set. Do nothing - None - } else { - Some(create_func(ctx, func_spec).await?) - } - } + if created { + Some(func) + } else { + None } } - }; - - if let Some(func) = func.as_mut() { - if is_builtin { - func.set_builtin(ctx, true).await? - } - - if let (Some(installed_pkg_id), Some(hash)) = (installed_pkg_id, hash) { - InstalledPkgAsset::new( - ctx, - InstalledPkgAssetTyped::new_for_func( - *func.id(), - installed_pkg_id, - hash.to_string(), - ), - ) - .await?; + Some(_) => { + unimplemented!("workspace import not fixed"); + // let existing_func = thing_map.get(change_set_pk, &func_spec.unique_id().to_owned()); + + // match existing_func { + // Some(Thing::Func(existing_func)) => { + // let mut existing_func = existing_func.to_owned(); + + // if func_spec.deleted() { + // existing_func.delete_by_id(ctx).await?; + + // None + // } else { + // if let Some(data) = func_spec.data() { + // update_func(ctx, &mut existing_func, data).await?; + // } + + // Some(existing_func) + // } + // } + // _ => { + // if func_spec.deleted() { + // // If we're "deleted" but there is no existing function, this means we're + // // deleted only in a change set. Do nothing + // None + // } else { + // Some(create_func(ctx, func_spec).await?) + // } + // } + // } } + }; + if let Some(func) = func.as_ref() { thing_map.insert( change_set_pk, - func_spec.unique_id.clone(), + func_spec.unique_id().to_owned(), Thing::Func(func.to_owned()), ); } @@ -946,154 +1650,137 @@ pub async fn import_func( async fn create_func_argument( ctx: &DalContext, func_id: FuncId, - func_arg: &FuncArgumentSpec, + func_arg: &SiPkgFuncArgument<'_>, ) -> PkgResult { Ok(FuncArgument::new( ctx, - func_arg.name.clone(), - func_arg.kind.into(), - func_arg.element_kind.as_ref().map(|&kind| kind.into()), + func_arg.name(), + func_arg.kind().into(), + func_arg.element_kind().to_owned().map(|&kind| kind.into()), func_id, ) .await?) } -async fn update_func_argument( +// async fn update_func_argument( +// ctx: &DalContext, +// existing_arg: &mut FuncArgument, +// func_id: FuncId, +// func_arg: &SiPkgFuncArgument<'_>, +// ) -> PkgResult<()> { +// existing_arg.set_name(ctx, func_arg.name()).await?; +// existing_arg.set_kind(ctx, func_arg.kind()).await?; +// let element_kind: Option = func_arg.element_kind().map(|&kind| kind.into()); +// existing_arg.set_element_kind(ctx, element_kind).await?; +// existing_arg.set_func_id(ctx, func_id).await?; +// +// Ok(()) +// } + +async fn import_func_arguments( ctx: &DalContext, - existing_arg: &mut FuncArgument, + change_set_pk: Option, func_id: FuncId, - func_arg: &FuncArgumentSpec, + func_arguments: &[SiPkgFuncArgument<'_>], + _thing_map: &mut ThingMap, ) -> PkgResult<()> { - existing_arg.set_name(ctx, &func_arg.name).await?; - existing_arg.set_kind(ctx, func_arg.kind).await?; - let element_kind: Option = - func_arg.element_kind.as_ref().map(|&kind| kind.into()); - existing_arg.set_element_kind(ctx, element_kind).await?; - existing_arg.set_func_id(ctx, func_id).await?; - - Ok(()) -} - -async fn import_func_arguments( - ctx: &DalContext, - change_set_pk: ChangeSetPk, - func_id: FuncId, - func_arguments: &[FuncArgumentSpec], - thing_map: &mut ThingMap, -) -> PkgResult<()> { - for arg in func_arguments { - match arg.unique_id.as_deref().map(|unique_id| { - ( - unique_id, - thing_map.get(change_set_pk, &unique_id.to_owned()), - ) - }) { - Some((unique_id, Some(Thing::FuncArgument(existing_arg)))) => { - let mut existing_arg = existing_arg.to_owned(); - - if arg.deleted { - existing_arg.delete_by_id(ctx).await?; - } else { - update_func_argument(ctx, &mut existing_arg, func_id, arg).await?; - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::FuncArgument(existing_arg.to_owned()), - ); - } - } - Some((unique_id, _)) => { - if !arg.deleted { - let new_arg = create_func_argument(ctx, func_id, arg).await?; - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::FuncArgument(new_arg), - ); - } - } - None => { - create_func_argument(ctx, func_id, arg).await?; - } - } - } + match change_set_pk { + None => { + for arg in func_arguments { + create_func_argument(ctx, func_id, arg).await?; + } + } + Some(_) => {} // for arg in func_arguments { + // let unique_id = + // arg.unique_id() + // .ok_or(PkgError::MissingUniqueIdForNode(format!( + // "func-argument-{}", + // arg.hash() + // )))?; + // + // match thing_map.get(change_set_pk, &unique_id.to_owned()) { + // Some(Thing::FuncArgument(existing_arg)) => { + // let mut existing_arg = existing_arg.to_owned(); + // + // if arg.deleted() { + // existing_arg.delete_by_id(ctx).await?; + // } else { + // update_func_argument(ctx, &mut existing_arg, func_id, arg).await?; + // thing_map.insert( + // change_set_pk, + // unique_id.to_owned(), + // Thing::FuncArgument(existing_arg.to_owned()), + // ); + // } + // } + // _ => { + // if !arg.deleted() { + // let new_arg = create_func_argument(ctx, func_id, arg).await?; + // thing_map.insert( + // change_set_pk, + // unique_id.to_owned(), + // Thing::FuncArgument(new_arg), + // ); + // } + // } + // } + // } + // } + } Ok(()) } async fn create_schema(ctx: &DalContext, schema_spec_data: &SiPkgSchemaData) -> PkgResult { - let mut schema = Schema::new(ctx, schema_spec_data.name(), &ComponentKind::Standard).await?; - schema - .set_ui_hidden(ctx, schema_spec_data.ui_hidden()) + let schema = Schema::new(ctx, schema_spec_data.name(), ComponentKind::Standard) + .await? + .modify(ctx, |schema| { + schema.ui_hidden = schema_spec_data.ui_hidden(); + Ok(()) + }) .await?; - - let ui_menu = SchemaUiMenu::new( - ctx, - schema_spec_data - .category_name() - .unwrap_or_else(|| schema_spec_data.name()), - schema_spec_data.category(), - ) - .await?; - ui_menu.set_schema(ctx, schema.id()).await?; - Ok(schema) } -async fn update_schema( - ctx: &DalContext, - schema: &mut Schema, - schema_spec_data: &SiPkgSchemaData, -) -> PkgResult<()> { - if schema_spec_data.name() != schema.name() { - schema.set_name(ctx, schema_spec_data.name()).await?; - } - - if schema_spec_data.ui_hidden() != schema.ui_hidden() { - schema - .set_ui_hidden(ctx, schema_spec_data.ui_hidden()) - .await?; - } - - if let Some(mut ui_menu) = schema.ui_menus(ctx).await?.pop() { - if let Some(category_name) = schema_spec_data.category_name() { - if category_name != ui_menu.name() { - ui_menu.set_name(ctx, category_name).await?; - } - if schema_spec_data.category() != ui_menu.category() { - ui_menu.set_name(ctx, schema_spec_data.category()).await?; - } - } - } - - Ok(()) -} +// async fn update_schema( +// ctx: &DalContext, +// schema: &mut Schema, +// schema_spec_data: &SiPkgSchemaData, +// ) -> PkgResult<()> { +// if schema_spec_data.name() != schema.name() { +// schema.set_name(ctx, schema_spec_data.name()).await?; +// } + +// if schema_spec_data.ui_hidden() != schema.ui_hidden() { +// schema +// .set_ui_hidden(ctx, schema_spec_data.ui_hidden()) +// .await?; +// } + +// if let Some(mut ui_menu) = schema.ui_menus(ctx).await?.pop() { +// if let Some(category_name) = schema_spec_data.category_name() { +// if category_name != ui_menu.name() { +// ui_menu.set_name(ctx, category_name).await?; +// } +// if schema_spec_data.category() != ui_menu.category() { +// ui_menu.set_name(ctx, schema_spec_data.category()).await?; +// } +// } +// } + +// Ok(()) +// } async fn import_schema( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, schema_spec: &SiPkgSchema<'_>, installed_pkg_id: Option, thing_map: &mut ThingMap, - metadata: &SiPkgMetadata, ) -> PkgResult<(Option, Vec)> { - let hash = schema_spec.hash().to_string(); - let schema = { - // If this is a workspace backup and the schema is a builtin, then it's - // in the backup *only* because it has a custom schema variant. We - // expect that the builtin schema is already imported, so we use that. - // If it's missing, we're erroring out, but what we should do is try and - // find the missing builtin schema on the remote module index, and then - // import that. - if metadata.kind() == SiPkgKind::WorkspaceBackup && schema_spec.is_builtin() { - let existing_schema = Schema::find_by_name_builtin(ctx, schema_spec.name()) - .await? - .ok_or(PkgError::MissingBuiltinSchema( - schema_spec.name().to_owned(), - ))?; - - Some(existing_schema) - } else { + let schema_and_category = match change_set_pk { + None => { + let hash = schema_spec.hash().to_string(); let existing_schema = InstalledPkgAsset::list_for_kind_and_hash( ctx, InstalledPkgAssetKind::Schema, @@ -1102,68 +1789,84 @@ async fn import_schema( .await? .pop(); - if let Some(installed_schema_record) = existing_schema { - match installed_schema_record.as_installed_schema()? { - InstalledPkgAssetTyped::Schema { id, .. } => { - match Schema::get_by_id(ctx, &id).await? { - Some(schema) => Some(schema), - None => return Err(PkgError::InstalledSchemaMissing(id)), - } - } - _ => unreachable!(), - } - } else { - match schema_spec - .unique_id() - .and_then(|unique_id| thing_map.get(change_set_pk, &unique_id.to_owned())) - { - Some(Thing::Schema(schema)) => { - let mut schema = schema.to_owned(); + let data = schema_spec + .data() + .ok_or(PkgError::DataNotFound("schema".into()))?; - if schema_spec.deleted() { - schema.delete_by_id(ctx).await?; - // delete all schema children? + // NOTE(nick): with the new engine, the category moves to the schema variant, so we need + // to pull it off here, even if we find an existing schema. + let category = data.category.clone(); - None - } else { - if let Some(data) = schema_spec.data() { - update_schema(ctx, &mut schema, data).await?; - } - - Some(schema) - } - } - _ => { - if schema_spec.deleted() { - None - } else { - Some( - create_schema( - ctx, - schema_spec - .data() - .ok_or(PkgError::DataNotFound("schema".into()))?, - ) - .await?, - ) + let schema = match existing_schema { + None => create_schema(ctx, data).await?, + Some(installed_schema_record) => { + match installed_schema_record.as_installed_schema()? { + InstalledPkgAssetTyped::Schema { id, .. } => { + Schema::get_by_id(ctx, id).await? } + _ => unimplemented!("no idea what happens here!"), } } + }; + + // Even if the asset is already installed, we write a record of the asset installation so that + // we can track the installed packages that share schemas. + if let Some(installed_pkg_id) = installed_pkg_id { + InstalledPkgAsset::new( + ctx, + InstalledPkgAssetTyped::new_for_schema(schema.id(), installed_pkg_id, hash), + ) + .await?; } - } - }; - if let Some(mut schema) = schema { - // Even if the asset is already installed, we write a record of the asset - // installation so that we can track the installed packages that share schemas. - if let Some(installed_pkg_id) = installed_pkg_id { - InstalledPkgAsset::new( - ctx, - InstalledPkgAssetTyped::new_for_schema(*schema.id(), installed_pkg_id, hash), - ) - .await?; + Some((schema, category)) + } + Some(_) => { + unimplemented!("workspace import not yet implemented") + // let unique_id = schema_spec + // .unique_id() + // .ok_or(PkgError::MissingUniqueIdForNode(format!( + // "schema {}", + // schema_spec.hash() + // )))?; + // + // match thing_map.get(change_set_pk, &unique_id.to_owned()) { + // Some(Thing::Schema(schema)) => { + // let mut schema = schema.to_owned(); + // + // if schema_spec.deleted() { + // schema.delete_by_id(ctx).await?; + // // delete all schema children? + // + // None + // } else { + // if let Some(data) = schema_spec.data() { + // update_schema(ctx, &mut schema, data).await?; + // } + // + // Some(schema) + // } + // } + // _ => { + // if schema_spec.deleted() { + // None + // } else { + // Some( + // create_schema( + // ctx, + // schema_spec + // .data() + // .ok_or(PkgError::DataNotFound("schema".into()))?, + // ) + // .await?, + // ) + // } + // } + // } } + }; + if let Some((mut schema, category)) = schema_and_category { if let Some(unique_id) = schema_spec.unique_id() { thing_map.insert( change_set_pk, @@ -1172,178 +1875,178 @@ async fn import_schema( ); } - let mut installed_schema_variant_ids = vec![]; + let installed_schema_variant_ids = vec![]; for variant_spec in &schema_spec.variants()? { - let variant = import_schema_variant( + let _variant = import_schema_variant( ctx, change_set_pk, &mut schema, + category.clone(), variant_spec, installed_pkg_id, thing_map, - metadata, ) .await?; - if let Some(variant) = variant { - installed_schema_variant_ids.push(*variant.id()); - - let schema_default_variant_id = schema_spec - .data() - .as_ref() - .and_then(|data| data.default_schema_variant()); - - set_default_schema_variant_id( - ctx, - &mut schema, - schema_default_variant_id, - variant_spec.unique_id(), - *variant.id(), - ) - .await?; - - if let Some(variant_spec_data) = variant_spec.data() { - let func_unique_id = variant_spec_data.func_unique_id().to_owned(); - - if let Thing::Func(asset_func) = - thing_map - .get(change_set_pk, &func_unique_id) - .ok_or(PkgError::MissingFuncUniqueId(func_unique_id.to_string()))? - { - create_schema_variant_definition( - ctx, - schema_spec.clone(), - installed_pkg_id, - *variant.id(), - asset_func, - ) - .await?; - } - } - } - } - - Ok((Some(*schema.id()), installed_schema_variant_ids)) + // if let Some(variant) = variant { + // installed_schema_variant_ids.push(*variant.id()); + // + // if let Some(variant_spec_data) = variant_spec.data() { + // let func_unique_id = variant_spec_data.func_unique_id().to_owned(); + // + // set_default_schema_variant_id( + // ctx, + // change_set_pk, + // &mut schema, + // schema_spec + // .data() + // .as_ref() + // .and_then(|data| data.default_schema_variant()), + // variant_spec.unique_id(), + // *variant.id(), + // ) + // .await?; + // + // if let Thing::Func(asset_func) = + // thing_map + // .get(change_set_pk, &func_unique_id) + // .ok_or(PkgError::MissingFuncUniqueId(func_unique_id.to_string()))? + // { + // create_schema_variant_definition( + // ctx, + // schema_spec.clone(), + // installed_pkg_id, + // *variant.id(), + // asset_func, + // ) + // .await?; + // } + // } + // } + } + + Ok((Some(schema.id()), installed_schema_variant_ids)) } else { Ok((None, vec![])) } } -async fn set_default_schema_variant_id( - ctx: &DalContext, - schema: &mut Schema, - spec_default_unique_id: Option<&str>, - variant_unique_id: Option<&str>, - variant_id: SchemaVariantId, -) -> PkgResult<()> { - match (variant_unique_id, spec_default_unique_id) { - (None, _) | (Some(_), None) => { - if schema.default_schema_variant_id().is_none() { - schema - .set_default_schema_variant_id(ctx, Some(variant_id)) - .await?; - } - } - (Some(variant_unique_id), Some(spec_default_unique_id)) => { - if variant_unique_id == spec_default_unique_id { - let current_default_variant_id = schema - .default_schema_variant_id() - .copied() - .unwrap_or(SchemaVariantId::NONE); - - if variant_id != current_default_variant_id { - schema - .set_default_schema_variant_id(ctx, Some(variant_id)) - .await?; - } - } - } - } - - Ok(()) -} - -async fn create_schema_variant_definition( - ctx: &DalContext, - schema_spec: SiPkgSchema<'_>, - installed_pkg_id: Option, - schema_variant_id: SchemaVariantId, - asset_func: &Func, -) -> PkgResult<()> { - let hash = schema_spec.hash().to_string(); - let existing_definition = InstalledPkgAsset::list_for_kind_and_hash( - ctx, - InstalledPkgAssetKind::SchemaVariantDefinition, - &hash, - ) - .await? - .pop(); - - let definition = match existing_definition { - None => { - let maybe_schema_variant_definition = - SchemaVariantDefinition::get_by_func_id(ctx, *asset_func.id()).await?; - let mut schema_variant_definition = match maybe_schema_variant_definition { - None => { - let spec = schema_spec.to_spec().await?; - let metadata = SchemaVariantDefinitionJson::metadata_from_spec(spec)?; - - let mut svd = SchemaVariantDefinition::new( - ctx, - metadata.name, - metadata.menu_name, - metadata.category, - metadata.link, - metadata.color, - metadata.component_kind, - metadata.description, - *asset_func.id(), - ) - .await?; - - svd.set_component_type(ctx, metadata.component_type).await?; - - svd - } - Some(schema_variant_definition) => schema_variant_definition, - }; - - schema_variant_definition - .set_schema_variant_id(ctx, Some(schema_variant_id)) - .await?; - - schema_variant_definition - } - Some(existing_definition) => { - match existing_definition.as_installed_schema_variant_definition()? { - InstalledPkgAssetTyped::SchemaVariantDefinition { id, .. } => { - match SchemaVariantDefinition::get_by_id(ctx, &id).await? { - Some(definition) => definition, - None => return Err(PkgError::InstalledSchemaVariantDefinitionMissing(id)), - } - } - _ => unreachable!( - "we are protected by the as_installed_schema_variant_definition method" - ), - } - } - }; - - if let Some(installed_pkg_id) = installed_pkg_id { - InstalledPkgAsset::new( - ctx, - InstalledPkgAssetTyped::new_for_schema_variant_definition( - *definition.id(), - installed_pkg_id, - hash, - ), - ) - .await?; - } - - Ok(()) -} - +// async fn set_default_schema_variant_id( +// ctx: &DalContext, +// change_set_pk: Option, +// schema: &mut Schema, +// spec_default_unique_id: Option<&str>, +// variant_unique_id: Option<&str>, +// variant_id: SchemaVariantId, +// ) -> PkgResult<()> { +// match (change_set_pk, variant_unique_id, spec_default_unique_id) { +// (None, _, _) | (Some(_), None, _) | (_, Some(_), None) => { +// if schema.default_schema_variant_id().is_none() { +// schema +// .set_default_schema_variant_id(ctx, Some(variant_id)) +// .await?; +// } +// } +// (Some(_), Some(variant_unique_id), Some(spec_default_unique_id)) => { +// if variant_unique_id == spec_default_unique_id { +// let current_default_variant_id = schema +// .default_schema_variant_id() +// .copied() +// .unwrap_or(SchemaVariantId::NONE); + +// if variant_id != current_default_variant_id { +// schema +// .set_default_schema_variant_id(ctx, Some(variant_id)) +// .await?; +// } +// } +// } +// } + +// Ok(()) +// } + +// async fn create_schema_variant_definition( +// ctx: &DalContext, +// schema_spec: SiPkgSchema<'_>, +// installed_pkg_id: Option, +// schema_variant_id: SchemaVariantId, +// asset_func: &Func, +// ) -> PkgResult<()> { +// let hash = schema_spec.hash().to_string(); +// let existing_definition = InstalledPkgAsset::list_for_kind_and_hash( +// ctx, +// InstalledPkgAssetKind::SchemaVariantDefinition, +// &hash, +// ) +// .await? +// .pop(); + +// let definition = match existing_definition { +// None => { +// let maybe_schema_variant_definition = +// SchemaVariantDefinition::get_by_func_id(ctx, *asset_func.id()).await?; +// let mut schema_variant_definition = match maybe_schema_variant_definition { +// None => { +// let spec = schema_spec.to_spec().await?; +// let metadata = SchemaVariantDefinitionJson::metadata_from_spec(spec)?; + +// let mut svd = SchemaVariantDefinition::new( +// ctx, +// metadata.name, +// metadata.menu_name, +// metadata.category, +// metadata.link, +// metadata.color, +// metadata.component_kind, +// metadata.description, +// *asset_func.id(), +// ) +// .await?; + +// svd.set_component_type(ctx, metadata.component_type).await?; +// svd +// } +// Some(schema_variant_definition) => schema_variant_definition, +// }; + +// schema_variant_definition +// .set_schema_variant_id(ctx, Some(schema_variant_id)) +// .await?; + +// schema_variant_definition +// } +// Some(existing_definition) => { +// match existing_definition.as_installed_schema_variant_definition()? { +// InstalledPkgAssetTyped::SchemaVariantDefinition { id, .. } => { +// match SchemaVariantDefinition::get_by_id(ctx, &id).await? { +// Some(definition) => definition, +// None => return Err(PkgError::InstalledSchemaVariantDefinitionMissing(id)), +// } +// } +// _ => unreachable!( +// "we are protected by the as_installed_schema_variant_definition method" +// ), +// } +// } +// }; + +// if let Some(installed_pkg_id) = installed_pkg_id { +// InstalledPkgAsset::new( +// ctx, +// InstalledPkgAssetTyped::new_for_schema_variant_definition( +// *definition.id(), +// installed_pkg_id, +// hash, +// ), +// ) +// .await?; +// } + +// Ok(()) +// } + +#[allow(dead_code)] #[derive(Clone, Debug)] struct AttrFuncInfo { func_unique_id: String, @@ -1351,6 +2054,7 @@ struct AttrFuncInfo { inputs: Vec, } +#[allow(dead_code)] #[remain::sorted] #[derive(Clone, Debug)] enum DefaultValueInfo { @@ -1374,11 +2078,12 @@ struct PropVisitContext<'a> { pub attr_funcs: Mutex>, pub default_values: Mutex>, pub map_key_funcs: Mutex>, + pub change_set_pk: Option, } async fn import_leaf_function( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, leaf_func: SiPkgLeafFunction<'_>, schema_variant_id: SchemaVariantId, thing_map: &mut ThingMap, @@ -1406,115 +2111,95 @@ async fn import_leaf_function( Ok(()) } -// TODO: cache this so we don't fetch it for every socket -async fn get_identity_func( - ctx: &DalContext, -) -> PkgResult<(Func, FuncBinding, FuncBindingReturnValue, FuncArgument)> { - let func_name = "si:identity"; - let func_argument_name = "identity"; - let func: Func = Func::find_by_name(ctx, func_name) - .await? - .ok_or_else(|| FuncError::NotFoundByName(func_name.to_string()))?; - - let func_id = *func.id(); - let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( - ctx, - serde_json::json![{ "identity": null }], - func_id, - vec![], - ) - .await?; - let func_argument = FuncArgument::find_by_name_for_func(ctx, func_argument_name, func_id) - .await? - .ok_or_else(|| { - PkgError::MissingIntrinsicFuncArgument( - func_name.to_string(), - func_argument_name.to_string(), - ) - })?; - - Ok((func, func_binding, func_binding_return_value, func_argument)) +async fn get_identity_func(ctx: &DalContext) -> PkgResult { + Ok(Func::find_intrinsic(ctx, IntrinsicFunc::Identity).await?) } async fn create_socket( ctx: &DalContext, data: &SiPkgSocketData, - schema_id: SchemaId, schema_variant_id: SchemaVariantId, -) -> PkgResult<(Socket, Option, Option)> { - let (identity_func, identity_func_binding, identity_fbrv, _) = get_identity_func(ctx).await?; +) -> PkgResult<(Option, Option)> { + let identity_func_id = get_identity_func(ctx).await?; - let (mut socket, ip, ep) = match data.kind() { + let (ip, ep) = match data.kind() { SocketSpecKind::Input => { - let (ip, socket) = InternalProvider::new_explicit_with_socket( + let ip = InternalProvider::new_explicit( ctx, schema_variant_id, data.name(), - *identity_func.id(), - *identity_func_binding.id(), - *identity_fbrv.id(), - data.connection_annotations(), + identity_func_id, data.arity().into(), - false, + ProviderKind::Standard, ) .await?; - (socket, Some(ip), None) + (Some(ip), None) } SocketSpecKind::Output => { - let (ep, socket) = ExternalProvider::new_with_socket( + let ep = ExternalProvider::new( ctx, - schema_id, schema_variant_id, data.name(), None, - *identity_func.id(), - *identity_func_binding.id(), - *identity_fbrv.id(), - data.connection_annotations(), + identity_func_id, data.arity().into(), - false, + ProviderKind::Standard, ) .await?; - (socket, None, Some(ep)) + (None, Some(ep)) } }; - socket.set_ui_hidden(ctx, data.ui_hidden()).await?; + // TODO: add modify_by_id to socket, ui hide frames + // socket.set_ui_hidden(ctx, data.ui_hidden()).await?; - Ok((socket, ip, ep)) + Ok((ip, ep)) } async fn import_socket( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, socket_spec: SiPkgSocket<'_>, - schema_id: SchemaId, schema_variant_id: SchemaVariantId, thing_map: &mut ThingMap, ) -> PkgResult<()> { - let (socket, ip, ep) = { - match socket_spec - .unique_id() - .and_then(|unique_id| thing_map.get(change_set_pk, &unique_id.to_owned())) - { - Some(Thing::Socket(socket_box)) => { - ( - socket_box.0.to_owned(), - socket_box.1.to_owned(), - socket_box.2.to_owned(), - ) - // prop trees, including sockets and providers, are created whole cloth, so - // should not have differences in change sets (currently) - } - _ => { - let data = socket_spec - .data() - .ok_or(PkgError::DataNotFound(socket_spec.name().into()))?; - - create_socket(ctx, data, schema_id, schema_variant_id).await? - } + let (ip, ep) = match change_set_pk { + None => { + let data = socket_spec + .data() + .ok_or(PkgError::DataNotFound(socket_spec.name().into()))?; + + create_socket(ctx, data, schema_variant_id).await? + } + Some(_) => { + todo!("workspace backup imports"); + // let unique_id = socket_spec + // .unique_id() + // .ok_or(PkgError::MissingUniqueIdForNode(format!( + // "socket {}", + // socket_spec.hash() + // )))?; + // + // match thing_map.get(change_set_pk, &unique_id.to_owned()) { + // Some(Thing::Socket(socket_box)) => { + // ( + // socket_box.0.to_owned(), + // socket_box.1.to_owned(), + // socket_box.2.to_owned(), + // ) + // // prop trees, including sockets and providers, are created whole cloth, so + // // should not have differences in change sets (currently) + // } + // _ => { + // let data = socket_spec + // .data() + // .ok_or(PkgError::DataNotFound(socket_spec.name().into()))?; + // + // create_socket(ctx, data, schema_id, schema_variant_id).await? + // } + // } } }; @@ -1522,7 +2207,7 @@ async fn import_socket( thing_map.insert( change_set_pk, unique_id.to_owned(), - Thing::Socket(Box::new((socket, ip.to_owned(), ep.to_owned()))), + Thing::Socket(Box::new((ip.to_owned(), ep.to_owned()))), ); } @@ -1536,7 +2221,7 @@ async fn import_socket( ctx, change_set_pk, schema_variant_id, - *ep.id(), + ep.id(), func_unique_id, socket_spec.inputs()?.drain(..).map(Into::into).collect(), thing_map, @@ -1550,91 +2235,56 @@ async fn import_socket( Ok(()) } -async fn create_action_prototype( +async fn create_action_protoype( ctx: &DalContext, action_func_spec: &SiPkgActionFunc<'_>, func_id: FuncId, schema_variant_id: SchemaVariantId, ) -> PkgResult { - let mut proto = ActionPrototype::new( + let proto = ActionPrototype::new( ctx, - func_id, + action_func_spec.name(), action_func_spec.kind().into(), - ActionPrototypeContext { schema_variant_id }, + schema_variant_id, + func_id, ) .await?; - if let Some(name) = action_func_spec.name() { - proto.set_name(ctx, Some(name)).await?; - } - Ok(proto) } -async fn create_authentication_prototype( - ctx: &DalContext, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult { - Ok(AuthenticationPrototype::new( - ctx, - func_id, - AuthenticationPrototypeContext { schema_variant_id }, - ) - .await?) -} - -async fn update_action_prototype( - ctx: &DalContext, - prototype: &mut ActionPrototype, - action_func_spec: &SiPkgActionFunc<'_>, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - if prototype.schema_variant_id() != schema_variant_id { - prototype - .set_schema_variant_id(ctx, schema_variant_id) - .await?; - } - - if prototype.name() != action_func_spec.name() { - prototype.set_name(ctx, action_func_spec.name()).await?; - } - - if prototype.func_id() != func_id { - prototype.set_func_id(ctx, func_id).await?; - } - - let kind: ActionKind = action_func_spec.kind().into(); - if *prototype.kind() != kind { - prototype.set_kind(ctx, kind).await?; - } - - Ok(()) -} - -async fn update_authentication_prototype( - ctx: &DalContext, - prototype: &mut AuthenticationPrototype, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - if prototype.schema_variant_id() != schema_variant_id { - prototype - .set_schema_variant_id(ctx, schema_variant_id) - .await?; - } - - if prototype.func_id() != func_id { - prototype.set_func_id(ctx, func_id).await?; - } - - Ok(()) -} +// async fn update_action_prototype( +// ctx: &DalContext, +// prototype: &mut ActionPrototype, +// action_func_spec: &SiPkgActionFunc<'_>, +// func_id: FuncId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult<()> { +// if prototype.schema_variant_id() != schema_variant_id { +// prototype +// .set_schema_variant_id(ctx, schema_variant_id) +// .await?; +// } + +// if prototype.name() != action_func_spec.name() { +// prototype.set_name(ctx, action_func_spec.name()).await?; +// } + +// if prototype.func_id() != func_id { +// prototype.set_func_id(ctx, func_id).await?; +// } + +// let kind: ActionKind = action_func_spec.kind().into(); +// if *prototype.kind() != kind { +// prototype.set_kind(ctx, kind).await?; +// } + +// Ok(()) +// } async fn import_action_func( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, action_func_spec: &SiPkgActionFunc<'_>, schema_variant_id: SchemaVariantId, thing_map: &ThingMap, @@ -1642,34 +2292,35 @@ async fn import_action_func( let prototype = match thing_map.get(change_set_pk, &action_func_spec.func_unique_id().to_owned()) { Some(Thing::Func(func)) => { - let func_id = *func.id(); + let func_id = func.id; if let Some(unique_id) = action_func_spec.unique_id() { match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::ActionPrototype(prototype)) => { - let mut prototype = prototype.to_owned(); - - if action_func_spec.deleted() { - prototype.delete_by_id(ctx).await?; - } else { - update_action_prototype( - ctx, - &mut prototype, - action_func_spec, - func_id, - schema_variant_id, - ) - .await?; - } - - Some(prototype) + Some(Thing::ActionPrototype(_prototype)) => { + todo!("workspace import paths not yet implemented"); + // let mut prototype = prototype.to_owned(); + // + // if action_func_spec.deleted() { + // prototype.delete_by_id(ctx).await?; + // } else { + // update_action_prototype( + // ctx, + // &mut prototype, + // action_func_spec, + // func_id, + // schema_variant_id, + // ) + // .await?; + // } + // + // Some(prototype) } _ => { if action_func_spec.deleted() { None } else { Some( - create_action_prototype( + create_action_protoype( ctx, action_func_spec, func_id, @@ -1682,7 +2333,7 @@ async fn import_action_func( } } else { Some( - create_action_prototype(ctx, action_func_spec, func_id, schema_variant_id) + create_action_protoype(ctx, action_func_spec, func_id, schema_variant_id) .await?, ) } @@ -1699,47 +2350,66 @@ async fn import_action_func( async fn import_auth_func( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, func_spec: &SiPkgAuthFunc<'_>, schema_variant_id: SchemaVariantId, thing_map: &ThingMap, ) -> PkgResult> { let prototype = match thing_map.get(change_set_pk, &func_spec.func_unique_id().to_owned()) { Some(Thing::Func(func)) => { - let func_id = *func.id(); + let func_id = func.id; if let Some(unique_id) = func_spec.unique_id() { match thing_map.get(change_set_pk, &unique_id.to_owned()) { - Some(Thing::AuthPrototype(prototype)) => { - let mut prototype = prototype.to_owned(); - + Some(Thing::AuthPrototype(_prototype)) => { + todo!("workspace import paths not yet implemented"); + // AuthenticationPrototype is represented by just and edge, + // Since the info that matters is only then func_id and the schema_variant_id + // Do we need to update it? + + // let mut prototype = prototype.to_owned(); + // + // if func_spec.deleted() { + // prototype.delete_by_id(ctx).await?; + // } else { + // update_authentication_prototype( + // ctx, + // &mut prototype, + // func_id, + // schema_variant_id, + // ) + // .await?; + // } + // + // Some(prototype) + } + _ => { if func_spec.deleted() { - prototype.delete_by_id(ctx).await?; + None } else { - update_authentication_prototype( + SchemaVariant::new_authentication_prototype( ctx, - &mut prototype, func_id, schema_variant_id, ) .await?; - } - Some(prototype) - } - _ => { - if func_spec.deleted() { - None - } else { - Some( - create_authentication_prototype(ctx, func_id, schema_variant_id) - .await?, - ) + Some(AuthenticationPrototype { + id: AuthenticationPrototypeId::generate(), + func_id, + schema_variant_id, + }) } } } } else { - Some(create_authentication_prototype(ctx, func_id, schema_variant_id).await?) + SchemaVariant::new_authentication_prototype(ctx, func_id, schema_variant_id) + .await?; + Some(AuthenticationPrototype { + id: AuthenticationPrototypeId::generate(), + func_id, + schema_variant_id, + }) } } _ => { @@ -1761,9 +2431,7 @@ struct CreatePropsSideEffects { impl IntoIterator for CreatePropsSideEffects { type Item = CreatePropsSideEffects; - type IntoIter = std::vec::IntoIter; - fn into_iter(self) -> Self::IntoIter { vec![self].into_iter() } @@ -1781,6 +2449,7 @@ impl Extend for CreatePropsSideEffects { async fn create_props( ctx: &DalContext, + change_set_pk: Option, variant_spec: &SiPkgSchemaVariant<'_>, prop_root: SchemaVariantSpecPropRoot, prop_root_prop_id: PropId, @@ -1792,9 +2461,14 @@ async fn create_props( attr_funcs: Mutex::new(vec![]), default_values: Mutex::new(vec![]), map_key_funcs: Mutex::new(vec![]), + change_set_pk, }; - let parent_info = (prop_root_prop_id, PropPath::new(prop_root.path_parts())); + let parent_info = ParentPropInfo { + prop_id: prop_root_prop_id, + path: PropPath::new(prop_root.path_parts()), + kind: PropKind::Object, + }; variant_spec .visit_prop_tree(prop_root, create_prop, Some(parent_info), &context) @@ -1807,522 +2481,425 @@ async fn create_props( }) } -async fn update_schema_variant( - ctx: &DalContext, - schema_variant: &mut SchemaVariant, - name: &str, - schema_id: SchemaId, -) -> PkgResult<()> { - let current_schema_id = schema_variant - .schema(ctx) - .await? - .map(|schema| *schema.id()) - .ok_or(SchemaVariantError::MissingSchema(*schema_variant.id()))?; - - if schema_id != current_schema_id { - schema_variant.set_schema(ctx, &schema_id).await?; - } - - if schema_variant.name() != name { - schema_variant.set_name(ctx, name).await?; - } - - Ok(()) -} - -/// Duplicate all the functions, and return a thing_map with them included, so -/// that we can import a standalone schema variant. -pub async fn clone_and_import_funcs(ctx: &DalContext, funcs: Vec) -> PkgResult { - let mut thing_map = ThingMap::new(); - - for func_spec in funcs { - let func = if func::is_intrinsic(&func_spec.name) - || SPECIAL_CASE_FUNCS.contains(&func_spec.name.as_str()) - { - Func::find_by_name(ctx, &func_spec.name) - .await? - .ok_or(PkgError::MissingIntrinsicFunc(func_spec.name.to_owned()))? - } else { - let func = create_func(ctx, &func_spec).await?; - - if !func_spec.arguments.is_empty() { - import_func_arguments( - ctx, - ChangeSetPk::NONE, - *func.id(), - &func_spec.arguments, - &mut thing_map, - ) - .await?; - } - - func - }; - - thing_map.insert(ChangeSetPk::NONE, func_spec.unique_id, Thing::Func(func)); - } - - Ok(thing_map) -} - -pub async fn import_schema_variant( +// async fn update_schema_variant( +// ctx: &DalContext, +// schema_variant: &mut SchemaVariant, +// name: &str, +// schema_id: SchemaId, +// ) -> PkgResult<()> { +// let current_schema_id = schema_variant +// .schema(ctx) +// .await? +// .map(|schema| *schema.id()) +// .ok_or(SchemaVariantError::MissingSchema(*schema_variant.id()))?; + +// if schema_id != current_schema_id { +// schema_variant.set_schema(ctx, &schema_id).await?; +// } + +// if schema_variant.name() != name { +// schema_variant.set_name(ctx, name).await?; +// } + +// Ok(()) +// } + +async fn import_schema_variant( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, schema: &mut Schema, + category: String, variant_spec: &SiPkgSchemaVariant<'_>, installed_pkg_id: Option, thing_map: &mut ThingMap, - metadata: &SiPkgMetadata, ) -> PkgResult> { - // Builtin variants should already be imported, so we can just find it and - // return it. However, what we *should* do is check to see if the installed - // variant is the one we want, and if not, find the missing builtin variant - // on the remote module index, and then import that. - if metadata.kind() == SiPkgKind::WorkspaceBackup && variant_spec.is_builtin() { - let variant = schema - .find_variant_by_name(ctx, variant_spec.name()) + let schema_variant = match change_set_pk { + None => { + let hash = variant_spec.hash().to_string(); + let existing_schema_variant = InstalledPkgAsset::list_for_kind_and_hash( + ctx, + InstalledPkgAssetKind::SchemaVariant, + &hash, + ) .await? - .ok_or(PkgError::MissingBuiltinSchemaVariant( - variant_spec.name().to_owned(), - ))?; - - return Ok(Some(variant)); - } - - let hash = variant_spec.hash().to_string(); - let mut schema_variant = { - let existing_schema_variant = InstalledPkgAsset::list_for_kind_and_hash( - ctx, - InstalledPkgAssetKind::SchemaVariant, - &hash, - ) - .await? - .pop(); + .pop(); - if let Some(installed_sv_record) = existing_schema_variant { - match installed_sv_record.as_installed_schema_variant()? { - InstalledPkgAssetTyped::SchemaVariant { id, .. } => { - SchemaVariant::get_by_id(ctx, &id).await? + let (variant, created) = match existing_schema_variant { + Some(installed_sv_record) => { + match installed_sv_record.as_installed_schema_variant()? { + InstalledPkgAssetTyped::SchemaVariant { id, .. } => (SchemaVariant::get_by_id(ctx, id) .await?, false), + _ => unreachable!( + "the as_installed_schema_variant method ensures we cannot hit this branch" + ), + } } - _ => unreachable!( - "the as_installed_schema_variant method ensures we cannot hit this branch" + None => ( + // FIXME(nick): move category, color, and all metadata to variant or somewhere + // else. It should not be on schema. + SchemaVariant::new(ctx, schema.id(), variant_spec.name(), category) + .await? + .0, + true, ), - } - } else { - match variant_spec - .unique_id() - .and_then(|unique_id| thing_map.get(change_set_pk, &unique_id.to_owned())) - { - Some(Thing::SchemaVariant(variant)) => { - let mut variant = variant.to_owned(); - update_schema_variant(ctx, &mut variant, variant_spec.name(), *schema.id()) - .await?; - - if variant_spec.deleted() { - variant.delete_by_id(ctx).await?; + }; - None - } else { - Some(variant) - } - } - _ => { - if variant_spec.deleted() { - None - } else { - let mut variant = - SchemaVariant::new(ctx, *schema.id(), variant_spec.name()) - .await? - .0; - - if matches!(metadata.kind(), SiPkgKind::Module) { - variant - .set_pkg_created_at(ctx, Some(metadata.created_at())) - .await?; - } + if let Some(installed_pkg_id) = installed_pkg_id { + InstalledPkgAsset::new( + ctx, + InstalledPkgAssetTyped::new_for_schema_variant( + variant.id(), + installed_pkg_id, + hash, + ), + ) + .await?; + } - Some(variant) - } - } + if created { + Some(variant) + } else { + None } } - }; - - if let Some(schema_variant) = schema_variant.as_mut() { - if let Some(installed_pkg_id) = installed_pkg_id { - InstalledPkgAsset::new( - ctx, - InstalledPkgAssetTyped::new_for_schema_variant( - *schema_variant.id(), - installed_pkg_id, - hash, - ), - ) - .await?; + Some(_) => { + unimplemented!("workspace import is not working at this time") + // let unique_id = variant_spec + // .unique_id() + // .ok_or(PkgError::MissingUniqueIdForNode(format!( + // "variant {}", + // variant_spec.hash() + // )))?; + // + // match thing_map.get(change_set_pk, &unique_id.to_owned()) { + // Some(Thing::SchemaVariant(variant)) => { + // let mut variant = variant.to_owned(); + // update_schema_variant(ctx, &mut variant, variant_spec.name(), *schema.id()) + // .await?; + // + // if variant_spec.deleted() { + // variant.delete_by_id(ctx).await?; + // + // None + // } else { + // Some(variant) + // } + // } + // _ => { + // if variant_spec.deleted() { + // None + // } else { + // Some( + // SchemaVariant::new(ctx, *schema.id(), variant_spec.name()) + // .await? + // .0, + // ) + // } + // } + // } } + }; - if let Some(unique_id) = variant_spec.unique_id() { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::SchemaVariant(schema_variant.to_owned()), - ); - } + let schema_variant = match schema_variant { + None => None, + Some(schema_variant) => { + if let Some(unique_id) = variant_spec.unique_id() { + thing_map.insert( + change_set_pk, + unique_id.to_owned(), + Thing::SchemaVariant(schema_variant.to_owned()), + ); + } - if let Some(data) = variant_spec.data() { - if let (Some(spec_color), current_color) = - (data.color(), schema_variant.color(ctx).await?) - { - if current_color.is_none() - || spec_color - != current_color.expect("is none condition ensures this won't panic") - { - schema_variant.set_color(ctx, spec_color.to_owned()).await?; + if let Some(data) = variant_spec.data() { + if let Some(color) = data.color() { + let current_color = schema_variant.get_color(ctx).await?; + if current_color.as_deref() != Some(color) { + schema_variant.set_color(ctx, color).await? + } } - } - } - let mut side_effects = CreatePropsSideEffects::default(); + schema_variant + .set_type(ctx, data.component_type().to_string()) + .await?; + } - let domain_prop_id = schema_variant - .find_prop(ctx, &["root", "domain"]) - .await? - .id() - .to_owned(); + let mut side_effects = CreatePropsSideEffects::default(); - side_effects.extend( - create_props( + let domain_prop_id = Prop::find_prop_id_by_path( ctx, - variant_spec, - SchemaVariantSpecPropRoot::Domain, - domain_prop_id, - *schema_variant.id(), + schema_variant.id(), + &PropPath::new(["root", "domain"]), ) - .await?, - ); + .await?; - let secrets_prop_id = schema_variant - .find_prop(ctx, &["root", "secrets"]) - .await? - .id() - .to_owned(); + side_effects.extend( + create_props( + ctx, + change_set_pk, + variant_spec, + SchemaVariantSpecPropRoot::Domain, + domain_prop_id, + schema_variant.id(), + ) + .await?, + ); - side_effects.extend( - create_props( + let resource_value_prop_id = Prop::find_prop_id_by_path( ctx, - variant_spec, - SchemaVariantSpecPropRoot::Secrets, - secrets_prop_id, - *schema_variant.id(), + schema_variant.id(), + &PropPath::new(["root", "resource_value"]), ) - .await?, - ); + .await?; - if !variant_spec.secret_definitions()?.is_empty() { - let secret_definition_prop_id = *Prop::new_without_ui_optionals( + side_effects.extend( + create_props( + ctx, + change_set_pk, + variant_spec, + SchemaVariantSpecPropRoot::ResourceValue, + resource_value_prop_id, + schema_variant.id(), + ) + .await?, + ); + + let secrets_prop_id = Prop::find_prop_id_by_path( ctx, - "secret_definition", - PropKind::Object, - *schema_variant.id(), - Some(*schema_variant.find_prop(ctx, &["root"]).await?.id()), + schema_variant.id(), + &PropPath::new(["root", "secrets"]), ) - .await? - .id(); + .await?; side_effects.extend( create_props( ctx, + change_set_pk, variant_spec, - SchemaVariantSpecPropRoot::SecretDefinition, - secret_definition_prop_id, - *schema_variant.id(), + SchemaVariantSpecPropRoot::Secrets, + secrets_prop_id, + schema_variant.id(), ) .await?, ); - } - match schema_variant - .find_prop(ctx, &["root", "resource_value"]) - .await - { - Ok(resource_value_prop) => { + if !variant_spec.secret_definitions()?.is_empty() { + let root_prop_id = + Prop::find_prop_id_by_path(ctx, schema_variant.id(), &PropPath::new(["root"])) + .await?; + + let secret_definition_prop = Prop::new( + ctx, + "secret_definition", + PropKind::Object, + false, + None, + None, + PropParent::OrderedProp(root_prop_id), + ) + .await?; + let secret_definition_prop_id = secret_definition_prop.id(); + side_effects.extend( create_props( ctx, + change_set_pk, variant_spec, - SchemaVariantSpecPropRoot::ResourceValue, - *resource_value_prop.id(), - *schema_variant.id(), + SchemaVariantSpecPropRoot::SecretDefinition, + secret_definition_prop_id, + schema_variant.id(), ) .await?, ); } - Err(SchemaVariantError::PropNotFoundAtPath(_, _, _)) => { - warn!("Cannot find /root/resource_value prop, so skipping creating props under the resource value. If the /root/resource_value pr has been merged, this should be an error!"); - } - Err(err) => Err(err)?, - }; - - if let Some(data) = variant_spec.data() { - schema_variant - .finalize(ctx, Some(data.component_type().into())) - .await?; - } - for action_func in &variant_spec.action_funcs()? { - let prototype = import_action_func( - ctx, - change_set_pk, - action_func, - *schema_variant.id(), - thing_map, - ) - .await?; + SchemaVariant::finalize(ctx, schema_variant.id()).await?; - if let (Some(prototype), Some(unique_id)) = (prototype, action_func.unique_id()) { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::ActionPrototype(prototype), - ); + for socket in variant_spec.sockets()? { + import_socket(ctx, change_set_pk, socket, schema_variant.id(), thing_map).await?; } - } - for func in &variant_spec.auth_funcs()? { - let prototype = - import_auth_func(ctx, change_set_pk, func, *schema_variant.id(), thing_map).await?; - - if let (Some(prototype), Some(unique_id)) = (prototype, func.unique_id()) { - thing_map.insert( + for action_func in &variant_spec.action_funcs()? { + let prototype = import_action_func( + ctx, change_set_pk, - unique_id.to_owned(), - Thing::AuthPrototype(prototype), - ); - } - } - - for leaf_func in variant_spec.leaf_functions()? { - import_leaf_function( - ctx, - change_set_pk, - leaf_func, - *schema_variant.id(), - thing_map, - ) - .await?; - } - - for socket in variant_spec.sockets()? { - import_socket( - ctx, - change_set_pk, - socket, - *schema.id(), - *schema_variant.id(), - thing_map, - ) - .await?; - } - - // Default values must be set before attribute functions are configured so they don't - // override the prototypes set there - for default_value_info in side_effects.default_values { - set_default_value(ctx, default_value_info).await?; - } - - // Set a default name value for all name props, this ensures region has a name before - // the function is executed - { - let name_prop = schema_variant - .find_prop(ctx, &["root", "si", "name"]) - .await?; - let name_default_value_info = DefaultValueInfo::String { - prop_id: *name_prop.id(), - default_value: schema.name().to_lowercase(), - }; - - set_default_value(ctx, name_default_value_info).await?; - } - - for si_prop_func in variant_spec.si_prop_funcs()? { - let prop = schema_variant - .find_prop(ctx, &si_prop_func.kind().prop_path()) + action_func, + schema_variant.id(), + thing_map, + ) .await?; - import_attr_func_for_prop( - ctx, - change_set_pk, - *schema_variant.id(), - AttrFuncInfo { - func_unique_id: si_prop_func.func_unique_id().to_owned(), - prop_id: *prop.id(), - inputs: si_prop_func - .inputs()? - .iter() - .map(|input| input.to_owned().into()) - .collect(), - }, - None, - thing_map, - ) - .await?; - } - let mut has_resource_value_func = false; - for root_prop_func in variant_spec.root_prop_funcs()? { - if root_prop_func.prop() == SchemaVariantSpecPropRoot::ResourceValue { - has_resource_value_func = true; + if let (Some(prototype), Some(unique_id)) = (prototype, action_func.unique_id()) { + thing_map.insert( + change_set_pk, + unique_id.to_owned(), + Thing::ActionPrototype(prototype), + ); + } } - let prop = schema_variant - .find_prop(ctx, root_prop_func.prop().path_parts()) + for auth_func in &variant_spec.auth_funcs()? { + let prototype = import_auth_func( + ctx, + change_set_pk, + auth_func, + schema_variant.id(), + thing_map, + ) .await?; - import_attr_func_for_prop( - ctx, - change_set_pk, - *schema_variant.id(), - AttrFuncInfo { - func_unique_id: root_prop_func.func_unique_id().to_owned(), - prop_id: *prop.id(), - inputs: root_prop_func - .inputs()? - .iter() - .map(|input| input.to_owned().into()) - .collect(), - }, - None, - thing_map, - ) - .await?; - } - if !has_resource_value_func { - attach_resource_payload_to_value(ctx, *schema_variant.id()).await?; - } - for attr_func in side_effects.attr_funcs { - import_attr_func_for_prop( - ctx, - change_set_pk, - *schema_variant.id(), - attr_func, - None, - thing_map, - ) - .await?; - } - - for (key, map_key_func) in side_effects.map_key_funcs { - import_attr_func_for_prop( - ctx, - change_set_pk, - *schema_variant.id(), - map_key_func, - Some(key), - thing_map, - ) - .await?; - } - } - - Ok(schema_variant) -} + if let (Some(prototype), Some(unique_id)) = (prototype, auth_func.unique_id()) { + thing_map.insert( + change_set_pk, + unique_id.to_owned(), + Thing::AuthPrototype(prototype), + ); + } + } -pub async fn attach_resource_payload_to_value( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - let func_id = *Func::find_by_name(ctx, "si:resourcePayloadToValue") - .await? - .ok_or(FuncError::NotFoundByName( - "si:resourcePayloadToValue".into(), - ))? - .id(); + for leaf_func in variant_spec.leaf_functions()? { + import_leaf_function( + ctx, + change_set_pk, + leaf_func, + schema_variant.id(), + thing_map, + ) + .await?; + } - let func_argument_id = *FuncArgument::find_by_name_for_func(ctx, "payload", func_id) - .await? - .ok_or(FuncArgumentError::NotFoundByNameForFunc( - "payload".into(), - func_id, - ))? - .id(); + // Default values must be set before attribute functions are configured so they don't + // override the prototypes set there + for default_value_info in side_effects.default_values { + set_default_value(ctx, default_value_info).await?; + } - let source = { - let prop = SchemaVariant::find_prop_in_tree( - ctx, - schema_variant_id, - &["root", "resource", "payload"], - ) - .await?; + // Set a default name value for all name props, this ensures region has a name before + // the function is executed + { + let name_prop_id = Prop::find_prop_id_by_path( + ctx, + schema_variant.id(), + &PropPath::new(["root", "si", "name"]), + ) + .await?; + let name_default_value_info = DefaultValueInfo::String { + prop_id: name_prop_id, + default_value: schema.name.to_owned().to_lowercase(), + }; - InternalProvider::find_for_prop(ctx, *prop.id()) - .await? - .ok_or(InternalProviderError::NotFoundForProp(*prop.id()))? - }; + set_default_value(ctx, name_default_value_info).await?; + } - let target = { - let resource_value_prop = - SchemaVariant::find_prop_in_tree(ctx, schema_variant_id, &["root", "resource_value"]) + for si_prop_func in variant_spec.si_prop_funcs()? { + let prop_id = Prop::find_prop_id_by_path( + ctx, + schema_variant.id(), + &PropPath::new(si_prop_func.kind().prop_path()), + ) + .await?; + import_attr_func_for_prop( + ctx, + change_set_pk, + schema_variant.id(), + AttrFuncInfo { + func_unique_id: si_prop_func.func_unique_id().to_owned(), + prop_id, + inputs: si_prop_func + .inputs()? + .iter() + .map(|input| input.to_owned().into()) + .collect(), + }, + None, + thing_map, + ) .await?; + } - let mut prototype = AttributeValue::find_for_context( - ctx, - AttributeReadContext::default_with_prop(*resource_value_prop.id()), - ) - .await? - .ok_or(AttributeValueError::Missing)? - .attribute_prototype(ctx) - .await? - .ok_or(AttributeValueError::MissingAttributePrototype)?; + let mut has_resource_value_func = false; + for root_prop_func in variant_spec.root_prop_funcs()? { + if root_prop_func.prop() == SchemaVariantSpecPropRoot::ResourceValue { + has_resource_value_func = true; + } - prototype.set_func_id(ctx, func_id).await?; + let prop_id = Prop::find_prop_id_by_path( + ctx, + schema_variant.id(), + &PropPath::new(root_prop_func.prop().path_parts()), + ) + .await?; + import_attr_func_for_prop( + ctx, + change_set_pk, + schema_variant.id(), + AttrFuncInfo { + func_unique_id: root_prop_func.func_unique_id().to_owned(), + prop_id, + inputs: root_prop_func + .inputs()? + .iter() + .map(|input| input.to_owned().into()) + .collect(), + }, + None, + thing_map, + ) + .await?; + } + if !has_resource_value_func { + attach_resource_payload_to_value(ctx, schema_variant.id()).await?; + } - prototype - }; + for attr_func in side_effects.attr_funcs { + import_attr_func_for_prop( + ctx, + change_set_pk, + schema_variant.id(), + attr_func, + None, + thing_map, + ) + .await?; + } - match AttributePrototypeArgument::list_for_attribute_prototype(ctx, *target.id()) - .await? - .iter() - .find(|apa| apa.func_argument_id() == func_argument_id) - { - Some(apa) => { - if apa.internal_provider_id() != *source.id() { - let mut apa = apa.to_owned(); - apa.set_internal_provider_id(ctx, *source.id()).await?; + for (key, map_key_func) in side_effects.map_key_funcs { + import_attr_func_for_prop( + ctx, + change_set_pk, + schema_variant.id(), + map_key_func, + Some(key), + thing_map, + ) + .await?; } + + Some(schema_variant) } - None => { - AttributePrototypeArgument::new_for_intra_component( - ctx, - *target.id(), - func_argument_id, - *source.id(), - ) - .await?; - } - } - Ok(()) + }; + + Ok(schema_variant) } async fn set_default_value( ctx: &DalContext, default_value_info: DefaultValueInfo, ) -> PkgResult<()> { - let prop = match &default_value_info { + let prop_id = match &default_value_info { DefaultValueInfo::Number { prop_id, .. } | DefaultValueInfo::String { prop_id, .. } - | DefaultValueInfo::Boolean { prop_id, .. } => Prop::get_by_id(ctx, prop_id) - .await? - .ok_or(PkgError::MissingProp(*prop_id))?, + | DefaultValueInfo::Boolean { prop_id, .. } => *prop_id, }; match default_value_info { DefaultValueInfo::Boolean { default_value, .. } => { - prop.set_default_value(ctx, default_value).await? + Prop::set_default_value(ctx, prop_id, default_value).await? } DefaultValueInfo::Number { default_value, .. } => { - prop.set_default_value(ctx, default_value).await? + Prop::set_default_value(ctx, prop_id, default_value).await? } DefaultValueInfo::String { default_value, .. } => { - prop.set_default_value(ctx, default_value).await? + Prop::set_default_value(ctx, prop_id, default_value).await? } } @@ -2331,7 +2908,7 @@ async fn set_default_value( async fn import_attr_func_for_prop( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, schema_variant_id: SchemaVariantId, AttrFuncInfo { func_unique_id, @@ -2346,13 +2923,10 @@ async fn import_attr_func_for_prop( import_attr_func( ctx, change_set_pk, - AttributeReadContext { - prop_id: Some(prop_id), - ..Default::default() - }, + AttrFuncContext::Prop(prop_id), key, schema_variant_id, - *func.id(), + func.id, inputs, thing_map, ) @@ -2366,7 +2940,7 @@ async fn import_attr_func_for_prop( async fn import_attr_func_for_output_socket( ctx: &DalContext, - change_set_pk: ChangeSetPk, + change_set_pk: Option, schema_variant_id: SchemaVariantId, external_provider_id: ExternalProviderId, func_unique_id: &str, @@ -2378,13 +2952,10 @@ async fn import_attr_func_for_output_socket( import_attr_func( ctx, change_set_pk, - AttributeReadContext { - external_provider_id: Some(external_provider_id), - ..Default::default() - }, + AttrFuncContext::ExternalProvider(external_provider_id), None, schema_variant_id, - *func.id(), + func.id, inputs, thing_map, ) @@ -2398,78 +2969,57 @@ async fn import_attr_func_for_output_socket( async fn get_prototype_for_context( ctx: &DalContext, - context: AttributeReadContext, + context: AttrFuncContext, key: Option, -) -> PkgResult { - let value = AttributeValue::find_for_context(ctx, context) - .await? - .ok_or(AttributeValueError::Missing)?; - - let real_value = if let Some(key) = key { - let parent_prop_id = context - .prop_id() - .ok_or(PkgError::AttributeFuncForKeyMissingProp( +) -> PkgResult { + if key.is_some() { + #[allow(clippy::infallible_destructuring_match)] + let map_prop_id = match context { + AttrFuncContext::Prop(prop_id) => prop_id, + _ => Err(PkgError::AttributeFuncForKeyMissingProp( context, - key.to_owned(), - ))?; - - let parent_prop = Prop::get_by_id(ctx, &parent_prop_id) - .await? - .ok_or(PkgError::MissingProp(parent_prop_id))?; + key.to_owned().expect("check above ensures this is some"), + ))?, + }; + let map_prop = Prop::get_by_id(ctx, map_prop_id).await?; - if *parent_prop.kind() != PropKind::Map { + if map_prop.kind != PropKind::Map { return Err(PkgError::AttributeFuncForKeySetOnWrongKind( - parent_prop_id, - key, - *parent_prop.kind(), + map_prop_id, + key.to_owned().expect("check above ensures this is some"), + map_prop.kind, )); } - match parent_prop.child_props(ctx).await?.pop() { - Some(item_prop) => { - let item_write_context = AttributeContextBuilder::new() - .set_prop_id(*item_prop.id()) - .to_context()?; - - let item_read_context: AttributeReadContext = item_write_context.to_owned().into(); - - match AttributeValue::find_with_parent_and_key_for_context( - ctx, - Some(*value.id()), - Some(key.to_owned()), - item_read_context, - ) - .await? - { - Some(item_av) => item_av, - None => { - let item_id = AttributeValue::insert_for_context( - ctx, - item_write_context, - *value.id(), - None, - Some(key), - ) - .await?; + let element_prop_id = map_prop.element_prop_id(ctx).await?; + Ok( + match AttributePrototype::find_for_prop(ctx, element_prop_id, &key).await? { + None => { + let unset_func_id = Func::find_intrinsic(ctx, IntrinsicFunc::Unset).await?; + let prototype_id = AttributePrototype::new(ctx, unset_func_id).await?.id(); + Prop::set_prototype_id(ctx, element_prop_id, prototype_id).await?; - AttributeValue::get_by_id(ctx, &item_id) - .await? - .ok_or(AttributeValueError::MissingForId(item_id))? - } + prototype_id } + Some(prototype_id) => prototype_id, + }, + ) + } else { + Ok(match context { + AttrFuncContext::Prop(prop_id) => { + AttributePrototype::find_for_prop(ctx, prop_id, &None) + .await? + .ok_or(PkgError::PropMissingPrototype(prop_id))? } - None => { - return Err(PkgError::MissingItemPropForMapProp(parent_prop_id)); + AttrFuncContext::ExternalProvider(external_provider_id) => { + AttributePrototype::find_for_external_provider(ctx, external_provider_id) + .await? + .ok_or(PkgError::ExternalProviderMissingPrototype( + external_provider_id, + ))? } - } - } else { - value - }; - - Ok(real_value - .attribute_prototype(ctx) - .await? - .ok_or(AttributeValueError::MissingAttributePrototype)?) + }) + } } async fn create_attr_proto_arg( @@ -2478,7 +3028,7 @@ async fn create_attr_proto_arg( input: &SiPkgAttrFuncInputView, func_id: FuncId, schema_variant_id: SchemaVariantId, -) -> PkgResult { +) -> PkgResult { let arg = match &input { SiPkgAttrFuncInputView::Prop { name, .. } | SiPkgAttrFuncInputView::InputSocket { name, .. } @@ -2491,37 +3041,28 @@ async fn create_attr_proto_arg( Ok(match input { SiPkgAttrFuncInputView::Prop { prop_path, .. } => { - let prop = Prop::find_prop_by_path(ctx, schema_variant_id, &prop_path.into()).await?; - let prop_ip = InternalProvider::find_for_prop(ctx, *prop.id()) - .await? - .ok_or(PkgError::MissingInternalProviderForProp(*prop.id()))?; + let prop_id = + Prop::find_prop_id_by_path(ctx, schema_variant_id, &prop_path.into()).await?; + let apa = AttributePrototypeArgument::new(ctx, prototype_id, arg.id).await?; + let apa_id = apa.id(); - AttributePrototypeArgument::new_for_intra_component( - ctx, - prototype_id, - *arg.id(), - *prop_ip.id(), - ) - .await? + apa.set_value_from_prop_id(ctx, prop_id).await?; + + apa_id } SiPkgAttrFuncInputView::InputSocket { socket_name, .. } => { - let explicit_ip = InternalProvider::find_explicit_for_schema_variant_and_name( - ctx, - schema_variant_id, - &socket_name, - ) - .await? - .ok_or(PkgError::MissingInternalProviderForSocketName( - socket_name.to_owned(), - ))?; - - AttributePrototypeArgument::new_for_intra_component( - ctx, - prototype_id, - *arg.id(), - *explicit_ip.id(), - ) - .await? + let explicit_ip = + InternalProvider::find_explicit_with_name(ctx, socket_name, schema_variant_id) + .await? + .ok_or(PkgError::MissingInternalProviderForSocketName( + socket_name.to_owned(), + ))?; + let apa = AttributePrototypeArgument::new(ctx, prototype_id, arg.id).await?; + let apa_id = apa.id(); + + apa.set_value_from_internal_provider_id(ctx, explicit_ip.id()) + .await?; + apa_id } _ => { // xxx: make this an error @@ -2530,138 +3071,156 @@ async fn create_attr_proto_arg( }) } -async fn update_attr_proto_arg( - ctx: &DalContext, - apa: &mut AttributePrototypeArgument, - _prototype_id: AttributePrototypeId, - input: &SiPkgAttrFuncInputView, - func_id: FuncId, - schema_variant_id: SchemaVariantId, -) -> PkgResult<()> { - let arg = match &input { - SiPkgAttrFuncInputView::Prop { name, .. } - | SiPkgAttrFuncInputView::InputSocket { name, .. } - | SiPkgAttrFuncInputView::OutputSocket { name, .. } => { - FuncArgument::find_by_name_for_func(ctx, name, func_id) - .await? - .ok_or(PkgError::MissingFuncArgument(name.to_owned(), func_id))? - } - }; - - if apa.func_argument_id() != *arg.id() { - apa.set_func_argument_id(ctx, arg.id()).await?; - } - - match input { - SiPkgAttrFuncInputView::Prop { prop_path, .. } => { - let prop = Prop::find_prop_by_path(ctx, schema_variant_id, &prop_path.into()).await?; - let prop_ip = InternalProvider::find_for_prop(ctx, *prop.id()) - .await? - .ok_or(PkgError::MissingInternalProviderForProp(*prop.id()))?; - - if apa.internal_provider_id() != *prop_ip.id() { - apa.set_internal_provider_id_safe(ctx, *prop_ip.id()) - .await?; - } - } - SiPkgAttrFuncInputView::InputSocket { socket_name, .. } => { - let explicit_ip = InternalProvider::find_explicit_for_schema_variant_and_name( - ctx, - schema_variant_id, - &socket_name, - ) - .await? - .ok_or(PkgError::MissingInternalProviderForSocketName( - socket_name.to_owned(), - ))?; - - if apa.internal_provider_id() != *explicit_ip.id() { - apa.set_internal_provider_id_safe(ctx, *explicit_ip.id()) - .await?; - } - } - _ => {} - } - - Ok(()) +// async fn update_attr_proto_arg( +// ctx: &DalContext, +// apa: &mut AttributePrototypeArgument, +// _prototype_id: AttributePrototypeId, +// input: &SiPkgAttrFuncInputView, +// func_id: FuncId, +// schema_variant_id: SchemaVariantId, +// ) -> PkgResult<()> { +// let arg = match &input { +// SiPkgAttrFuncInputView::Prop { name, .. } +// | SiPkgAttrFuncInputView::InputSocket { name, .. } +// | SiPkgAttrFuncInputView::OutputSocket { name, .. } => { +// FuncArgument::find_by_name_for_func(ctx, name, func_id) +// .await? +// .ok_or(PkgError::MissingFuncArgument(name.to_owned(), func_id))? +// } +// }; + +// if apa.func_argument_id() != *arg.id() { +// apa.set_func_argument_id(ctx, arg.id()).await?; +// } + +// match input { +// SiPkgAttrFuncInputView::Prop { prop_path, .. } => { +// let prop = Prop::find_prop_by_path(ctx, schema_variant_id, &prop_path.into()).await?; +// let prop_ip = InternalProvider::find_for_prop(ctx, *prop.id()) +// .await? +// .ok_or(PkgError::MissingInternalProviderForProp(*prop.id()))?; + +// if apa.internal_provider_id() != *prop_ip.id() { +// apa.set_internal_provider_id_safe(ctx, *prop_ip.id()) +// .await?; +// } +// } +// SiPkgAttrFuncInputView::InputSocket { socket_name, .. } => { +// let explicit_ip = InternalProvider::find_explicit_for_schema_variant_and_name( +// ctx, +// schema_variant_id, +// &socket_name, +// ) +// .await? +// .ok_or(PkgError::MissingInternalProviderForSocketName( +// socket_name.to_owned(), +// ))?; + +// if apa.internal_provider_id() != *explicit_ip.id() { +// apa.set_internal_provider_id_safe(ctx, *explicit_ip.id()) +// .await?; +// } +// } +// _ => {} +// } + +// Ok(()) +// } + +#[derive(Debug, Clone)] +pub enum AttrFuncContext { + Prop(PropId), + ExternalProvider(ExternalProviderId), } #[allow(clippy::too_many_arguments)] async fn import_attr_func( ctx: &DalContext, - change_set_pk: ChangeSetPk, - context: AttributeReadContext, + change_set_pk: Option, + context: AttrFuncContext, key: Option, schema_variant_id: SchemaVariantId, func_id: FuncId, inputs: Vec, - thing_map: &mut ThingMap, + _thing_map: &mut ThingMap, ) -> PkgResult<()> { - let mut prototype = get_prototype_for_context(ctx, context, key).await?; + let prototype_id = get_prototype_for_context(ctx, context, key).await?; + + let prototype_func_id = AttributePrototype::func_id(ctx, prototype_id).await?; - if prototype.func_id() != func_id { - prototype.set_func_id(ctx, &func_id).await?; + if prototype_func_id != func_id { + AttributePrototype::update_func_by_id(ctx, prototype_id, func_id).await?; } for input in &inputs { - let (unique_id, deleted) = match input { - SiPkgAttrFuncInputView::Prop { - unique_id, deleted, .. - } - | SiPkgAttrFuncInputView::InputSocket { - unique_id, deleted, .. - } - | SiPkgAttrFuncInputView::OutputSocket { - unique_id, deleted, .. - } => (unique_id, *deleted), - }; - - let apa = match unique_id - .as_deref() - .and_then(|unique_id| thing_map.get(change_set_pk, &unique_id.to_owned())) - { - Some(Thing::AttributePrototypeArgument(apa)) => { - let mut apa = apa.to_owned(); - if deleted { - apa.delete_by_id(ctx).await?; - } else { - update_attr_proto_arg( - ctx, - &mut apa, - *prototype.id(), - input, - func_id, - schema_variant_id, - ) - .await?; - } - - Some(apa) + match change_set_pk { + None => { + create_attr_proto_arg(ctx, prototype_id, input, func_id, schema_variant_id).await?; } - _ => { - if deleted { - None - } else { - Some( - create_attr_proto_arg( - ctx, - *prototype.id(), - input, - func_id, - schema_variant_id, - ) - .await?, - ) - } + Some(_) => { + todo!(); + // let (unique_id, deleted) = match input { + // SiPkgAttrFuncInputView::Prop { + // unique_id, deleted, .. + // } + // | SiPkgAttrFuncInputView::InputSocket { + // unique_id, deleted, .. + // } + // | SiPkgAttrFuncInputView::OutputSocket { + // unique_id, deleted, .. + // } => ( + // unique_id + // .as_deref() + // .ok_or(PkgError::MissingUniqueIdForNode("attr-func-input".into()))?, + // *deleted, + // ), + // }; + // + // let apa = match thing_map.get(change_set_pk, &unique_id.to_owned()) { + // Some(Thing::AttributePrototypeArgument(apa)) => { + // let mut apa = apa.to_owned(); + // if deleted { + // apa.delete_by_id(ctx).await?; + // } else { + // update_attr_proto_arg( + // ctx, + // &mut apa, + // *prototype.id(), + // input, + // func_id, + // schema_variant_id, + // ) + // .await?; + // } + // + // Some(apa) + // } + // _ => { + // if deleted { + // None + // } else { + // Some( + // create_attr_proto_arg( + // ctx, + // *prototype.id(), + // input, + // func_id, + // schema_variant_id, + // ) + // .await?, + // ) + // } + // } + // }; + + // if let Some(apa) = apa { + // thing_map.insert( + // change_set_pk, + // unique_id.to_owned(), + // Thing::AttributePrototypeArgument(apa), + // ); + // } } - }; - - if let (Some(apa), Some(unique_id)) = (apa, unique_id) { - thing_map.insert( - change_set_pk, - unique_id.to_owned(), - Thing::AttributePrototypeArgument(apa), - ); } } @@ -2684,58 +3243,84 @@ async fn create_dal_prop( data: &SiPkgPropData, kind: PropKind, schema_variant_id: SchemaVariantId, - parent_prop_id: Option, + parent_prop_info: Option, ) -> PkgResult { - let mut prop = Prop::new( + let prop_parent = match parent_prop_info { + None => PropParent::SchemaVariant(schema_variant_id), + Some(parent_info) => { + if parent_info.kind.ordered() { + PropParent::OrderedProp(parent_info.prop_id) + } else { + PropParent::Prop(parent_info.prop_id) + } + } + }; + + let prop = Prop::new( ctx, &data.name, kind, - schema_variant_id, - parent_prop_id, + data.hidden, + data.doc_link.as_ref().map(|l| l.to_string()), Some(((&data.widget_kind).into(), data.widget_options.to_owned())), - data.documentation.to_owned(), - data.validation_format.to_owned(), + prop_parent, ) .await .map_err(SiPkgError::visit_prop)?; - prop.set_hidden(ctx, data.hidden).await?; - prop.set_doc_link(ctx, data.doc_link.as_ref().map(|l| l.to_string())) - .await?; - Ok(prop) } +#[derive(Debug, Clone)] +struct ParentPropInfo { + prop_id: PropId, + path: PropPath, + kind: PropKind, +} + async fn create_prop( spec: SiPkgProp<'_>, - parent_prop_info: Option<(PropId, PropPath)>, + parent_prop_info: Option, ctx: &PropVisitContext<'_>, -) -> PkgResult> { - let prop = { - let parent_path = parent_prop_info - .as_ref() - .map(|info| info.1.to_owned()) - .unwrap_or(PropPath::new(["root"])); +) -> PkgResult> { + let prop = match ctx.change_set_pk { + None => { + let data = spec.data().ok_or(PkgError::DataNotFound("prop".into()))?; + create_dal_prop( + ctx.ctx, + data, + prop_kind_for_pkg_prop(&spec), + ctx.schema_variant_id, + parent_prop_info, + ) + .await? + } + Some(_) => { + let parent_path = parent_prop_info + .as_ref() + .map(|info| info.path.to_owned()) + .unwrap_or(PropPath::new(["root"])); - let path = parent_path.join(&PropPath::new([spec.name()])); + let path = parent_path.join(&PropPath::new([spec.name()])); - match Prop::find_prop_by_path_opt(ctx.ctx, ctx.schema_variant_id, &path).await? { - None => { - let data = spec.data().ok_or(PkgError::DataNotFound("prop".into()))?; - create_dal_prop( - ctx.ctx, - data, - prop_kind_for_pkg_prop(&spec), - ctx.schema_variant_id, - parent_prop_info.as_ref().map(|info| info.0.to_owned()), - ) - .await? + match Prop::find_prop_id_by_path_opt(ctx.ctx, ctx.schema_variant_id, &path).await? { + None => { + let data = spec.data().ok_or(PkgError::DataNotFound("prop".into()))?; + create_dal_prop( + ctx.ctx, + data, + prop_kind_for_pkg_prop(&spec), + ctx.schema_variant_id, + parent_prop_info, + ) + .await? + } + Some(prop_id) => Prop::get_by_id(ctx.ctx, prop_id).await?, } - Some(prop) => prop, } }; - let prop_id = *prop.id(); + let prop_id = prop.id(); // Both attribute functions and default values have to be set *after* the schema variant is // "finalized", so we can't do until we construct the *entire* prop tree. Hence we push work @@ -2814,5 +3399,86 @@ async fn create_prop( }); } - Ok(Some((*prop.id(), prop.path()))) + Ok(Some(ParentPropInfo { + prop_id: prop.id(), + path: prop.path(ctx.ctx).await?, + kind: prop.kind, + })) +} + +pub async fn attach_resource_payload_to_value( + ctx: &DalContext, + schema_variant_id: SchemaVariantId, +) -> PkgResult<()> { + let func_id = Func::find_by_name(ctx, "si:resourcePayloadToValue") + .await? + .ok_or(PkgError::FuncNotFoundByName( + "si:resourcePayloadToValue".into(), + ))?; + + let func_argument_id = FuncArgument::find_by_name_for_func(ctx, "payload", func_id) + .await? + .ok_or(PkgError::FuncArgumentNotFoundByName( + func_id, + "payload".into(), + ))? + .id; + + let source_prop_id = Prop::find_prop_id_by_path( + ctx, + schema_variant_id, + &PropPath::new(["root", "resource", "payload"]), + ) + .await?; + + let target_id = { + let resource_value_prop_id = Prop::find_prop_id_by_path( + ctx, + schema_variant_id, + &PropPath::new(["root", "resource_value"]), + ) + .await?; + + let prototype_id = + get_prototype_for_context(ctx, AttrFuncContext::Prop(resource_value_prop_id), None) + .await?; + + AttributePrototype::update_func_by_id(ctx, prototype_id, func_id).await?; + + prototype_id + }; + + let mut rv_input_apa_id = None; + for apa_id in AttributePrototypeArgument::list_ids_for_prototype(ctx, target_id).await? { + if func_argument_id + == AttributePrototypeArgument::func_argument_id_by_id(ctx, apa_id).await? + { + rv_input_apa_id = Some(apa_id); + break; + } + } + + match rv_input_apa_id { + Some(apa_id) => { + dbg!("existing apa"); + if !{ + if let Some(ValueSource::Prop(prop_id)) = + AttributePrototypeArgument::value_source_by_id(ctx, apa_id).await? + { + prop_id == source_prop_id + } else { + false + } + } { + let apa = AttributePrototypeArgument::get_by_id(ctx, apa_id).await?; + apa.set_value_from_prop_id(ctx, source_prop_id).await?; + } + } + None => { + let apa = AttributePrototypeArgument::new(ctx, target_id, func_argument_id).await?; + apa.set_value_from_prop_id(ctx, source_prop_id).await?; + } + } + + Ok(()) } diff --git a/lib/dal/src/prop.rs b/lib/dal/src/prop.rs index b26e0915fc..c5035528e4 100644 --- a/lib/dal/src/prop.rs +++ b/lib/dal/src/prop.rs @@ -1,39 +1,174 @@ -use async_recursion::async_recursion; -use base64::{engine::general_purpose, Engine}; +use content_store::{ContentHash, Store}; +use petgraph::prelude::*; use serde::{Deserialize, Serialize}; use serde_json::Value; -use si_data_pg::PgError; use si_pkg::PropSpecKind; use std::collections::VecDeque; -use strum::{AsRefStr, Display, EnumIter, EnumString}; +use strum::{AsRefStr, Display, EnumDiscriminants, EnumIter, EnumString}; use telemetry::prelude::*; use thiserror::Error; -use tokio::sync::mpsc; -use veritech_client::FunctionResult; +use ulid::Ulid; -use crate::standard_model::{ - finish_create_from_row, object_option_from_row_option, objects_from_rows, +use crate::attribute::prototype::argument::{ + AttributePrototypeArgument, AttributePrototypeArgumentError, }; +use crate::attribute::prototype::AttributePrototypeError; +use crate::change_set_pointer::ChangeSetPointerError; +use crate::func::argument::{FuncArgument, FuncArgumentError}; +use crate::func::intrinsics::IntrinsicFunc; +use crate::func::FuncError; +use crate::workspace_snapshot::content_address::ContentAddressDiscriminants; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; +use crate::workspace_snapshot::edge_weight::{EdgeWeightError, EdgeWeightKindDiscriminants}; +use crate::workspace_snapshot::node_weight::{NodeWeight, NodeWeightError}; +use crate::workspace_snapshot::WorkspaceSnapshotError; +use crate::AttributeValueId; use crate::{ - attribute::{prototype::AttributePrototype, value::AttributeValue}, - component::ComponentViewError, - func::{ - binding::{FuncBinding, FuncBindingError}, - binding_return_value::FuncBindingReturnValueError, - }, - impl_standard_model, - job::consumer::JobConsumerError, - label_list::ToLabelList, - pk, - property_editor::schema::WidgetKind, - standard_model, standard_model_accessor, standard_model_belongs_to, standard_model_has_many, - AttributeContext, AttributeContextBuilder, AttributeContextBuilderError, - AttributePrototypeError, AttributeReadContext, ComponentId, ComponentView, DalContext, Func, - FuncError, FuncId, HistoryEventError, SchemaVariantId, StandardModel, StandardModelError, - Tenancy, Timestamp, ValidationOutput, ValidationResolver, ValidationResolverError, - ValidationStatus, Visibility, + label_list::ToLabelList, pk, property_editor::schema::WidgetKind, AttributePrototype, + AttributePrototypeId, DalContext, Func, FuncBackendResponseType, FuncId, SchemaVariantId, + Timestamp, TransactionsError, }; -use crate::{AttributeValueError, AttributeValueId, FuncBackendResponseType, TransactionsError}; + +pub const PROP_VERSION: PropContentDiscriminants = PropContentDiscriminants::V1; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum PropError { + #[error("attribute prototype error: {0}")] + AttributePrototype(#[from] AttributePrototypeError), + #[error("attribute prototype argument error: {0}")] + AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("child prop of {0:?} not found by name: {1}")] + ChildPropNotFoundByName(NodeIndex, String), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("prop {0} of kind {1} does not have an element prop")] + ElementPropNotOnKind(PropId, PropKind), + #[error("func error: {0}")] + Func(#[from] FuncError), + #[error("func argument error: {0}")] + FuncArgument(#[from] FuncArgumentError), + #[error("map or array {0} missing element prop")] + MapOrArrayMissingElementProp(PropId), + #[error("missing prototype for prop {0}")] + MissingPrototypeForProp(PropId), + #[error("missing provider for prop {0}")] + MissingProviderForProp(PropId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("prop {0} is orphaned")] + PropIsOrphan(PropId), + #[error("prop {0} has a non prop or schema variant parent")] + PropParentInvalid(PropId), + #[error("serde error: {0}")] + Serde(#[from] serde_json::Error), + #[error("can only set default values for scalars (string, integer, boolean), prop {0} is {1}")] + SetDefaultForNonScalar(PropId, PropKind), + #[error("for parent prop {0}, there is a child prop {1} that has unexpected siblings: {2:?}")] + SingleChildPropHasUnexpectedSiblings(PropId, PropId, Vec), + #[error("no single child prop found for parent: {0}")] + SingleChildPropNotFound(PropId), + #[error("store error: {0}")] + Store(#[from] content_store::StoreError), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), +} + +pub type PropResult = Result; + +pk!(PropId); + +// TODO: currently we only have string values in all widget_options but we should extend this to +// support other types. However, we cannot use serde_json::Value since postcard will not +// deserialize into a serde_json::Value. +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +pub struct WidgetOption { + label: String, + pub value: String, +} + +pub type WidgetOptions = Vec; + +/// An individual "field" within the tree of a [`SchemaVariant`](crate::SchemaVariant). +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +pub struct Prop { + pub id: PropId, + #[serde(flatten)] + pub timestamp: Timestamp, + /// The name of the [`Prop`]. + pub name: String, + /// The kind of the [`Prop`]. + pub kind: PropKind, + /// The kind of "widget" that should be used for this [`Prop`]. + pub widget_kind: WidgetKind, + /// The configuration of the "widget". + pub widget_options: Option, + /// A link to external documentation for working with this specific [`Prop`]. + pub doc_link: Option, + /// Embedded documentation for working with this specific [`Prop`]. + pub documentation: Option, + /// A toggle for whether or not the [`Prop`] should be visually hidden. + pub hidden: bool, + /// Props can be connected to eachother to signify that they should contain the same value + /// This is useful for diffing the resource with the domain, to suggest actions if the real world changes + pub refers_to_prop_id: Option, + /// Connected props may need a custom diff function + pub diff_func_id: Option, + /// A serialized validation format JSON object for the prop. TODO: useTODO: use + pub validation_format: Option, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum PropContent { + V1(PropContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct PropContentV1 { + pub timestamp: Timestamp, + /// The name of the [`Prop`]. + pub name: String, + /// The kind of the [`Prop`]. + pub kind: PropKind, + /// The kind of "widget" that should be used for this [`Prop`]. + pub widget_kind: WidgetKind, + /// The configuration of the "widget". + pub widget_options: Option, + /// A link to external documentation for working with this specific [`Prop`]. + pub doc_link: Option, + /// Embedded documentation for working with this specific [`Prop`]. + pub documentation: Option, + /// A toggle for whether or not the [`Prop`] should be visually hidden. + pub hidden: bool, + /// Props can be connected to eachother to signify that they should contain the same value + /// This is useful for diffing the resource with the domain, to suggest actions if the real world changes + pub refers_to_prop_id: Option, + /// Connected props may need a custom diff function + pub diff_func_id: Option, +} + +impl From for PropContentV1 { + fn from(value: Prop) -> Self { + Self { + timestamp: value.timestamp, + name: value.name, + kind: value.kind, + widget_kind: value.widget_kind, + widget_options: value.widget_options, + doc_link: value.doc_link, + documentation: value.documentation, + hidden: value.hidden, + refers_to_prop_id: value.refers_to_prop_id, + diff_func_id: value.diff_func_id, + } + } +} /// This is the separator used for the "path" column. It is a vertical tab character, which should /// not (we'll see) be able to be provided by our users in [`Prop`] names. @@ -122,71 +257,9 @@ impl From for PropPath { } } -const ALL_ANCESTOR_PROPS: &str = include_str!("queries/prop/all_ancestor_props.sql"); -const FIND_VALIDATIONS_FOR_COMPONENT: &str = - include_str!("queries/prop/find_validations_for_component.sql"); -const FIND_ROOT_PROP_FOR_PROP: &str = include_str!("queries/prop/root_prop_for_prop.sql"); -const FIND_PROP_IN_TREE: &str = include_str!("queries/prop/find_prop_in_tree.sql"); - -#[remain::sorted] -#[derive(Error, Debug)] -pub enum PropError { - #[error("Array prop {0} is missing element child")] - ArrayMissingElementChild(PropId), - #[error("AttributeContext error: {0}")] - AttributeContext(#[from] AttributeContextBuilderError), - #[error("AttributePrototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("AttributeValue error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error(transparent)] - ComponentView(#[from] Box), - #[error("default diff function not found")] - DefaultDiffFunctionNotFound, - #[error("expected child prop not found with name {0}")] - ExpectedChildNotFound(String), - #[error("Func error: {0}")] - Func(#[from] FuncError), - #[error("FuncBinding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("FuncBindingReturnValue error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error(transparent)] - JobConsumer(#[from] Box), - #[error("Map prop {0} is missing element child")] - MapMissingElementChild(PropId), - #[error("missing a func: {0}")] - MissingFunc(String), - #[error("missing a func by id: {0}")] - MissingFuncById(FuncId), - #[error("prop not found: {0} ({1:?})")] - NotFound(PropId, Visibility), - #[error("prop not found at path: {0} {1:?}")] - NotFoundAtPath(String, Visibility), - #[error("parent prop kind is not \"Object\", which is required for setting default values on props (found {0})")] - ParentPropIsNotObjectForPropWithDefaultValue(PropKind), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error(transparent)] - SerdeJson(#[from] serde_json::Error), - #[error("unable to set default value for non scalar prop type")] - SetDefaultForNonScalar(PropKind), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), - #[error(transparent)] - ValidationResolver(#[from] Box), - #[error(transparent)] - Veritech(#[from] veritech_client::ClientError), -} - -pub type PropResult = Result; - -pk!(PropPk); -pk!(PropId); +// const ALL_ANCESTOR_PROPS: &str = include_str!("queries/prop/all_ancestor_props.sql"); +// const FIND_ROOT_PROP_FOR_PROP: &str = include_str!("queries/prop/root_prop_for_prop.sql"); +// const FIND_PROP_IN_TREE: &str = include_str!("queries/prop/find_prop_in_tree.sql"); #[remain::sorted] #[derive( @@ -213,6 +286,20 @@ pub enum PropKind { String, } +impl PropKind { + pub fn ordered(&self) -> bool { + matches!(self, PropKind::Array | PropKind::Map | PropKind::Object) + } + + pub fn empty_value(&self) -> Option { + match self { + Self::Array => Some(serde_json::json!([])), + Self::Map | Self::Object => Some(serde_json::json!({})), + _ => None, + } + } +} + impl From for PropSpecKind { fn from(prop: PropKind) -> Self { match prop { @@ -253,547 +340,876 @@ impl From for FuncBackendResponseType { } } -/// An individual "field" within the tree of a [`SchemaVariant`](crate::SchemaVariant). -#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] -pub struct Prop { - pk: PropPk, - id: PropId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, +pub enum PropParent { + OrderedProp(PropId), + Prop(PropId), + SchemaVariant(SchemaVariantId), +} - /// The name of the [`Prop`]. - name: String, - /// The kind of the [`Prop`]. - kind: PropKind, - /// The kind of "widget" that should be used for this [`Prop`]. - widget_kind: WidgetKind, - /// The configuration of the "widget". - widget_options: Option, - /// A link to external documentation for working with this specific [`Prop`]. - doc_link: Option, - /// Embedded documentation for working with this specific [`Prop`]. - documentation: Option, - /// A toggle for whether or not the [`Prop`] should be visually hidden. - hidden: bool, - /// The "path" for a given [`Prop`]. It is a concatenation of [`Prop`] names based on lineage - /// with [`PROP_PATH_SEPARATOR`] as the separator between each parent and child. +impl Prop { + pub fn assemble(id: PropId, inner: PropContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + name: inner.name, + kind: inner.kind, + widget_kind: inner.widget_kind, + widget_options: inner.widget_options, + doc_link: inner.doc_link, + documentation: inner.documentation, + hidden: inner.hidden, + refers_to_prop_id: inner.refers_to_prop_id, + diff_func_id: inner.diff_func_id, + validation_format: None, + } + } + + pub fn id(&self) -> PropId { + self.id + } + + pub async fn parent_prop_id_by_id( + ctx: &DalContext, + prop_id: PropId, + ) -> PropResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + match workspace_snapshot + .incoming_sources_for_edge_weight_kind(prop_id, EdgeWeightKindDiscriminants::Use)? + .first() + { + Some(parent_node_idx) => Ok( + match workspace_snapshot.get_node_weight(*parent_node_idx)? { + NodeWeight::Prop(prop_inner) => Some(prop_inner.id().into()), + NodeWeight::Content(content_inner) => { + let content_addr_discrim: ContentAddressDiscriminants = + content_inner.content_address().into(); + match content_addr_discrim { + ContentAddressDiscriminants::SchemaVariant => None, + _ => return Err(PropError::PropParentInvalid(prop_id)), + } + } + _ => return Err(PropError::PropParentInvalid(prop_id)), + }, + ), + None => Ok(None), + } + } + + pub async fn direct_child_prop_ids( + ctx: &DalContext, + prop_id: PropId, + ) -> PropResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + Ok(workspace_snapshot + .edges_directed_for_edge_weight_kind( + prop_id, + Outgoing, + EdgeWeightKindDiscriminants::Use, + )? + .iter() + .filter_map(|edge_ref| { + workspace_snapshot + .get_node_weight(edge_ref.target()) + .ok() + .and_then(|node_weight| node_weight.get_prop_node_weight().ok()) + .map(|prop_node| prop_node.id().into()) + }) + .collect()) + } + + /// Finds and expects a single child [`Prop`]. If zero or more than one [`Prop`] is found, an error is returned. /// - /// This is useful for finding and querying for specific [`Props`](Prop) in a - /// [`SchemaVariant`](crate::SchemaVariant)'s tree. - path: String, - /// The [`SchemaVariant`](crate::SchemaVariant) whose tree we (the [`Prop`]) reside in. - schema_variant_id: SchemaVariantId, - /// Props can be connected to eachother to signify that they should contain the same value - /// This is useful for diffing the resource with the domain, to suggest actions if the real world changes - refers_to_prop_id: Option, - /// Connected props may need a custom diff function - diff_func_id: Option, - /// A serialized validation format JSON object for the prop - validation_format: Option, -} + /// This is most useful for maps and arrays, but can also be useful for objects with single fields + /// (e.g. "/root/secrets" under certain scenarios). + pub async fn direct_single_child_prop_id( + ctx: &DalContext, + prop_id: PropId, + ) -> PropResult { + let mut direct_child_prop_ids_should_only_be_one = + Self::direct_child_prop_ids(ctx, prop_id).await?; -impl_standard_model! { - model: Prop, - pk: PropPk, - id: PropId, - table_name: "props", - history_event_label_base: "prop", - history_event_message_name: "Prop" -} + let single_child_prop_id = direct_child_prop_ids_should_only_be_one + .pop() + .ok_or(PropError::SingleChildPropNotFound(prop_id))?; + + if !direct_child_prop_ids_should_only_be_one.is_empty() { + return Err(PropError::SingleChildPropHasUnexpectedSiblings( + prop_id, + single_child_prop_id, + direct_child_prop_ids_should_only_be_one, + )); + } + + Ok(single_child_prop_id) + } + + pub async fn path_by_id(ctx: &DalContext, prop_id: PropId) -> PropResult { + let name = ctx + .workspace_snapshot()? + .read() + .await + .get_node_weight_by_id(prop_id)? + .get_prop_node_weight()? + .name() + .to_owned(); + + let mut parts = VecDeque::from([name]); + let mut work_queue = VecDeque::from([prop_id]); + + while let Some(prop_id) = work_queue.pop_front() { + if let Some(prop_id) = Prop::parent_prop_id_by_id(ctx, prop_id).await? { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let node_idx = workspace_snapshot.get_node_index_by_id(prop_id)?; + + if let NodeWeight::Prop(inner) = workspace_snapshot.get_node_weight(node_idx)? { + parts.push_front(inner.name().to_owned()); + work_queue.push_back(inner.id().into()); + } + } + } + + Ok(PropPath::new(parts)) + } + + pub async fn path(&self, ctx: &DalContext) -> PropResult { + Self::path_by_id(ctx, self.id).await + } + + pub async fn attribute_values_for_prop_id( + ctx: &DalContext, + prop_id: PropId, + ) -> PropResult> { + let mut result = vec![]; + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let av_sources = workspace_snapshot + .incoming_sources_for_edge_weight_kind(prop_id, EdgeWeightKindDiscriminants::Prop)?; + + for av_source_idx in av_sources { + let av_id: AttributeValueId = workspace_snapshot + .get_node_weight(av_source_idx)? + .get_attribute_value_node_weight()? + .id() + .into(); + result.push(av_id) + } + + Ok(result) + } + + pub async fn new_without_ui_optionals( + ctx: &DalContext, + name: impl AsRef, + kind: PropKind, + prop_parent: PropParent, + ) -> PropResult { + Self::new(ctx, name.as_ref(), kind, false, None, None, prop_parent).await + } -impl Prop { /// Create a new [`Prop`]. A corresponding [`AttributePrototype`] and [`AttributeValue`] will be /// created when the provided [`SchemaVariant`](crate::SchemaVariant) is /// [`finalized`](crate::SchemaVariant::finalize). - #[allow(clippy::too_many_arguments)] pub async fn new( ctx: &DalContext, - name: impl AsRef, + name: impl Into, kind: PropKind, - schema_variant_id: SchemaVariantId, - parent_prop_id: Option, + hidden: bool, + doc_link: Option, widget_kind_and_options: Option<(WidgetKind, Option)>, - documentation: Option, - validation_format: Option, + prop_parent: PropParent, ) -> PropResult { - let name = name.as_ref(); - let (widget_kind, widget_options) = match widget_kind_and_options { - Some((kind, options)) => (kind, options), - None => (WidgetKind::from(kind), None), + let ordered = kind.ordered(); + let name = name.into(); + + let timestamp = Timestamp::now(); + let (widget_kind, widget_options): (WidgetKind, Option) = + match widget_kind_and_options { + Some((kind, options)) => ( + kind, + match options { + Some(options) => Some(serde_json::from_value(options)?), + None => None, + }, + ), + None => (WidgetKind::from(kind), None), + }; + + let content = PropContentV1 { + timestamp, + name: name.clone(), + kind, + widget_kind, + widget_options, + doc_link, + documentation: None, + hidden, + refers_to_prop_id: None, + diff_func_id: None, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&PropContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_prop(change_set, id, kind, name, hash)?; + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + let _node_index = if ordered { + workspace_snapshot.add_ordered_node(change_set, node_weight)? + } else { + workspace_snapshot.add_node(node_weight)? }; - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM prop_create_v3($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)", - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &kind.as_ref(), - &widget_kind.as_ref(), - &widget_options.as_ref(), - &schema_variant_id, - &parent_prop_id, - &documentation, - &validation_format, - ], - ) - .await?; - Ok(finish_create_from_row(ctx, row).await?) + match prop_parent { + PropParent::OrderedProp(ordered_prop_id) => { + workspace_snapshot.add_ordered_edge( + change_set, + ordered_prop_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + } + PropParent::Prop(prop_id) => { + workspace_snapshot.add_edge( + prop_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + } + PropParent::SchemaVariant(schema_variant_id) => { + workspace_snapshot.add_edge( + schema_variant_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + } + }; + + Ok(Self::assemble(id.into(), content)) } - pub async fn new_without_ui_optionals( - ctx: &DalContext, - name: impl AsRef, - kind: PropKind, - schema_variant_id: SchemaVariantId, - parent_prop_id: Option, - ) -> PropResult { - Self::new( - ctx, - name, - kind, - schema_variant_id, - parent_prop_id, - None, - None, - None, - ) - .await - } - standard_model_accessor!(name, String, PropResult); - standard_model_accessor!(kind, Enum(PropKind), PropResult); - standard_model_accessor!(widget_kind, Enum(WidgetKind), PropResult); - standard_model_accessor!(widget_options, Option, PropResult); - standard_model_accessor!(doc_link, Option, PropResult); - standard_model_accessor!(documentation, Option, PropResult); - standard_model_accessor!(hidden, bool, PropResult); - standard_model_accessor!(refers_to_prop_id, Option, PropResult); - standard_model_accessor!(diff_func_id, Option, PropResult); - standard_model_accessor!(schema_variant_id, Pk(SchemaVariantId), PropResult); - standard_model_accessor!(validation_format, Option, PropResult); - - pub fn path(&self) -> PropPath { - self.path.to_owned().into() - } - - // TODO(nick): replace this table with a foreign key relationship. - standard_model_belongs_to!( - lookup_fn: parent_prop, - set_fn: set_parent_prop_do_not_use, - unset_fn: unset_parent_prop_do_not_use, - table: "prop_belongs_to_prop", - model_table: "props", - belongs_to_id: PropId, - returns: Prop, - result: PropResult, - ); - - // TODO(nick): replace this table with a foreign key relationship. - standard_model_has_many!( - lookup_fn: child_props, - table: "prop_belongs_to_prop", - model_table: "props", - returns: Prop, - result: PropResult, - ); - - pub async fn find_root_prop_for_prop( - ctx: &DalContext, - prop_id: PropId, - ) -> PropResult> { - let row = ctx - .txns() + pub async fn get_by_id(ctx: &DalContext, id: PropId) -> PropResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let ulid: ulid::Ulid = id.into(); + let node_index = workspace_snapshot.get_node_index_by_id(ulid)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: PropContent = ctx + .content_store() + .lock() + .await + .get(&hash) .await? - .pg() - .query_opt( - FIND_ROOT_PROP_FOR_PROP, - &[ctx.tenancy(), ctx.visibility(), &prop_id], - ) - .await?; + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(ulid))?; - Ok(standard_model::object_option_from_row_option::(row)?) - } + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let PropContent::V1(inner) = content; - /// Returns the given [`Prop`] and all ancestor [`Props`](crate::Prop) back to the root. - /// Ancestor props are ordered by depth, starting from the root prop. - pub async fn all_ancestor_props(ctx: &DalContext, prop_id: PropId) -> PropResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ALL_ANCESTOR_PROPS, - &[ctx.tenancy(), ctx.visibility(), &prop_id], - ) - .await?; - Ok(objects_from_rows(rows)?) + Ok(Prop::assemble(id, inner)) } - #[instrument(level = "debug", skip_all)] - #[async_recursion] - pub async fn ts_type(&self, ctx: &DalContext) -> PropResult { - // XXX: Hack! The payload prop kind is a string but we're actually storing arbitrary json - // there and expect it to be a JSON object in most of our code. However, the resource_value - // work is likely to remove the need for this entirely - if self.path() == PropPath::new(["root", "resource", "payload"]) { - return Ok("any".to_string()); + pub async fn element_prop_id(&self, ctx: &DalContext) -> PropResult { + if !matches!(self.kind, PropKind::Array | PropKind::Map) { + return Err(PropError::ElementPropNotOnKind(self.id, self.kind)); } - // Note: we should fix this by having propper enums as prop types - if self.path() == PropPath::new(["root", "resource", "status"]) { - return Ok("'ok' | 'warning' | 'error' | undefined | null".to_owned()); + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + for maybe_elem_node_idx in workspace_snapshot + .outgoing_targets_for_edge_weight_kind(self.id, EdgeWeightKindDiscriminants::Use)? + { + if let NodeWeight::Prop(prop_inner) = + workspace_snapshot.get_node_weight(maybe_elem_node_idx)? + { + return Ok(prop_inner.id().into()); + } } - Ok(match self.kind() { - PropKind::Array => format!( - "{}[]", - self.child_props(ctx) - .await? - .get(0) - .ok_or(PropError::ArrayMissingElementChild(self.id))? - .ts_type(ctx) - .await? - ), - PropKind::Boolean => "boolean".to_string(), - PropKind::Integer => "number".to_string(), - PropKind::Map => format!( - "Record", - self.child_props(ctx) - .await? - .get(0) - .ok_or(PropError::MapMissingElementChild(self.id))? - .ts_type(ctx) - .await? - ), - PropKind::Object => { - let mut object_type = "{\n".to_string(); - for child in self.child_props(ctx).await? { - let name_value = serde_json::to_value(&child.name)?; - let name_serialized = serde_json::to_string(&name_value)?; - object_type.push_str( - format!( - "{}: {} | null | undefined;\n", - &name_serialized, - child.ts_type(ctx).await? - ) - .as_str(), - ); - } - object_type.push('}'); + Err(PropError::MapOrArrayMissingElementProp(self.id)) + } - object_type + pub async fn find_child_prop_index_by_name( + ctx: &DalContext, + node_index: NodeIndex, + child_name: impl AsRef, + ) -> PropResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + for prop_node_index in workspace_snapshot.outgoing_targets_for_edge_weight_kind_by_index( + node_index, + EdgeWeightKindDiscriminants::Use, + )? { + if let NodeWeight::Prop(prop_inner) = + workspace_snapshot.get_node_weight(prop_node_index)? + { + if prop_inner.name() == child_name.as_ref() { + return Ok(prop_node_index); + } } - PropKind::String => "string".to_string(), - }) - } + } - /// Assembles the "json_pointer" representing the full "path" to a [`Prop`] based on its - /// lineage. - /// - /// For examples, if a [`Prop`] named "poop" had a parent named "domain" and a grandparent named - /// "root", then the "json_pointer" would be "/root/domain/poop". - pub fn json_pointer(&self) -> String { - format!("/{}", self.path().as_parts().join("/")) + Err(PropError::ChildPropNotFoundByName( + node_index, + child_name.as_ref().to_string(), + )) } - /// Finds a prop by a path made up of prop names separated by - /// [`PROP_PATH_SEPARATOR`](crate::prop::PROP_PATH_SEPARATOR) for each depth level - pub async fn find_prop_by_path( + pub async fn find_prop_id_by_path_opt( ctx: &DalContext, schema_variant_id: SchemaVariantId, path: &PropPath, - ) -> PropResult { - Self::find_prop_by_path_opt(ctx, schema_variant_id, path) - .await? - .ok_or(PropError::NotFoundAtPath( - path.to_string(), - *ctx.visibility(), - )) + ) -> PropResult> { + match Self::find_prop_id_by_path(ctx, schema_variant_id, path).await { + Ok(prop_id) => Ok(Some(prop_id)), + Err(err) => match err { + PropError::ChildPropNotFoundByName(_, _) => Ok(None), + err => Err(err), + }, + } } - /// Finds a prop by a path made up of prop names separated by - /// [`PROP_PATH_SEPARATOR`](crate::prop::PROP_PATH_SEPARATOR) for each depth level - pub async fn find_prop_by_path_opt( + pub async fn find_prop_id_by_path( ctx: &DalContext, schema_variant_id: SchemaVariantId, path: &PropPath, - ) -> PropResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_PROP_IN_TREE, - &[ - ctx.tenancy(), - ctx.visibility(), - &schema_variant_id, - &path.as_str(), - ], - ) - .await?; + ) -> PropResult { + let schema_variant_node_index = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; - Ok(object_option_from_row_option(row)?) + workspace_snapshot.get_node_index_by_id(schema_variant_id)? + }; + + let path_parts = path.as_parts(); + + let mut current_node_index = schema_variant_node_index; + for part in path_parts { + current_node_index = + Self::find_child_prop_index_by_name(ctx, current_node_index, part).await?; + } + + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + Ok(workspace_snapshot + .get_node_weight(current_node_index)? + .id() + .into()) + } + + pub async fn find_prop_by_path( + ctx: &DalContext, + schema_variant_id: SchemaVariantId, + path: &PropPath, + ) -> PropResult { + let prop_id = Self::find_prop_id_by_path(ctx, schema_variant_id, path).await?; + Self::get_by_id(ctx, prop_id).await } - pub async fn create_default_prototypes_and_values( + pub async fn set_prototype_id( ctx: &DalContext, prop_id: PropId, + attribute_prototype_id: AttributePrototypeId, ) -> PropResult<()> { - #[derive(Debug)] - struct WorkItem { - maybe_parent: Option, - prop: Prop, - } - - let mut root_prop = Prop::get_by_id(ctx, &prop_id) - .await? - .ok_or_else(|| PropError::NotFound(prop_id, *ctx.visibility()))?; - - // We should make sure that we're creating AttributePrototypes & AttributeValues - // contiguously from the root. - while let Some(parent) = root_prop.parent_prop(ctx).await? { - root_prop = parent; - } + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + prop_id, + EdgeWeight::new(ctx.change_set_pointer()?, EdgeWeightKind::Prototype(None))?, + attribute_prototype_id, + )?; - let mut work_queue: VecDeque = VecDeque::from(vec![WorkItem { - maybe_parent: None, - prop: root_prop, - }]); - - let func_name = "si:unset".to_string(); - let mut funcs = Func::find_by_attr(ctx, "name", &func_name).await?; - let func = funcs.pop().ok_or(PropError::MissingFunc(func_name))?; - - // No matter what, we need a FuncBindingReturnValueId to create a new attribute prototype. - // If the func binding was created, we execute on it to generate our value id. Otherwise, - // we try to find a value by id and then fallback to executing anyway if one was not found. - let (func_binding, func_binding_return_value) = - FuncBinding::create_and_execute(ctx, serde_json::json![null], *func.id(), vec![]) - .await?; - - while let Some(WorkItem { maybe_parent, prop }) = work_queue.pop_front() { - let attribute_context = AttributeContext::builder() - .set_prop_id(*prop.id()) - .to_context()?; - - let attribute_value = if let Some(attribute_value) = - AttributeValue::find_for_context(ctx, attribute_context.into()).await? - { - attribute_value - } else { - AttributePrototype::new( - ctx, - *func.id(), - *func_binding.id(), - *func_binding_return_value.id(), - attribute_context, - None, - maybe_parent, - ) - .await?; - - AttributeValue::find_for_context(ctx, attribute_context.into()) - .await? - .ok_or(AttributeValueError::NotFoundForReadContext( - attribute_context.into(), - ))? - }; + Ok(()) + } - if *prop.kind() == PropKind::Object { - let child_props = prop.child_props(ctx).await?; - if !child_props.is_empty() { - work_queue.extend(child_props.iter().map(|p| WorkItem { - maybe_parent: Some(*attribute_value.id()), - prop: p.clone(), - })); + pub async fn prototypes_by_key( + ctx: &DalContext, + prop_id: PropId, + ) -> PropResult, AttributePrototypeId)>> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + Ok(workspace_snapshot + .edges_directed_for_edge_weight_kind( + prop_id, + Outgoing, + EdgeWeightKindDiscriminants::Prototype, + )? + .iter() + .filter_map(|edge_ref| { + match ( + edge_ref.weight().kind(), + workspace_snapshot.get_node_weight(edge_ref.target()).ok(), + ) { + (EdgeWeightKind::Prototype(key), Some(node_weight)) => { + Some((key.to_owned(), node_weight.id().into())) + } + _ => None, } - } - } + }) + .collect()) + } - Ok(()) + pub async fn prototype_id( + ctx: &DalContext, + prop_id: PropId, + ) -> PropResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let prototype_node_index = *workspace_snapshot + .outgoing_targets_for_edge_weight_kind(prop_id, EdgeWeightKindDiscriminants::Prototype)? + .first() + .ok_or(PropError::MissingPrototypeForProp(prop_id))?; + + Ok(workspace_snapshot + .get_node_weight(prototype_node_index)? + .id() + .into()) } pub async fn set_default_value( - &self, ctx: &DalContext, + prop_id: PropId, value: T, ) -> PropResult<()> { let value = serde_json::to_value(value)?; - match self.kind() { - PropKind::String | PropKind::Boolean | PropKind::Integer => { - let attribute_read_context = AttributeReadContext::default_with_prop(self.id); - let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) - .await? - .ok_or(AttributeValueError::NotFoundForReadContext( - attribute_read_context, - ))?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| AttributeValueError::ParentNotFound(*attribute_value.id()))?; - - // Ensure the parent project is an object. Technically, we should ensure that every - // prop in entire lineage is of kind object, but this should (hopefully) suffice - // for now. Ideally, this would be handled in a query. - let parent_prop = Prop::get_by_id(ctx, &parent_attribute_value.context.prop_id()) - .await? - .ok_or_else(|| { - PropError::NotFound( - parent_attribute_value.context.prop_id(), - *ctx.visibility(), - ) - })?; - if parent_prop.kind() != &PropKind::Object { - return Err(PropError::ParentPropIsNotObjectForPropWithDefaultValue( - *parent_prop.kind(), - )); - } - let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; - AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - context, - Some(value), - None, - ) - .await?; - Ok(()) - } - _ => Err(PropError::SetDefaultForNonScalar(*self.kind())), + let prop = Prop::get_by_id(ctx, prop_id).await?; + if !matches!( + prop.kind, + PropKind::String | PropKind::Boolean | PropKind::Integer + ) { + return Err(PropError::SetDefaultForNonScalar(prop_id, prop.kind)); } - } - pub async fn validation_props( - ctx: &DalContext, - component_id: ComponentId, - ) -> PropResult> { - let row = ctx - .txns() + let prototype_id = Prop::prototype_id(ctx, prop_id).await?; + let intrinsic: IntrinsicFunc = prop.kind.into(); + let intrinsic_id = Func::find_intrinsic(ctx, intrinsic).await?; + let func_arg_id = *FuncArgument::list_ids_for_func(ctx, intrinsic_id) + .await? + .first() + .ok_or(FuncArgumentError::IntrinsicMissingFuncArgumentEdge( + intrinsic.name().into(), + intrinsic_id, + ))?; + + AttributePrototype::update_func_by_id(ctx, prototype_id, intrinsic_id).await?; + AttributePrototypeArgument::new(ctx, prototype_id, func_arg_id) .await? - .pg() - .query( - FIND_VALIDATIONS_FOR_COMPONENT, - &[ctx.tenancy(), ctx.visibility(), &component_id], - ) + .set_value_from_static_value(ctx, value) .await?; - Ok(standard_model::objects_from_rows(row)?) - } - - pub async fn set_default_diff(&mut self, ctx: &DalContext) -> PropResult<()> { - let func = Func::find_by_attr(ctx, "name", &"si:diff") - .await? - .pop() - .ok_or(PropError::DefaultDiffFunctionNotFound)?; - self.set_diff_func_id(ctx, Some(*func.id())).await + Ok(()) } - #[instrument(level = "info", skip_all)] - pub async fn run_validation( + #[allow(dead_code)] + async fn get_content( ctx: &DalContext, prop_id: PropId, - component_id: ComponentId, - key: Option<&str>, - value: serde_json::Value, - ) { - if let Err(err) = - Self::run_validation_fallible(ctx, prop_id, component_id, key, &value).await - { - error!("Unable to run validation for prop {prop_id} and component {component_id} with value {value:#?}: {err}"); - } + ) -> PropResult<(ContentHash, PropContentV1)> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let id: Ulid = prop_id.into(); + let node_index = workspace_snapshot.get_node_index_by_id(id)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: PropContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let PropContent::V1(inner) = content; + + Ok((hash, inner)) } - async fn run_validation_fallible( - ctx: &DalContext, - prop_id: PropId, - component_id: ComponentId, - key: Option<&str>, - value: &serde_json::Value, - ) -> PropResult<()> { - if let Some(prop) = Prop::get_by_id(ctx, &prop_id).await? { - if let Some(validation_format) = prop.validation_format() { - let function = "const main = (object) => { - const schema: Schema = Joi.build(JSON.parse(object.format)); - return { result: schema.validate(object.value) }; - }"; - let code_base64 = general_purpose::STANDARD_NO_PAD.encode(function); - let (output_tx, mut rx) = mpsc::channel(64); - let veritech = ctx.veritech().clone(); - let request = veritech_client::ResolverFunctionRequest { - execution_id: "vagnermoura".to_owned(), - handler: "main".to_owned(), - component: veritech_client::ResolverFunctionComponent { - data: veritech_client::ComponentView { - properties: serde_json::json!({ "format": serde_json::to_value(validation_format)?, "value": value }), - ..Default::default() - }, - parents: Vec::new(), - }, - response_type: veritech_client::ResolverFunctionResponseType::Object, - code_base64, - before: Vec::new(), - }; - let value = veritech - .execute_resolver_function(output_tx, &request) - .await?; - let mut logs = Vec::new(); - while let Some(log) = rx.recv().await { - logs.push(log); - } + pub async fn modify(self, ctx: &DalContext, lambda: L) -> PropResult + where + L: FnOnce(&mut Self) -> PropResult<()>, + { + let mut prop = self; - let value = match value { - FunctionResult::Failure(err) => ValidationOutput { - status: ValidationStatus::Failure, - message: format!("{}: {}", err.error.kind, err.error.message), - logs, - }, - FunctionResult::Success(data) - if data.data.pointer("/result/error").is_some() => - { - ValidationOutput { - status: ValidationStatus::Error, - message: data - .data - .pointer("/result/error/details/0/message") - .map(String::deserialize) - .unwrap_or_else(|| serde_json::to_string_pretty(&data.data))?, - logs, - } - } - FunctionResult::Success(_) => ValidationOutput { - status: ValidationStatus::Success, - message: "OK".to_owned(), - logs, - }, - }; - ValidationResolver::upsert(ctx, prop_id, component_id, key, &value) - .await - .map_err(Box::new)?; - } + let before = PropContentV1::from(prop.clone()); + lambda(&mut prop)?; + let updated = PropContentV1::from(prop.clone()); - let component_value_json = ComponentView::new(ctx, component_id) + if updated != before { + let hash = ctx + .content_store() + .lock() .await - .map_err(Box::new)? - .properties; - crate::job::definition::dependent_values_update::update_summary_tables( - ctx, - &component_value_json, - component_id, - ) - .await - .map_err(Box::new)?; + .add(&PropContent::V1(updated.clone()))?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.update_content(ctx.change_set_pointer()?, prop.id.into(), hash)?; } - Ok(()) + Ok(prop) } } + +// impl Prop { +// /// Create a new [`Prop`]. A corresponding [`AttributePrototype`] and [`AttributeValue`] will be +// /// created when the provided [`SchemaVariant`](crate::SchemaVariant) is +// /// [`finalized`](crate::SchemaVariant::finalize). +// #[instrument(skip_all)] +// pub async fn new( +// ctx: &DalContext, +// name: impl AsRef, +// kind: PropKind, +// widget_kind_and_options: Option<(WidgetKind, Option)>, +// schema_variant_id: SchemaVariantId, +// parent_prop_id: Option, +// documentation: Option, +// ) -> PropResult { +// let name = name.as_ref(); +// let (widget_kind, widget_options) = match widget_kind_and_options { +// Some((kind, options)) => (kind, options), +// None => (WidgetKind::from(kind), None), +// }; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM prop_create_v2($1, $2, $3, $4, $5, $6, $7, $8, $9)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &name, +// &kind.as_ref(), +// &widget_kind.as_ref(), +// &widget_options.as_ref(), +// &schema_variant_id, +// &parent_prop_id, +// &documentation, +// ], +// ) +// .await?; +// Ok(finish_create_from_row(ctx, row).await?) +// } + +// standard_model_accessor!(name, String, PropResult); +// standard_model_accessor!(kind, Enum(PropKind), PropResult); +// standard_model_accessor!(widget_kind, Enum(WidgetKind), PropResult); +// standard_model_accessor!(widget_options, Option, PropResult); +// standard_model_accessor!(doc_link, Option, PropResult); +// standard_model_accessor!(documentation, Option, PropResult); +// standard_model_accessor!(hidden, bool, PropResult); +// standard_model_accessor!(refers_to_prop_id, Option, PropResult); +// standard_model_accessor!(diff_func_id, Option, PropResult); +// standard_model_accessor!(schema_variant_id, Pk(SchemaVariantId), PropResult); + +// pub fn path(&self) -> PropPath { +// self.path.to_owned().into() +// } + +// // TODO(nick): replace this table with a foreign key relationship. +// standard_model_belongs_to!( +// lookup_fn: parent_prop, +// set_fn: set_parent_prop_do_not_use, +// unset_fn: unset_parent_prop_do_not_use, +// table: "prop_belongs_to_prop", +// model_table: "props", +// belongs_to_id: PropId, +// returns: Prop, +// result: PropResult, +// ); + +// // TODO(nick): replace this table with a foreign key relationship. +// standard_model_has_many!( +// lookup_fn: child_props, +// table: "prop_belongs_to_prop", +// model_table: "props", +// returns: Prop, +// result: PropResult, +// ); + +// pub async fn find_root_prop_for_prop( +// ctx: &DalContext, +// prop_id: PropId, +// ) -> PropResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_ROOT_PROP_FOR_PROP, +// &[ctx.tenancy(), ctx.visibility(), &prop_id], +// ) +// .await?; + +// Ok(standard_model::object_option_from_row_option::(row)?) +// } + +// /// Returns the given [`Prop`] and all ancestor [`Props`](crate::Prop) back to the root. +// /// Ancestor props are ordered by depth, starting from the root prop. +// pub async fn all_ancestor_props(ctx: &DalContext, prop_id: PropId) -> PropResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// ALL_ANCESTOR_PROPS, +// &[ctx.tenancy(), ctx.visibility(), &prop_id], +// ) +// .await?; +// Ok(objects_from_rows(rows)?) +// } + +// #[instrument(skip_all)] +// #[async_recursion] +// pub async fn ts_type(&self, ctx: &DalContext) -> PropResult { +// // XXX: Hack! The payload prop kind is a string but we're actually storing arbitrary json +// // there and expect it to be a JSON object in most of our code. However, the resource_value +// // work is likely to remove the need for this entirely +// if self.path() == PropPath::new(["root", "resource", "payload"]) { +// return Ok("any".to_string()); +// } + +// // Note: we should fix this by having propper enums as prop types +// if self.path() == PropPath::new(["root", "resource", "status"]) { +// return Ok("'ok' | 'warning' | 'error' | undefined | null".to_owned()); +// } + +// Ok(match self.kind() { +// PropKind::Array => format!( +// "{}[]", +// self.child_props(ctx) +// .await? +// .get(0) +// .ok_or(PropError::ArrayMissingElementChild(self.id))? +// .ts_type(ctx) +// .await? +// ), +// PropKind::Boolean => "boolean".to_string(), +// PropKind::Integer => "number".to_string(), +// PropKind::Map => format!( +// "Record", +// self.child_props(ctx) +// .await? +// .get(0) +// .ok_or(PropError::MapMissingElementChild(self.id))? +// .ts_type(ctx) +// .await? +// ), +// PropKind::Object => { +// let mut object_type = "{\n".to_string(); +// for child in self.child_props(ctx).await? { +// let name_value = serde_json::to_value(&child.name)?; +// let name_serialized = serde_json::to_string(&name_value)?; +// object_type.push_str( +// format!( +// "{}: {} | null | undefined;\n", +// &name_serialized, +// child.ts_type(ctx).await? +// ) +// .as_str(), +// ); +// } +// object_type.push('}'); + +// object_type +// } +// PropKind::String => "string".to_string(), +// }) +// } + +// /// Assembles the "json_pointer" representing the full "path" to a [`Prop`] based on its +// /// lineage. +// /// +// /// For examples, if a [`Prop`] named "poop" had a parent named "domain" and a grandparent named +// /// "root", then the "json_pointer" would be "/root/domain/poop". +// pub async fn json_pointer(&self, ctx: &DalContext) -> PropResult { +// // NOTE(nick,zack): if this ends up getting used frequently to manage paths corresponding +// // to attribute (and/or property editor) values, then we should consider strongly typing +// // "json_pointer". +// Ok([ +// "/".to_string(), +// Prop::all_ancestor_props(ctx, *self.id()) +// .await? +// .iter() +// .map(|prop| prop.name().to_string()) +// .collect::>() +// .join("/"), +// ] +// .join("")) +// } + +// /// Finds a prop by a path made up of prop names separated by +// /// [`PROP_PATH_SEPARATOR`](crate::prop::PROP_PATH_SEPARATOR) for each depth level +// pub async fn find_prop_by_path( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// path: &PropPath, +// ) -> PropResult { +// Self::find_prop_by_path_opt(ctx, schema_variant_id, path) +// .await? +// .ok_or(PropError::NotFoundAtPath( +// path.to_string(), +// *ctx.visibility(), +// )) +// } + +// /// Finds a prop by a path made up of prop names separated by +// /// [`PROP_PATH_SEPARATOR`](crate::prop::PROP_PATH_SEPARATOR) for each depth level +// pub async fn find_prop_by_path_opt( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// path: &PropPath, +// ) -> PropResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_PROP_IN_TREE, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &schema_variant_id, +// &path.as_str(), +// ], +// ) +// .await?; + +// Ok(object_option_from_row_option(row)?) +// } + +// pub async fn create_default_prototypes_and_values( +// ctx: &DalContext, +// prop_id: PropId, +// ) -> PropResult<()> { +// #[derive(Debug)] +// struct WorkItem { +// maybe_parent: Option, +// prop: Prop, +// } + +// let mut root_prop = Prop::get_by_id(ctx, &prop_id) +// .await? +// .ok_or_else(|| PropError::NotFound(prop_id, *ctx.visibility()))?; + +// // We should make sure that we're creating AttributePrototypes & AttributeValues +// // contiguously from the root. +// while let Some(parent) = root_prop.parent_prop(ctx).await? { +// root_prop = parent; +// } + +// let mut work_queue: VecDeque = VecDeque::from(vec![WorkItem { +// maybe_parent: None, +// prop: root_prop, +// }]); + +// let func_name = "si:unset".to_string(); +// let mut funcs = Func::find_by_attr(ctx, "name", &func_name).await?; +// let func = funcs.pop().ok_or(PropError::MissingFunc(func_name))?; + +// // No matter what, we need a FuncBindingReturnValueId to create a new attribute prototype. +// // If the func binding was created, we execute on it to generate our value id. Otherwise, +// // we try to find a value by id and then fallback to executing anyway if one was not found. +// let (func_binding, func_binding_return_value) = +// FuncBinding::create_and_execute(ctx, serde_json::json![null], *func.id(), vec![]) +// .await?; + +// while let Some(WorkItem { maybe_parent, prop }) = work_queue.pop_front() { +// let attribute_context = AttributeContext::builder() +// .set_prop_id(*prop.id()) +// .to_context()?; + +// let attribute_value = if let Some(attribute_value) = +// AttributeValue::find_for_context(ctx, attribute_context.into()).await? +// { +// attribute_value +// } else { +// AttributePrototype::new( +// ctx, +// *func.id(), +// *func_binding.id(), +// *func_binding_return_value.id(), +// attribute_context, +// None, +// maybe_parent, +// ) +// .await?; + +// AttributeValue::find_for_context(ctx, attribute_context.into()) +// .await? +// .ok_or(AttributeValueError::NotFoundForReadContext( +// attribute_context.into(), +// ))? +// }; + +// if *prop.kind() == PropKind::Object { +// let child_props = prop.child_props(ctx).await?; +// if !child_props.is_empty() { +// work_queue.extend(child_props.iter().map(|p| WorkItem { +// maybe_parent: Some(*attribute_value.id()), +// prop: p.clone(), +// })); +// } +// } +// } + +// Ok(()) +// } + +// pub async fn set_default_value( +// &self, +// ctx: &DalContext, +// value: T, +// ) -> PropResult<()> { +// let value = serde_json::to_value(value)?; +// match self.kind() { +// PropKind::String | PropKind::Boolean | PropKind::Integer => { +// let attribute_read_context = AttributeReadContext::default_with_prop(self.id); +// let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) +// .await? +// .ok_or(AttributeValueError::NotFoundForReadContext( +// attribute_read_context, +// ))?; +// let parent_attribute_value = attribute_value +// .parent_attribute_value(ctx) +// .await? +// .ok_or_else(|| AttributeValueError::ParentNotFound(*attribute_value.id()))?; + +// // Ensure the parent project is an object. Technically, we should ensure that every +// // prop in entire lineage is of kind object, but this should (hopefully) suffice +// // for now. Ideally, this would be handled in a query. +// let parent_prop = Prop::get_by_id(ctx, &parent_attribute_value.context.prop_id()) +// .await? +// .ok_or_else(|| { +// PropError::NotFound( +// parent_attribute_value.context.prop_id(), +// *ctx.visibility(), +// ) +// })?; +// if parent_prop.kind() != &PropKind::Object { +// return Err(PropError::ParentPropIsNotObjectForPropWithDefaultValue( +// *parent_prop.kind(), +// )); +// } + +// let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; +// AttributeValue::update_for_context( +// ctx, +// *attribute_value.id(), +// Some(*parent_attribute_value.id()), +// context, +// Some(value), +// None, +// ) +// .await?; +// Ok(()) +// } +// _ => Err(PropError::SetDefaultForNonScalar(*self.kind())), +// } +// } + +// pub async fn set_default_diff(&mut self, ctx: &DalContext) -> PropResult<()> { +// let func = Func::find_by_attr(ctx, "name", &"si:diff") +// .await? +// .pop() +// .ok_or(PropError::DefaultDiffFunctionNotFound)?; +// self.set_diff_func_id(ctx, Some(*func.id())).await +// } +// } diff --git a/lib/dal/src/property_editor.rs b/lib/dal/src/property_editor.rs index d22489568c..8a0387605e 100644 --- a/lib/dal/src/property_editor.rs +++ b/lib/dal/src/property_editor.rs @@ -3,18 +3,22 @@ //! and mutating said properties. use serde::{Deserialize, Serialize}; -use thiserror::Error; - use si_data_pg::PgError; +use thiserror::Error; +use crate::attribute::value::AttributeValueError; +use crate::prop::PropError; +use crate::workspace_snapshot::node_weight::NodeWeightError; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - pk, schema::variant::SchemaVariantError, AttributeValueError, AttributeValueId, ComponentError, - PropError, PropId, SchemaVariantId, StandardModelError, TransactionsError, + pk, AttributeValueId, ComponentError, PropId, SchemaVariantId, StandardModelError, + TransactionsError, }; pub mod schema; pub mod values; -pub mod values_summary; + +// pub mod validations; #[remain::sorted] #[derive(Error, Debug)] @@ -25,20 +29,16 @@ pub enum PropertyEditorError { BadAttributeReadContext(String), #[error("component error: {0}")] Component(#[from] ComponentError), + #[error("component not found")] + ComponentNotFound, + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), #[error("no value(s) found for property editor prop id: {0}")] NoValuesFoundForPropertyEditorProp(PropertyEditorPropId), #[error("pg error: {0}")] Pg(#[from] PgError), #[error("prop error: {0}")] Prop(#[from] PropError), - #[error("property editor values summary: {0}")] - PropertyEditorValuesSummary(String), - #[error("prop not found for id: {0}")] - PropNotFound(PropId), - #[error("root prop not found for schema variant")] - RootPropNotFound, - #[error("schema variant: {0}")] - SchemaVariant(#[from] SchemaVariantError), #[error("schema variant not found: {0}")] SchemaVariantNotFound(SchemaVariantId), #[error("error serializing/deserializing json: {0}")] @@ -49,6 +49,10 @@ pub enum PropertyEditorError { TooManyValuesFoundForPropertyEditorProp(PropertyEditorPropId), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type PropertyEditorResult = Result; diff --git a/lib/dal/src/property_editor/schema.rs b/lib/dal/src/property_editor/schema.rs index 56105e5f34..b33c303c19 100644 --- a/lib/dal/src/property_editor/schema.rs +++ b/lib/dal/src/property_editor/schema.rs @@ -1,18 +1,16 @@ //! This module contains the ability to construct a [`schema`](PropertyEditorSchema) for a -//! [`Component`](crate::Component)'s properties. +//! [`SchemaVariant`](crate::SchemaVariant)'s properties. use serde::{Deserialize, Serialize}; -use serde_json::Value; -use std::collections::HashMap; -use strum::{AsRefStr, Display, EnumString}; - use si_pkg::PropSpecWidgetKind; +use std::collections::{HashMap, VecDeque}; +use strum::{AsRefStr, Display, EnumString}; -use crate::property_editor::{PropertyEditorError, PropertyEditorPropId, PropertyEditorResult}; -use crate::{DalContext, Prop, PropKind, SchemaVariant, SchemaVariantId, StandardModel}; - -const PROPERTY_EDITOR_SCHEMA_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/property_editor_schema_for_schema_variant.sql"); +use crate::prop::{PropPath, WidgetOptions}; +use crate::property_editor::{PropertyEditorPropId, PropertyEditorResult}; +use crate::workspace_snapshot::edge_weight::EdgeWeightKindDiscriminants; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::{DalContext, Prop, PropId, PropKind, SchemaVariantId}; #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -23,52 +21,71 @@ pub struct PropertyEditorSchema { } impl PropertyEditorSchema { - pub async fn for_schema_variant( + pub async fn assemble( ctx: &DalContext, schema_variant_id: SchemaVariantId, ) -> PropertyEditorResult { - let schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) - .await? - .ok_or(PropertyEditorError::SchemaVariantNotFound( - schema_variant_id, - ))?; - let mut props: HashMap = HashMap::new(); - let mut child_props: HashMap> = - HashMap::new(); - - let rows = ctx - .txns() - .await? - .pg() - .query( - PROPERTY_EDITOR_SCHEMA_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant.id()], - ) - .await?; - - for row in rows { - let json: Value = row.try_get("object")?; - let prop: Prop = serde_json::from_value(json)?; - // Omit any secret definition props in the result - if prop.json_pointer().starts_with("/root/secret_definition") { - continue; - } - let property_editor_prop = PropertyEditorProp::new(prop); + let mut props = HashMap::new(); + let mut child_props = HashMap::new(); + + // Get the root prop and load it into the work queue. + let root_prop_id = + Prop::find_prop_id_by_path(ctx, schema_variant_id, &PropPath::new(["root"])).await?; + let root_prop = Prop::get_by_id(ctx, root_prop_id).await?; + let root_property_editor_prop = PropertyEditorProp::new(root_prop); + let root_property_editor_prop_id = root_property_editor_prop.id; + props.insert(root_property_editor_prop_id, root_property_editor_prop); - let maybe_child_prop_ids: Option> = - row.try_get("child_prop_ids")?; - if let Some(child_prop_ids) = maybe_child_prop_ids { - child_props.insert(property_editor_prop.id, child_prop_ids); + let mut work_queue = VecDeque::from([(root_prop_id, root_property_editor_prop_id)]); + while let Some((prop_id, property_editor_prop_id)) = work_queue.pop_front() { + // Collect all child props. + let mut cache = Vec::new(); + { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + for child_prop_node_index in workspace_snapshot + .outgoing_targets_for_edge_weight_kind( + prop_id, + EdgeWeightKindDiscriminants::Use, + )? + { + if let NodeWeight::Prop(child_prop_weight) = + workspace_snapshot.get_node_weight(child_prop_node_index)? + { + let child_prop_id: PropId = child_prop_weight.id().into(); + + // Skip anything at and under "/root/secret_definition" + if prop_id == root_prop_id + && child_prop_weight.name() == "secret_definition" + { + continue; + } + cache.push(child_prop_id); + } + } } - props.insert(property_editor_prop.id, property_editor_prop); + // Now that we have the child props, prepare the property editor props and load the work queue. + let mut child_property_editor_prop_ids = Vec::new(); + for child_prop_id in cache { + // NOTE(nick): we already have the node weight, but I believe we still want to use "get_by_id" to + // get the content from the store. Perhaps, there's a more efficient way that we can do this. + let child_prop = Prop::get_by_id(ctx, child_prop_id).await?; + let child_property_editor_prop = PropertyEditorProp::new(child_prop); + + // Load the work queue with the child prop. + work_queue.push_back((child_prop_id, child_property_editor_prop.id)); + + // Cache the child property editor props to eventually insert into the child property editor props map. + child_property_editor_prop_ids.push(child_property_editor_prop.id); + + // Insert the child property editor prop into the props map. + props.insert(child_property_editor_prop.id, child_property_editor_prop); + } + child_props.insert(property_editor_prop_id, child_property_editor_prop_ids); } - let root_prop_id = schema_variant - .root_prop_id() - .ok_or(PropertyEditorError::RootPropNotFound)?; Ok(PropertyEditorSchema { - root_prop_id: (*root_prop_id).into(), + root_prop_id: root_prop_id.into(), props, child_props, }) @@ -90,16 +107,16 @@ pub struct PropertyEditorProp { impl PropertyEditorProp { pub fn new(prop: Prop) -> PropertyEditorProp { PropertyEditorProp { - id: (*prop.id()).into(), - name: prop.name().into(), - kind: prop.kind().into(), + id: prop.id.into(), + name: prop.name, + kind: prop.kind.into(), widget_kind: PropertyEditorPropWidgetKind::new( - *prop.widget_kind(), - prop.widget_options().map(|v| v.to_owned()), + prop.widget_kind, + prop.widget_options.map(|v| v.to_owned()), ), - doc_link: prop.doc_link().map(Into::into), - documentation: prop.documentation().map(Into::into), - validation_format: prop.validation_format().map(Into::into), + doc_link: prop.doc_link.map(Into::into), + documentation: prop.documentation.map(Into::into), + validation_format: prop.validation_format.map(Into::into), } } } @@ -116,8 +133,8 @@ pub enum PropertyEditorPropKind { String, } -impl From<&PropKind> for PropertyEditorPropKind { - fn from(prop_kind: &PropKind) -> Self { +impl From for PropertyEditorPropKind { + fn from(prop_kind: PropKind) -> Self { match prop_kind { PropKind::Array => Self::Array, PropKind::Boolean => Self::Boolean, @@ -138,12 +155,12 @@ pub enum PropertyEditorPropWidgetKind { Checkbox, CodeEditor, Color, - ComboBox { options: Option }, + ComboBox { options: Option }, Header, Map, Password, - Secret { options: Option }, - Select { options: Option }, + Secret { options: Option }, + Select { options: Option }, Text, TextArea, } @@ -214,7 +231,7 @@ impl From<&PropSpecWidgetKind> for WidgetKind { } impl PropertyEditorPropWidgetKind { - pub fn new(widget_kind: WidgetKind, widget_options: Option) -> Self { + pub fn new(widget_kind: WidgetKind, widget_options: Option) -> Self { match widget_kind { WidgetKind::Array => Self::Array, WidgetKind::Checkbox => Self::Checkbox, diff --git a/lib/dal/src/property_editor/values.rs b/lib/dal/src/property_editor/values.rs index 591a97c5bc..0d6756e7bc 100644 --- a/lib/dal/src/property_editor/values.rs +++ b/lib/dal/src/property_editor/values.rs @@ -1,14 +1,19 @@ //! This module contains the ability to construct values reflecting the latest state of a //! [`Component`](crate::Component)'s properties. -use std::collections::HashMap; - +use petgraph::prelude::{EdgeRef, NodeIndex}; +use petgraph::Direction; use serde::{Deserialize, Serialize}; use serde_json::Value; +use std::collections::{HashMap, VecDeque}; -use crate::property_editor::{PropertyEditorError, PropertyEditorResult}; +use crate::property_editor::PropertyEditorResult; use crate::property_editor::{PropertyEditorPropId, PropertyEditorValueId}; -use crate::{AttributeValueId, ComponentId, DalContext, FuncId, Prop, PropId, StandardModel}; +use crate::workspace_snapshot::edge_weight::EdgeWeightKind; + +use crate::{ + AttributeValue, AttributeValueId, Component, ComponentId, DalContext, FuncId, Prop, PropId, +}; #[derive(Clone, Debug, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] @@ -19,26 +24,181 @@ pub struct PropertyEditorValues { } impl PropertyEditorValues { - pub async fn for_component( + pub async fn assemble( ctx: &DalContext, component_id: ComponentId, - ) -> PropertyEditorResult { - if let Some(summary) = - super::values_summary::PropertyEditorValuesSummary::get_by_id(ctx, &component_id) - .await - .map_err(|e| PropertyEditorError::PropertyEditorValuesSummary(e.to_string()))? - .map(|v| v.property_editor_values().clone()) - { - return Ok(summary); + ) -> PropertyEditorResult { + let mut values = HashMap::new(); + let mut child_values = HashMap::new(); + + // Get the root attribute value and load it into the work queue. + let root_attribute_value_id = Component::root_attribute_value_id(ctx, component_id).await?; + let root_property_editor_value_id = PropertyEditorValueId::from(root_attribute_value_id); + let root_prop_id = AttributeValue::prop(ctx, root_attribute_value_id).await?; + let root_attribute_value = AttributeValue::get_by_id(ctx, root_attribute_value_id).await?; + + let controlling_func_id = FuncId::NONE; + + values.insert( + root_property_editor_value_id, + PropertyEditorValue { + id: root_property_editor_value_id, + prop_id: root_prop_id.into(), + key: None, + value: root_attribute_value + .value(ctx) + .await? + .unwrap_or(Value::Null), + // TODO(nick): restore all these fields below. + is_from_external_source: false, + can_be_set_by_socket: false, + is_controlled_by_intrinsic_func: true, + controlling_func_id, + controlling_attribute_value_id: root_property_editor_value_id.into(), + overridden: false, + }, + ); + + let mut work_queue = + VecDeque::from([(root_attribute_value_id, root_property_editor_value_id)]); + while let Some((attribute_value_id, property_editor_value_id)) = work_queue.pop_front() { + // Collect all child attribute values. + let mut cache: Vec<(AttributeValueId, Option)> = Vec::new(); + { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let child_attribute_values_with_keys: Vec<(NodeIndex, Option)> = + workspace_snapshot + .edges_directed(attribute_value_id, Direction::Outgoing)? + .filter_map(|edge_ref| { + if let EdgeWeightKind::Contain(key) = edge_ref.weight().kind() { + Some((edge_ref.target(), key.to_owned())) + } else { + None + } + }) + .collect(); + + // NOTE(nick): this entire function is likely wasteful. Zack and Jacob, have mercy on me. + for (child_attribute_value_node_index, key) in child_attribute_values_with_keys { + let child_attribute_value_node_weight = + workspace_snapshot.get_node_weight(child_attribute_value_node_index)?; + let content = + child_attribute_value_node_weight.get_attribute_value_node_weight()?; + cache.push((content.id().into(), key)); + } + } + + // Now that we have the child props, prepare the property editor props and load the work queue. + let mut child_property_editor_value_ids = Vec::new(); + for (child_attribute_value_id, key) in cache { + // NOTE(nick): we already have the node weight, but I believe we still want to use "get_by_id" to + // get the content from the store. Perhaps, there's a more efficient way that we can do this. + let child_attribute_value = + AttributeValue::get_by_id(ctx, child_attribute_value_id).await?; + let prop_id_for_child_attribute_value = + AttributeValue::prop(ctx, child_attribute_value_id).await?; + let child_property_editor_value_id = + PropertyEditorValueId::from(child_attribute_value_id); + + let child_property_editor_value = PropertyEditorValue { + id: child_property_editor_value_id, + prop_id: prop_id_for_child_attribute_value.into(), + key, + value: child_attribute_value + .value(ctx) + .await? + .unwrap_or(Value::Null), + // TODO(nick): restore all the fields below. + is_from_external_source: false, + can_be_set_by_socket: false, + is_controlled_by_intrinsic_func: true, + controlling_func_id, + controlling_attribute_value_id: child_property_editor_value_id.into(), + overridden: false, + }; + + // Load the work queue with the child attribute value. + work_queue.push_back((child_attribute_value_id, child_property_editor_value.id)); + + // Cache the child property editor values to eventually insert into the child property editor values map. + child_property_editor_value_ids.push(child_property_editor_value.id); + + // Insert the child property editor value into the values map. + values.insert(child_property_editor_value.id, child_property_editor_value); + } + child_values.insert(property_editor_value_id, child_property_editor_value_ids); } - // If there's no values summary, calculate it live and return it - super::values_summary::PropertyEditorValuesSummary::create_or_update_component_entry( - ctx, - component_id, - ) - .await - .map_err(|e| PropertyEditorError::PropertyEditorValuesSummary(e.to_string())) + Ok(PropertyEditorValues { + root_value_id: root_property_editor_value_id, + child_values, + values, + }) + } + + /// Finds the [`AttributeValueId`](AttributeValue) for a given [`PropId`](Prop). + /// + /// This is useful for non-maps and non-array [`Props`](Prop). + pub fn find_by_prop_id(&self, prop_id: PropId) -> Option { + self.values + .iter() + .find(|(_, property_editor_value)| property_editor_value.prop_id() == prop_id) + .map(|(_, found_property_editor_value)| { + found_property_editor_value.attribute_value_id() + }) + } + + /// Finds the [`AttributeValueId`](AttributeValue) and the [`Value`] corresponding to it for a + /// given [`PropId`](Prop). + /// + /// This is useful for non-maps and non-array [`Props`](Prop). + pub fn find_with_value_by_prop_id(&self, prop_id: PropId) -> Option<(Value, AttributeValueId)> { + self.values + .iter() + .find(|(_, property_editor_value)| property_editor_value.prop_id() == prop_id) + .map(|(_, found_property_editor_value)| { + ( + found_property_editor_value.value.to_owned(), + found_property_editor_value.attribute_value_id(), + ) + }) + } + + /// Lists the [`AttributeValueIds`](AttributeValue) for a given [`PropId`](Prop). + /// + /// This is useful for map and array [`Props`](Prop). + pub fn list_by_prop_id(&self, prop_id: PropId) -> Vec { + self.values + .iter() + .filter_map(|(_, property_editor_value)| { + if property_editor_value.prop_id() == prop_id { + Some(property_editor_value.attribute_value_id()) + } else { + None + } + }) + .collect() + } + + /// Lists the [`AttributeValueIds`](AttributeValue) and the [`Values`] corresponding to them for + /// a given [`PropId`](Prop). + /// + /// This is useful for map and array [`Props`](Prop). + pub fn list_with_values_by_prop_id(&self, prop_id: PropId) -> Vec<(Value, AttributeValueId)> { + self.values + .iter() + .filter_map(|(_, property_editor_value)| { + if property_editor_value.prop_id() == prop_id { + Some(( + property_editor_value.value(), + property_editor_value.attribute_value_id(), + )) + } else { + None + } + }) + .collect() } } @@ -72,9 +232,7 @@ impl PropertyEditorValue { /// Returns the [`Prop`](crate::Prop) corresponding to the "prop_id" field. pub async fn prop(&self, ctx: &DalContext) -> PropertyEditorResult { - let prop = Prop::get_by_id(ctx, &self.prop_id.into()) - .await? - .ok_or_else(|| PropertyEditorError::PropNotFound(self.prop_id.into()))?; + let prop = Prop::get_by_id(ctx, self.prop_id.into()).await?; Ok(prop) } } diff --git a/lib/dal/src/provider.rs b/lib/dal/src/provider.rs index 654bee35b9..ff0cacc4fc 100644 --- a/lib/dal/src/provider.rs +++ b/lib/dal/src/provider.rs @@ -11,5 +11,70 @@ //! and explicit [`InternalProvider`](crate::InternalProvider) being "connected" //! (or we will at least have enough data to know which providers the user wants to "connect") +use serde::{Deserialize, Serialize}; +use si_pkg::SocketSpecArity; +use strum::{AsRefStr, Display, EnumIter, EnumString}; + pub mod external; pub mod internal; + +#[remain::sorted] +#[derive( + AsRefStr, + Clone, + Copy, + Debug, + Deserialize, + Display, + EnumIter, + EnumString, + Eq, + PartialEq, + Serialize, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum ProviderKind { + Frame, + // NOTE(nick): this used to be "Provider" when the enum was "Socket Kind". + Standard, +} + +#[remain::sorted] +#[derive( + AsRefStr, + Copy, + Clone, + Debug, + Deserialize, + Display, + EnumIter, + EnumString, + Eq, + PartialEq, + Serialize, +)] +#[serde(rename_all = "camelCase")] +#[strum(serialize_all = "camelCase")] +pub enum ProviderArity { + Many, + One, +} + +impl From<&ProviderArity> for SocketSpecArity { + fn from(value: &ProviderArity) -> Self { + match value { + ProviderArity::One => Self::One, + ProviderArity::Many => Self::Many, + } + } +} + +impl From for ProviderArity { + fn from(value: SocketSpecArity) -> Self { + match value { + SocketSpecArity::One => Self::One, + SocketSpecArity::Many => Self::Many, + } + } +} diff --git a/lib/dal/src/provider/external.rs b/lib/dal/src/provider/external.rs index 1586260106..d1fb92638e 100644 --- a/lib/dal/src/provider/external.rs +++ b/lib/dal/src/provider/external.rs @@ -1,319 +1,517 @@ +use content_store::{ContentHash, Store}; use serde::{Deserialize, Serialize}; -use si_data_pg::PgError; use std::collections::HashMap; +use strum::EnumDiscriminants; use telemetry::prelude::*; use thiserror::Error; -use crate::func::binding::FuncBindingId; -use crate::func::binding_return_value::FuncBindingReturnValueId; -use crate::socket::{Socket, SocketArity, SocketEdgeKind, SocketError, SocketId, SocketKind}; -use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_accessor_ro, - standard_model_has_many, AttributePrototype, AttributePrototypeError, ComponentId, DiagramKind, - FuncId, HistoryEventError, InternalProviderId, StandardModel, StandardModelError, Tenancy, - Timestamp, TransactionsError, Visibility, +use crate::attribute::prototype::AttributePrototypeError; +use crate::change_set_pointer::ChangeSetPointerError; +use crate::provider::{ProviderArity, ProviderKind}; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, }; -use crate::{ - AttributeContext, AttributeContextBuilderError, AttributeContextError, AttributePrototypeId, - DalContext, SchemaId, SchemaVariantId, -}; - -const BY_SOCKET: &str = include_str!("../queries/external_provider/by_socket.sql"); -const LIST_FOR_ATTRIBUTE_PROTOTYPE_WITH_TAIL_COMPONENT_ID: &str = include_str!( - "../queries/external_provider/list_for_attribute_prototype_with_tail_component_id.sql" -); -const FIND_FOR_SCHEMA_VARIANT_AND_NAME: &str = - include_str!("../queries/external_provider/find_for_schema_variant_and_name.sql"); -const FIND_FOR_SOCKET: &str = include_str!("../queries/external_provider/find_for_socket.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/external_provider/list_for_schema_variant.sql"); -const LIST_FROM_INTERNAL_PROVIDER_USE: &str = - include_str!("../queries/external_provider/list_from_internal_provider_use.sql"); +use crate::workspace_snapshot::node_weight::{NodeWeight, NodeWeightError}; +use crate::workspace_snapshot::WorkspaceSnapshotError; +use crate::{pk, AttributePrototype, DalContext, FuncId, Timestamp, TransactionsError}; +use crate::{AttributeValueId, SchemaVariantId}; #[remain::sorted] #[derive(Error, Debug)] pub enum ExternalProviderError { - #[error("attribute context error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), #[error("attribute prototype error: {0}")] AttributePrototype(#[from] AttributePrototypeError), - #[error("unexpected: attribute prototype field is empty")] - EmptyAttributePrototype, - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("not found for id: {0}")] - NotFound(ExternalProviderId), - #[error("not found for socket name: {0}")] - NotFoundForSocketName(String), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("schema id mismatch: {0} (self) and {1} (provided)")] - SchemaMismatch(SchemaId, SchemaId), - #[error("schema variant error: {0}")] - SchemaVariant(String), - #[error("schema variant id mismatch: {0} (self) and {1} (provided)")] - SchemaVariantMismatch(SchemaVariantId, SchemaVariantId), - #[error("serde: {0}")] - Serde(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("found two external providers ({0} and {1}) of the same name for the same schema variant: {2}")] + NameCollision(ExternalProviderId, ExternalProviderId, SchemaVariantId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("store error: {0}")] + Store(#[from] content_store::StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type ExternalProviderResult = Result; -pk!(ExternalProviderPk); pk!(ExternalProviderId); -impl_standard_model! { - model: ExternalProvider, - pk: ExternalProviderPk, - id: ExternalProviderId, - table_name: "external_providers", - history_event_label_base: "external_provider", - history_event_message_name: "External Provider" -} - /// This provider can only provide data to external [`SchemaVariants`](crate::SchemaVariant). It can /// only consume data within its own [`SchemaVariant`](crate::SchemaVariant). #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct ExternalProvider { - pk: ExternalProviderPk, id: ExternalProviderId, #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] - timestamp: Timestamp, - - /// Indicates which [`Schema`](crate::Schema) this provider belongs to. - schema_id: SchemaId, - /// Indicates which [`SchemaVariant`](crate::SchemaVariant) this provider belongs to. - schema_variant_id: SchemaVariantId, - /// Indicates which transformation function should be used for "emit". - attribute_prototype_id: Option, - + pub timestamp: Timestamp, /// Name for [`Self`] that can be used for identification. name: String, /// Definition of the data type (e.g. "JSONSchema" or "Number"). type_definition: Option, + arity: ProviderArity, + kind: ProviderKind, + required: bool, + ui_hidden: bool, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum ExternalProviderContent { + V1(ExternalProviderContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct ExternalProviderContentV1 { + pub timestamp: Timestamp, + /// Name for [`Self`] that can be used for identification. + pub name: String, + /// Definition of the data type (e.g. "JSONSchema" or "Number"). + pub type_definition: Option, + pub arity: ProviderArity, + pub kind: ProviderKind, + pub required: bool, + pub ui_hidden: bool, } impl ExternalProvider { - /// This function will also create an _output_ [`Socket`](crate::Socket). - #[allow(clippy::too_many_arguments)] - pub async fn new_with_socket( + pub fn assemble(id: ExternalProviderId, inner: ExternalProviderContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + name: inner.name, + type_definition: inner.type_definition, + arity: inner.arity, + kind: inner.kind, + ui_hidden: inner.ui_hidden, + required: inner.required, + } + } + + pub fn id(&self) -> ExternalProviderId { + self.id + } + + pub fn name(&self) -> &str { + &self.name + } + + pub fn arity(&self) -> ProviderArity { + self.arity + } + + pub fn ui_hidden(&self) -> bool { + self.ui_hidden + } + + pub fn required(&self) -> bool { + self.required + } + + pub async fn new( ctx: &DalContext, - schema_id: SchemaId, schema_variant_id: SchemaVariantId, - name: impl AsRef, + name: impl Into, type_definition: Option, func_id: FuncId, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - connection_annotations: impl AsRef, - arity: SocketArity, - frame_socket: bool, - ) -> ExternalProviderResult<(Self, Socket)> { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM external_provider_create_v1($1, $2, $3, $4, $5, $6)", - &[ - ctx.tenancy(), - ctx.visibility(), - &schema_id, - &schema_variant_id, - &name.as_ref(), - &type_definition, - ], - ) - .await?; + arity: ProviderArity, + kind: ProviderKind, + // todo: connection_annotation + ) -> ExternalProviderResult { + let name = name.into(); + let content = ExternalProviderContentV1 { + timestamp: Timestamp::now(), + name: name.clone(), + type_definition, + arity, + kind, + required: false, + ui_hidden: false, + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&ExternalProviderContent::V1(content.clone()))?; - let mut external_provider: ExternalProvider = - standard_model::finish_create_from_row(ctx, row).await?; - - let attribute_context = AttributeContext::builder() - .set_external_provider_id(external_provider.id) - .to_context()?; - let attribute_prototype = AttributePrototype::new( - ctx, - func_id, - func_binding_id, - func_binding_return_value_id, - attribute_context, - None, - None, - ) - .await?; - external_provider - .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) - .await?; + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::ExternalProvider(hash))?; + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + let _node_index = workspace_snapshot.add_node(node_weight)?; + workspace_snapshot.add_edge( + schema_variant_id, + EdgeWeight::new(change_set, EdgeWeightKind::Provider)?, + id, + )?; + } - let socket = Socket::new( - ctx, - name, - connection_annotations.as_ref(), - match frame_socket { - true => SocketKind::Frame, - false => SocketKind::Provider, - }, - &SocketEdgeKind::ConfigurationOutput, - &arity, - &DiagramKind::Configuration, - Some(schema_variant_id), - ) - .await?; - socket - .set_external_provider(ctx, external_provider.id()) - .await?; + let attribute_prototype = AttributePrototype::new(ctx, func_id).await?; - Ok((external_provider, socket)) - } + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + id, + EdgeWeight::new(change_set, EdgeWeightKind::Prototype(None))?, + attribute_prototype.id(), + )?; + } - // Immutable fields. - standard_model_accessor_ro!(schema_id, SchemaId); - standard_model_accessor_ro!(schema_variant_id, SchemaVariantId); - - // Mutable fields. - standard_model_accessor!(name, String, ExternalProviderResult); - standard_model_accessor!(type_definition, Option, ExternalProviderResult); - standard_model_accessor!( - attribute_prototype_id, - Option, - ExternalProviderResult - ); - - // This is a 1-1 relationship, so the Vec should be 1 - standard_model_has_many!( - lookup_fn: sockets, - table: "socket_belongs_to_external_provider", - model_table: "sockets", - returns: Socket, - result: ExternalProviderResult, - ); - - /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). - pub async fn list_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> ExternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + Ok(Self::assemble(id.into(), content)) } - /// Find [`Self`] with a provided [`SocketId`](crate::Socket). - pub async fn find_for_socket( + pub async fn attribute_values_for_external_provider_id( ctx: &DalContext, - socket_id: SocketId, - ) -> ExternalProviderResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_FOR_SOCKET, - &[ctx.tenancy(), ctx.visibility(), &socket_id], - ) - .await?; - Ok(standard_model::object_option_from_row_option(row)?) + external_provider_id: ExternalProviderId, + ) -> ExternalProviderResult> { + let mut result = vec![]; + + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let av_sources = workspace_snapshot.incoming_sources_for_edge_weight_kind( + external_provider_id, + EdgeWeightKindDiscriminants::Provider, + )?; + for av_source_idx in av_sources { + if let NodeWeight::AttributeValue(av_node_weight) = + workspace_snapshot.get_node_weight(av_source_idx)? + { + result.push(av_node_weight.id().into()); + } + } + + Ok(result) } - /// Find [`Self`] with a provided name, which is not only the name of [`Self`], but also of the - /// associated _output_ [`Socket`](crate::Socket). - pub async fn find_for_schema_variant_and_name( + pub async fn list_ids_for_schema_variant( ctx: &DalContext, schema_variant_id: SchemaVariantId, - name: impl AsRef, - ) -> ExternalProviderResult> { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_FOR_SCHEMA_VARIANT_AND_NAME, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id, &name], - ) - .await?; - Ok(standard_model::object_option_from_row_option(row)?) - } + ) -> ExternalProviderResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; - /// Find all [`Self`] for a given [`AttributePrototypeId`](crate::AttributePrototype). - pub async fn list_for_attribute_prototype_with_tail_component_id( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - tail_component_id: ComponentId, - ) -> ExternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_ATTRIBUTE_PROTOTYPE_WITH_TAIL_COMPONENT_ID, - &[ - ctx.tenancy(), - ctx.visibility(), - &attribute_prototype_id, - &tail_component_id, - ], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + let node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_variant_id, + EdgeWeightKindDiscriminants::Provider, + )?; + + let mut result = vec![]; + for node_index in node_indices { + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + if node_weight + .get_option_content_node_weight_of_kind( + ContentAddressDiscriminants::ExternalProvider, + ) + .is_some() + { + result.push(node_weight.id().into()) + } + } + + Ok(result) } - /// Find all [`Self`] that have - /// [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) referencing the provided - /// [`InternalProviderId`](crate::InternalProvider). - pub async fn list_from_internal_provider_use( + pub async fn list( ctx: &DalContext, - internal_provider_id: InternalProviderId, + schema_variant_id: SchemaVariantId, ) -> ExternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FROM_INTERNAL_PROVIDER_USE, - &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_variant_id, + EdgeWeightKindDiscriminants::Provider, + )?; + + let mut content_hashes = Vec::new(); + let mut node_weights = Vec::new(); + for node_index in node_indices { + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + if let Some(content_node_weight) = node_weight.get_option_content_node_weight_of_kind( + ContentAddressDiscriminants::ExternalProvider, + ) { + content_hashes.push(content_node_weight.content_hash()); + node_weights.push(content_node_weight); + } + } - pub async fn by_socket(ctx: &DalContext) -> ExternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(BY_SOCKET, &[ctx.tenancy(), ctx.visibility()]) + let content_map: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(content_hashes.as_slice()) .await?; - let mut objects: HashMap = HashMap::new(); - for row in rows.into_iter() { - let id: SocketId = row.try_get(0)?; + let mut external_providers = Vec::new(); + for node_weight in node_weights { + match content_map.get(&node_weight.content_hash()) { + Some(content) => { + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let ExternalProviderContent::V1(inner) = content; - let object: serde_json::Value = row.try_get(1)?; - let object: Self = serde_json::from_value(object)?; + external_providers + .push(Self::assemble(node_weight.id().into(), inner.to_owned())); + } + None => Err(WorkspaceSnapshotError::MissingContentFromStore( + node_weight.id(), + ))?, + } + } + + Ok(external_providers) + } - objects.insert(id, object); + // TODO(nick): this function uses the underlying list call since it needs to perform bulk content store retrieval. + // the analogous call for explicit internal providers is "find_explicit_with_name", but has many subtle differences. + // We should likely align how both of these work in the long term. + pub async fn find_with_name( + ctx: &DalContext, + name: impl AsRef, + schema_variant_id: SchemaVariantId, + ) -> ExternalProviderResult> { + let name = name.as_ref(); + + // NOTE(nick): at the time of writing, we do not have connection annotations and we do not enforce provider + // names being unique for the same schema variant. Should we do that? That's a different question, but this + // function will ensure that we find one and only one based on the name. + let mut maybe_external_provider: Option = None; + for external_provider in Self::list(ctx, schema_variant_id).await? { + if name == external_provider.name() { + match maybe_external_provider { + Some(already_found) => { + return Err(ExternalProviderError::NameCollision( + already_found.id(), + external_provider.id(), + schema_variant_id, + )) + } + None => maybe_external_provider = Some(external_provider), + } + } } - Ok(objects) + Ok(maybe_external_provider) } } + +// impl ExternalProvider { +// /// This function will also create an _output_ [`Socket`](crate::Socket). +// #[allow(clippy::too_many_arguments)] +// #[tracing::instrument(skip(ctx, name))] +// pub async fn new_with_socket( +// ctx: &DalContext, +// schema_id: SchemaId, +// schema_variant_id: SchemaVariantId, +// name: impl AsRef, +// type_definition: Option, +// func_id: FuncId, +// func_binding_id: FuncBindingId, +// func_binding_return_value_id: FuncBindingReturnValueId, +// arity: SocketArity, +// frame_socket: bool, +// ) -> ExternalProviderResult<(Self, Socket)> { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM external_provider_create_v1($1, $2, $3, $4, $5, $6)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &schema_id, +// &schema_variant_id, +// &name, +// &type_definition, +// ], +// ) +// .await?; + +// let mut external_provider: ExternalProvider = +// standard_model::finish_create_from_row(ctx, row).await?; + +// let attribute_context = AttributeContext::builder() +// .set_external_provider_id(external_provider.id) +// .to_context()?; +// let attribute_prototype = AttributePrototype::new( +// ctx, +// func_id, +// func_binding_id, +// func_binding_return_value_id, +// attribute_context, +// None, +// None, +// ) +// .await?; +// external_provider +// .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) +// .await?; + +// let socket = Socket::new( +// ctx, +// name, +// match frame_socket { +// true => SocketKind::Frame, +// false => SocketKind::Provider, +// }, +// &SocketEdgeKind::ConfigurationOutput, +// &arity, +// &DiagramKind::Configuration, +// Some(schema_variant_id), +// ) +// .await?; +// socket +// .set_external_provider(ctx, external_provider.id()) +// .await?; + +// Ok((external_provider, socket)) +// } + +// // Immutable fields. +// standard_model_accessor_ro!(schema_id, SchemaId); +// standard_model_accessor_ro!(schema_variant_id, SchemaVariantId); + +// // Mutable fields. +// standard_model_accessor!(name, String, ExternalProviderResult); +// standard_model_accessor!(type_definition, Option, ExternalProviderResult); +// standard_model_accessor!( +// attribute_prototype_id, +// Option, +// ExternalProviderResult +// ); + +// // This is a 1-1 relationship, so the Vec should be 1 +// standard_model_has_many!( +// lookup_fn: sockets, +// table: "socket_belongs_to_external_provider", +// model_table: "sockets", +// returns: Socket, +// result: ExternalProviderResult, +// ); + +// /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> ExternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find [`Self`] with a provided [`SocketId`](crate::Socket). +// #[instrument(skip_all)] +// pub async fn find_for_socket( +// ctx: &DalContext, +// socket_id: SocketId, +// ) -> ExternalProviderResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_FOR_SOCKET, +// &[ctx.tenancy(), ctx.visibility(), &socket_id], +// ) +// .await?; +// Ok(standard_model::object_option_from_row_option(row)?) +// } + +// /// Find [`Self`] with a provided name, which is not only the name of [`Self`], but also of the +// /// associated _output_ [`Socket`](crate::Socket). +// #[instrument(skip_all)] +// pub async fn find_for_schema_variant_and_name( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// name: impl AsRef, +// ) -> ExternalProviderResult> { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_FOR_SCHEMA_VARIANT_AND_NAME, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id, &name], +// ) +// .await?; +// Ok(standard_model::object_option_from_row_option(row)?) +// } + +// /// Find all [`Self`] for a given [`AttributePrototypeId`](crate::AttributePrototype). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_attribute_prototype_with_tail_component_id( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// tail_component_id: ComponentId, +// ) -> ExternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_ATTRIBUTE_PROTOTYPE_WITH_TAIL_COMPONENT_ID, +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &attribute_prototype_id, +// &tail_component_id, +// ], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find all [`Self`] that have +// /// [`AttributePrototypeArguments`](crate::AttributePrototypeArgument) referencing the provided +// /// [`InternalProviderId`](crate::InternalProvider). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_from_internal_provider_use( +// ctx: &DalContext, +// internal_provider_id: InternalProviderId, +// ) -> ExternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FROM_INTERNAL_PROVIDER_USE, +// &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// #[tracing::instrument(skip(ctx))] +// pub async fn by_socket(ctx: &DalContext) -> ExternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(BY_SOCKET, &[ctx.tenancy(), ctx.visibility()]) +// .await?; + +// let mut objects: HashMap = HashMap::new(); +// for row in rows.into_iter() { +// let id: SocketId = row.try_get(0)?; + +// let object: serde_json::Value = row.try_get(1)?; +// let object: Self = serde_json::from_value(object)?; + +// objects.insert(id, object); +// } +// Ok(objects) +// } +// } diff --git a/lib/dal/src/provider/internal.rs b/lib/dal/src/provider/internal.rs index 64ae657ae4..e6288e63b7 100644 --- a/lib/dal/src/provider/internal.rs +++ b/lib/dal/src/provider/internal.rs @@ -67,129 +67,63 @@ //! This design also lets us cache the view of a [`Prop`](crate::Prop) and its children rather //! than directly observing the real time values frequently. +use content_store::{ContentHash, Store}; use serde::{Deserialize, Serialize}; -use thiserror::Error; - -use si_data_pg::PgError; use std::collections::HashMap; +use strum::EnumDiscriminants; use telemetry::prelude::*; +use thiserror::Error; -use crate::attribute::context::AttributeContextBuilder; -use crate::func::backend::identity::FuncBackendIdentityArgs; -use crate::func::binding::{FuncBindingError, FuncBindingId}; -use crate::func::binding_return_value::FuncBindingReturnValueId; -use crate::socket::{Socket, SocketArity, SocketEdgeKind, SocketError, SocketId, SocketKind}; -use crate::standard_model::object_option_from_row_option; -use crate::ComponentId; -use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_accessor_ro, - AttributeContextBuilderError, AttributePrototype, AttributePrototypeError, - AttributePrototypeId, AttributeReadContext, AttributeValueError, AttributeView, DiagramKind, - FuncError, FuncId, HistoryEventError, Prop, PropError, StandardModel, StandardModelError, - Tenancy, Timestamp, TransactionsError, Visibility, +use crate::attribute::prototype::AttributePrototypeError; +use crate::change_set_pointer::ChangeSetPointerError; +use crate::func::FuncError; +use crate::provider::{ProviderArity, ProviderKind}; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, }; +use crate::workspace_snapshot::node_weight::{NodeWeight, NodeWeightError}; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - standard_model_has_many, AttributeContext, AttributeContextError, AttributeValue, DalContext, - Func, FuncBinding, PropId, SchemaId, SchemaVariantId, + pk, AttributePrototype, AttributePrototypeId, AttributeValueId, DalContext, FuncId, PropId, + SchemaVariantId, Timestamp, TransactionsError, }; -const BY_SOCKET: &str = include_str!("../queries/internal_provider/by_socket.sql"); -const FIND_EXPLICIT_FOR_SCHEMA_VARIANT_AND_NAME: &str = - include_str!("../queries/internal_provider/find_explicit_for_schema_variant_and_name.sql"); -const FIND_FOR_PROP: &str = include_str!("../queries/internal_provider/find_for_prop.sql"); -const FIND_EXPLICIT_FOR_SOCKET: &str = - include_str!("../queries/internal_provider/find_explicit_for_socket.sql"); -const IS_FOR_ROOT_PROP: &str = include_str!("../queries/internal_provider/is_for_root_prop.sql"); -const LIST_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/internal_provider/list_for_schema_variant.sql"); -const LIST_EXPLICIT_FOR_SCHEMA_VARIANT: &str = - include_str!("../queries/internal_provider/list_explicit_for_schema_variant.sql"); -const LIST_FOR_ATTRIBUTE_PROTOTYPE: &str = - include_str!("../queries/internal_provider/list_for_attribute_prototype.sql"); -const LIST_FOR_INPUT_SOCKETS: &str = - include_str!("../queries/internal_provider/list_for_input_sockets_for_all_schema_variants.sql"); - #[remain::sorted] #[derive(Error, Debug)] pub enum InternalProviderError { - #[error("attribute context error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), #[error("attribute prototype error: {0}")] AttributePrototype(#[from] AttributePrototypeError), - #[error("attribute prototype not found for id: {0}")] - AttributePrototypeNotFound(AttributePrototypeId), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("could not find attribute value for attribute context: {0:?}")] - AttributeValueNotFoundForContext(AttributeContext), - #[error("component error: {0}")] - Component(String), - #[error("component not found by id: {0}")] - ComponentNotFound(ComponentId), - #[error("unexpected: attribute prototype field is empty")] - EmptyAttributePrototype, + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), #[error("func error: {0}")] Func(#[from] FuncError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func not found for id: {0}")] - FuncNotFound(FuncId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("not allowed to perform implicit emit as an explicit internal provider")] - ImplicitEmitForExplicitProviderNotAllowed, - #[error("missing func")] - MissingFunc(String), - #[error("provided attribute context does not specify a PropId (required for implicit emit)")] - MissingPropForImplicitEmit, - #[error("not found for id: {0}")] - NotFound(InternalProviderId), - #[error("internal provider not found for prop id: {0}")] - NotFoundForProp(PropId), - #[error("internal provider not found for socket id: {0}")] - NotFoundForSocket(SocketId), - #[error("internal provider not found for prop socket name: {0}")] - NotFoundForSocketName(String), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("prop error: {0}")] - Prop(#[from] PropError), - #[error("prop not found for id: {0}")] - PropNotFound(PropId), - #[error("root prop not found for schema variant: {0}")] - RootPropNotFound(SchemaVariantId), - #[error("schema id mismatch: {0} (self) and {1} (provided)")] - SchemaMismatch(SchemaId, SchemaId), - #[error("schema variant error: {0}")] - SchemaVariant(String), - #[error("schema variant id mismatch: {0} (self) and {1} (provided)")] - SchemaVariantMismatch(SchemaVariantId, SchemaVariantId), - #[error("serde_json error: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), + #[error("missing attribute prototype for explicit internal provider: {0}")] + MissingAttributePrototypeExplicit(InternalProviderId), + #[error("missing attribute value for explicit internal provider: {0}")] + MissingAttributeValueExplicit(InternalProviderId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("Prop {0} is missing an internal provider")] + PropMissingInternalProvider(PropId), + #[error("An internal provider for prop {0} already exists")] + ProviderAlreadyExists(PropId), + #[error("store error: {0}")] + Store(#[from] content_store::StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type InternalProviderResult = Result; -pk!(InternalProviderPk); pk!(InternalProviderId); -impl_standard_model! { - model: InternalProvider, - pk: InternalProviderPk, - id: InternalProviderId, - table_name: "internal_providers", - history_event_label_base: "internal_provider", - history_event_message_name: "Internal Provider" -} - /// This provider can only provide data within its own [`SchemaVariant`](crate::SchemaVariant). /// /// If this provider _specifies_ a [`PropId`](crate::Prop), it provider can only consume data from @@ -201,448 +135,738 @@ impl_standard_model! { /// are called "explicit" [`InternalProviders`](Self). #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct InternalProvider { - pk: InternalProviderPk, id: InternalProviderId, #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - visibility: Visibility, - #[serde(flatten)] timestamp: Timestamp, - - /// Indicates which [`Prop`](crate::Prop) this provider belongs to. This will be - /// unset if [`Self`] is "explicit". If [`Self`] is "implicit", this will always be a "set" id. - prop_id: PropId, - /// Indicates which [`SchemaVariant`](crate::SchemaVariant) this provider belongs to. - schema_variant_id: SchemaVariantId, - /// Indicates which transformation function should be used for "emit". - attribute_prototype_id: Option, - /// Name for [`Self`] that can be used for identification. name: String, /// Definition of the inbound type (e.g. "JSONSchema" or "Number"). inbound_type_definition: Option, /// Definition of the outbound type (e.g. "JSONSchema" or "Number"). outbound_type_definition: Option, + arity: ProviderArity, + kind: ProviderKind, + required: bool, + ui_hidden: bool, +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum InternalProviderContent { + V1(InternalProviderContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct InternalProviderContentV1 { + pub timestamp: Timestamp, + /// Name for [`Self`] that can be used for identification. + pub name: String, + /// Definition of the inbound type (e.g. "JSONSchema" or "Number"). + pub inbound_type_definition: Option, + /// Definition of the outbound type (e.g. "JSONSchema" or "Number"). + pub outbound_type_definition: Option, + pub arity: ProviderArity, + pub kind: ProviderKind, + pub required: bool, + pub ui_hidden: bool, } impl InternalProvider { - pub async fn new_implicit( + pub async fn get_by_id( ctx: &DalContext, - prop_id: PropId, - schema_variant_id: SchemaVariantId, + id: InternalProviderId, ) -> InternalProviderResult { - // Use the prop name for the implicit internal provider name. We need an owned string that - // we then borrow for the query. - let prop = Prop::get_by_id(ctx, &prop_id) - .await? - .ok_or(InternalProviderError::PropNotFound(prop_id))?; - let name = prop.name().to_string(); + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let node_weight = workspace_snapshot.get_node_weight_by_id(id)?; - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM internal_provider_create_v1($1, $2, $3, $4, $5, $6, $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &prop_id, - &schema_variant_id, - &name, - &Option::::None, - &Option::::None, - ], - ) - .await?; - let mut internal_provider: InternalProvider = - standard_model::finish_create_from_row(ctx, row).await?; - - let (identity_func, identity_func_binding, identity_func_binding_return_value) = - Func::identity_with_binding_and_return_value(ctx).await?; - - // The "base" AttributeContext of anything we create should be as un-specific as possible, - // and for an InternalProvider that is having only the InternalProviderId set. - let context = AttributeContext::builder() - .set_internal_provider_id(*internal_provider.id()) - .to_context()?; - - // Key and parent are unneeded because the provider exists not strictly as part of the - // schema values _and_ because implicit internal providers cannot be created for descendants - // of maps and arrays. - let attribute_prototype = AttributePrototype::new( - ctx, - *identity_func.id(), - *identity_func_binding.id(), - *identity_func_binding_return_value.id(), - context, - None, - None, - ) - .await?; - - internal_provider - .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) - .await?; - Ok(internal_provider) + Self::get_from_node_weight(ctx, node_weight).await } - /// This function will also create an _input_ [`Socket`](crate::Socket). - #[allow(clippy::too_many_arguments)] - pub async fn new_explicit_with_socket( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - name: impl AsRef, - func_id: FuncId, - func_binding_id: FuncBindingId, - func_binding_return_value_id: FuncBindingReturnValueId, - connection_annotations: impl AsRef, - arity: SocketArity, - frame_socket: bool, - ) -> InternalProviderResult<(Self, Socket)> { - let name = name.as_ref(); - let prop_id = PropId::NONE; + pub fn assemble(id: InternalProviderId, inner: InternalProviderContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + name: inner.name, + inbound_type_definition: inner.inbound_type_definition, + outbound_type_definition: inner.outbound_type_definition, + arity: inner.arity, + kind: inner.kind, + required: inner.required, + ui_hidden: inner.ui_hidden, + } + } - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM internal_provider_create_v1($1, $2, $3, $4, $5, $6, $7)", - &[ - ctx.tenancy(), - ctx.visibility(), - &prop_id, - &schema_variant_id, - &name, - &Option::::None, - &Option::::None, - ], - ) - .await?; + pub fn id(&self) -> InternalProviderId { + self.id + } - let mut explicit_internal_provider: InternalProvider = - standard_model::finish_create_from_row(ctx, row).await?; - - // The "base" AttributeContext of anything we create should be as un-specific as possible, - // and for an InternalProvider that is having only the InternalProviderId set. - let _base_attribute_context = AttributeContext::builder() - .set_internal_provider_id(explicit_internal_provider.id) - .to_context()?; - - let attribute_prototype = AttributePrototype::new( - ctx, - func_id, - func_binding_id, - func_binding_return_value_id, - explicit_internal_provider.attribute_context()?, - None, - None, - ) - .await?; - explicit_internal_provider - .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) - .await?; + pub fn name(&self) -> &str { + &self.name + } - let socket = Socket::new( - ctx, - name, - connection_annotations.as_ref(), - match frame_socket { - true => SocketKind::Frame, - false => SocketKind::Provider, - }, - &SocketEdgeKind::ConfigurationInput, - &arity, - &DiagramKind::Configuration, - Some(schema_variant_id), - ) - .await?; - socket - .set_internal_provider(ctx, explicit_internal_provider.id()) - .await?; + pub fn arity(&self) -> ProviderArity { + self.arity + } - Ok((explicit_internal_provider, socket)) + pub fn ui_hidden(&self) -> bool { + self.ui_hidden } - // Immutable fields. - standard_model_accessor_ro!(prop_id, PropId); - standard_model_accessor_ro!(schema_variant_id, SchemaVariantId); - - // Mutable fields. - standard_model_accessor!( - attribute_prototype_id, - Option, - InternalProviderResult - ); - standard_model_accessor!(name, String, InternalProviderResult); - standard_model_accessor!( - inbound_type_definition, - Option, - InternalProviderResult - ); - standard_model_accessor!( - outbound_type_definition, - Option, - InternalProviderResult - ); - - // This is a 1-1 relationship, so the Vec should be 1 - standard_model_has_many!( - lookup_fn: sockets, - table: "socket_belongs_to_internal_provider", - model_table: "sockets", - returns: Socket, - result: InternalProviderResult, - ); - - /// If the [`PropId`](crate::Prop) field is not unset, then [`Self`] is an internal consumer. - pub fn is_internal_consumer(&self) -> bool { - self.prop_id != PropId::NONE + pub fn required(&self) -> bool { + self.required } - /// Consume with a provided [`AttributeContext`](crate::AttributeContext) and return the - /// resulting [`AttributeValue`](crate::AttributeValue). - /// - /// Requirements for the provided [`AttributeContext`](crate::AttributeContext): - /// - The least specific field be a [`PropId`](crate::Prop) - /// - If the [`SchemaId`](crate::Schema) is set, it must match the corresponding field on - /// [`Self`] - /// - If the [`SchemaVariantId`](crate::SchemaVariant) is set, it must match the corresponding - /// field on [`Self`] - pub async fn implicit_emit( - &self, + async fn get_from_node_weight( ctx: &DalContext, - target_attribute_value: &mut AttributeValue, - ) -> InternalProviderResult<()> { - if !self.is_internal_consumer() { - return Err(InternalProviderError::ImplicitEmitForExplicitProviderNotAllowed); - } - - // Get the func from our attribute prototype. - let attribute_prototype_id = self - .attribute_prototype_id - .ok_or(InternalProviderError::EmptyAttributePrototype)?; - let attribute_prototype = AttributePrototype::get_by_id(ctx, &attribute_prototype_id) + node_weight: &NodeWeight, + ) -> InternalProviderResult { + let content: InternalProviderContent = ctx + .content_store() + .try_lock()? + .get(&node_weight.content_hash()) .await? - .ok_or(InternalProviderError::AttributePrototypeNotFound( - attribute_prototype_id, + .ok_or(WorkspaceSnapshotError::MissingContentFromStore( + node_weight.id(), ))?; - let func_id = attribute_prototype.func_id(); - let func = Func::get_by_id(ctx, &func_id) - .await? - .ok_or(InternalProviderError::FuncNotFound(func_id))?; - - // Generate the AttributeContext that we should be sourcing our argument from. - let consume_attribute_context = - AttributeContextBuilder::from(target_attribute_value.context) - .unset_internal_provider_id() - .unset_external_provider_id() - .set_prop_id(self.prop_id) - .to_context()?; - - let source_attribute_value = - AttributeValue::find_for_context(ctx, consume_attribute_context.into()) - .await? - .ok_or(InternalProviderError::AttributeValueNotFoundForContext( - consume_attribute_context, - ))?; - let found_attribute_view_context = AttributeReadContext { - prop_id: None, - ..AttributeReadContext::from(consume_attribute_context) - }; - let found_attribute_view = AttributeView::new( - ctx, - found_attribute_view_context, - Some(*source_attribute_value.id()), - ) - .await?; - - let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( - ctx, - serde_json::to_value(FuncBackendIdentityArgs { - identity: Some(found_attribute_view.value().clone()), - })?, - *func.id(), - vec![], - ) - .await?; - - target_attribute_value - .set_func_binding_id(ctx, *func_binding.id()) - .await?; - target_attribute_value - .set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) - .await?; + let InternalProviderContent::V1(inner) = content; - Ok(()) + Ok(Self::assemble(node_weight.id().into(), inner)) } - /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). - pub async fn list_for_schema_variant( + pub async fn add_prototype_edge( ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) - } + internal_provider_id: InternalProviderId, + attribute_prototype_id: AttributePrototypeId, + key: &Option, + ) -> InternalProviderResult<()> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + internal_provider_id, + EdgeWeight::new( + ctx.change_set_pointer()?, + EdgeWeightKind::Prototype(key.to_owned()), + )?, + attribute_prototype_id, + )?; - /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). - pub async fn list_explicit_for_schema_variant( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_EXPLICIT_FOR_SCHEMA_VARIANT, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + Ok(()) } - /// Find [`Self`] with a provided name, which is not only the name of [`Self`], but also of the - /// associated _input_ [`Socket`](crate::Socket). - pub async fn find_explicit_for_schema_variant_and_name( + pub async fn find_explicit_with_name( ctx: &DalContext, - schema_variant_id: SchemaVariantId, name: impl AsRef, + schema_variant_id: SchemaVariantId, ) -> InternalProviderResult> { let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_EXPLICIT_FOR_SCHEMA_VARIANT_AND_NAME, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id, &name], - ) - .await?; - Ok(object_option_from_row_option(row)?) - } - /// Find [`Self`] with a provided [`SocketId`](crate::Socket). - pub async fn find_explicit_for_socket( - ctx: &DalContext, - socket_id: SocketId, - ) -> InternalProviderResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_EXPLICIT_FOR_SOCKET, - &[ctx.tenancy(), ctx.visibility(), &socket_id], - ) - .await?; - Ok(object_option_from_row_option(row)?) - } + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + for provider_idx in workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_variant_id, + EdgeWeightKindDiscriminants::Provider, + )? { + let node_weight = workspace_snapshot.get_node_weight(provider_idx)?; + if let NodeWeight::Content(content_inner) = node_weight { + if ContentAddressDiscriminants::InternalProvider + == content_inner.content_address().into() + { + let ip = Self::get_from_node_weight(ctx, node_weight).await?; + if ip.name() == name { + return Ok(Some(ip)); + } + } + } + } - /// Find all [`Self`] for a given [`AttributePrototypeId`](crate::AttributePrototype). - pub async fn list_for_attribute_prototype( - ctx: &DalContext, - attribute_prototype_id: AttributePrototypeId, - ) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_ATTRIBUTE_PROTOTYPE, - &[ctx.tenancy(), ctx.visibility(), &attribute_prototype_id], - ) - .await?; - Ok(standard_model::objects_from_rows(rows)?) + Ok(None) } - /// Find all [`Self`] which are also input sockets. - pub async fn list_for_input_sockets( + pub async fn new_explicit( ctx: &DalContext, - schema_variant_id: Option, - ) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - LIST_FOR_INPUT_SOCKETS, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; + schema_variant_id: SchemaVariantId, + name: impl Into, + func_id: FuncId, + arity: ProviderArity, + kind: ProviderKind, + // todo: connection_annotation + ) -> InternalProviderResult { + info!("creating explicit internal provider"); + let name = name.into(); + let content = InternalProviderContentV1 { + timestamp: Timestamp::now(), + name: name.clone(), + inbound_type_definition: None, + outbound_type_definition: None, + arity, + kind, + required: false, + ui_hidden: false, + }; + let hash = ctx + .content_store() + .try_lock()? + .add(&InternalProviderContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::InternalProvider(hash))?; + let _node_index = workspace_snapshot.add_node(node_weight)?; + workspace_snapshot.add_edge( + schema_variant_id, + EdgeWeight::new(change_set, EdgeWeightKind::Provider)?, + id, + )?; + } - Ok(standard_model::objects_from_rows(rows)?) - } + let attribute_prototype = AttributePrototype::new(ctx, func_id).await?; + + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + id, + EdgeWeight::new(change_set, EdgeWeightKind::Prototype(None))?, + attribute_prototype.id(), + )?; + } - /// Returns an [`AttributeContext`](crate::AttributeContext) corresponding to our id. - pub fn attribute_context(&self) -> InternalProviderResult { - Ok(AttributeContext::builder() - .set_internal_provider_id(self.id) - .to_context()?) + Ok(Self::assemble(id.into(), content)) } - /// Finds [`Self`] for a given [`PropId`](crate::Prop). This will only work for - /// implicit [`InternalProviders`](Self). - pub async fn find_for_prop( + pub async fn list_ids_for_schema_variant( ctx: &DalContext, - prop_id: PropId, - ) -> InternalProviderResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt(FIND_FOR_PROP, &[ctx.tenancy(), ctx.visibility(), &prop_id]) - .await?; - Ok(object_option_from_row_option(row)?) - } + schema_variant_id: SchemaVariantId, + ) -> InternalProviderResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_variant_id, + EdgeWeightKindDiscriminants::Provider, + )?; + + let mut result = vec![]; + for node_index in node_indices { + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + if node_weight + .get_option_content_node_weight_of_kind( + ContentAddressDiscriminants::InternalProvider, + ) + .is_some() + { + result.push(node_weight.id().into()); + } + } - pub async fn by_socket(ctx: &DalContext) -> InternalProviderResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(BY_SOCKET, &[ctx.tenancy(), ctx.visibility()]) - .await?; + Ok(result) + } - let mut objects: HashMap = HashMap::new(); - for row in rows.into_iter() { - let id: SocketId = row.try_get(0)?; + pub async fn list( + ctx: &DalContext, + schema_variant_id: SchemaVariantId, + ) -> InternalProviderResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_variant_id, + EdgeWeightKindDiscriminants::Provider, + )?; + + let mut content_hashes = Vec::new(); + let mut node_weights = Vec::new(); + for node_index in node_indices { + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + if let Some(content_node_weight) = node_weight.get_option_content_node_weight_of_kind( + ContentAddressDiscriminants::InternalProvider, + ) { + content_hashes.push(content_node_weight.content_hash()); + node_weights.push(content_node_weight); + } + } - let object: serde_json::Value = row.try_get(1)?; - let object: Self = serde_json::from_value(object)?; + let content_map: HashMap = ctx + .content_store() + .try_lock()? + .get_bulk(content_hashes.as_slice()) + .await?; - objects.insert(id, object); + let mut internal_providers = Vec::new(); + for node_weight in node_weights { + match content_map.get(&node_weight.content_hash()) { + Some(content) => { + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let InternalProviderContent::V1(inner) = content; + + internal_providers + .push(Self::assemble(node_weight.id().into(), inner.to_owned())); + } + None => Err(WorkspaceSnapshotError::MissingContentFromStore( + node_weight.id(), + ))?, + } } - Ok(objects.into_iter().collect()) + Ok(internal_providers) } - /// Determines if the provided [`InternalProvider`] corresponds to a "root" [`Prop`](crate::Prop). - pub async fn is_for_root_prop( + pub async fn attribute_values_for_internal_provider_id( ctx: &DalContext, internal_provider_id: InternalProviderId, - ) -> InternalProviderResult { - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - IS_FOR_ROOT_PROP, - &[ctx.tenancy(), ctx.visibility(), &internal_provider_id], - ) - .await?; - Ok(maybe_row.is_some()) + ) -> InternalProviderResult> { + let mut result = vec![]; + + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let av_sources = workspace_snapshot.incoming_sources_for_edge_weight_kind( + internal_provider_id, + EdgeWeightKindDiscriminants::Provider, + )?; + for av_source_idx in av_sources { + if let NodeWeight::AttributeValue(av_node_weight) = + workspace_snapshot.get_node_weight(av_source_idx)? + { + result.push(av_node_weight.id().into()); + } + } + + Ok(result) } } + +// impl InternalProvider { +// #[tracing::instrument(skip(ctx))] +// pub async fn new_implicit( +// ctx: &DalContext, +// prop_id: PropId, +// schema_variant_id: SchemaVariantId, +// ) -> InternalProviderResult { +// // Use the prop name for the implicit internal provider name. We need an owned string that +// // we then borrow for the query. +// let prop = Prop::get_by_id(ctx, &prop_id) +// .await? +// .ok_or(InternalProviderError::PropNotFound(prop_id))?; +// let name = prop.name().to_string(); + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM internal_provider_create_v1($1, $2, $3, $4, $5, $6, $7)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &prop_id, +// &schema_variant_id, +// &name, +// &Option::::None, +// &Option::::None, +// ], +// ) +// .await?; +// let mut internal_provider: InternalProvider = +// standard_model::finish_create_from_row(ctx, row).await?; + +// let (identity_func, identity_func_binding, identity_func_binding_return_value) = +// Func::identity_with_binding_and_return_value(ctx).await?; + +// // The "base" AttributeContext of anything we create should be as un-specific as possible, +// // and for an InternalProvider that is having only the InternalProviderId set. +// let context = AttributeContext::builder() +// .set_internal_provider_id(*internal_provider.id()) +// .to_context()?; + +// // Key and parent are unneeded because the provider exists not strictly as part of the +// // schema values _and_ because implicit internal providers cannot be created for descendants +// // of maps and arrays. +// let attribute_prototype = AttributePrototype::new( +// ctx, +// *identity_func.id(), +// *identity_func_binding.id(), +// *identity_func_binding_return_value.id(), +// context, +// None, +// None, +// ) +// .await?; + +// internal_provider +// .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) +// .await?; +// Ok(internal_provider) +// } + +// /// This function will also create an _input_ [`Socket`](crate::Socket). +// #[allow(clippy::too_many_arguments)] +// #[tracing::instrument(skip(ctx, name))] +// pub async fn new_explicit_with_socket( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// name: impl AsRef, +// func_id: FuncId, +// func_binding_id: FuncBindingId, +// func_binding_return_value_id: FuncBindingReturnValueId, +// arity: SocketArity, +// frame_socket: bool, +// ) -> InternalProviderResult<(Self, Socket)> { +// let name = name.as_ref(); +// let prop_id = PropId::NONE; + +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM internal_provider_create_v1($1, $2, $3, $4, $5, $6, $7)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &prop_id, +// &schema_variant_id, +// &name, +// &Option::::None, +// &Option::::None, +// ], +// ) +// .await?; + +// let mut explicit_internal_provider: InternalProvider = +// standard_model::finish_create_from_row(ctx, row).await?; + +// // The "base" AttributeContext of anything we create should be as un-specific as possible, +// // and for an InternalProvider that is having only the InternalProviderId set. +// let _base_attribute_context = AttributeContext::builder() +// .set_internal_provider_id(explicit_internal_provider.id) +// .to_context()?; + +// let attribute_prototype = AttributePrototype::new( +// ctx, +// func_id, +// func_binding_id, +// func_binding_return_value_id, +// explicit_internal_provider.attribute_context()?, +// None, +// None, +// ) +// .await?; +// explicit_internal_provider +// .set_attribute_prototype_id(ctx, Some(*attribute_prototype.id())) +// .await?; + +// let socket = Socket::new( +// ctx, +// name, +// match frame_socket { +// true => SocketKind::Frame, +// false => SocketKind::Provider, +// }, +// &SocketEdgeKind::ConfigurationInput, +// &arity, +// &DiagramKind::Configuration, +// Some(schema_variant_id), +// ) +// .await?; +// socket +// .set_internal_provider(ctx, explicit_internal_provider.id()) +// .await?; + +// Ok((explicit_internal_provider, socket)) +// } + +// // Immutable fields. +// standard_model_accessor_ro!(prop_id, PropId); +// standard_model_accessor_ro!(schema_variant_id, SchemaVariantId); + +// // Mutable fields. +// standard_model_accessor!( +// attribute_prototype_id, +// Option, +// InternalProviderResult +// ); +// standard_model_accessor!(name, String, InternalProviderResult); +// standard_model_accessor!( +// inbound_type_definition, +// Option, +// InternalProviderResult +// ); +// standard_model_accessor!( +// outbound_type_definition, +// Option, +// InternalProviderResult +// ); + +// // This is a 1-1 relationship, so the Vec should be 1 +// standard_model_has_many!( +// lookup_fn: sockets, +// table: "socket_belongs_to_internal_provider", +// model_table: "sockets", +// returns: Socket, +// result: InternalProviderResult, +// ); + +// /// If the [`PropId`](crate::Prop) field is not unset, then [`Self`] is an internal consumer. +// pub fn is_internal_consumer(&self) -> bool { +// self.prop_id != PropId::NONE +// } + +// /// Consume with a provided [`AttributeContext`](crate::AttributeContext) and return the +// /// resulting [`AttributeValue`](crate::AttributeValue). +// /// +// /// Requirements for the provided [`AttributeContext`](crate::AttributeContext): +// /// - The least specific field be a [`PropId`](crate::Prop) +// /// - If the [`SchemaId`](crate::Schema) is set, it must match the corresponding field on +// /// [`Self`] +// /// - If the [`SchemaVariantId`](crate::SchemaVariant) is set, it must match the corresponding +// /// field on [`Self`] +// pub async fn implicit_emit( +// &self, +// ctx: &DalContext, +// target_attribute_value: &mut AttributeValue, +// ) -> InternalProviderResult<()> { +// if !self.is_internal_consumer() { +// return Err(InternalProviderError::ImplicitEmitForExplicitProviderNotAllowed); +// } + +// // Get the func from our attribute prototype. +// let attribute_prototype_id = self +// .attribute_prototype_id +// .ok_or(InternalProviderError::EmptyAttributePrototype)?; +// let attribute_prototype = AttributePrototype::get_by_id(ctx, &attribute_prototype_id) +// .await? +// .ok_or(InternalProviderError::AttributePrototypeNotFound( +// attribute_prototype_id, +// ))?; +// let func_id = attribute_prototype.func_id(); +// let func = Func::get_by_id(ctx, &func_id) +// .await? +// .ok_or(InternalProviderError::FuncNotFound(func_id))?; + +// // Generate the AttributeContext that we should be sourcing our argument from. +// let consume_attribute_context = +// AttributeContextBuilder::from(target_attribute_value.context) +// .unset_internal_provider_id() +// .unset_external_provider_id() +// .set_prop_id(self.prop_id) +// .to_context()?; + +// let source_attribute_value = +// AttributeValue::find_for_context(ctx, consume_attribute_context.into()) +// .await? +// .ok_or(InternalProviderError::AttributeValueNotFoundForContext( +// consume_attribute_context, +// ))?; +// let found_attribute_view_context = AttributeReadContext { +// prop_id: None, +// ..AttributeReadContext::from(consume_attribute_context) +// }; + +// let found_attribute_view = AttributeView::new( +// ctx, +// found_attribute_view_context, +// Some(*source_attribute_value.id()), +// ) +// .await?; + +// let (func_binding, func_binding_return_value) = FuncBinding::create_and_execute( +// ctx, +// serde_json::to_value(FuncBackendIdentityArgs { +// identity: Some(found_attribute_view.value().clone()), +// })?, +// *func.id(), +// vec![], +// ) +// .await?; + +// target_attribute_value +// .set_func_binding_id(ctx, *func_binding.id()) +// .await?; +// target_attribute_value +// .set_func_binding_return_value_id(ctx, *func_binding_return_value.id()) +// .await?; + +// if target_attribute_value.context.component_id().is_some() && self.prop_id().is_some() { +// let provider_prop = Prop::get_by_id(ctx, self.prop_id()) +// .await? +// .ok_or_else(|| InternalProviderError::PropNotFound(*self.prop_id()))?; + +// // NOTE(jhelwig): This whole block will go away once Qualifications/Validations become part of the Prop tree. +// // +// // The Root Prop won't have a parent Prop. +// if provider_prop.parent_prop(ctx).await?.is_none() { +// let ctx_deletion = &ctx.clone_with_delete_visibility(); +// let component = Component::get_by_id( +// ctx_deletion, +// &target_attribute_value.context.component_id(), +// ) +// .await? +// .ok_or_else(|| { +// InternalProviderError::ComponentNotFound( +// target_attribute_value.context.component_id(), +// ) +// })?; +// component +// .check_validations(ctx) +// .await +// .map_err(|e| InternalProviderError::Component(e.to_string()))?; +// } +// } + +// Ok(()) +// } + +// /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find all [`Self`] for a given [`SchemaVariant`](crate::SchemaVariant). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_explicit_for_schema_variant( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// ) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_EXPLICIT_FOR_SCHEMA_VARIANT, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find [`Self`] with a provided name, which is not only the name of [`Self`], but also of the +// /// associated _input_ [`Socket`](crate::Socket). +// #[instrument(skip_all)] +// pub async fn find_explicit_for_schema_variant_and_name( +// ctx: &DalContext, +// schema_variant_id: SchemaVariantId, +// name: impl AsRef, +// ) -> InternalProviderResult> { +// let name = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_EXPLICIT_FOR_SCHEMA_VARIANT_AND_NAME, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id, &name], +// ) +// .await?; +// Ok(object_option_from_row_option(row)?) +// } + +// /// Find [`Self`] with a provided [`SocketId`](crate::Socket). +// #[instrument(skip_all)] +// pub async fn find_explicit_for_socket( +// ctx: &DalContext, +// socket_id: SocketId, +// ) -> InternalProviderResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_EXPLICIT_FOR_SOCKET, +// &[ctx.tenancy(), ctx.visibility(), &socket_id], +// ) +// .await?; +// Ok(object_option_from_row_option(row)?) +// } + +// /// Find all [`Self`] for a given [`AttributePrototypeId`](crate::AttributePrototype). +// #[tracing::instrument(skip(ctx))] +// pub async fn list_for_attribute_prototype( +// ctx: &DalContext, +// attribute_prototype_id: AttributePrototypeId, +// ) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_ATTRIBUTE_PROTOTYPE, +// &[ctx.tenancy(), ctx.visibility(), &attribute_prototype_id], +// ) +// .await?; +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Find all [`Self`] which are also input sockets. +// pub async fn list_for_input_sockets( +// ctx: &DalContext, +// schema_variant_id: Option, +// ) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query( +// LIST_FOR_INPUT_SOCKETS, +// &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], +// ) +// .await?; + +// Ok(standard_model::objects_from_rows(rows)?) +// } + +// /// Returns an [`AttributeContext`](crate::AttributeContext) corresponding to our id. +// pub fn attribute_context(&self) -> InternalProviderResult { +// Ok(AttributeContext::builder() +// .set_internal_provider_id(self.id) +// .to_context()?) +// } + +// /// Finds [`Self`] for a given [`PropId`](crate::Prop). This will only work for +// /// implicit [`InternalProviders`](Self). +// pub async fn find_for_prop( +// ctx: &DalContext, +// prop_id: PropId, +// ) -> InternalProviderResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt(FIND_FOR_PROP, &[ctx.tenancy(), ctx.visibility(), &prop_id]) +// .await?; +// Ok(object_option_from_row_option(row)?) +// } + +// #[tracing::instrument(skip(ctx))] +// pub async fn by_socket(ctx: &DalContext) -> InternalProviderResult> { +// let rows = ctx +// .txns() +// .await? +// .pg() +// .query(BY_SOCKET, &[ctx.tenancy(), ctx.visibility()]) +// .await?; + +// let mut objects: HashMap = HashMap::new(); +// for row in rows.into_iter() { +// let id: SocketId = row.try_get(0)?; + +// let object: serde_json::Value = row.try_get(1)?; +// let object: Self = serde_json::from_value(object)?; + +// objects.insert(id, object); +// } + +// Ok(objects.into_iter().collect()) +// } +// } diff --git a/lib/dal/src/qualification.rs b/lib/dal/src/qualification.rs index ab1d1dce44..66530b50d4 100644 --- a/lib/dal/src/qualification.rs +++ b/lib/dal/src/qualification.rs @@ -4,15 +4,18 @@ use strum::{AsRefStr, Display, EnumIter, EnumString}; use telemetry::prelude::*; use thiserror::Error; +use crate::attribute::value::AttributeValueError; use crate::component::qualification::QualificationEntry; -use crate::func::binding_return_value::FuncBindingReturnValueId; +use crate::func::FuncError; +use crate::prop::PropError; +use crate::validation::resolver::{ValidationResolver, ValidationResolverError, ValidationStatus}; use crate::{ - func::binding_return_value::{FuncBindingReturnValue, FuncBindingReturnValueError}, + func::binding_return_value::FuncBindingReturnValueError, ws_event::{WsEvent, WsPayload}, - ComponentError, ComponentId, DalContext, FuncId, Prop, StandardModel, StandardModelError, - ValidationResolver, ValidationResolverError, ValidationStatus, WsEventResult, + Component, ComponentError, ComponentId, DalContext, Prop, StandardModel, StandardModelError, + WsEventResult, }; -use crate::{standard_model, TransactionsError}; +use crate::{AttributeValue, AttributeValueId, Func}; #[derive(Deserialize, Serialize, Debug)] pub struct QualificationSummaryForComponent { @@ -43,8 +46,6 @@ pub enum QualificationSummaryError { Pg(#[from] PgError), #[error(transparent)] StandardModel(#[from] StandardModelError), - #[error(transparent)] - Transaction(#[from] TransactionsError), } pub type QualificationSummaryResult = Result; @@ -57,13 +58,47 @@ impl QualificationSummary { let mut components_failed = 0; let mut total = 0; - let qualification_summary_for_components: Vec = - standard_model::list(ctx, "summary_qualifications").await?; - for component_summary in qualification_summary_for_components.iter() { - components_succeeded += component_summary.succeeded; - components_warned += component_summary.warned; - components_failed += component_summary.failed; - total += 1; + let mut component_summaries = vec![]; + + for component in Component::list(ctx).await? { + let component_id = component.id(); + let qualifications = Component::list_qualifications(ctx, component_id).await?; + + let individual_total = qualifications.len() as i64; + let mut succeeded = 0; + let mut warned = 0; + let mut failed = 0; + for qualification in qualifications { + if let Some(result) = qualification.result { + match result.status { + QualificationSubCheckStatus::Success => succeeded += 1, + QualificationSubCheckStatus::Warning => warned += 1, + QualificationSubCheckStatus::Failure => failed += 1, + QualificationSubCheckStatus::Unknown => {} + } + } + } + + let individual_summary = QualificationSummaryForComponent { + component_id, + component_name: component.name(ctx).await?, + total: individual_total, + succeeded, + warned, + failed, + }; + + // Update counters for all components. + if failed > 0 { + components_failed += 1; + } else if warned > 0 { + components_warned += 1; + } else { + components_succeeded += 1; + } + total += individual_total; + + component_summaries.push(individual_summary); } Ok(QualificationSummary { @@ -71,7 +106,7 @@ impl QualificationSummary { succeeded: components_succeeded, warned: components_warned, failed: components_failed, - components: qualification_summary_for_components, + components: component_summaries, }) } } @@ -79,10 +114,16 @@ impl QualificationSummary { #[remain::sorted] #[derive(Error, Debug)] pub enum QualificationError { + #[error("attribute value error: {0}")] + AttributeValue(#[from] AttributeValueError), + #[error("func error: {0}")] + Func(#[from] FuncError), #[error("function binding return value error: {0}")] FuncBindingReturnValueError(#[from] FuncBindingReturnValueError), #[error("no value returned in qualification function result")] NoValue, + #[error("prop error: {0}")] + Prop(#[from] PropError), #[error("error serializing/deserializing json: {0}")] SerdeJson(#[from] serde_json::Error), #[error(transparent)] @@ -134,27 +175,30 @@ impl Ord for QualificationView { impl QualificationView { pub async fn new( ctx: &DalContext, - qualification_name: &str, - qualification_entry: QualificationEntry, - attribute_prototype_func_id: FuncId, - func_binding_return_value_id: FuncBindingReturnValueId, + attribute_value_id: AttributeValueId, ) -> Result, QualificationError> { - let func_binding_return_value = - FuncBindingReturnValue::get_by_id(ctx, &func_binding_return_value_id) - .await? - .ok_or(FuncBindingReturnValueError::NotFound( - func_binding_return_value_id, - ))?; - - // If the func binding return value on this does not match the prototype func, it means - // the qualification has not yet been run - if *func_binding_return_value.func_id() != attribute_prototype_func_id { - return Ok(None); - } + let attribute_value = AttributeValue::get_by_id(ctx, attribute_value_id).await?; + let qualification_name = match attribute_value.key(ctx).await? { + Some(key) => key, + None => return Ok(None), + }; + + let func_execution = match attribute_value.func_execution(ctx).await? { + Some(func_execution) => func_execution, + None => return Ok(None), + }; + + let qualification_entry: QualificationEntry = + match attribute_value.materialized_view(ctx).await? { + Some(value) => serde_json::from_value(value)?, + None => return Ok(None), + }; - let func_metadata = func_binding_return_value.func_metadata_view(ctx).await?; + let func = Func::get_by_id(ctx, *func_execution.func_id()).await?; - let output_streams = func_binding_return_value.get_output_stream(ctx).await?; + let func_metadata = func.metadata_view(); + + let output_streams = func_execution.into_output_stream(); let output = match output_streams { Some(streams) => streams .into_iter() @@ -211,13 +255,12 @@ impl QualificationView { status = QualificationSubCheckStatus::Failure; fail_counter += 1; - if let Some(prop) = Prop::get_by_id(ctx, &resolver.prop_id()).await? { - output.push(QualificationOutputStreamView { - stream: "stdout".to_owned(), - level: "log".to_owned(), - line: format!("{}: {}", prop.name(), value.message), - }); - } + let prop = Prop::get_by_id(ctx, resolver.prop_id()).await?; + output.push(QualificationOutputStreamView { + stream: "stdout".to_owned(), + level: "log".to_owned(), + line: format!("{}: {}", prop.name, value.message), + }); } } diff --git a/lib/dal/src/queries/authentication_prototype/find_for_context.sql b/lib/dal/src/queries/authentication_prototype/find_for_context.sql deleted file mode 100644 index 9e28b30521..0000000000 --- a/lib/dal/src/queries/authentication_prototype/find_for_context.sql +++ /dev/null @@ -1,3 +0,0 @@ -SELECT row_to_json(prototypes.*) AS object -FROM authentication_prototypes_v1($1, $2) AS prototypes -WHERE prototypes.schema_variant_id = $3 diff --git a/lib/dal/src/queries/authentication_prototype/find_for_context_and_func.sql b/lib/dal/src/queries/authentication_prototype/find_for_context_and_func.sql deleted file mode 100644 index 6ae596528e..0000000000 --- a/lib/dal/src/queries/authentication_prototype/find_for_context_and_func.sql +++ /dev/null @@ -1,4 +0,0 @@ -SELECT row_to_json(prototypes.*) AS object -FROM authentication_prototypes_v1($1, $2) AS prototypes -WHERE prototypes.func_id = $4 - AND prototypes.schema_variant_id = $3 diff --git a/lib/dal/src/queries/authentication_prototype/find_for_func.sql b/lib/dal/src/queries/authentication_prototype/find_for_func.sql deleted file mode 100644 index e7957804b3..0000000000 --- a/lib/dal/src/queries/authentication_prototype/find_for_func.sql +++ /dev/null @@ -1,3 +0,0 @@ -SELECT row_to_json(prototypes.*) AS object -FROM authentication_prototypes_v1($1, $2) AS prototypes -WHERE prototypes.func_id = $3 diff --git a/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql b/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql new file mode 100644 index 0000000000..1427d7b87b --- /dev/null +++ b/lib/dal/src/queries/workspace_snapshot/find_for_change_set.sql @@ -0,0 +1,4 @@ +SELECT * FROM workspace_snapshots +JOIN change_set_pointers + ON change_set_pointers.id = $1 + AND change_set_pointers.workspace_snapshot_id = workspace_snapshots.id \ No newline at end of file diff --git a/lib/dal/src/schema.rs b/lib/dal/src/schema.rs index c68a3a222c..9e75cdbde6 100644 --- a/lib/dal/src/schema.rs +++ b/lib/dal/src/schema.rs @@ -1,266 +1,367 @@ +use content_store::{ContentHash, Store, StoreError}; use serde::{Deserialize, Serialize}; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use telemetry::prelude::*; +use std::collections::HashMap; +use strum::EnumDiscriminants; use thiserror::Error; +use tokio::sync::TryLockError; -use crate::func::binding_return_value::FuncBindingReturnValueError; -use crate::provider::external::ExternalProviderError; -use crate::provider::internal::InternalProviderError; -use crate::schema::variant::SchemaVariantError; -use crate::socket::SocketError; -use crate::standard_model::object_option_from_row_option; -use crate::{ - component::ComponentKind, func::binding::FuncBindingError, impl_standard_model, pk, - schema::ui_menu::SchemaUiMenuId, standard_model, standard_model_accessor, - standard_model_has_many, standard_model_many_to_many, AttributeContextBuilderError, - AttributePrototypeError, AttributeValueError, Component, DalContext, FuncError, - HistoryEventError, PropError, StandardModel, StandardModelError, Timestamp, Visibility, - WsEventError, +use crate::change_set_pointer::ChangeSetPointerError; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, }; -use crate::{Tenancy, TransactionsError, WorkspacePk}; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::{NodeWeight, NodeWeightError}; +use crate::workspace_snapshot::WorkspaceSnapshotError; +use crate::{pk, ComponentKind, DalContext, Timestamp, TransactionsError}; pub use ui_menu::SchemaUiMenu; -pub use variant::root_prop::RootProp; pub use variant::{SchemaVariant, SchemaVariantId}; pub mod ui_menu; pub mod variant; -const FIND_SCHEMA_VARIANT_BY_NAME_FOR_SCHEMA: &str = - include_str!("./queries/find_schema_variant_for_schema_and_name.sql"); +pub const SCHEMA_VERSION: SchemaContentDiscriminants = SchemaContentDiscriminants::V1; #[remain::sorted] #[derive(Error, Debug)] pub enum SchemaError { - #[error("AttributeContextBuilder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("AttributePrototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("AttributeValue error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("external provider error: {0}")] - ExternalProvider(#[from] ExternalProviderError), - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("func not found: {0}")] - FuncNotFound(String), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), - #[error("missing a func in attribute update: {0} not found")] - MissingFunc(String), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("no default variant for schema id: {0}")] - NoDefaultVariant(SchemaId), - #[error("schema not found: {0}")] - NotFound(SchemaId), - #[error("schema not found by name: {0}")] - NotFoundByName(String), - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("prop error: {0}")] - Prop(#[from] PropError), - #[error("schema ui menu not found: {0}")] - SchemaUiMenuNotFound(SchemaUiMenuId), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("store error: {0}")] + Store(#[from] StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), - #[error("schema variant error: {0}")] - Variant(#[from] SchemaVariantError), - #[error("ws event error: {0}")] - WsEvent(#[from] WsEventError), + #[error("try lock error: {0}")] + TryLock(#[from] TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type SchemaResult = Result; -pk!(SchemaPk); pk!(SchemaId); #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] pub struct Schema { - pk: SchemaPk, id: SchemaId, - name: String, - #[serde(flatten)] - tenancy: Tenancy, #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, - ui_hidden: bool, + pub name: String, + pub ui_hidden: bool, + // NOTE(nick): maybe we should have a special edge for this instead or remove it altogether. default_schema_variant_id: Option, component_kind: ComponentKind, } -impl_standard_model! { - model: Schema, - pk: SchemaPk, - id: SchemaId, - table_name: "schemas", - history_event_label_base: "schema", - history_event_message_name: "Schema" +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum SchemaContent { + V1(SchemaContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct SchemaContentV1 { + pub timestamp: Timestamp, + pub name: String, + pub ui_hidden: bool, + // NOTE(nick): maybe we should have a special edge for this instead or remove it altogether. + pub default_schema_variant_id: Option, + pub component_kind: ComponentKind, +} + +impl From for SchemaContentV1 { + fn from(value: Schema) -> Self { + Self { + timestamp: value.timestamp, + name: value.name, + ui_hidden: value.ui_hidden, + default_schema_variant_id: value.default_schema_variant_id, + component_kind: value.component_kind, + } + } } impl Schema { + pub fn assemble(id: SchemaId, inner: SchemaContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + name: inner.name, + ui_hidden: inner.ui_hidden, + default_schema_variant_id: inner.default_schema_variant_id, + component_kind: inner.component_kind, + } + } + + pub fn id(&self) -> SchemaId { + self.id + } + + pub fn name(&self) -> &str { + &self.name + } + pub async fn new( ctx: &DalContext, - name: impl AsRef, - component_kind: &ComponentKind, + name: impl Into, + component_kind: ComponentKind, ) -> SchemaResult { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM schema_create_v1($1, $2, $3, $4)", - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &component_kind.as_ref(), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } + let content = SchemaContentV1 { + timestamp: Timestamp::now(), + name: name.into(), + ui_hidden: false, + default_schema_variant_id: None, + component_kind, + }; - standard_model_accessor!(name, String, SchemaResult); - standard_model_accessor!(component_kind, Enum(ComponentKind), SchemaResult); - standard_model_accessor!(ui_hidden, bool, SchemaResult); - standard_model_accessor!( - default_schema_variant_id, - Option, - SchemaResult - ); - - standard_model_has_many!( - lookup_fn: ui_menus, - table: "schema_ui_menu_belongs_to_schema", - model_table: "schema_ui_menus", - returns: SchemaUiMenu, - result: SchemaResult, - ); - - standard_model_has_many!( - lookup_fn: components, - table: "component_belongs_to_schema", - model_table: "components", - returns: Component, - result: SchemaResult, - ); - - standard_model_has_many!( - lookup_fn: variants, - table: "schema_variant_belongs_to_schema", - model_table: "schema_variants", - returns: SchemaVariant, - result: SchemaResult, - ); - - standard_model_many_to_many!( - lookup_fn: implements, - associate_fn: add_implements_schema, - disassociate_fn: remove_implements_schema, - disassociate_all_fn: remove_all_implements_schemas, - table_name: "schema_many_to_many_implements", - left_table: "schemas", - left_id: SchemaId, - right_table: "schemas", - right_id: SchemaId, - which_table_is_this: "left", - returns: Schema, - result: SchemaResult, - ); - - pub async fn default_variant(&self, ctx: &DalContext) -> SchemaResult { - match self.default_schema_variant_id() { - Some(schema_variant_id) => Ok(SchemaVariant::get_by_id(ctx, schema_variant_id) - .await? - .ok_or_else(|| SchemaError::NoDefaultVariant(*self.id()))?), - None => Err(SchemaError::NoDefaultVariant(*self.id())), - } + let hash = ctx + .content_store() + .lock() + .await + .add(&SchemaContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + let node_weight = NodeWeight::new_content(change_set, id, ContentAddress::Schema(hash))?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_node(node_weight)?; + + let schema_category_index_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Schema)?; + workspace_snapshot.add_edge( + schema_category_index_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + + Ok(Self::assemble(id.into(), content)) } - pub async fn is_builtin(&self, ctx: &DalContext) -> SchemaResult { - let row = ctx - .txns() + pub async fn get_by_id(ctx: &DalContext, id: SchemaId) -> SchemaResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let node_index = workspace_snapshot.get_node_index_by_id(id)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: SchemaContent = ctx + .content_store() + .lock() + .await + .get(&hash) .await? - .pg() - .query_opt( - "SELECT id FROM schemas WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", - &[self.id(), &WorkspacePk::NONE], - ) - .await?; + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id.into()))?; - Ok(row.is_some()) - } + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let SchemaContent::V1(inner) = content; - pub async fn find_by_name(ctx: &DalContext, name: impl AsRef) -> SchemaResult { - let name = name.as_ref(); - let schemas = Schema::find_by_attr(ctx, "name", &name).await?; - schemas - .first() - .ok_or_else(|| SchemaError::NotFoundByName(name.into())) - .cloned() + Ok(Self::assemble(id, inner)) } - pub async fn find_by_name_builtin( - ctx: &DalContext, - name: impl AsRef, - ) -> SchemaResult> { - let builtin_ctx = ctx.clone_with_new_tenancy(Tenancy::new(WorkspacePk::NONE)); + pub async fn modify(self, ctx: &DalContext, lambda: L) -> SchemaResult + where + L: FnOnce(&mut Self) -> SchemaResult<()>, + { + let mut schema = self; - let builtin_schema = Self::find_by_name(&builtin_ctx, name).await?; + let before = SchemaContentV1::from(schema.clone()); + lambda(&mut schema)?; + let updated = SchemaContentV1::from(schema.clone()); + + if updated != before { + let hash = ctx + .content_store() + .lock() + .await + .add(&SchemaContent::V1(updated.clone()))?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.update_content(ctx.change_set_pointer()?, schema.id.into(), hash)?; + } - Ok(Self::get_by_id(ctx, builtin_schema.id()).await?) + Ok(schema) } - pub async fn find_variant_by_name( - &self, - ctx: &DalContext, - name: impl AsRef, - ) -> SchemaResult> { - let name: &str = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_SCHEMA_VARIANT_BY_NAME_FOR_SCHEMA, - &[ctx.tenancy(), ctx.visibility(), self.id(), &name], - ) + pub async fn list(ctx: &DalContext) -> SchemaResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut schemas = vec![]; + let schema_category_index_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Schema)?; + + let schema_node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_category_index_id, + EdgeWeightKindDiscriminants::Use, + )?; + + let mut schema_node_weights = vec![]; + let mut schema_content_hashes = vec![]; + for index in schema_node_indices { + let node_weight = workspace_snapshot + .get_node_weight(index)? + .get_content_node_weight_of_kind(ContentAddressDiscriminants::Schema)?; + schema_content_hashes.push(node_weight.content_hash()); + schema_node_weights.push(node_weight); + } + + let schema_contents: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(schema_content_hashes.as_slice()) .await?; - Ok(object_option_from_row_option(row)?) + for node_weight in schema_node_weights { + match schema_contents.get(&node_weight.content_hash()) { + Some(content) => { + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let SchemaContent::V1(inner) = content; + + schemas.push(Self::assemble(node_weight.id().into(), inner.to_owned())); + } + None => Err(WorkspaceSnapshotError::MissingContentFromStore( + node_weight.id(), + ))?, + } + } + + Ok(schemas) } - pub async fn default_schema_variant_id_for_name( + /// Lists all [`Schemas`](Schema) by ID in the workspace. + pub async fn list_ids(ctx: &DalContext) -> SchemaResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let schema_category_index_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Schema)?; + let schema_node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_category_index_id, + EdgeWeightKindDiscriminants::Use, + )?; + + let mut schema_ids = Vec::new(); + for index in schema_node_indices { + let raw_id = workspace_snapshot.get_node_weight(index)?.id(); + schema_ids.push(raw_id.into()); + } + + Ok(schema_ids) + } + + // NOTE(nick): this assumes that schema names are unique. + pub async fn find_by_name( ctx: &DalContext, name: impl AsRef, - ) -> SchemaResult { - let name = name.as_ref(); - let schemas = Schema::find_by_attr(ctx, "name", &name).await?; - let schema = schemas - .first() - .ok_or_else(|| SchemaError::NotFoundByName(name.into()))?; - let schema_variant_id = schema - .default_schema_variant_id() - .ok_or_else(|| SchemaError::NoDefaultVariant(*schema.id()))?; - - Ok(*schema_variant_id) + ) -> SchemaResult> { + let schema_node_indices = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let schema_category_index_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Schema)?; + workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_category_index_id, + EdgeWeightKindDiscriminants::Use, + )? + }; + + // NOTE(nick): this algorithm could be better. + for schema_node_index in schema_node_indices { + let schema_node_weight = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot + .get_node_weight(schema_node_index)? + .get_content_node_weight_of_kind(ContentAddressDiscriminants::Schema)? + }; + let schema = Self::get_by_id(ctx, schema_node_weight.id().into()).await?; + if schema.name == name.as_ref() { + return Ok(Some(schema)); + } + } + Ok(None) } } + +// impl Schema { +// pub async fn default_variant(&self, ctx: &DalContext) -> SchemaResult { +// match self.default_schema_variant_id() { +// Some(schema_variant_id) => Ok(SchemaVariant::get_by_id(ctx, schema_variant_id) +// .await? +// .ok_or_else(|| SchemaError::NoDefaultVariant(*self.id()))?), +// None => Err(SchemaError::NoDefaultVariant(*self.id())), +// } +// } +// +// pub async fn is_builtin(&self, ctx: &DalContext) -> SchemaResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// "SELECT id FROM schemas WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", +// &[self.id(), &WorkspacePk::NONE], +// ) +// .await?; +// +// Ok(row.is_some()) +// } +// +// pub async fn find_by_name(ctx: &DalContext, name: impl AsRef) -> SchemaResult { +// let name = name.as_ref(); +// let schemas = Schema::find_by_attr(ctx, "name", &name).await?; +// schemas +// .first() +// .ok_or_else(|| SchemaError::NotFoundByName(name.into())) +// .cloned() +// } +// +// pub async fn find_by_name_builtin( +// ctx: &DalContext, +// name: impl AsRef, +// ) -> SchemaResult> { +// let name = name.as_ref(); +// +// let builtin_ctx = ctx.clone_with_new_tenancy(Tenancy::new(WorkspacePk::NONE)); +// let builtin_schema = Self::find_by_name(&builtin_ctx, name).await?; +// +// Ok(Self::get_by_id(ctx, builtin_schema.id()).await?) +// } +// +// pub async fn find_variant_by_name( +// &self, +// ctx: &DalContext, +// name: impl AsRef, +// ) -> SchemaResult> { +// let name: &str = name.as_ref(); +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_SCHEMA_VARIANT_BY_NAME_FOR_SCHEMA, +// &[ctx.tenancy(), ctx.visibility(), self.id(), &name], +// ) +// .await?; +// +// Ok(object_option_from_row_option(row)?) +// } +// +// pub async fn default_schema_variant_id_for_name( +// ctx: &DalContext, +// name: impl AsRef, +// ) -> SchemaResult { +// let name = name.as_ref(); +// let schemas = Schema::find_by_attr(ctx, "name", &name).await?; +// let schema = schemas +// .first() +// .ok_or_else(|| SchemaError::NotFoundByName(name.into()))?; +// let schema_variant_id = schema +// .default_schema_variant_id() +// .ok_or_else(|| SchemaError::NoDefaultVariant(*schema.id()))?; +// +// Ok(*schema_variant_id) +// } +// } diff --git a/lib/dal/src/schema/ui_menu.rs b/lib/dal/src/schema/ui_menu.rs index b6a1845893..9edbed322f 100644 --- a/lib/dal/src/schema/ui_menu.rs +++ b/lib/dal/src/schema/ui_menu.rs @@ -1,14 +1,6 @@ use serde::{Deserialize, Serialize}; -use telemetry::prelude::*; -use crate::{ - impl_standard_model, pk, standard_model, standard_model_accessor, standard_model_belongs_to, - DalContext, StandardModel, Tenancy, Timestamp, Visibility, -}; - -use super::{Schema, SchemaId, SchemaResult}; - -const FIND_FOR_SCHEMA: &str = include_str!("../queries/ui_menus_find_for_schema.sql"); +use crate::{impl_standard_model, pk, Tenancy, Timestamp, Visibility}; pk!(SchemaUiMenuPk); pk!(SchemaUiMenuId); @@ -36,68 +28,70 @@ impl_standard_model! { history_event_message_name: "Schema UI Menu" } -impl SchemaUiMenu { - pub async fn new( - ctx: &DalContext, - name: impl AsRef, - category: impl AsRef, - ) -> SchemaResult { - let name = name.as_ref(); - let category = category.as_ref(); +// impl SchemaUiMenu { +// #[instrument(skip_all)] +// pub async fn new( +// ctx: &DalContext, +// name: impl AsRef, +// category: impl AsRef, +// ) -> SchemaResult { +// let name = name.as_ref(); +// let category = category.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM schema_ui_menu_create_v1($1, $2, $3, $4)", - &[ - ctx.tenancy(), - ctx.visibility(), - &(name.to_string()), - &(category.to_string()), - ], - ) - .await?; - let object = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) - } +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM schema_ui_menu_create_v1($1, $2, $3, $4)", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &(name.to_string()), +// &(category.to_string()), +// ], +// ) +// .await?; +// let object = standard_model::finish_create_from_row(ctx, row).await?; +// Ok(object) +// } - standard_model_accessor!(name, String, SchemaResult); - standard_model_accessor!(category, String, SchemaResult); +// standard_model_accessor!(name, String, SchemaResult); +// standard_model_accessor!(category, String, SchemaResult); - standard_model_belongs_to!( - lookup_fn: schema, - set_fn: set_schema, - unset_fn: unset_schema, - table: "schema_ui_menu_belongs_to_schema", - model_table: "schemas", - belongs_to_id: SchemaId, - returns: Schema, - result: SchemaResult, - ); +// standard_model_belongs_to!( +// lookup_fn: schema, +// set_fn: set_schema, +// unset_fn: unset_schema, +// table: "schema_ui_menu_belongs_to_schema", +// model_table: "schemas", +// belongs_to_id: SchemaId, +// returns: Schema, +// result: SchemaResult, +// ); - pub async fn find_for_schema( - ctx: &DalContext, - schema_id: SchemaId, - ) -> SchemaResult> { - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_FOR_SCHEMA, - &[ctx.tenancy(), ctx.visibility(), &schema_id], - ) - .await?; +// #[instrument(skip_all)] +// pub async fn find_for_schema( +// ctx: &DalContext, +// schema_id: SchemaId, +// ) -> SchemaResult> { +// let maybe_row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// FIND_FOR_SCHEMA, +// &[ctx.tenancy(), ctx.visibility(), &schema_id], +// ) +// .await?; - // NOTE(nick): currently, we are assuming there can only be one "schema_ui_menu" for a given - // schema id. This might not always be the case. - let maybe_object: Option = standard_model::option_object_from_row(maybe_row)?; - Ok(maybe_object) - } +// // NOTE(nick): currently, we are assuming there can only be one "schema_ui_menu" for a given +// // schema id. This might not always be the case. +// let maybe_object: Option = standard_model::option_object_from_row(maybe_row)?; +// Ok(maybe_object) +// } - pub fn category_path(&self) -> Vec { - self.category.split('.').map(|f| f.to_string()).collect() - } -} +// pub fn category_path(&self) -> Vec { +// self.category.split('.').map(|f| f.to_string()).collect() +// } +// } diff --git a/lib/dal/src/schema/variant.rs b/lib/dal/src/schema/variant.rs index 32be44e325..7445b48c2c 100644 --- a/lib/dal/src/schema/variant.rs +++ b/lib/dal/src/schema/variant.rs @@ -1,424 +1,454 @@ //! This module contains [`SchemaVariant`](crate::SchemaVariant), which is t/he "class" of a //! [`Component`](crate::Component). +use content_store::{ContentHash, Store}; +use petgraph::prelude::EdgeRef; +use petgraph::Direction; use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, VecDeque}; +use strum::EnumDiscriminants; +use telemetry::prelude::*; use thiserror::Error; +use ulid::Ulid; -use si_data_nats::NatsError; -use si_data_pg::PgError; -use telemetry::prelude::*; +use crate::attribute::prototype::argument::{ + AttributePrototypeArgument, AttributePrototypeArgumentError, +}; +use crate::attribute::prototype::AttributePrototypeError; +use crate::change_set_pointer::ChangeSetPointerError; +use crate::func::argument::{FuncArgument, FuncArgumentError}; +use crate::func::intrinsics::IntrinsicFunc; +use crate::func::FuncError; +use crate::prop::{PropError, PropPath}; +use crate::provider::external::{ExternalProviderContent, ExternalProviderError}; +use crate::provider::internal::{InternalProviderContent, InternalProviderError}; +use crate::schema::variant::root_prop::RootProp; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::graph::NodeIndex; -use crate::attribute::context::AttributeContextBuilder; -use crate::func::binding_return_value::FuncBindingReturnValueError; -use crate::prop::PropPath; -use crate::provider::internal::InternalProviderError; -use crate::schema::variant::definition::{SchemaVariantDefinitionError, SchemaVariantDefinitionId}; -use crate::schema::variant::root_prop::component_type::ComponentType; -use crate::schema::variant::root_prop::SiPropChild; -use crate::standard_model::{object_from_row, option_object_from_row}; -use crate::AttributePrototypeArgument; +use crate::workspace_snapshot::node_weight::{NodeWeight, NodeWeightError, PropNodeWeight}; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ - func::{ - argument::{FuncArgument, FuncArgumentError}, - binding::FuncBindingError, - binding_return_value::FuncBindingReturnValueId, - }, - impl_standard_model, pk, - schema::{RootProp, SchemaError}, - socket::{Socket, SocketError, SocketId}, - standard_model::{self, objects_from_rows}, - standard_model_accessor, standard_model_belongs_to, standard_model_many_to_many, - AttributeContextBuilderError, AttributePrototype, AttributePrototypeArgumentError, - AttributePrototypeError, AttributeReadContext, AttributeValue, AttributeValueError, - AttributeValueId, BuiltinsError, Component, ComponentError, ComponentId, DalContext, - ExternalProvider, ExternalProviderError, Func, FuncBackendResponseType, FuncBindingReturnValue, - FuncError, FuncId, HistoryEventError, InternalProvider, Prop, PropError, PropId, PropKind, - ReconciliationPrototypeError, RootPropChild, Schema, SchemaId, SocketArity, StandardModel, - StandardModelError, Tenancy, Timestamp, TransactionsError, Visibility, WorkspacePk, - WsEventError, + pk, + schema::variant::leaves::{LeafInput, LeafInputLocation, LeafKind}, + AttributePrototype, AttributePrototypeId, ComponentId, DalContext, ExternalProvider, + ExternalProviderId, Func, FuncId, InternalProvider, Prop, PropId, PropKind, Schema, + SchemaError, SchemaId, Timestamp, TransactionsError, }; +use crate::{FuncBackendResponseType, InternalProviderId}; -use self::leaves::{LeafInput, LeafInputLocation, LeafKind}; +use self::root_prop::RootPropChild; + +// use self::leaves::{LeafInput, LeafInputLocation, LeafKind}; pub mod definition; pub mod leaves; pub mod root_prop; -const ALL_FUNCS: &str = include_str!("../queries/schema_variant/all_related_funcs.sql"); -const ALL_PROPS: &str = include_str!("../queries/schema_variant/all_props.sql"); -const FIND_ROOT_PROP: &str = include_str!("../queries/schema_variant/find_root_prop.sql"); -const FIND_LEAF_ITEM_PROP: &str = include_str!("../queries/schema_variant/find_leaf_item_prop.sql"); -const FIND_ROOT_CHILD_IMPLICIT_INTERNAL_PROVIDER: &str = - include_str!("../queries/schema_variant/find_root_child_implicit_internal_provider.sql"); -const LIST_ROOT_SI_CHILD_PROPS: &str = - include_str!("../queries/schema_variant/list_root_si_child_props.sql"); -const SECRET_DEFINING_SCHEMA_VARIANTS: &str = - include_str!("../queries/schema_variant/secret_defining_schema_variants.sql"); +// FIXME(nick,theo): colors should be required for all schema variants. +// There should be no default in the backend as there should always be a color. +pub const DEFAULT_SCHEMA_VARIANT_COLOR: &str = "00b0bc"; +pub const SCHEMA_VARIANT_VERSION: SchemaVariantContentDiscriminants = + SchemaVariantContentDiscriminants::V1; #[remain::sorted] #[derive(Error, Debug)] pub enum SchemaVariantError { - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), #[error("attribute prototype error: {0}")] AttributePrototype(#[from] AttributePrototypeError), - #[error("attribute prototype argument error: {0}")] + #[error("attribute argument prototype error: {0}")] AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - /// Not parent [`AttributeValue`](crate::AttributeValue) was found for the specified - /// [`AttributeValueId`](crate::AttributeValue). - #[error("no parent found for attribute value: {0}")] - AttributeValueDoesNotHaveParent(AttributeValueId), - /// An [`AttributeValue`](crate::AttributeValue) could not be found for the specified - /// [`AttributeReadContext`](crate::AttributeReadContext). - #[error("attribute value not found for attribute read context: {0:?}")] - AttributeValueNotFoundForContext(Box), - #[error(transparent)] - Builtins(#[from] Box), - #[error(transparent)] - Component(#[from] Box), - #[error(transparent)] + #[error("change set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("external provider error: {0}")] ExternalProvider(#[from] ExternalProviderError), - #[error("can neither provide children nor entry for primitive with name: ({0})")] - FoundChildrenAndEntryForPrimitive(String), - #[error("cannot provide children for array with name: ({0})")] - FoundChildrenForArray(String), - #[error("cannot provide children for primitive with name: ({0})")] - FoundChildrenForPrimitive(String), - #[error("cannot provide entry for object with name: ({0})")] - FoundEntryForObject(String), - #[error("cannot provide entry for primitive with name: ({0})")] - FoundEntryForPrimitive(String), #[error("func error: {0}")] Func(#[from] FuncError), #[error("func argument error: {0}")] FuncArgument(#[from] FuncArgumentError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("func binding return value not found {0}")] - FuncBindingReturnValueNotFound(FuncBindingReturnValueId), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), #[error("internal provider error: {0}")] InternalProvider(#[from] InternalProviderError), - #[error("must provide valid schema variant, found unset schema variant id")] - InvalidSchemaVariant, - #[error("leaf function response type ({0}) must match leaf kind ({0})")] - LeafFunctionMismatch(FuncBackendResponseType, LeafKind), - #[error("leaf function ({0}) must be JsAttribute")] + #[error("Func {0} of response type {1} cannot set leaf {2:?}")] + LeafFunctionMismatch(FuncId, FuncBackendResponseType, LeafKind), + #[error("func {0} not a JsAttribute func, required for leaf functions")] LeafFunctionMustBeJsAttribute(FuncId), - #[error("link not found in doc links map for doc link ref: {0}")] - LinkNotFoundForDocLinkRef(String), - #[error("must provide children for object with name: ({0})")] - MissingChildrenForObject(String), - #[error("must provide entry for array with name: ({0})")] - MissingEntryForArray(String), - #[error("missing a func in attribute update: {0} not found")] - MissingFunc(String), - #[error("Schema is missing for SchemaVariant {0}")] - MissingSchema(SchemaVariantId), - #[error("cannot use doc link and doc link ref for prop definition name: ({0})")] - MultipleDocLinksProvided(String), - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), - #[error("schema variant not found: {0}")] - NotFound(SchemaVariantId), - #[error("parent prop not found for prop id: {0}")] - ParentPropNotFound(PropId), - #[error("pg error: {0}")] - Pg(#[from] PgError), + #[error("Leaf map prop not found for item prop {0}")] + LeafMapPropNotFound(PropId), + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), #[error("prop error: {0}")] Prop(#[from] PropError), - /// This variant indicates that a [`Prop`](crate::Prop) or [`PropId`](crate::Prop) was not - /// found. However, it does not _describe_ the attempt to locate the object in question. The - /// "json pointer" piece is purely meant to help describe the location. - #[error("prop not found corresponding to the following json pointer: {0}")] - PropNotFound(&'static str), - #[error("cannot find prop at path {1} for SchemaVariant {0} and Visibility {2:?}")] - PropNotFoundAtPath(SchemaVariantId, String, Visibility), - #[error("prop not found in cache for name ({0}) and parent prop id ({1})")] - PropNotFoundInCache(String, PropId), - #[error("reconciliation prototype: {0}")] - ReconciliationPrototype(#[from] ReconciliationPrototypeError), + #[error("found prop id {0} that is not a prop")] + PropIdNotAProp(PropId), + #[error("schema variant {0} has no root node")] + RootNodeMissing(SchemaVariantId), #[error("schema error: {0}")] - Schema(#[from] Box), - #[error("schema variant definition error")] - SchemaVariantDefinition(#[from] SchemaVariantDefinitionError), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), - #[error("standard model error: {0}")] - StandardModel(#[from] StandardModelError), - #[error("std error: {0}")] - Std(#[from] Box), + Schema(#[from] SchemaError), + #[error("schema not found for schema variant: {0}")] + SchemaNotFound(SchemaVariantId), + #[error("serde json error: {0}")] + Serde(#[from] serde_json::Error), + #[error("store error: {0}")] + Store(#[from] content_store::StoreError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), - #[error("ws event error: {0}")] - WsEvent(#[from] WsEventError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type SchemaVariantResult = Result; -pk!(SchemaVariantPk); pk!(SchemaVariantId); -type DateTimeUtc = chrono::DateTime; - #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct SchemaVariant { - pk: SchemaVariantPk, id: SchemaVariantId, #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, - ui_hidden: bool, - default_color: Option, name: String, - /// The [`RootProp`](crate::RootProp) for [`self`](Self). - root_prop_id: Option, - schema_variant_definition_id: Option, + // The [`RootProp`](crate::RootProp) for [`self`](Self). + //root_prop_id: Option, + // schema_variant_definition_id: Option, link: Option, - pkg_created_at: Option, - // NOTE(nick): we may want to replace this with a better solution. We use this to ensure - // components are not created unless the variant has been finalized at least once. finalized_once: bool, + category: String, } -impl_standard_model! { - model: SchemaVariant, - pk: SchemaVariantPk, - id: SchemaVariantId, - table_name: "schema_variants", - history_event_label_base: "schema_variant", - history_event_message_name: "Schema Variant" +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum SchemaVariantContent { + V1(SchemaVariantContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct SchemaVariantContentV1 { + pub timestamp: Timestamp, + pub ui_hidden: bool, + pub name: String, + // The [`RootProp`](crate::RootProp) for [`self`](Self). + // pub root_prop_id: Option, + // pub schema_variant_definition_id: Option, + pub link: Option, + pub finalized_once: bool, + pub category: String, } impl SchemaVariant { - /// Create a [`SchemaVariant`](Self) with a [`RootProp`](crate::schema::RootProp). + pub fn assemble(id: SchemaVariantId, inner: SchemaVariantContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + name: inner.name, + link: inner.link, + ui_hidden: inner.ui_hidden, + finalized_once: inner.finalized_once, + category: inner.category, + } + } + pub async fn new( ctx: &DalContext, schema_id: SchemaId, - name: impl AsRef, + name: impl Into, + category: impl Into, ) -> SchemaVariantResult<(Self, RootProp)> { - let name = name.as_ref(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM schema_variant_create_v1($1, $2, $3)", - &[ctx.tenancy(), ctx.visibility(), &name], - ) - .await?; - let mut object: SchemaVariant = standard_model::finish_create_from_row(ctx, row).await?; - let root_prop = object.create_and_set_root_prop(ctx, schema_id).await?; - object.set_schema(ctx, &schema_id).await?; + info!("creating schema variant and root prop tree"); + let content = SchemaVariantContentV1 { + timestamp: Timestamp::now(), + name: name.into(), + link: None, + ui_hidden: false, + finalized_once: false, + category: category.into(), + }; + let hash = ctx + .content_store() + .lock() + .await + .add(&SchemaVariantContent::V1(content.clone()))?; + + let change_set = ctx.change_set_pointer()?; + let id = change_set.generate_ulid()?; + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + let node_weight = + NodeWeight::new_content(change_set, id, ContentAddress::SchemaVariant(hash))?; + let _node_index = workspace_snapshot.add_node(node_weight)?; + + // Schema --Use--> SchemaVariant (this) + workspace_snapshot.add_edge( + schema_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + } - let (identity_func, identity_func_binding, identity_func_binding_return_value) = - Func::identity_with_binding_and_return_value(ctx).await?; + let schema_variant_id: SchemaVariantId = id.into(); + let root_prop = RootProp::new(ctx, schema_variant_id).await?; + let _func_id = Func::find_intrinsic(ctx, IntrinsicFunc::Identity).await?; - // all nodes can be turned into frames therefore, they will need a frame input socket - // the UI itself will determine if this socket is available to be connected - let (_frame_internal_provider, _input_socket) = InternalProvider::new_explicit_with_socket( - ctx, - *object.id(), - "Frame", - *identity_func.id(), - *identity_func_binding.id(), - *identity_func_binding_return_value.id(), - serde_json::to_string(&vec!["Frame"])?, - SocketArity::Many, - true, - ) - .await?; + let schema_variant = Self::assemble(id.into(), content); + Ok((schema_variant, root_prop)) + } - let (_output_provider, _output_socket) = ExternalProvider::new_with_socket( - ctx, - schema_id, - *object.id(), - "Frame", - None, - *identity_func.id(), - *identity_func_binding.id(), - *identity_func_binding_return_value.id(), - serde_json::to_string(&vec!["Frame"])?, - SocketArity::Many, - true, - ) - .await?; + pub async fn dump_props_as_list(&self, ctx: &DalContext) -> SchemaVariantResult> { + let mut props = vec![]; - Ok((object, root_prop)) + let root_prop_id = Self::get_root_prop_id(ctx, self.id()).await?; + let mut work_queue = VecDeque::from([(root_prop_id, None::)]); + + while let Some((prop_id, maybe_parent_path)) = work_queue.pop_front() { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let node_weight = workspace_snapshot.get_node_weight_by_id(prop_id)?; + + match node_weight { + NodeWeight::Prop(prop_inner) => { + let name = prop_inner.name(); + + let path = match &maybe_parent_path { + Some(parent_path) => parent_path.join(&PropPath::new([name])), + None => PropPath::new([name]), + }; + + props.push(path.clone()); + + if let Some(ordering_node_idx) = workspace_snapshot + .outgoing_targets_for_edge_weight_kind( + prop_id, + EdgeWeightKindDiscriminants::Ordering, + )? + .first() + { + let ordering_node_weight = workspace_snapshot + .get_node_weight(*ordering_node_idx)? + .get_ordering_node_weight()?; + + for &id in ordering_node_weight.order() { + work_queue.push_back((id.into(), Some(path.clone()))); + } + } + } + _ => return Err(SchemaVariantError::PropIdNotAProp(prop_id)), + } + } + + Ok(props) } - pub async fn is_builtin(&self, ctx: &DalContext) -> SchemaVariantResult { - let row = ctx - .txns() + pub async fn get_by_id(ctx: &DalContext, id: SchemaVariantId) -> SchemaVariantResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let node_index = workspace_snapshot.get_node_index_by_id(id)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: SchemaVariantContent = ctx + .content_store() + .lock() + .await + .get(&hash) .await? - .pg() - .query_opt( - "SELECT id FROM schema_variants WHERE id = $1 and tenancy_workspace_pk = $2 LIMIT 1", - &[self.id(), &WorkspacePk::NONE], - ) - .await?; + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id.into()))?; - Ok(row.is_some()) + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here + let SchemaVariantContent::V1(inner) = content; + + Ok(Self::assemble(id, inner)) } - /// This _idempotent_ function "finalizes" a [`SchemaVariant`]. - /// - /// Once a [`SchemaVariant`] has had all of its [`Props`](crate::Prop) created, there are a few - /// things that need to happen before it is usable: - /// - /// * Create the default [`AttributePrototypes`](crate::AttributePrototype) and - /// [`AttributeValues`](crate::AttributeValue). - /// * Create the _internally consuming_ [`InternalProviders`](crate::InternalProvider) - /// corresponding to every [`Prop`](crate::Prop) in the [`SchemaVariant`] that is not a - /// descendant of an Array or a Map. - /// - /// This method **MUST** be called once all the [`Props`](Prop) have been created for the - /// [`SchemaVariant`]. It can be called multiple times while [`Props`](Prop) are being created, - /// but it must be called once after all [`Props`](Prop) have been created. - pub async fn finalize( - &mut self, + pub async fn find_root_child_prop_id( ctx: &DalContext, - component_type: Option, - ) -> SchemaVariantResult<()> { - let total_start = std::time::Instant::now(); + schema_variant_id: SchemaVariantId, + root_prop_child: RootPropChild, + ) -> SchemaVariantResult { + Ok( + Prop::find_prop_id_by_path(ctx, schema_variant_id, &root_prop_child.prop_path()) + .await?, + ) + } + + /// Lists all [`SchemaVariants`](SchemaVariant) by ID in the workspace. + pub async fn list_ids(ctx: &DalContext) -> SchemaVariantResult> { + let schema_ids = Schema::list_ids(ctx).await?; + + let mut schema_variant_ids = Vec::new(); - Self::create_default_prototypes_and_values(ctx, self.id).await?; - Self::create_implicit_internal_providers(ctx, self.id).await?; - if !self.finalized_once() { - self.set_finalized_once(ctx, true).await?; + for schema_id in schema_ids { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let schema_variant_node_indices = workspace_snapshot + .outgoing_targets_for_edge_weight_kind( + schema_id, + EdgeWeightKindDiscriminants::Use, + )?; + + for schema_variant_node_index in schema_variant_node_indices { + let raw_id = workspace_snapshot + .get_node_weight(schema_variant_node_index)? + .id(); + schema_variant_ids.push(raw_id.into()); + } } - // Default to the standard "component" component type. - let component_type = match component_type { - Some(component_type) => component_type, - None => ComponentType::Component, - }; + Ok(schema_variant_ids) + } - // Find props that we need to set defaults on for _all_ schema variants. - // FIXME(nick): use the enum and create an appropriate query. - let mut maybe_type_prop_id = None; - let mut maybe_protected_prop_id = None; - for root_si_child_prop in Self::list_root_si_child_props(ctx, self.id).await? { - if root_si_child_prop.name() == "type" { - maybe_type_prop_id = Some(*root_si_child_prop.id()) - } else if root_si_child_prop.name() == "protected" { - maybe_protected_prop_id = Some(*root_si_child_prop.id()) + pub async fn list_for_schema( + ctx: &DalContext, + schema_id: SchemaId, + ) -> SchemaVariantResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut schema_variants = vec![]; + let parent_index = workspace_snapshot.get_node_index_by_id(schema_id)?; + + let node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind_by_index( + parent_index, + EdgeWeightKindDiscriminants::Use, + )?; + + let mut node_weights = vec![]; + let mut content_hashes = vec![]; + for index in node_indices { + let node_weight = workspace_snapshot + .get_node_weight(index)? + .get_content_node_weight_of_kind(ContentAddressDiscriminants::SchemaVariant)?; + content_hashes.push(node_weight.content_hash()); + node_weights.push(node_weight); + } + + let content_map: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(content_hashes.as_slice()) + .await?; + + for node_weight in node_weights { + match content_map.get(&node_weight.content_hash()) { + Some(func_content) => { + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let SchemaVariantContent::V1(inner) = func_content; + + schema_variants.push(Self::assemble(node_weight.id().into(), inner.to_owned())); + } + None => Err(WorkspaceSnapshotError::MissingContentFromStore( + node_weight.id(), + ))?, } } - let type_prop_id = - maybe_type_prop_id.ok_or(SchemaVariantError::PropNotFound("/root/si/type"))?; - let protected_prop_id = maybe_protected_prop_id - .ok_or(SchemaVariantError::PropNotFound("/root/si/protected"))?; - // Set the default type of the schema variant. - let attribute_read_context = AttributeReadContext::default_with_prop(type_prop_id); - let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) - .await? - .ok_or_else(|| { - SchemaVariantError::AttributeValueNotFoundForContext(attribute_read_context.into()) - })?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| { - SchemaVariantError::AttributeValueDoesNotHaveParent(*attribute_value.id()) - })?; - let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; - AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - context, - Some(serde_json::to_value(component_type)?), - None, - ) - .await?; + Ok(schema_variants) + } - // Ensure _all_ schema variants are not protected by default. - let attribute_read_context = AttributeReadContext::default_with_prop(protected_prop_id); - let attribute_value = AttributeValue::find_for_context(ctx, attribute_read_context) - .await? - .ok_or_else(|| { - SchemaVariantError::AttributeValueNotFoundForContext(attribute_read_context.into()) - })?; - let parent_attribute_value = attribute_value - .parent_attribute_value(ctx) - .await? - .ok_or_else(|| { - SchemaVariantError::AttributeValueDoesNotHaveParent(*attribute_value.id()) - })?; - let context = AttributeContextBuilder::from(attribute_read_context).to_context()?; - AttributeValue::update_for_context( - ctx, - *attribute_value.id(), - Some(*parent_attribute_value.id()), - context, - Some(serde_json::json![false]), - None, - ) - .await?; + pub fn id(&self) -> SchemaVariantId { + self.id + } - debug!("finalizing {:?} took {:?}", self.id, total_start.elapsed()); - Ok(()) + pub fn ui_hidden(&self) -> bool { + self.ui_hidden } - /// Create the default [`AttributePrototypes`](crate::AttributePrototype) and - /// [`AttributeValues`](crate::AttributeValue) for the [`Props`](Prop) of the - /// [`SchemaVariant`]. - /// - /// This method is idempotent, and may be safely called multiple times before - /// [`SchemaVariant.finalize(ctx)`](SchemaVariant#finalize()) is called. - pub async fn create_default_prototypes_and_values( + pub fn name(&self) -> &str { + &self.name + } + + pub fn category(&self) -> &str { + &self.category + } + + pub async fn get_root_prop_id( ctx: &DalContext, schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult<()> { - let root_prop = match Self::find_root_prop(ctx, schema_variant_id).await? { - Some(root_prop) => root_prop, - None => return Ok(()), - }; + ) -> SchemaVariantResult { + let root_prop_node_weight = Self::get_root_prop_node_weight(ctx, schema_variant_id).await?; + Ok(root_prop_node_weight.id().into()) + } - Ok(Prop::create_default_prototypes_and_values(ctx, *root_prop.id()).await?) + async fn get_root_prop_node_weight( + ctx: &DalContext, + schema_variant_id: SchemaVariantId, + ) -> SchemaVariantResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let edge_targets: Vec = workspace_snapshot + .edges_directed(schema_variant_id, Direction::Outgoing)? + .map(|edge_ref| edge_ref.target()) + .collect(); + + for index in edge_targets { + let node_weight = workspace_snapshot.get_node_weight(index)?; + // TODO(nick): ensure that only one prop can be under a schema variant. + if let NodeWeight::Prop(inner_weight) = node_weight { + if inner_weight.name() == "root" { + return Ok(inner_weight.clone()); + } + } + } + + Err(SchemaVariantError::RootNodeMissing(schema_variant_id)) } - /// Creates _internally consuming_ [`InternalProviders`](crate::InternalProvider) corresponding - /// to every [`Prop`](crate::Prop) in the [`SchemaVariant`] that is not a descendant of an array - /// or a map. - async fn create_implicit_internal_providers( + pub async fn create_default_prototypes( ctx: &DalContext, schema_variant_id: SchemaVariantId, ) -> SchemaVariantResult<()> { - // If no props have been created for the schema variant, there are no internal providers - // to create. - let root_prop = match Self::find_root_prop(ctx, schema_variant_id).await? { - Some(root_prop) => root_prop, - None => return Ok(()), - }; - - let mut work_queue = vec![root_prop]; + info!("creating default prototypes"); + let change_set = ctx.change_set_pointer()?; + let func_id = Func::find_intrinsic(ctx, IntrinsicFunc::Unset).await?; + let root_prop_node_weight = Self::get_root_prop_node_weight(ctx, schema_variant_id).await?; + let mut work_queue: VecDeque = VecDeque::from(vec![root_prop_node_weight]); + + while let Some(prop) = work_queue.pop_front() { + // See an attribute prototype exists. + let mut found_attribute_prototype_id: Option = None; + { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let targets = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + prop.id(), + EdgeWeightKindDiscriminants::Prototype, + )?; + for target in targets { + let node_weight = workspace_snapshot.get_node_weight(target)?; + if let Some(ContentAddressDiscriminants::AttributePrototype) = + node_weight.content_address_discriminants() + { + found_attribute_prototype_id = Some(node_weight.id().into()); + break; + } + } + } - while let Some(work) = work_queue.pop() { - let maybe_existing_implicit_internal_provider = - InternalProvider::find_for_prop(ctx, *work.id()).await?; - if maybe_existing_implicit_internal_provider.is_none() { - InternalProvider::new_implicit(ctx, *work.id(), SchemaVariantId::NONE).await?; + // Create the attribute prototype and appropriate edges if they do not exist. + if found_attribute_prototype_id.is_none() { + // We did not find a prototype, so we must create one. + let attribute_prototype = AttributePrototype::new(ctx, func_id).await?; + + // New edge Prop --Prototype--> AttributePrototype. + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + prop.id(), + EdgeWeight::new(change_set, EdgeWeightKind::Prototype(None))?, + attribute_prototype.id(), + )?; } - // Only check for child props if the current prop is of kind object. - if work.kind() == &PropKind::Object { - let child_props = work.child_props(ctx).await?; - if !child_props.is_empty() { - work_queue.extend(child_props); + // Push all children onto the work queue. + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let targets = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + prop.id(), + EdgeWeightKindDiscriminants::Use, + )?; + for target in targets { + let node_weight = workspace_snapshot.get_node_weight(target)?; + if let NodeWeight::Prop(child_prop) = node_weight { + work_queue.push_back(child_prop.to_owned()) } } } @@ -426,167 +456,202 @@ impl SchemaVariant { Ok(()) } - standard_model_accessor!(default_color, Option, SchemaVariantResult); - standard_model_accessor!(pkg_created_at, Option, SchemaVariantResult); - standard_model_accessor!(ui_hidden, bool, SchemaVariantResult); - standard_model_accessor!(name, String, SchemaVariantResult); - standard_model_accessor!(root_prop_id, Option, SchemaVariantResult); - standard_model_accessor!(link, Option, SchemaVariantResult); - standard_model_accessor!(finalized_once, bool, SchemaVariantResult); - standard_model_accessor!( - schema_variant_definition_id, - Option, - SchemaVariantResult - ); - - pub async fn color(&self, ctx: &DalContext) -> SchemaVariantResult> { - if let Some(color) = self.default_color() { - return Ok(Some(color.to_owned())); - } - - let attribute_value = Component::find_si_child_attribute_value( - ctx, - ComponentId::NONE, - self.id, - SiPropChild::Color, - ) - .await - .map_err(Box::new)?; - let func_binding_return_value = - FuncBindingReturnValue::get_by_id(ctx, &attribute_value.func_binding_return_value_id()) - .await? - .ok_or_else(|| { - SchemaVariantError::FuncBindingReturnValueNotFound( - attribute_value.func_binding_return_value_id(), - ) - })?; - - let color = func_binding_return_value - .value() - .cloned() - .map(serde_json::from_value) - .transpose()?; + pub async fn mark_props_as_able_to_be_used_as_prototype_args( + ctx: &DalContext, + schema_variant_id: SchemaVariantId, + ) -> SchemaVariantResult<()> { + let root_prop_node_weight = Self::get_root_prop_node_weight(ctx, schema_variant_id).await?; + let root_prop_idx = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + workspace_snapshot.get_node_index_by_id(root_prop_node_weight.id())? + }; - if let Some(color) = color.clone() { - self.clone().set_default_color(ctx, Some(color)).await?; + let mut work_queue = VecDeque::new(); + work_queue.push_back(root_prop_idx); + + while let Some(prop_idx) = work_queue.pop_front() { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.mark_prop_as_able_to_be_used_as_prototype_arg(prop_idx)?; + + let node_weight = workspace_snapshot.get_node_weight(prop_idx)?.to_owned(); + if let NodeWeight::Prop(prop) = node_weight { + // Only descend if we are an object. + if prop.kind() == PropKind::Object { + let targets = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + prop.id(), + EdgeWeightKindDiscriminants::Use, + )?; + work_queue.extend(targets); + } + } } - Ok(color) + Ok(()) } - pub async fn set_color(&mut self, ctx: &DalContext, color: String) -> SchemaVariantResult<()> { - self.set_default_color(ctx, Some(color.clone())).await?; + pub async fn new_action_prototype( + ctx: &DalContext, + func_id: FuncId, + schema_variant_id: SchemaVariantId, + ) -> SchemaVariantResult<()> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + schema_variant_id, + EdgeWeight::new(ctx.change_set_pointer()?, EdgeWeightKind::Use)?, + func_id, + )?; + Ok(()) + } - let attribute_value = Component::find_si_child_attribute_value( - ctx, - ComponentId::NONE, - self.id, - SiPropChild::Color, - ) - .await - .map_err(Box::new)?; - let prop = Prop::get_by_id(ctx, &attribute_value.context.prop_id()) - .await? - .ok_or(PropError::NotFound( - attribute_value.context.prop_id(), - *ctx.visibility(), - ))?; - prop.set_default_value(ctx, color).await?; + pub async fn new_authentication_prototype( + ctx: &DalContext, + func_id: FuncId, + schema_variant_id: SchemaVariantId, + ) -> SchemaVariantResult<()> { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + schema_variant_id, + EdgeWeight::new( + ctx.change_set_pointer()?, + EdgeWeightKind::AuthenticationPrototype, + )?, + func_id, + )?; Ok(()) } - standard_model_belongs_to!( - lookup_fn: schema, - set_fn: set_schema, - unset_fn: unset_schema, - table: "schema_variant_belongs_to_schema", - model_table: "schemas", - belongs_to_id: SchemaId, - returns: Schema, - result: SchemaVariantResult, - ); - - standard_model_many_to_many!( - lookup_fn: sockets, - associate_fn: add_socket, - disassociate_fn: remove_socket, - table_name: "socket_many_to_many_schema_variants", - left_table: "sockets", - left_id: SocketId, - right_table: "schema_variants", - right_id: SchemaId, - which_table_is_this: "right", - returns: Socket, - result: SchemaVariantResult, - ); - - /// List all direct child [`Props`](crate::Prop) of the [`Prop`](crate::Prop) corresponding - /// to "/root/si". - pub async fn list_root_si_child_props( + #[allow(dead_code)] + async fn get_content( ctx: &DalContext, schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult> { - let rows = ctx - .txns() + ) -> SchemaVariantResult<(ContentHash, SchemaVariantContentV1)> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let id: Ulid = schema_variant_id.into(); + let node_index = workspace_snapshot.get_node_index_by_id(id)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: SchemaVariantContent = ctx + .content_store() + .lock() + .await + .get(&hash) .await? - .pg() - .query( - LIST_ROOT_SI_CHILD_PROPS, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(objects_from_rows(rows)?) + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(id))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let SchemaVariantContent::V1(inner) = content; + + Ok((hash, inner)) } - /// Find all [`Props`](crate::Prop) for a given [`SchemaVariantId`](SchemaVariant). - pub async fn all_props( + /// This _idempotent_ function "finalizes" a [`SchemaVariant`]. + /// + /// Once a [`SchemaVariant`] has had all of its [`Props`](crate::Prop) created, there are a few + /// things that need to happen before it is usable: + /// + /// * Create the default [`AttributePrototypes`](crate::AttributePrototype) + /// * Create the _internally consuming_ [`InternalProviders`](crate::InternalProvider) + /// corresponding to every [`Prop`](crate::Prop) in the [`SchemaVariant`] that is not a + /// descendant of an Array or a Map. + /// + /// This method **MUST** be called once all the [`Props`](Prop) have been created for the + /// [`SchemaVariant`]. It can be called multiple times while [`Props`](Prop) are being created, + /// but it must be called once after all [`Props`](Prop) have been created. + pub async fn finalize( ctx: &DalContext, schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ALL_PROPS, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(objects_from_rows(rows)?) + ) -> SchemaVariantResult<()> { + Self::create_default_prototypes(ctx, schema_variant_id).await?; + //Self::mark_props_as_able_to_be_used_as_prototype_args(ctx, schema_variant_id)?; + + // TODO(nick,jacob,zack): if we are going to copy the existing system (which we likely will), we need to + // set "/root/si/type" and "/root/si/protected". + + Ok(()) } - pub async fn list_secret_defining(ctx: &DalContext) -> SchemaVariantResult> { - let rows = ctx - .txns() + pub async fn get_color(&self, ctx: &DalContext) -> SchemaVariantResult> { + let color_prop_id = + Prop::find_prop_id_by_path(ctx, self.id, &PropPath::new(["root", "si", "color"])) + .await?; + + let prototype_id = Prop::prototype_id(ctx, color_prop_id).await?; + + match AttributePrototypeArgument::list_ids_for_prototype(ctx, prototype_id) .await? - .pg() - .query( - SECRET_DEFINING_SCHEMA_VARIANTS, - &[ctx.tenancy(), ctx.visibility()], - ) - .await?; - Ok(objects_from_rows(rows)?) + .first() + { + None => Ok(None), + Some(apa_id) => { + match AttributePrototypeArgument::static_value_by_id(ctx, *apa_id).await? { + Some(static_value) => { + let color: String = serde_json::from_value(static_value.value)?; + Ok(Some(color)) + } + None => Ok(None), + } + } + } + } + + /// Configures the "default" value for the + /// [`AttributePrototypeArgument`](crate::attribute::prototype::argument::AttributePrototypeArgument) + /// for the /root/si/color [`Prop`](crate::Prop). If a prototype already + /// exists pointing to a function other than + /// [`IntrinsicFunc::SetString`](`crate::func::intrinsics::IntrinsicFunc::SetString`) + /// we will remove that edge and replace it with one pointing to + /// `SetString`. + pub async fn set_color( + &self, + ctx: &DalContext, + color: impl AsRef, + ) -> SchemaVariantResult<()> { + let color_prop_id = + Prop::find_prop_id_by_path(ctx, self.id, &PropPath::new(["root", "si", "color"])) + .await?; + + Prop::set_default_value(ctx, color_prop_id, color.as_ref()).await?; + + Ok(()) + } + + /// Configures the "default" value for the + /// [`AttributePrototypeArgument`](crate::attribute::prototype::argument::AttributePrototypeArgument) + /// for the /root/si/type [`Prop`](crate::Prop). If a prototype already + /// exists pointing to a function other than + /// [`IntrinsicFunc::SetString`](`crate::func::intrinsics::IntrinsicFunc::SetString`) + /// we will remove that edge and replace it with one pointing to + /// `SetString`. + pub async fn set_type( + &self, + ctx: &DalContext, + component_type: impl AsRef, + ) -> SchemaVariantResult<()> { + let type_prop_id = + Prop::find_prop_id_by_path(ctx, self.id, &PropPath::new(["root", "si", "type"])) + .await?; + + Prop::set_default_value(ctx, type_prop_id, component_type.as_ref()).await?; + + Ok(()) } - /// Find all [`Func`](crate::Func) objects connected to this schema variant in any way. Only - /// finds funcs connected at the schema variant context, ignoring any funcs connected to - /// directly to components. Ignores any functions that have no code (these are typically - /// intrinsics) - pub async fn all_funcs( + /// This method finds a [`leaf`](crate::schema::variant::leaves)'s entry + /// [`Prop`](crate::Prop) given a [`LeafKind`](crate::schema::variant::leaves::LeafKind). + pub async fn find_leaf_item_prop( ctx: &DalContext, schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - ALL_FUNCS, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; + leaf_kind: LeafKind, + ) -> SchemaVariantResult { + let (leaf_map_prop_name, leaf_item_prop_name) = leaf_kind.prop_names(); - Ok(objects_from_rows(rows)?) + Ok(Prop::find_prop_id_by_path( + ctx, + schema_variant_id, + &PropPath::new(["root", leaf_map_prop_name, leaf_item_prop_name]), + ) + .await?) } pub async fn upsert_leaf_function( @@ -596,101 +661,106 @@ impl SchemaVariant { leaf_kind: LeafKind, input_locations: &[LeafInputLocation], func: &Func, - ) -> SchemaVariantResult { - let leaf_prop = + ) -> SchemaVariantResult { + let leaf_item_prop_id = SchemaVariant::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; - let context = match component_id { - Some(component_id) => AttributeContextBuilder::new() - .set_prop_id(*leaf_prop.id()) - .set_component_id(component_id) - .to_context()?, - None => AttributeContextBuilder::new() - .set_prop_id(*leaf_prop.id()) - .to_context()?, - }; + if component_id.is_some() { + unimplemented!("component context not supported for leaf functions"); + } + + let key = Some(func.name.to_owned()); - let key = Some(func.name().to_string()); - let mut existing_args = FuncArgument::list_for_func(ctx, *func.id()).await?; + let mut existing_args = FuncArgument::list_for_func(ctx, func.id).await?; let mut inputs = vec![]; for location in input_locations { let arg_name = location.arg_name(); - let arg = match existing_args.iter().find(|arg| arg.name() == arg_name) { + let arg = match existing_args + .iter() + .find(|arg| arg.name.as_str() == arg_name) + { Some(existing_arg) => existing_arg.clone(), None => { - FuncArgument::new(ctx, arg_name, location.arg_kind(), None, *func.id()).await? + FuncArgument::new(ctx, arg_name, location.arg_kind(), None, func.id).await? } }; inputs.push(LeafInput { location: *location, - func_argument_id: *arg.id(), + func_argument_id: arg.id, }); } - for mut existing_arg in existing_args.drain(..) { + for existing_arg in existing_args.drain(..) { if !inputs.iter().any( |&LeafInput { func_argument_id, .. - }| func_argument_id == *existing_arg.id(), + }| func_argument_id == existing_arg.id, ) { - existing_arg.delete_by_id(ctx).await?; + FuncArgument::remove(ctx, existing_arg.id).await?; } } Ok( - match AttributePrototype::find_for_context_and_key(ctx, context, &key) - .await? - .pop() - { - Some(existing_proto) => { - let mut apas = AttributePrototypeArgument::list_for_attribute_prototype( - ctx, - *existing_proto.id(), - ) - .await?; + match AttributePrototype::find_for_prop(ctx, leaf_item_prop_id, &key).await? { + Some(existing_proto_id) => { + let apas = + AttributePrototypeArgument::list_ids_for_prototype(ctx, existing_proto_id) + .await?; + let mut apa_func_arg_ids = HashMap::new(); for input in &inputs { - if !apas - .iter() - .any(|apa| apa.func_argument_id() == input.func_argument_id) - { - let input_internal_provider = - Self::find_root_child_implicit_internal_provider( - ctx, - schema_variant_id, - input.location.into(), - ) - .await?; - - AttributePrototypeArgument::new_for_intra_component( + let mut exisiting_func_arg = None; + for apa_id in &apas { + let func_arg_id = + AttributePrototypeArgument::func_argument_id_by_id(ctx, *apa_id) + .await?; + apa_func_arg_ids.insert(apa_id, func_arg_id); + + if func_arg_id == input.func_argument_id { + exisiting_func_arg = Some(func_arg_id); + } + } + + if exisiting_func_arg.is_none() { + let input_prop_id = Self::find_root_child_prop_id( + ctx, + schema_variant_id, + input.location.into(), + ) + .await?; + + info!( + "adding root child func arg: {:?}, {:?}", + input_prop_id, input.location + ); + + let new_apa = AttributePrototypeArgument::new( ctx, - *existing_proto.id(), + existing_proto_id, input.func_argument_id, - *input_internal_provider.id(), ) .await?; + new_apa.set_value_from_prop_id(ctx, input_prop_id).await?; } } - for mut apa in apas.drain(..) { + for (apa_id, func_arg_id) in apa_func_arg_ids { if !inputs.iter().any( |&LeafInput { func_argument_id, .. - }| { - func_argument_id == apa.func_argument_id() - }, + }| { func_argument_id == func_arg_id }, ) { - apa.delete_by_id(ctx).await?; + AttributePrototypeArgument::remove(ctx, *apa_id).await?; } } - existing_proto + existing_proto_id } None => { let (_, new_proto) = SchemaVariant::add_leaf( ctx, - *func.id(), + func.id, schema_variant_id, component_id, leaf_kind, @@ -704,146 +774,156 @@ impl SchemaVariant { ) } - /// This method finds all the functions for a particular - /// ['LeafKind'](crate::schema::variant::leaves::LeafKind) for this SchemaVariant. For example, - /// it can find all Qualification functions for the variant. - pub async fn find_leaf_item_functions( + pub async fn list_external_providers_and_explicit_internal_providers( ctx: &DalContext, schema_variant_id: SchemaVariantId, - leaf_kind: LeafKind, - ) -> SchemaVariantResult> { - let leaf_item_prop = Self::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; - let backend_response_type: FuncBackendResponseType = leaf_kind.into(); - - let context = AttributeContextBuilder::new() - .set_prop_id(*leaf_item_prop.id()) - .to_context()?; - - Ok( - AttributePrototype::list_prototype_funcs_by_context_and_backend_response_type( - ctx, - context, - backend_response_type, - ) - .await?, - ) - } + ) -> SchemaVariantResult<(Vec, Vec)> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + // Look for all external and explicit internal providers that the schema variant uses. + let maybe_provider_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + schema_variant_id, + EdgeWeightKindDiscriminants::Provider, + )?; + + // Collect the external and the explicit internal providers separately. + let mut external_provider_hashes: Vec<(ExternalProviderId, ContentHash)> = Vec::new(); + let mut explicit_internal_provider_hashes: Vec<(InternalProviderId, ContentHash)> = + Vec::new(); + + for maybe_provider_index in maybe_provider_indices { + let node_weight = workspace_snapshot.get_node_weight(maybe_provider_index)?; + if let NodeWeight::Content(content_node_weight) = node_weight { + match content_node_weight.content_address() { + ContentAddress::ExternalProvider(external_provider_content_hash) => { + external_provider_hashes.push(( + content_node_weight.id().into(), + external_provider_content_hash, + )); + } + ContentAddress::InternalProvider(internal_provider_content_hash) => { + explicit_internal_provider_hashes.push(( + content_node_weight.id().into(), + internal_provider_content_hash, + )); + } + _ => {} + } + } + } - /// This method finds a [`leaf`](crate::schema::variant::leaves)'s entry - /// [`Prop`](crate::Prop) given a [`LeafKind`](crate::schema::variant::leaves::LeafKind). - pub async fn find_leaf_item_prop( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - leaf_kind: LeafKind, - ) -> SchemaVariantResult { - let (leaf_map_prop_name, leaf_item_prop_name) = leaf_kind.prop_names(); - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_LEAF_ITEM_PROP, - &[ - ctx.tenancy(), - ctx.visibility(), - &schema_variant_id, - &leaf_map_prop_name, - &leaf_item_prop_name, - ], - ) + // Grab all the contents in bulk from the content store. + let external_provider_hashes_only: Vec = + external_provider_hashes.iter().map(|(_, h)| *h).collect(); + let external_provider_content_map: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(external_provider_hashes_only.as_slice()) .await?; - Ok(object_from_row(row)?) - } - - /// Find the implicit [`InternalProvider`](crate::InternalProvider) corresponding to a provided, - /// [`direct child`](crate::RootPropChild) of [`RootProp`](crate::RootProp). - pub async fn find_root_child_implicit_internal_provider( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - root_prop_child: RootPropChild, - ) -> SchemaVariantResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - FIND_ROOT_CHILD_IMPLICIT_INTERNAL_PROVIDER, - &[ - ctx.tenancy(), - ctx.visibility(), - &schema_variant_id, - &root_prop_child.as_str(), - ], - ) + let explicit_internal_provider_hashes_only: Vec = + explicit_internal_provider_hashes + .iter() + .map(|(_, h)| *h) + .collect(); + let internal_provider_content_map: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(explicit_internal_provider_hashes_only.as_slice()) .await?; - Ok(object_from_row(row)?) - } - /// Call [`Self::find_root_prop`] with the [`SchemaVariantId`](SchemaVariant) off - /// [`self`](SchemaVariant). - pub async fn root_prop(&self, ctx: &DalContext) -> SchemaVariantResult> { - Self::find_root_prop(ctx, self.id).await - } + // Assemble all external providers. + let mut external_providers = Vec::with_capacity(external_provider_hashes.len()); + for (external_provider_id, external_provider_hash) in external_provider_hashes { + let external_provider_content = external_provider_content_map + .get(&external_provider_hash) + .ok_or(WorkspaceSnapshotError::MissingContentFromStore( + external_provider_id.into(), + ))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let ExternalProviderContent::V1(external_provider_content_inner) = + external_provider_content; + + external_providers.push(ExternalProvider::assemble( + external_provider_id, + external_provider_content_inner.to_owned(), + )); + } - /// Find the [`Prop`](crate::Prop) corresponding to "/root" for a given - /// [`SchemaVariantId`](SchemaVariant). - pub async fn find_root_prop( - ctx: &DalContext, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult> { - let maybe_row = ctx - .txns() - .await? - .pg() - .query_opt( - FIND_ROOT_PROP, - &[ctx.tenancy(), ctx.visibility(), &schema_variant_id], - ) - .await?; - Ok(option_object_from_row(maybe_row)?) + // Assemble all explicit internal providers. + let mut explicit_internal_providers = + Vec::with_capacity(explicit_internal_provider_hashes.len()); + for (internal_provider_id, internal_provider_hash) in explicit_internal_provider_hashes { + let internal_provider_content = internal_provider_content_map + .get(&internal_provider_hash) + .ok_or(WorkspaceSnapshotError::MissingContentFromStore( + internal_provider_id.into(), + ))?; + + // NOTE(nick,jacob,zack): if we had a v2, then there would be migration logic here. + let InternalProviderContent::V1(internal_provider_content_inner) = + internal_provider_content; + + explicit_internal_providers.push(InternalProvider::assemble( + internal_provider_id, + internal_provider_content_inner.to_owned(), + )); + } + + Ok((external_providers, explicit_internal_providers)) } - /// Find the [`SchemaVariant`] for a given [`PropId`](crate::Prop) that resides _anywhere_ in a - /// [`Prop`](crate::Prop) tree. - /// - /// For instance, if you have a [`PropId`](crate::Prop) corresponding to "/root/domain/poop" - /// and want to know what [`SchemaVariant`]'s [`Prop`](crate::Prop) tree it resides in, use this - /// method to find out. - pub async fn find_for_prop( + pub async fn schema( ctx: &DalContext, - prop_id: PropId, - ) -> SchemaVariantResult> { - // FIXME(nick): this is expensive and should be one query. Please WON'T SOMEBODY THINK OF - // THE CPU AND THE DATABASE??? OHHHHHHH THE HUMANITY!!!!!!! Oh well, anyway. - if let Some(root_prop) = Prop::find_root_prop_for_prop(ctx, prop_id).await? { - for schema_variant in Self::list(ctx).await? { - if let Some(populated_root_prop_id) = schema_variant.root_prop_id { - if *root_prop.id() == populated_root_prop_id { - return Ok(Some(schema_variant)); + schema_variant_id: SchemaVariantId, + ) -> SchemaVariantResult { + let schema_id = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let maybe_schema_indices = workspace_snapshot.incoming_sources_for_edge_weight_kind( + schema_variant_id, + EdgeWeightKindDiscriminants::Use, + )?; + + let mut schema_id: Option = None; + for index in maybe_schema_indices { + if let NodeWeight::Content(content) = workspace_snapshot.get_node_weight(index)? { + let content_hash_discriminants: ContentAddressDiscriminants = + content.content_address().into(); + if let ContentAddressDiscriminants::Schema = content_hash_discriminants { + // TODO(nick): consider creating a new edge weight kind to make this easier. + // We also should use a proper error here. + schema_id = match schema_id { + None => Some(content.id().into()), + Some(_already_found_schema_id) => { + panic!("already found a schema") + } + }; } } } - } - Ok(None) - } + schema_id.ok_or(SchemaVariantError::SchemaNotFound(schema_variant_id))? + }; - /// Calls [`Self::find_prop_in_tree`] using the ID off of [`self`](SchemaVariant). - pub async fn find_prop(&self, ctx: &DalContext, path: &[&str]) -> SchemaVariantResult { - Self::find_prop_in_tree(ctx, self.id, path).await + Ok(Schema::get_by_id(ctx, schema_id).await?) } - /// Find the [`Prop`] in a tree underneath our [`RootProp`] with a given path. - pub async fn find_prop_in_tree( + pub async fn list_auth_func_ids_for_schema_variant( ctx: &DalContext, - schema_variant_id: SchemaVariantId, - path: &[&str], - ) -> SchemaVariantResult { - match Prop::find_prop_by_path(ctx, schema_variant_id, &PropPath::new(path)).await { - Ok(prop) => Ok(prop), - Err(PropError::NotFoundAtPath(path, visiblity)) => Err( - SchemaVariantError::PropNotFoundAtPath(schema_variant_id, path, visiblity), - ), - Err(err) => Err(err)?, + variant_id: SchemaVariantId, + ) -> SchemaVariantResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut auth_funcs = vec![]; + + for node_id in workspace_snapshot.outgoing_targets_for_edge_weight_kind( + variant_id, + EdgeWeightKindDiscriminants::AuthenticationPrototype, + )? { + auth_funcs.push(workspace_snapshot.get_node_weight(node_id)?.id().into()) } + + Ok(auth_funcs) } } diff --git a/lib/dal/src/schema/variant/definition.rs b/lib/dal/src/schema/variant/definition.rs index ef403159fe..9aa6c9baf0 100644 --- a/lib/dal/src/schema/variant/definition.rs +++ b/lib/dal/src/schema/variant/definition.rs @@ -6,359 +6,263 @@ use serde::{Deserialize, Serialize}; use serde_json::Value; +use si_pkg::{SchemaSpec, SchemaSpecData, SchemaVariantSpecData, SiPropFuncSpecKind}; use std::collections::HashMap; -use telemetry::prelude::*; use thiserror::Error; -use url::ParseError; - -use crate::pkg::{get_component_type, PkgError}; -use crate::prop::PropPath; -use crate::schema::variant::{SchemaVariantError, SchemaVariantResult}; -use crate::{ - component::ComponentKind, impl_standard_model, pk, property_editor::schema::WidgetKind, - standard_model, standard_model_accessor, Component, ComponentError, ComponentType, DalContext, - FuncId, HistoryEventError, NatsError, PgError, PropId, PropKind, Schema, SchemaId, - SchemaVariant, SchemaVariantId, SocketArity, StandardModel, StandardModelError, Tenancy, - Timestamp, TransactionsError, Visibility, -}; -use crate::{ChangeSetPk, WsEvent, WsEventResult, WsPayload}; -use si_pkg::{ - AttrFuncInputSpec, MapKeyFuncSpec, PropSpec, SchemaSpec, SchemaSpecData, SchemaVariantSpec, - SchemaVariantSpecData, SiPropFuncSpec, SiPropFuncSpecKind, SocketSpec, SocketSpecArity, - SocketSpecData, SocketSpecKind, SpecError, -}; - -const DEFS_FOR_DEFAULT_VARIANTS: &str = include_str!("../../queries/defs_for_default_variants.sql"); + +use crate::property_editor::schema::WidgetKind; +use crate::schema::variant::root_prop::component_type::ComponentType; +use crate::schema::variant::DEFAULT_SCHEMA_VARIANT_COLOR; +use crate::{pk, PropKind}; +use crate::{ComponentKind, ProviderArity}; #[remain::sorted] #[derive(Error, Debug)] pub enum SchemaVariantDefinitionError { - #[error(transparent)] - Component(#[from] Box), - #[error("Could not check for default variant: {0}")] - CouldNotCheckForDefaultVariant(String), - #[error("Could not get ui menu for schema: {0}")] - CouldNotGetUiMenu(SchemaId), - #[error("error decoding code_base64: {0}")] - Decode(#[from] base64::DecodeError), - #[error("default variant {0} not found")] + // #[error("Could not check for default variant: {0}")] + // CouldNotCheckForDefaultVariant(String), + // #[error("Could not get ui menu for schema: {0}")] + // CouldNotGetUiMenu(SchemaId), + // #[error("error decoding code_base64: {0}")] + // Decode(#[from] base64::DecodeError), + #[error("default variant not found: {0}")] DefaultVariantNotFound(String), - #[error("history event error: {0}")] - HistoryEvent(#[from] HistoryEventError), - #[error("{0} is not a valid hex color string")] - InvalidHexColor(String), + // #[error("history event error: {0}")] + // HistoryEvent(#[from] HistoryEventError), + // #[error("{0} is not a valid hex color string")] + // InvalidHexColor(String), #[error("schema spec has more than one variant, which we do not yet support")] MoreThanOneVariant, - #[error("nats txn error: {0}")] - Nats(#[from] NatsError), + // #[error("nats txn error: {0}")] + // Nats(#[from] NatsError), #[error("schema spec has no variants")] NoVariants, - #[error("pg error: {0}")] - Pg(#[from] PgError), - #[error("pkg error: {0}")] - Pkg(#[from] Box), - #[error(transparent)] - SchemaVariant(#[from] Box), - #[error("error serializing/deserializing json: {0}")] - SerdeJson(#[from] serde_json::Error), - #[error("spec error: {0}")] - Spec(#[from] SpecError), - #[error("standard model error: {0}")] - StandardModelError(#[from] StandardModelError), - #[error("transactions error: {0}")] - Transactions(#[from] TransactionsError), - #[error("url parse error: {0}")] - Url(#[from] ParseError), + // #[error("pg error: {0}")] + // Pg(#[from] PgError), + // #[error("pkg error: {0}")] + // Pkg(#[from] Box), + // #[error(transparent)] + // SchemaVariant(#[from] Box), + // #[error("error serializing/deserializing json: {0}")] + // SerdeJson(#[from] serde_json::Error), + // #[error("spec error: {0}")] + // Spec(#[from] SpecError), + // #[error("standard model error: {0}")] + // StandardModelError(#[from] StandardModelError), + // #[error("transactions error: {0}")] + // Transactions(#[from] TransactionsError), + // #[error("url parse error: {0}")] + // Url(#[from] ParseError), } pub type SchemaVariantDefinitionResult = Result; +// +// /// A cache of [`PropIds`](crate::Prop) where the _key_ is a tuple corresponding to the +// /// [`Prop`](crate::Prop) name and the _parent_ [`PropId`](crate::Prop) who's child is the +// /// [`PropId`](crate::Prop) in the _value_ of the entry. +// /// +// /// It is recommended to start with the [`RootProp`](crate::RootProp) in order to descend into the +// /// cache. +// #[derive(Debug, Clone)] +// pub struct PropCache(HashMap<(String, PropId), PropId>); +// +// impl PropCache { +// pub fn new() -> Self { +// Self(HashMap::new()) +// } +// +// /// Attempts to retrieve the [`PropId`](crate::Prop) value for a given [`Prop`](crate::Prop) +// /// name and parent [`PropId`](crate::Prop) key tuple. An error is returned if nothing is found. +// pub fn get( +// &self, +// prop_name: impl AsRef, +// parent_prop_id: PropId, +// ) -> SchemaVariantResult { +// // NOTE(nick): the string handling could probably be better here. +// let prop_name = prop_name.as_ref().to_string(); +// let prop_id = *self.0.get(&(prop_name.clone(), parent_prop_id)).ok_or( +// SchemaVariantError::PropNotFoundInCache(prop_name, parent_prop_id), +// )?; +// Ok(prop_id) +// } +// +// /// Insert the [`PropId`](crate::Prop) into [`self`](Self). The returned `option` from the +// /// underlying method is ignored. +// pub fn insert(&mut self, key: (String, PropId), value: PropId) { +// self.0.insert(key, value); +// } +// } +// +// impl Default for PropCache { +// fn default() -> Self { +// Self::new() +// } +// } +// +// pk!(SchemaVariantDefinitionPk); -/// A cache of [`PropIds`](crate::Prop) where the _key_ is a tuple corresponding to the -/// [`Prop`](crate::Prop) name and the _parent_ [`PropId`](crate::Prop) who's child is the -/// [`PropId`](crate::Prop) in the _value_ of the entry. -/// -/// It is recommended to start with the [`RootProp`](crate::RootProp) in order to descend into the -/// cache. -#[derive(Debug, Clone)] -pub struct PropCache(HashMap<(String, PropId), PropId>); - -impl PropCache { - pub fn new() -> Self { - Self(HashMap::new()) - } - - /// Attempts to retrieve the [`PropId`](crate::Prop) value for a given [`Prop`](crate::Prop) - /// name and parent [`PropId`](crate::Prop) key tuple. An error is returned if nothing is found. - pub fn get( - &self, - prop_name: impl AsRef, - parent_prop_id: PropId, - ) -> SchemaVariantResult { - // NOTE(nick): the string handling could probably be better here. - let prop_name = prop_name.as_ref().to_string(); - let prop_id = *self.0.get(&(prop_name.clone(), parent_prop_id)).ok_or( - SchemaVariantError::PropNotFoundInCache(prop_name, parent_prop_id), - )?; - Ok(prop_id) - } - - /// Insert the [`PropId`](crate::Prop) into [`self`](Self). The returned `option` from the - /// underlying method is ignored. - pub fn insert(&mut self, key: (String, PropId), value: PropId) { - self.0.insert(key, value); - } -} - -impl Default for PropCache { - fn default() -> Self { - Self::new() - } -} - -pk!(SchemaVariantDefinitionPk); pk!(SchemaVariantDefinitionId); -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] -pub struct SchemaVariantDefinition { - pk: SchemaVariantDefinitionPk, - id: SchemaVariantDefinitionId, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] - timestamp: Timestamp, - #[serde(flatten)] - visibility: Visibility, - - /// Name for this variant. Actually, this is the name for this [`Schema`](crate::Schema), we're - /// punting on the issue of multiple variants for the moment. - name: String, - /// Override for the UI name for this schema - menu_name: Option, - /// The category this schema variant belongs to - category: String, - /// The color for the component on the component diagram as a hex string - color: String, - component_kind: ComponentKind, - component_type: ComponentType, - link: Option, - func_id: FuncId, - description: Option, - schema_variant_id: Option, -} - -impl_standard_model! { - model: SchemaVariantDefinition, - pk: SchemaVariantDefinitionPk, - id: SchemaVariantDefinitionId, - table_name: "schema_variant_definitions", - history_event_label_base: "schema_variant_definition", - history_event_message_name: "Schema Variant Definition", -} - -impl SchemaVariantDefinition { - #[allow(clippy::too_many_arguments)] - pub async fn new( - ctx: &DalContext, - name: String, - menu_name: Option, - category: String, - link: Option, - color: String, - component_kind: ComponentKind, - description: Option, - func_id: FuncId, - ) -> SchemaVariantDefinitionResult { - let row = ctx - .txns() - .await? - .pg() - .query_one( - "SELECT object FROM schema_variant_definition_create_v1( - $1, - $2, - $3, - $4, - $5, - $6, - $7, - $8, - $9, - $10 - )", - &[ - ctx.tenancy(), - ctx.visibility(), - &name, - &menu_name, - &category, - &link, - &color, - &component_kind.as_ref(), - &func_id, - &description, - ], - ) - .await?; - - Ok(standard_model::finish_create_from_row(ctx, row).await?) - } - - pub async fn list_for_default_variants( - ctx: &DalContext, - ) -> SchemaVariantDefinitionResult> { - let rows = ctx - .txns() - .await? - .pg() - .query( - DEFS_FOR_DEFAULT_VARIANTS, - &[ctx.tenancy(), ctx.visibility()], - ) - .await?; - - Ok(standard_model::objects_from_rows(rows)?) - } - - pub async fn get_by_func_id( - ctx: &DalContext, - func_id: FuncId, - ) -> SchemaVariantDefinitionResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT row_to_json(svd.*) AS object - FROM schema_variant_definitions_v1($1, $2) as svd - WHERE func_id = $3", - &[ctx.tenancy(), ctx.visibility(), &func_id], - ) - .await?; - - Ok(standard_model::object_option_from_row_option(row)?) - } - - pub async fn get_by_schema_variant_id( - ctx: &DalContext, - schema_variant_id: &SchemaVariantId, - ) -> SchemaVariantDefinitionResult> { - let row = ctx - .txns() - .await? - .pg() - .query_opt( - "SELECT row_to_json(svd.*) AS object - FROM schema_variant_definitions_v1($1, $2) as svd - WHERE schema_variant_id = $3", - &[ctx.tenancy(), ctx.visibility(), schema_variant_id], - ) - .await?; - - Ok(standard_model::object_option_from_row_option(row)?) - } - - pub async fn list_components( - &self, - ctx: &DalContext, - ) -> SchemaVariantDefinitionResult> { - Ok(if let Some(variant_id) = self.schema_variant_id { - Component::list_for_schema_variant(ctx, variant_id) - .await - .map_err(Box::new)? - } else { - vec![] - }) - } - - standard_model_accessor!(name, String, SchemaVariantDefinitionResult); - standard_model_accessor!(menu_name, Option, SchemaVariantDefinitionResult); - standard_model_accessor!(category, String, SchemaVariantDefinitionResult); - standard_model_accessor!(color, String, SchemaVariantDefinitionResult); - standard_model_accessor!( - component_kind, - Enum(ComponentKind), - SchemaVariantDefinitionResult - ); - standard_model_accessor!(link, Option, SchemaVariantDefinitionResult); - standard_model_accessor!(description, Option, SchemaVariantDefinitionResult); - standard_model_accessor!(func_id, Pk(FuncId), SchemaVariantDefinitionResult); - standard_model_accessor!( - schema_variant_id, - Option, - SchemaVariantDefinitionResult - ); - standard_model_accessor!( - component_type, - Enum(ComponentType), - SchemaVariantDefinitionResult - ); -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct SchemaVariantDefinitionCreatedPayload { - schema_variant_definition_id: SchemaVariantDefinitionId, - change_set_pk: ChangeSetPk, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct SchemaVariantDefinitionClonedPayload { - schema_variant_definition_id: SchemaVariantDefinitionId, - change_set_pk: ChangeSetPk, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct SchemaVariantDefinitionSavedPayload { - schema_variant_definition_id: SchemaVariantDefinitionId, - change_set_pk: ChangeSetPk, -} - -impl WsEvent { - pub async fn schema_variant_definition_created( - ctx: &DalContext, - schema_variant_definition_id: SchemaVariantDefinitionId, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::SchemaVariantDefinitionCreated(SchemaVariantDefinitionCreatedPayload { - schema_variant_definition_id, - change_set_pk: ctx.visibility().change_set_pk, - }), - ) - .await - } - - pub async fn schema_variant_definition_cloned( - ctx: &DalContext, - schema_variant_definition_id: SchemaVariantDefinitionId, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::SchemaVariantDefinitionCloned(SchemaVariantDefinitionClonedPayload { - schema_variant_definition_id, - change_set_pk: ctx.visibility().change_set_pk, - }), - ) - .await - } - - pub async fn schema_variant_definition_saved( - ctx: &DalContext, - schema_variant_definition_id: SchemaVariantDefinitionId, - ) -> WsEventResult { - WsEvent::new( - ctx, - WsPayload::SchemaVariantDefinitionSaved(SchemaVariantDefinitionSavedPayload { - schema_variant_definition_id, - change_set_pk: ctx.visibility().change_set_pk, - }), - ) - .await - } -} - +// #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] +// pub struct SchemaVariantDefinition { +// pk: SchemaVariantDefinitionPk, +// id: SchemaVariantDefinitionId, +// #[serde(flatten)] +// tenancy: Tenancy, +// #[serde(flatten)] +// timestamp: Timestamp, +// #[serde(flatten)] +// visibility: Visibility, +// +// /// Name for this variant. Actually, this is the name for this [`Schema`](crate::Schema), we're +// /// punting on the issue of multiple variants for the moment. +// name: String, +// /// Override for the UI name for this schema +// menu_name: Option, +// /// The category this schema variant belongs to +// category: String, +// /// The color for the component on the component diagram as a hex string +// color: String, +// component_kind: ComponentKind, +// component_type: ComponentType, +// link: Option, +// func_id: FuncId, +// description: Option, +// schema_variant_id: Option, +// } + +// impl_standard_model! { +// model: SchemaVariantDefinition, +// pk: SchemaVariantDefinitionPk, +// id: SchemaVariantDefinitionId, +// table_name: "schema_variant_definitions", +// history_event_label_base: "schema_variant_definition", +// history_event_message_name: "Schema Variant Definition", +// } + +// impl SchemaVariantDefinition { +// #[allow(clippy::too_many_arguments)] +// pub async fn new( +// ctx: &DalContext, +// name: String, +// menu_name: Option, +// category: String, +// link: Option, +// color: String, +// component_kind: ComponentKind, +// description: Option, +// func_id: FuncId, +// ) -> SchemaVariantDefinitionResult { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_one( +// "SELECT object FROM schema_variant_definition_create_v1( +// $1, +// $2, +// $3, +// $4, +// $5, +// $6, +// $7, +// $8, +// $9, +// $10 +// )", +// &[ +// ctx.tenancy(), +// ctx.visibility(), +// &name, +// &menu_name, +// &category, +// &link, +// &color, +// &component_kind.as_ref(), +// &func_id, +// &description, +// ], +// ) +// .await?; + +// Ok(standard_model::finish_create_from_row(ctx, row).await?) +// } + +// pub async fn get_by_func_id( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> SchemaVariantDefinitionResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// "SELECT row_to_json(svd.*) AS object +// FROM schema_variant_definitions_v1($1, $2) as svd +// WHERE func_id = $3", +// &[ctx.tenancy(), ctx.visibility(), &func_id], +// ) +// .await?; + +// Ok(standard_model::object_option_from_row_option(row)?) +// } + +// pub async fn get_by_schema_variant_id( +// ctx: &DalContext, +// schema_variant_id: &SchemaVariantId, +// ) -> SchemaVariantDefinitionResult> { +// let row = ctx +// .txns() +// .await? +// .pg() +// .query_opt( +// "SELECT row_to_json(svd.*) AS object +// FROM schema_variant_definitions_v1($1, $2) as svd +// WHERE schema_variant_id = $3", +// &[ctx.tenancy(), ctx.visibility(), schema_variant_id], +// ) +// .await?; + +// Ok(standard_model::object_option_from_row_option(row)?) +// } + +// pub async fn list_components( +// &self, +// ctx: &DalContext, +// ) -> SchemaVariantDefinitionResult> { +// Ok(if let Some(variant_id) = self.schema_variant_id { +// Component::list_for_schema_variant(ctx, variant_id) +// .await +// .map_err(Box::new)? +// } else { +// vec![] +// }) +// } + +// standard_model_accessor!(name, String, SchemaVariantDefinitionResult); +// standard_model_accessor!(menu_name, Option, SchemaVariantDefinitionResult); +// standard_model_accessor!(category, String, SchemaVariantDefinitionResult); +// standard_model_accessor!(color, String, SchemaVariantDefinitionResult); +// standard_model_accessor!( +// component_kind, +// Enum(ComponentKind), +// SchemaVariantDefinitionResult +// ); +// standard_model_accessor!(link, Option, SchemaVariantDefinitionResult); +// standard_model_accessor!(description, Option, SchemaVariantDefinitionResult); +// standard_model_accessor!(func_id, Pk(FuncId), SchemaVariantDefinitionResult); +// standard_model_accessor!( +// schema_variant_id, +// Option, +// SchemaVariantDefinitionResult +// ); +// standard_model_accessor!( +// component_type, +// Enum(ComponentType), +// SchemaVariantDefinitionResult +// ); +// } +// #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct SchemaVariantDefinitionMetadataJson { @@ -380,102 +284,103 @@ pub struct SchemaVariantDefinitionMetadataJson { pub description: Option, } -impl SchemaVariantDefinitionMetadataJson { - pub fn to_spec(&self, variant: SchemaVariantSpec) -> SchemaVariantDefinitionResult { - let mut builder = SchemaSpec::builder(); - builder.name(&self.name); - let mut data_builder = SchemaSpecData::builder(); - data_builder.name(&self.name); - data_builder.category(&self.category); - if let Some(menu_name) = &self.menu_name { - data_builder.category_name(menu_name.as_str()); - } - builder.data(data_builder.build()?); - builder.variant(variant); - - Ok(builder.build()?) - } -} - -impl From for SchemaVariantDefinitionMetadataJson { - fn from(value: SchemaVariantDefinition) -> Self { - SchemaVariantDefinitionMetadataJson { - name: value.name, - menu_name: value.menu_name, - category: value.category, - color: value.color, - component_kind: value.component_kind, - component_type: value.component_type, - link: value.link, - description: value.description, - } - } -} - -impl SchemaVariantDefinitionMetadataJson { - #[allow(clippy::too_many_arguments)] - pub fn new( - name: &str, - menu_name: Option<&str>, - category: &str, - color: &str, - component_kind: ComponentKind, - link: Option<&str>, - description: Option<&str>, - component_type: ComponentType, - ) -> SchemaVariantDefinitionMetadataJson { - SchemaVariantDefinitionMetadataJson { - name: name.to_string(), - menu_name: menu_name.map(|s| s.to_string()), - category: category.to_string(), - color: color.to_string(), - component_kind, - component_type, - link: link.map(|l| l.to_string()), - description: description.map(|d| d.to_string()), - } - } - - pub async fn from_schema_and_variant( - ctx: &DalContext, - schema: &Schema, - variant: &SchemaVariant, - ) -> SchemaVariantDefinitionResult { - let (menu_name, category) = match schema.ui_menus(ctx).await { - Ok(ui_menus) => match ui_menus.get(0) { - Some(ui_menu) => ( - Some(ui_menu.name().to_string()), - ui_menu.category().to_string(), - ), - None => (None, "".to_string()), - }, - Err(_) => { - return Err(SchemaVariantDefinitionError::CouldNotGetUiMenu( - *schema.id(), - )); - } - }; - - Ok(SchemaVariantDefinitionMetadataJson { - name: schema.name().to_string(), - menu_name, - category, - color: variant - .color(ctx) - .await - .map_err(Box::new)? - .unwrap_or_else(|| "baddad".to_string()), - component_kind: *schema.component_kind(), - link: variant.link().map(|l| l.to_string()), - description: None, - component_type: get_component_type(ctx, variant) - .await - .map_err(Box::new)? - .into(), - }) - } -} - +// impl SchemaVariantDefinitionMetadataJson { +// pub fn to_spec(&self, variant: SchemaVariantSpec) -> SchemaVariantDefinitionResult { +// let mut builder = SchemaSpec::builder(); +// builder.name(&self.name); +// let mut data_builder = SchemaSpecData::builder(); +// data_builder.name(&self.name); +// data_builder.category(&self.category); +// if let Some(menu_name) = &self.menu_name { +// data_builder.category_name(menu_name.as_str()); +// } +// builder.data(data_builder.build()?); +// builder.variant(variant); + +// Ok(builder.build()?) +// } +// } + +// impl From for SchemaVariantDefinitionMetadataJson { +// fn from(value: SchemaVariantDefinition) -> Self { +// SchemaVariantDefinitionMetadataJson { +// name: value.name, +// menu_name: value.menu_name, +// category: value.category, +// color: value.color, +// component_kind: value.component_kind, +// component_type: value.component_type, +// link: value.link, +// description: value.description, +// } +// } +// } + +// impl SchemaVariantDefinitionMetadataJson { +// #[instrument(skip_all)] +// #[allow(clippy::too_many_arguments)] +// pub fn new( +// name: &str, +// menu_name: Option<&str>, +// category: &str, +// color: &str, +// component_kind: ComponentKind, +// link: Option<&str>, +// description: Option<&str>, +// component_type: ComponentType, +// ) -> SchemaVariantDefinitionMetadataJson { +// SchemaVariantDefinitionMetadataJson { +// name: name.to_string(), +// menu_name: menu_name.map(|s| s.to_string()), +// category: category.to_string(), +// color: color.to_string(), +// component_kind, +// component_type, +// link: link.map(|l| l.to_string()), +// description: description.map(|d| d.to_string()), +// } +// } + +// pub async fn from_schema_and_variant( +// ctx: &DalContext, +// schema: &Schema, +// variant: &SchemaVariant, +// ) -> SchemaVariantDefinitionResult { +// let (menu_name, category) = match schema.ui_menus(ctx).await { +// Ok(ui_menus) => match ui_menus.get(0) { +// Some(ui_menu) => ( +// Some(ui_menu.name().to_string()), +// ui_menu.category().to_string(), +// ), +// None => (None, "".to_string()), +// }, +// Err(_) => { +// return Err(SchemaVariantDefinitionError::CouldNotGetUiMenu( +// *schema.id(), +// )); +// } +// }; + +// Ok(SchemaVariantDefinitionMetadataJson { +// name: schema.name().to_string(), +// menu_name, +// category, +// color: variant +// .color(ctx) +// .await +// .map_err(Box::new)? +// .unwrap_or_else(|| "baddad".to_string()), +// component_kind: *schema.component_kind(), +// link: variant.link().map(|l| l.to_string()), +// description: None, +// component_type: get_component_type(ctx, variant) +// .await +// .map_err(Box::new)? +// .into(), +// }) +// } +// } +// /// The definition for a [`SchemaVariant`](crate::SchemaVariant)'s [`Prop`](crate::Prop) tree (and /// more in the future). #[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] @@ -514,54 +419,54 @@ pub struct SchemaVariantDefinitionJson { } impl SchemaVariantDefinitionJson { - pub fn to_spec( - &self, - metadata: SchemaVariantDefinitionMetadataJson, - identity_func_unique_id: &str, - asset_func_spec_unique_id: &str, - name: &str, - ) -> SchemaVariantDefinitionResult { - let mut builder = SchemaVariantSpec::builder(); - builder.name(name); - - let mut data_builder = SchemaVariantSpecData::builder(); - - data_builder.name(name); - data_builder.color(metadata.color); - data_builder.component_type(metadata.component_type); - if let Some(link) = metadata.link { - data_builder.try_link(link.as_str())?; - } - - data_builder.func_unique_id(asset_func_spec_unique_id); - builder.data(data_builder.build()?); - - for si_prop_value_from in &self.si_prop_value_froms { - builder.si_prop_func(si_prop_value_from.to_spec(identity_func_unique_id)); - } - for prop in &self.props { - builder.domain_prop(prop.to_spec(identity_func_unique_id)?); - } - for prop in &self.secret_props { - builder.secret_prop(prop.to_spec(identity_func_unique_id)?); - } - if let Some(props) = &self.secret_definition { - for prop in props { - builder.secret_definition_prop(prop.to_spec(identity_func_unique_id)?); - } - } - for resource_prop in &self.resource_props { - builder.resource_value_prop(resource_prop.to_spec(identity_func_unique_id)?); - } - for input_socket in &self.input_sockets { - builder.socket(input_socket.to_spec(true, identity_func_unique_id)?); - } - for output_socket in &self.output_sockets { - builder.socket(output_socket.to_spec(false, identity_func_unique_id)?); - } - - Ok(builder.build()?) - } + // pub fn to_spec( + // &self, + // metadata: SchemaVariantDefinitionMetadataJson, + // identity_func_unique_id: &str, + // asset_func_spec_unique_id: &str, + // ) -> SchemaVariantDefinitionResult { + // let mut builder = SchemaVariantSpec::builder(); + // let name = "v0"; + // builder.name(name); + // + // let mut data_builder = SchemaVariantSpecData::builder(); + // + // data_builder.name(name); + // data_builder.color(metadata.color); + // data_builder.component_type(metadata.component_type); + // if let Some(link) = metadata.link { + // data_builder.try_link(link.as_str())?; + // } + // + // data_builder.func_unique_id(asset_func_spec_unique_id); + // builder.data(data_builder.build()?); + // + // for si_prop_value_from in &self.si_prop_value_froms { + // builder.si_prop_func(si_prop_value_from.to_spec(identity_func_unique_id)); + // } + // for prop in &self.props { + // builder.domain_prop(prop.to_spec(identity_func_unique_id)?); + // } + // for prop in &self.secret_props { + // builder.secret_prop(prop.to_spec(identity_func_unique_id)?); + // } + // if let Some(props) = &self.secret_definition { + // for prop in props { + // builder.secret_definition_prop(prop.to_spec(identity_func_unique_id)?); + // } + // } + // for resource_prop in &self.resource_props { + // builder.resource_value_prop(resource_prop.to_spec(identity_func_unique_id)?); + // } + // for input_socket in &self.input_sockets { + // builder.socket(input_socket.to_spec(true, identity_func_unique_id)?); + // } + // for output_socket in &self.output_sockets { + // builder.socket(output_socket.to_spec(false, identity_func_unique_id)?); + // } + // + // Ok(builder.build()?) + // } pub fn metadata_from_spec( schema_spec: SchemaSpec, @@ -607,7 +512,7 @@ impl SchemaVariantDefinitionJson { color: variant_spec_data .color .to_owned() - .unwrap_or("000000".into()), + .unwrap_or(DEFAULT_SCHEMA_VARIANT_COLOR.into()), component_kind: ComponentKind::Standard, component_type: variant_spec_data.component_type.into(), link: variant_spec_data.link.as_ref().map(|l| l.to_string()), @@ -637,21 +542,21 @@ pub struct MapKeyFunc { pub value_from: Option, } -impl MapKeyFunc { - pub fn to_spec( - &self, - identity_func_unique_id: &str, - ) -> SchemaVariantDefinitionResult { - let mut builder = MapKeyFuncSpec::builder(); - builder.func_unique_id(identity_func_unique_id); - builder.key(&self.key); - if let Some(value_from) = &self.value_from { - builder.input(value_from.to_spec()); - }; - Ok(builder.build()?) - } -} - +// impl MapKeyFunc { +// pub fn to_spec( +// &self, +// identity_func_unique_id: &str, +// ) -> SchemaVariantDefinitionResult { +// let mut builder = MapKeyFuncSpec::builder(); +// builder.func_unique_id(identity_func_unique_id); +// builder.key(&self.key); +// if let Some(value_from) = &self.value_from { +// builder.input(value_from.to_spec()); +// }; +// Ok(builder.build()?) +// } +// } +// /// The definition for a [`Prop`](crate::Prop) in a [`SchemaVariant`](crate::SchemaVariant). #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] @@ -696,62 +601,64 @@ pub struct PropDefinition { pub map_key_funcs: Option>, } -impl PropDefinition { - pub fn to_spec( - &self, - identity_func_unique_id: &str, - ) -> SchemaVariantDefinitionResult { - let mut builder = PropSpec::builder(); - builder.name(&self.name); - builder.kind(self.kind); - builder.has_data(true); - if let Some(doc_url) = &self.doc_link { - builder.try_doc_link(doc_url.as_str())?; - } - if let Some(docs) = &self.documentation { - builder.documentation(docs); - } - if let Some(default_value) = &self.default_value { - builder.default_value(default_value.to_owned()); - } - if let Some(validation_format) = &self.validation_format { - builder.validation_format(validation_format); - } - match self.kind { - PropKind::Array | PropKind::Map => { - if let Some(entry) = &self.entry { - builder.type_prop(entry.to_spec(identity_func_unique_id)?); - } - } - PropKind::Object => { - for child in &self.children { - builder.entry(child.to_spec(identity_func_unique_id)?); - } - } - _ => {} - } - if let Some(widget) = &self.widget { - builder.widget_kind(widget.kind); - if let Some(widget_options) = &widget.options { - builder.widget_options(widget_options.to_owned()); - } - } - if let Some(value_from) = &self.value_from { - builder.func_unique_id(identity_func_unique_id); - builder.input(value_from.to_spec()); - } - if let Some(hidden) = self.hidden { - builder.hidden(hidden); - } - if let Some(map_key_funcs) = &self.map_key_funcs { - for map_key_func in map_key_funcs { - builder.map_key_func(map_key_func.to_spec(identity_func_unique_id)?); - } - } - - Ok(builder.build()?) - } -} +// impl PropDefinition { +// pub fn to_spec( +// &self, +// identity_func_unique_id: &str, +// ) -> SchemaVariantDefinitionResult { +// let mut builder = PropSpec::builder(); +// builder.name(&self.name); +// builder.kind(self.kind); +// builder.has_data(true); +// if let Some(doc_url) = &self.doc_link { +// builder.try_doc_link(doc_url.as_str())?; +// } +// if let Some(docs) = &self.documentation { +// builder.documentation(docs); +// } +// if let Some(default_value) = &self.default_value { +// builder.default_value(default_value.to_owned()); +// } +// if let Some(validations) = &self.validations { +// for validation in validations { +// builder.validation(validation.to_owned()); +// } +// } +// match self.kind { +// PropKind::Array | PropKind::Map => { +// if let Some(entry) = &self.entry { +// builder.type_prop(entry.to_spec(identity_func_unique_id)?); +// } +// } +// PropKind::Object => { +// for child in &self.children { +// builder.entry(child.to_spec(identity_func_unique_id)?); +// } +// } +// _ => {} +// } +// if let Some(widget) = &self.widget { +// builder.widget_kind(widget.kind); +// if let Some(widget_options) = &widget.options { +// builder.widget_options(widget_options.to_owned()); +// } +// } +// if let Some(value_from) = &self.value_from { +// builder.func_unique_id(identity_func_unique_id); +// builder.input(value_from.to_spec()); +// } +// if let Some(hidden) = self.hidden { +// builder.hidden(hidden); +// } +// if let Some(map_key_funcs) = &self.map_key_funcs { +// for map_key_func in map_key_funcs { +// builder.map_key_func(map_key_func.to_spec(identity_func_unique_id)?); +// } +// } + +// Ok(builder.build()?) +// } +// } /// The definition for a [`Socket`](crate::Socket) in a [`SchemaVariant`](crate::SchemaVariant). /// A corresponding [`provider`](crate::provider) will be created as well. @@ -765,7 +672,7 @@ pub struct SocketDefinition { /// The [`arity`](https://en.wikipedia.org/wiki/Arity) of the [`Socket`](crate::Socket). /// Defaults to [`SocketArity::Many`](crate::SocketArity::Many) if nothing is provided. #[serde(skip_serializing_if = "Option::is_none")] - pub arity: Option, + pub arity: Option, #[serde(skip_serializing_if = "Option::is_none")] pub ui_hidden: Option, // The source of the information for the socket @@ -773,42 +680,42 @@ pub struct SocketDefinition { pub value_from: Option, } -impl SocketDefinition { - pub fn to_spec( - &self, - is_input: bool, - identity_func_unique_id: &str, - ) -> SchemaVariantDefinitionResult { - let mut builder = SocketSpec::builder(); - let mut data_builder = SocketSpecData::builder(); - builder.name(&self.name); - data_builder.name(&self.name); - data_builder.connection_annotations(&self.connection_annotations); - if is_input { - data_builder.kind(SocketSpecKind::Input); - } else { - data_builder.kind(SocketSpecKind::Output); - } - - if let Some(arity) = &self.arity { - data_builder.arity(arity); - } else { - data_builder.arity(SocketSpecArity::Many); - } - if let Some(hidden) = &self.ui_hidden { - data_builder.ui_hidden(*hidden); - } else { - data_builder.ui_hidden(false); - } - if let Some(value_from) = &self.value_from { - data_builder.func_unique_id(identity_func_unique_id); - builder.input(value_from.to_spec()); - } - builder.data(data_builder.build()?); - - Ok(builder.build()?) - } -} +// impl SocketDefinition { +// pub fn to_spec( +// &self, +// is_input: bool, +// identity_func_unique_id: &str, +// ) -> SchemaVariantDefinitionResult { +// let mut builder = SocketSpec::builder(); +// let mut data_builder = SocketSpecData::builder(); +// builder.name(&self.name); +// data_builder.name(&self.name); +// data_builder.connection_annotations(&self.connection_annotations); +// if is_input { +// data_builder.kind(SocketSpecKind::Input); +// } else { +// data_builder.kind(SocketSpecKind::Output); +// } + +// if let Some(arity) = &self.arity { +// data_builder.arity(arity); +// } else { +// data_builder.arity(SocketSpecArity::Many); +// } +// if let Some(hidden) = &self.ui_hidden { +// data_builder.ui_hidden(*hidden); +// } else { +// data_builder.ui_hidden(false); +// } +// if let Some(value_from) = &self.value_from { +// data_builder.func_unique_id(identity_func_unique_id); +// builder.input(value_from.to_spec()); +// } +// builder.data(data_builder.build()?); + +// Ok(builder.build()?) +// } +// } /// The definition for the source of the information for a prop or a socket in a [`SchemaVariant`](crate::SchemaVariant). #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] @@ -819,30 +726,30 @@ pub enum ValueFrom { Prop { prop_path: Vec }, } -impl ValueFrom { - fn to_spec(&self) -> AttrFuncInputSpec { - match self { - ValueFrom::InputSocket { socket_name } => AttrFuncInputSpec::InputSocket { - name: "identity".to_string(), - socket_name: socket_name.to_owned(), - unique_id: None, - deleted: false, - }, - ValueFrom::Prop { prop_path } => AttrFuncInputSpec::Prop { - name: "identity".to_string(), - prop_path: PropPath::new(prop_path).into(), - unique_id: None, - deleted: false, - }, - ValueFrom::OutputSocket { socket_name } => AttrFuncInputSpec::OutputSocket { - name: "identity".to_string(), - socket_name: socket_name.to_owned(), - unique_id: None, - deleted: false, - }, - } - } -} +// impl ValueFrom { +// fn to_spec(&self) -> AttrFuncInputSpec { +// match self { +// ValueFrom::InputSocket { socket_name } => AttrFuncInputSpec::InputSocket { +// name: "identity".to_string(), +// socket_name: socket_name.to_owned(), +// unique_id: None, +// deleted: false, +// }, +// ValueFrom::Prop { prop_path } => AttrFuncInputSpec::Prop { +// name: "identity".to_string(), +// prop_path: PropPath::new(prop_path).into(), +// unique_id: None, +// deleted: false, +// }, +// ValueFrom::OutputSocket { socket_name } => AttrFuncInputSpec::OutputSocket { +// name: "identity".to_string(), +// socket_name: socket_name.to_owned(), +// unique_id: None, +// deleted: false, +// }, +// } +// } +// } /// The definition for the source of the data for prop under "/root/"si" in a [`SchemaVariant`](crate::SchemaVariant). #[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] @@ -852,14 +759,14 @@ pub struct SiPropValueFrom { value_from: ValueFrom, } -impl SiPropValueFrom { - fn to_spec(&self, identity_func_unique_id: &str) -> SiPropFuncSpec { - SiPropFuncSpec { - kind: self.kind, - func_unique_id: identity_func_unique_id.to_owned(), - inputs: vec![self.value_from.to_spec()], - unique_id: None, - deleted: false, - } - } -} +// impl SiPropValueFrom { +// fn to_spec(&self, identity_func_unique_id: &str) -> SiPropFuncSpec { +// SiPropFuncSpec { +// kind: self.kind, +// func_unique_id: identity_func_unique_id.to_owned(), +// inputs: vec![self.value_from.to_spec()], +// unique_id: None, +// deleted: false, +// } +// } +// } diff --git a/lib/dal/src/schema/variant/leaves.rs b/lib/dal/src/schema/variant/leaves.rs index 8938fe13cf..17328a45fb 100644 --- a/lib/dal/src/schema/variant/leaves.rs +++ b/lib/dal/src/schema/variant/leaves.rs @@ -4,17 +4,21 @@ use serde::{Deserialize, Serialize}; use strum::EnumIter; +use telemetry::prelude::*; -use crate::func::argument::{FuncArgumentId, FuncArgumentKind}; -use crate::schema::variant::{SchemaVariantError, SchemaVariantResult}; +use crate::attribute::prototype::argument::AttributePrototypeArgument; +use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; use crate::{ - AttributeContext, AttributePrototype, AttributePrototypeArgument, AttributeReadContext, - AttributeValue, AttributeValueError, ComponentId, DalContext, Func, FuncBackendKind, - FuncBackendResponseType, FuncError, FuncId, PropId, RootPropChild, SchemaVariant, - SchemaVariantId, StandardModel, + AttributePrototype, AttributePrototypeId, ComponentId, DalContext, Func, FuncBackendKind, + FuncBackendResponseType, FuncId, Prop, PropId, SchemaVariant, SchemaVariantId, }; use si_pkg::{LeafInputLocation as PkgLeafInputLocation, LeafKind as PkgLeafKind}; +use crate::func::argument::{FuncArgumentId, FuncArgumentKind}; +use crate::schema::variant::root_prop::RootPropChild; + +use super::{SchemaVariantError, SchemaVariantResult}; + /// This enum provides options for creating leaves underneath compatible subtrees of "/root" within /// a [`SchemaVariant`](crate::SchemaVariant). Each compatible subtree starts with a /// [`map`](crate::PropKind::Map) [`Prop`](crate::Prop) that can contain zero to many @@ -198,100 +202,77 @@ impl SchemaVariant { component_id: Option, leaf_kind: LeafKind, inputs: Vec, - ) -> SchemaVariantResult<(PropId, AttributePrototype)> { - if schema_variant_id.is_none() { - return Err(SchemaVariantError::InvalidSchemaVariant); - } - + ) -> SchemaVariantResult<(PropId, AttributePrototypeId)> { // Ensure the func matches what we need. - let func = Func::get_by_id(ctx, &func_id) - .await? - .ok_or(FuncError::NotFound(func_id))?; - if func.backend_kind() != &FuncBackendKind::JsAttribute { - return Err(SchemaVariantError::LeafFunctionMustBeJsAttribute( - *func.id(), - )); + let func = Func::get_by_id(ctx, func_id).await?; + if func.backend_kind != FuncBackendKind::JsAttribute { + return Err(SchemaVariantError::LeafFunctionMustBeJsAttribute(func.id)); } - if func.backend_response_type() != &leaf_kind.into() { + if func.backend_response_type != leaf_kind.into() { return Err(SchemaVariantError::LeafFunctionMismatch( - *func.backend_response_type(), + func_id, + func.backend_response_type, leaf_kind, )); } - // We only need to finalize once since we are adding a leaf to a known descendant of the - // root prop. - let mut schema_variant = SchemaVariant::get_by_id(ctx, &schema_variant_id) - .await? - .ok_or(SchemaVariantError::NotFound(schema_variant_id))?; - if !schema_variant.finalized_once() { - schema_variant.finalize(ctx, None).await?; + if component_id.is_some() { + unimplemented!("component context for leaves not yet implemented in graph version"); } - // Assemble the values we need to insert an object into the map. - let item_prop = + let item_prop_id = SchemaVariant::find_leaf_item_prop(ctx, schema_variant_id, leaf_kind).await?; - // NOTE(nick): we should consider getting the parent and the item at the same time. - let map_prop = item_prop - .parent_prop(ctx) + let map_prop_id = Prop::parent_prop_id_by_id(ctx, item_prop_id) .await? - .ok_or_else(|| SchemaVariantError::ParentPropNotFound(*item_prop.id()))?; - let map_attribute_read_context = - AttributeReadContext::default_with_prop_and_component_id(*map_prop.id(), component_id); - let map_attribute_value = AttributeValue::find_for_context(ctx, map_attribute_read_context) - .await? - .ok_or(AttributeValueError::NotFoundForReadContext( - map_attribute_read_context, - ))?; - let insert_attribute_context = AttributeContext::builder() - .set_prop_id(*item_prop.id()) - .set_component_id(component_id.unwrap_or(ComponentId::NONE)) - .to_context()?; + .ok_or_else(|| SchemaVariantError::LeafMapPropNotFound(item_prop_id))?; - // Insert an item into the map and setup its function. The new entry is named after the func - // name since func names must be unique for a given tenancy and visibility. If that changes, - // then this will break. - let inserted_attribute_value_id = AttributeValue::insert_for_context( - ctx, - insert_attribute_context, - *map_attribute_value.id(), - Some(serde_json::json![{}]), - Some(func.name().to_string()), - ) - .await?; - let inserted_attribute_value = AttributeValue::get_by_id(ctx, &inserted_attribute_value_id) - .await? - .ok_or_else(|| { - AttributeValueError::NotFound(inserted_attribute_value_id, *ctx.visibility()) - })?; - let mut inserted_attribute_prototype = inserted_attribute_value - .attribute_prototype(ctx) - .await? - .ok_or(AttributeValueError::MissingAttributePrototype)?; - inserted_attribute_prototype - .set_func_id(ctx, func_id) - .await?; + if let Some(prototype_id) = + AttributePrototype::find_for_prop(ctx, item_prop_id, &None).await? + { + info!("removing None proto"); + AttributePrototype::remove(ctx, prototype_id).await?; + } + + let key = Some(func.name.to_owned()); + if let Some(prototype_id) = + AttributePrototype::find_for_prop(ctx, item_prop_id, &key).await? + { + info!("removing {:?} proto", &key); + AttributePrototype::remove(ctx, prototype_id).await?; + } + + let attribute_prototype_id = AttributePrototype::new(ctx, func_id).await?.id(); + + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_edge( + item_prop_id, + EdgeWeight::new(ctx.change_set_pointer()?, EdgeWeightKind::Prototype(key))?, + attribute_prototype_id, + )?; + } for input in inputs { - let input_internal_provider = - SchemaVariant::find_root_child_implicit_internal_provider( - ctx, - schema_variant_id, - input.location.into(), - ) - .await?; - AttributePrototypeArgument::new_for_intra_component( + let input_prop_id = SchemaVariant::find_root_child_prop_id( + ctx, + schema_variant_id, + input.location.into(), + ) + .await?; + + let apa = AttributePrototypeArgument::new( ctx, - *inserted_attribute_prototype.id(), + attribute_prototype_id, input.func_argument_id, - *input_internal_provider.id(), ) .await?; + + apa.set_value_from_prop_id(ctx, input_prop_id).await?; } // Return the prop id for the entire map so that its implicit internal provider can be // used for intelligence functions. - Ok((*map_prop.id(), inserted_attribute_prototype)) + Ok((map_prop_id, attribute_prototype_id)) } } diff --git a/lib/dal/src/schema/variant/root_prop.rs b/lib/dal/src/schema/variant/root_prop.rs index 7155e1cc78..d1c33ca26c 100644 --- a/lib/dal/src/schema/variant/root_prop.rs +++ b/lib/dal/src/schema/variant/root_prop.rs @@ -2,14 +2,12 @@ //! to the database. use strum::{AsRefStr, Display as EnumDisplay, EnumIter, EnumString}; -use telemetry::prelude::*; +use crate::prop::{PropParent, PropPath}; use crate::property_editor::schema::WidgetKind; -use crate::{ - schema::variant::{leaves::LeafKind, SchemaVariantResult}, - DalContext, Func, Prop, PropId, PropKind, ReconciliationPrototype, - ReconciliationPrototypeContext, SchemaId, SchemaVariant, SchemaVariantId, StandardModel, -}; +use crate::schema::variant::leaves::LeafKind; +use crate::schema::variant::SchemaVariantResult; +use crate::{DalContext, Prop, PropId, PropKind, SchemaVariant, SchemaVariantId}; pub mod component_type; @@ -46,6 +44,10 @@ impl RootPropChild { Self::Secrets => "secrets", } } + + pub fn prop_path(&self) -> PropPath { + PropPath::new(["root", self.as_str()]) + } } /// This enum contains the subtree names for every direct child [`Prop`](crate::Prop) of "/root/si". @@ -103,63 +105,79 @@ pub struct RootProp { pub deleted_at_prop_id: PropId, } -impl SchemaVariant { +impl RootProp { /// Create and set a [`RootProp`] for the [`SchemaVariant`]. - #[instrument(level = "debug", skip_all)] - pub async fn create_and_set_root_prop( - &mut self, + pub async fn new( ctx: &DalContext, - schema_id: SchemaId, - ) -> SchemaVariantResult { - let root_prop = - Prop::new_without_ui_optionals(ctx, "root", PropKind::Object, self.id, None).await?; - let root_prop_id = *root_prop.id(); - self.set_root_prop_id(ctx, Some(root_prop_id)).await?; - - // FIXME(nick): we rely on ULID ordering for now, so the si prop tree creation has to come - // before the domain prop tree creation. Once index maps for objects are added, this - // can be moved back to its original location with the other prop tree creation methods. - let si_prop_id = Self::setup_si(ctx, root_prop_id, schema_id, self.id).await?; + schema_variant_id: SchemaVariantId, + ) -> SchemaVariantResult { + let root_prop = Prop::new( + ctx, + "root", + PropKind::Object, + false, + None, + None, + PropParent::SchemaVariant(schema_variant_id), + ) + .await?; + let root_prop_id = root_prop.id(); + + // info!("setting up si, domain and secrets"); + let si_prop_id = Self::setup_si(ctx, root_prop_id).await?; let domain_prop = Prop::new_without_ui_optionals( ctx, "domain", PropKind::Object, - self.id, - Some(root_prop_id), + PropParent::OrderedProp(root_prop_id), ) .await?; - let secrets_prop_id = *Prop::new_without_ui_optionals( + let secrets_prop = Prop::new_without_ui_optionals( ctx, "secrets", PropKind::Object, - self.id, - Some(root_prop_id), + PropParent::OrderedProp(root_prop_id), ) - .await? - .id(); + .await?; - let resource_prop_id = Self::setup_resource(ctx, root_prop_id, self.id).await?; - let resource_value_prop_id = Self::setup_resource_value(ctx, root_prop_id, self).await?; - let code_prop_id = Self::setup_code(ctx, root_prop_id, self.id).await?; - let qualification_prop_id = Self::setup_qualification(ctx, root_prop_id, self.id).await?; - let deleted_at_prop_id = Self::setup_deleted_at(ctx, root_prop_id, self.id).await?; + // info!("setting up resource"); + let resource_prop_id = Self::setup_resource(ctx, root_prop_id).await?; + // info!("setting up resource value"); + let resource_value_prop_id = Self::setup_resource_value(ctx, root_prop_id).await?; + // info!("setting up code"); + let code_prop_id = Self::setup_code(ctx, root_prop_id).await?; + // info!("setting up qualification"); + let qualification_prop_id = Self::setup_qualification(ctx, root_prop_id).await?; + + // info!("setting up deleted at"); + let deleted_at_prop = Prop::new( + ctx, + "deleted_at", + PropKind::String, + true, + None, + None, + PropParent::OrderedProp(root_prop_id), + ) + .await?; // Now that the structure is set up, we can populate default - // AttributePrototypes & AttributeValues to be updated appropriately below. - SchemaVariant::create_default_prototypes_and_values(ctx, self.id).await?; + // AttributePrototypes to be updated appropriately below. + SchemaVariant::create_default_prototypes(ctx, schema_variant_id).await?; + //SchemaVariant::mark_props_as_able_to_be_used_as_prototype_args(ctx, schema_variant_id)?; Ok(RootProp { prop_id: root_prop_id, si_prop_id, - domain_prop_id: *domain_prop.id(), + domain_prop_id: domain_prop.id(), resource_value_prop_id, resource_prop_id, - secrets_prop_id, + secrets_prop_id: secrets_prop.id(), code_prop_id, qualification_prop_id, - deleted_at_prop_id, + deleted_at_prop_id: deleted_at_prop.id(), }) } @@ -167,54 +185,54 @@ impl SchemaVariant { ctx: &DalContext, leaf_kind: LeafKind, root_prop_id: PropId, - schema_variant_id: SchemaVariantId, ) -> SchemaVariantResult<(PropId, PropId)> { let (leaf_prop_name, leaf_item_prop_name) = leaf_kind.prop_names(); - let mut leaf_prop = Prop::new_without_ui_optionals( + let leaf_prop = Prop::new( ctx, leaf_prop_name, PropKind::Map, - schema_variant_id, - Some(root_prop_id), + true, + None, + None, + PropParent::OrderedProp(root_prop_id), ) .await?; - leaf_prop.set_hidden(ctx, true).await?; - let mut leaf_item_prop = Prop::new_without_ui_optionals( + let leaf_item_prop = Prop::new( ctx, leaf_item_prop_name, PropKind::Object, - schema_variant_id, - Some(*leaf_prop.id()), + true, + None, + None, + PropParent::OrderedProp(leaf_prop.id()), ) .await?; - leaf_item_prop.set_hidden(ctx, true).await?; - Ok((*leaf_prop.id(), *leaf_item_prop.id())) + Ok((leaf_prop.id(), leaf_item_prop.id())) } - async fn setup_si( - ctx: &DalContext, - root_prop_id: PropId, - _schema_id: SchemaId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult { - let si_prop = Prop::new_without_ui_optionals( + async fn setup_si(ctx: &DalContext, root_prop_id: PropId) -> SchemaVariantResult { + let si_prop = Prop::new( ctx, "si", PropKind::Object, - schema_variant_id, - Some(root_prop_id), + false, + None, + None, + PropParent::OrderedProp(root_prop_id), ) .await?; - let si_prop_id = *si_prop.id(); - let _si_name_prop = Prop::new_without_ui_optionals( + + let _si_name_prop = Prop::new( ctx, "name", PropKind::String, - schema_variant_id, - Some(si_prop_id), + false, + None, + None, + PropParent::OrderedProp(si_prop.id()), ) .await?; @@ -223,8 +241,7 @@ impl SchemaVariant { ctx, "protected", PropKind::Boolean, - schema_variant_id, - Some(si_prop_id), + PropParent::OrderedProp(si_prop.id()), ) .await?; @@ -235,8 +252,8 @@ impl SchemaVariant { ctx, "type", PropKind::String, - schema_variant_id, - Some(si_prop_id), + false, + None, Some(( WidgetKind::Select, Some(serde_json::json!([ @@ -258,182 +275,156 @@ impl SchemaVariant { }, ])), )), - None, - None, + PropParent::OrderedProp(si_prop.id()), ) .await?; // Override the schema variant color for nodes on the diagram. - let mut color_prop = Prop::new_without_ui_optionals( + Prop::new( ctx, "color", PropKind::String, - schema_variant_id, - Some(si_prop_id), - ) - .await?; - color_prop.set_widget_kind(ctx, WidgetKind::Color).await?; - - Ok(si_prop_id) - } - - async fn setup_resource_value( - ctx: &DalContext, - root_prop_id: PropId, - schema_variant: &mut SchemaVariant, - ) -> SchemaVariantResult { - let schema_variant_id = *schema_variant.id(); - let mut resource_value_prop = Prop::new_without_ui_optionals( - ctx, - "resource_value", - PropKind::Object, - schema_variant_id, - Some(root_prop_id), + false, + None, + Some((WidgetKind::Color, None)), + PropParent::OrderedProp(si_prop.id()), ) .await?; - resource_value_prop.set_hidden(ctx, true).await?; - - if let Some(reconciliation_func) = - Func::find_by_attr(ctx, "name", &"si:defaultReconciliation") - .await? - .pop() - { - ReconciliationPrototype::upsert( - ctx, - *reconciliation_func.id(), - "Reconciliation", - ReconciliationPrototypeContext::new(*schema_variant.id()), - ) - .await?; - } - - SchemaVariant::create_default_prototypes_and_values(ctx, *schema_variant.id()).await?; - SchemaVariant::create_implicit_internal_providers(ctx, *schema_variant.id()).await?; - Ok(*resource_value_prop.id()) + Ok(si_prop.id()) } - async fn setup_resource( - ctx: &DalContext, - root_prop_id: PropId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult { - let mut resource_prop = Prop::new_without_ui_optionals( + async fn setup_resource(ctx: &DalContext, root_prop_id: PropId) -> SchemaVariantResult { + // /root/resource + let resource_prop = Prop::new( ctx, "resource", PropKind::Object, - schema_variant_id, - Some(root_prop_id), + true, + None, + None, + PropParent::OrderedProp(root_prop_id), ) .await?; - resource_prop.set_hidden(ctx, true).await?; - let resource_prop_id = *resource_prop.id(); - let mut resource_status_prop = Prop::new_without_ui_optionals( + // /root/resource/status + let _resource_status_prop = Prop::new( ctx, "status", PropKind::String, - schema_variant_id, - Some(resource_prop_id), + true, + None, + None, + PropParent::OrderedProp(resource_prop.id()), ) .await?; - resource_status_prop.set_hidden(ctx, true).await?; - let mut resource_message_prop = Prop::new_without_ui_optionals( + // /root/resource/message + let _resource_message_prop = Prop::new( ctx, "message", PropKind::String, - schema_variant_id, - Some(resource_prop_id), + true, + None, + None, + PropParent::OrderedProp(resource_prop.id()), ) .await?; - resource_message_prop.set_hidden(ctx, true).await?; - let mut resource_logs_prop = Prop::new_without_ui_optionals( + // /root/resource/logs + let resource_logs_prop = Prop::new( ctx, "logs", PropKind::Array, - schema_variant_id, - Some(resource_prop_id), + true, + None, + None, + PropParent::OrderedProp(resource_prop.id()), ) .await?; - resource_logs_prop.set_hidden(ctx, true).await?; - let mut resource_logs_log_prop = Prop::new_without_ui_optionals( + // /root/resource/logs/log + let _resource_logs_log_prop = Prop::new( ctx, "log", PropKind::String, - schema_variant_id, - Some(*resource_logs_prop.id()), + true, + None, + None, + PropParent::OrderedProp(resource_logs_prop.id()), ) .await?; - resource_logs_log_prop.set_hidden(ctx, true).await?; - let mut resource_payload_prop = Prop::new_without_ui_optionals( + // /root/resource/payload + let _resource_payload_prop = Prop::new( ctx, "payload", PropKind::String, - schema_variant_id, - Some(resource_prop_id), + true, + None, + None, + PropParent::OrderedProp(resource_prop.id()), ) .await?; - resource_payload_prop.set_hidden(ctx, true).await?; - let mut resource_last_synced_prop = Prop::new_without_ui_optionals( + // /root/resource/payload + let _resource_last_synced_prop = Prop::new( ctx, - "last_synced", + "resource_last_synced_prop", PropKind::String, - schema_variant_id, - Some(resource_prop_id), + true, + None, + None, + PropParent::OrderedProp(resource_prop.id()), ) .await?; - resource_last_synced_prop.set_hidden(ctx, true).await?; - Ok(resource_prop_id) + Ok(resource_prop.id()) } - async fn setup_code( + async fn setup_resource_value( ctx: &DalContext, root_prop_id: PropId, - schema_variant_id: SchemaVariantId, ) -> SchemaVariantResult { - let (code_map_prop_id, code_map_item_prop_id) = Self::insert_leaf_props( + let resource_value_prop = Prop::new( ctx, - LeafKind::CodeGeneration, - root_prop_id, - schema_variant_id, + "resource_value", + PropKind::Object, + true, + None, + None, + PropParent::OrderedProp(root_prop_id), ) .await?; - let mut child_code_prop = Prop::new_without_ui_optionals( - ctx, - "code", - PropKind::String, - schema_variant_id, - Some(code_map_item_prop_id), - ) - .await?; - child_code_prop.set_hidden(ctx, true).await?; + Ok(resource_value_prop.id()) + } - let mut child_message_prop = Prop::new_without_ui_optionals( + async fn setup_code(ctx: &DalContext, root_prop_id: PropId) -> SchemaVariantResult { + let (code_map_prop_id, code_map_item_prop_id) = + Self::insert_leaf_props(ctx, LeafKind::CodeGeneration, root_prop_id).await?; + + let _child_code_prop = Prop::new( ctx, - "message", + "code", PropKind::String, - schema_variant_id, - Some(code_map_item_prop_id), + true, + None, + None, + PropParent::OrderedProp(code_map_item_prop_id), ) .await?; - child_message_prop.set_hidden(ctx, true).await?; - let mut child_format_prop = Prop::new_without_ui_optionals( + let _child_format_prop = Prop::new( ctx, "format", PropKind::String, - schema_variant_id, - Some(code_map_item_prop_id), + true, + None, + None, + PropParent::OrderedProp(code_map_item_prop_id), ) .await?; - child_format_prop.set_hidden(ctx, true).await?; Ok(code_map_prop_id) } @@ -441,55 +432,32 @@ impl SchemaVariant { async fn setup_qualification( ctx: &DalContext, root_prop_id: PropId, - schema_variant_id: SchemaVariantId, ) -> SchemaVariantResult { - let (qualification_map_prop_id, qualification_map_item_prop_id) = Self::insert_leaf_props( - ctx, - LeafKind::Qualification, - root_prop_id, - schema_variant_id, - ) - .await?; + let (qualification_map_prop_id, qualification_map_item_prop_id) = + Self::insert_leaf_props(ctx, LeafKind::Qualification, root_prop_id).await?; - let mut child_qualified_prop = Prop::new_without_ui_optionals( + let _child_qualified_prop = Prop::new( ctx, "result", PropKind::String, - schema_variant_id, - Some(qualification_map_item_prop_id), + true, + None, + None, + PropParent::OrderedProp(qualification_map_item_prop_id), ) .await?; - child_qualified_prop.set_hidden(ctx, true).await?; - let mut child_message_prop = Prop::new_without_ui_optionals( + let _child_message_prop = Prop::new( ctx, "message", PropKind::String, - schema_variant_id, - Some(qualification_map_item_prop_id), + true, + None, + None, + PropParent::OrderedProp(qualification_map_item_prop_id), ) .await?; - child_message_prop.set_hidden(ctx, true).await?; Ok(qualification_map_prop_id) } - - async fn setup_deleted_at( - ctx: &DalContext, - root_prop_id: PropId, - schema_variant_id: SchemaVariantId, - ) -> SchemaVariantResult { - // This is a new prop that we will use to determine if we want to run a delete workflow - let mut deleted_at = Prop::new_without_ui_optionals( - ctx, - "deleted_at", - PropKind::String, - schema_variant_id, - Some(root_prop_id), - ) - .await?; - deleted_at.set_hidden(ctx, true).await?; - - Ok(*deleted_at.id()) - } } diff --git a/lib/dal/src/secret.rs b/lib/dal/src/secret.rs index d4bbc256e3..368b178884 100644 --- a/lib/dal/src/secret.rs +++ b/lib/dal/src/secret.rs @@ -1,21 +1,33 @@ -use std::fmt; - use base64::{engine::general_purpose, Engine}; +use content_store::{ContentHash, Store, StoreError}; use serde::{Deserialize, Serialize}; use serde_json::Value; use si_crypto::{SymmetricCryptoError, SymmetricCryptoService, SymmetricNonce}; +use si_data_pg::PgError; use si_hash::Hash; use sodiumoxide::crypto::{ box_::{PublicKey, SecretKey}, sealedbox, }; -use strum::{AsRefStr, Display, EnumString}; -use thiserror::Error; - -use si_data_pg::PgError; +use std::collections::HashMap; +use std::fmt; +use strum::{AsRefStr, Display, EnumDiscriminants, EnumString}; use telemetry::prelude::*; +use thiserror::Error; +use ulid::Ulid; use veritech_client::SensitiveContainer; +use crate::change_set_pointer::ChangeSetPointerError; +use crate::prop::{PropError, PropPath}; +use crate::schema::variant::root_prop::RootPropChild; +use crate::schema::variant::SchemaVariantError; +use crate::workspace_snapshot::content_address::{ContentAddress, ContentAddressDiscriminants}; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::{NodeWeight, NodeWeightError}; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ history_event::HistoryEventMetadata, impl_standard_model, @@ -23,39 +35,51 @@ use crate::{ pk, property_editor::schema::PropertyEditorPropWidgetKind, serde_impls::{base64_bytes_serde, nonce_serde}, - standard_model::{self, objects_from_rows, TypeHint}, + standard_model::{self, TypeHint}, standard_model_accessor, standard_model_accessor_ro, ActorView, ChangeSetPk, DalContext, - HistoryActor, HistoryEvent, HistoryEventError, KeyPair, KeyPairError, StandardModel, - StandardModelError, Tenancy, Timestamp, TransactionsError, UserPk, Visibility, WsEvent, - WsEventResult, WsPayload, + HistoryActor, HistoryEvent, HistoryEventError, KeyPair, KeyPairError, Prop, PropId, + SchemaVariant, SchemaVariantId, StandardModel, StandardModelError, Tenancy, Timestamp, + TransactionsError, UserPk, Visibility, WsEvent, WsEventResult, WsPayload, }; -const LIST_SECRET_DEFINITIONS: &str = include_str!("queries/secrets/list_secret_definitions.sql"); - /// Error type for Secrets. #[remain::sorted] #[derive(Error, Debug)] pub enum SecretError { + #[error("change set pointer error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), #[error("error when decrypting crypted secret")] DecryptionFailed, #[error("error deserializing message: {0}")] DeserializeMessage(#[source] serde_json::Error), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), #[error("history event error: {0}")] HistoryEvent(#[from] HistoryEventError), #[error("key pair error: {0}")] KeyPair(#[from] KeyPairError), #[error("key pair not found for secret")] KeyPairNotFound, + #[error("node weight error: {0}")] + NodeWeight(#[from] NodeWeightError), #[error("pg error: {0}")] Pg(#[from] PgError), + #[error("prop error: {0}")] + Prop(#[from] PropError), + #[error("schema variant error: {0}")] + SchemaVariant(#[from] SchemaVariantError), #[error("secret not found: {0}")] SecretNotFound(SecretId), #[error("standard model error: {0}")] StandardModelError(#[from] StandardModelError), + #[error("content store error: {0}")] + Store(#[from] StoreError), #[error("symmetric crypto error: {0}")] SymmetricCrypto(#[from] SymmetricCryptoError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } /// Result type for Secrets. @@ -70,67 +94,255 @@ pk!(SecretId); /// therefore safe to expose via external API. #[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] pub struct Secret { - pk: SecretPk, id: SecretId, - name: String, + + // TODO(nick): evaluate how these three fields will work with the new engine. + #[serde(flatten)] + timestamp: Timestamp, + created_by: Option, + updated_by: Option, + + pk: SecretPk, key_pair_pk: KeyPairPk, + name: String, definition: String, description: Option, - #[serde(flatten)] - tenancy: Tenancy, - #[serde(flatten)] +} + +#[derive(EnumDiscriminants, Serialize, Deserialize, PartialEq)] +pub enum SecretContent { + V1(SecretContentV1), +} + +#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)] +pub struct SecretContentV1 { timestamp: Timestamp, created_by: Option, updated_by: Option, - #[serde(flatten)] - visibility: Visibility, -} -impl_standard_model! { - model: Secret, pk: SecretPk, - id: SecretId, - table_name: "secrets", - history_event_label_base: "secret", - history_event_message_name: "Secret" + key_pair_pk: KeyPairPk, + name: String, + definition: String, + description: Option, +} + +impl From for SecretContentV1 { + fn from(value: Secret) -> Self { + Self { + timestamp: value.timestamp, + created_by: value.created_by, + updated_by: value.updated_by, + pk: value.pk, + key_pair_pk: value.key_pair_pk, + name: value.name, + definition: value.definition, + description: value.description, + } + } } impl Secret { - standard_model_accessor_ro!(name, str); + pub fn assemble(id: SecretId, inner: SecretContentV1) -> Self { + Self { + id, + timestamp: inner.timestamp, + created_by: inner.created_by, + updated_by: inner.updated_by, + pk: inner.pk, + key_pair_pk: inner.key_pair_pk, + name: inner.name, + definition: inner.definition, + description: inner.description, + } + } - // Update the underlying `encrypted_secrets` table rather than attempting to update the - // `secrets` view - pub async fn set_name( - &mut self, + // TODO(nick): to maintain API compatibility with main, we need "EncryptedSecret::new" to create + // this. We may want the opposite to happen in the future. We should decide this after the + // switchover. Let's consume the object for now to help ensure the underlying "encrypted secret" + // is not used where it shouldn't be. + pub async fn new( ctx: &DalContext, - value: impl Into, - ) -> SecretResult<()> { - let value = value.into(); - let updated_at = standard_model::update( - ctx, - "encrypted_secrets", - "name", - self.id(), - &value, - TypeHint::Text, - ) - .await?; - let _history_event = HistoryEvent::new( - ctx, - Self::history_event_label(vec!["updated"]), - Self::history_event_message("updated"), - &serde_json::json!({"pk": self.pk, "field": "name", "value": &value}), - ) - .await?; - self.timestamp.updated_at = updated_at; - self.name = value; + secret_id: SecretId, + content: SecretContentV1, + ) -> SecretResult { + let hash = ctx + .content_store() + .lock() + .await + .add(&SecretContent::V1(content.clone()))?; + + let id = Ulid::from(secret_id); + let change_set = ctx.change_set_pointer()?; + let node_weight = NodeWeight::new_content(change_set, id, ContentAddress::Secret(hash))?; + + // Attach secret to the category. + { + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.add_node(node_weight)?; + + // Root --> Secret Category --> Secret (this) + let secret_category_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Secret)?; + workspace_snapshot.add_edge( + secret_category_id, + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + id, + )?; + } + + let secret = Self::assemble(id.into(), content); + + Ok(secret) + } + + pub fn id(&self) -> SecretId { + self.id + } + + pub fn pk(&self) -> SecretPk { + self.pk + } + + pub fn name(&self) -> &str { + self.name.as_ref() + } + + pub fn definition(&self) -> &str { + self.definition.as_ref() + } + + pub fn description(&self) -> &Option { + &self.description + } + + pub async fn get_by_id(ctx: &DalContext, id: SecretId) -> SecretResult { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let ulid: Ulid = id.into(); + let node_index = workspace_snapshot.get_node_index_by_id(ulid)?; + let node_weight = workspace_snapshot.get_node_weight(node_index)?; + let hash = node_weight.content_hash(); + + let content: SecretContent = ctx + .content_store() + .lock() + .await + .get(&hash) + .await? + .ok_or(WorkspaceSnapshotError::MissingContentFromStore(ulid))?; + + // NOTE(nick): if we had a v2, then there would be migration logic here. + let SecretContent::V1(inner) = content; + + Ok(Self::assemble(id, inner)) + } + + pub async fn list(ctx: &DalContext) -> SecretResult> { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut secrets = vec![]; + let secret_category_node_id = + workspace_snapshot.get_category_node(None, CategoryNodeKind::Secret)?; + + let secret_node_indices = workspace_snapshot.outgoing_targets_for_edge_weight_kind( + secret_category_node_id, + EdgeWeightKindDiscriminants::Use, + )?; + + let mut node_weights = vec![]; + let mut hashes = vec![]; + for index in secret_node_indices { + let node_weight = workspace_snapshot + .get_node_weight(index)? + .get_content_node_weight_of_kind(ContentAddressDiscriminants::Secret)?; + hashes.push(node_weight.content_hash()); + node_weights.push(node_weight); + } + + let contents: HashMap = ctx + .content_store() + .lock() + .await + .get_bulk(hashes.as_slice()) + .await?; + + for node_weight in node_weights { + match contents.get(&node_weight.content_hash()) { + Some(content) => { + // NOTE(nick): if we had a v2, then there would be migration logic here. + let SecretContent::V1(inner) = content; + + secrets.push(Self::assemble(node_weight.id().into(), inner.to_owned())); + } + None => Err(WorkspaceSnapshotError::MissingContentFromStore( + node_weight.id(), + ))?, + } + } + + Ok(secrets) + } + + // TODO(nick): we need to decide the order of operations for referential secrets and encrypted ones. + pub async fn update(ctx: &DalContext, encrypted_secret: &EncryptedSecret) -> SecretResult<()> { + let raw_id = Ulid::from(encrypted_secret.id); + let mut referential_secret = Self::get_by_id(ctx, encrypted_secret.id).await?; + + let before = SecretContentV1::from(referential_secret.clone()); + + // Only update fields that are updated when encrypted secrets are updated. + referential_secret.timestamp = encrypted_secret.timestamp; + referential_secret.updated_by = encrypted_secret.updated_by; + referential_secret.name = encrypted_secret.name.clone(); + referential_secret.description = Some(encrypted_secret.definition.clone()); + referential_secret.key_pair_pk = encrypted_secret.key_pair_pk; + + let after = SecretContentV1::from(referential_secret); + + if before != after { + let hash = ctx + .content_store() + .lock() + .await + .add(&SecretContent::V1(after.clone()))?; + + let mut workspace_snapshot = ctx.workspace_snapshot()?.write().await; + workspace_snapshot.update_content(ctx.change_set_pointer()?, raw_id, hash)?; + } Ok(()) } - // Once created, these object fields are to be considered immutable - standard_model_accessor_ro!(definition, String); - standard_model_accessor_ro!(description, Option); + // TODO(nick): this was only used in tests. We should decide how the referential secrets and encrypted secrets + // interfaces behave with one another in the long term. + // // Update the underlying `encrypted_secrets` table rather than attempting to update the + // // `secrets` view + // pub async fn set_name( + // &mut self, + // ctx: &DalContext, + // value: impl Into, + // ) -> SecretResult<()> { + // let value = value.into(); + // let _updated_at = standard_model::update( + // ctx, + // "encrypted_secrets", + // "name", + // &self.id(), + // &value, + // TypeHint::Text, + // ) + // .await?; + // let _history_event = HistoryEvent::new( + // ctx, + // EncryptedSecret::history_event_label(vec!["updated"]), + // EncryptedSecret::history_event_message("updated"), + // &serde_json::json!({"pk": self.pk, "field": "name", "value": &value}), + // ) + // .await?; + // self.name = value; + // + // Ok(()) + // } pub async fn key_pair(&self, ctx: &DalContext) -> SecretResult { Ok(KeyPair::get_by_pk(ctx, self.key_pair_pk).await?) @@ -240,11 +452,9 @@ impl From for Secret { key_pair_pk: value.key_pair_pk, definition: value.definition, description: value.description, - tenancy: value.tenancy, timestamp: value.timestamp, created_by: value.created_by, - updated_by: value.updated_by, - visibility: value.visibility, + updated_by: None, } } } @@ -308,6 +518,37 @@ impl_standard_model! { history_event_message_name: "Encrypted Secret" } +/// A transient type between [`EncryptedSecret`] and [`Secret`]. +/// +/// Like [`Secret`], this type does not contain any encrypted information nor any encryption metadata and is +/// therefore safe to expose via external API. +#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)] +struct DeserializedEncryptedSecret { + pk: SecretPk, + id: SecretId, + name: String, + key_pair_pk: KeyPairPk, + definition: String, + description: Option, + #[serde(flatten)] + tenancy: Tenancy, + #[serde(flatten)] + timestamp: Timestamp, + created_by: Option, + updated_by: Option, + #[serde(flatten)] + visibility: Visibility, +} + +impl_standard_model! { + model: DeserializedEncryptedSecret, + pk: SecretPk, + id: SecretId, + table_name: "secrets", + history_event_label_base: "secret", + history_event_message_name: "Secret" +} + impl EncryptedSecret { /// Creates a new encrypted secret and returns a corresponding [`Secret`] representation. #[allow(clippy::too_many_arguments, clippy::new_ret_no_self)] @@ -329,7 +570,6 @@ impl EncryptedSecret { }; let (double_crypted, nonce, key_hash) = ctx.symmetric_crypto_service().encrypt(crypted); - let row = ctx .txns() .await? @@ -352,9 +592,27 @@ impl EncryptedSecret { ], ) .await?; - let object: Secret = standard_model::finish_create_from_row(ctx, row).await?; - Ok(object) + let object: DeserializedEncryptedSecret = + standard_model::finish_create_from_row(ctx, row).await?; + + let referential_secret = Secret::new( + ctx, + object.id, + SecretContentV1 { + timestamp: object.timestamp, + created_by: object.created_by, + updated_by: object.updated_by, + pk: object.pk, + key_pair_pk: object.key_pair_pk, + name: object.name, + definition: object.definition, + description: object.description, + }, + ) + .await?; + + Ok(referential_secret) } standard_model_accessor!(name, String, SecretResult); @@ -547,15 +805,62 @@ pub struct SecretDefinitionView { } impl SecretDefinitionView { - pub async fn list(ctx: &DalContext) -> SecretResult> { - let rows = ctx - .txns() - .await? - .pg() - .query(LIST_SECRET_DEFINITIONS, &[ctx.tenancy(), ctx.visibility()]) - .await?; + pub async fn list(ctx: &DalContext) -> SecretResult> { + let schema_variant_ids = SchemaVariant::list_ids(ctx).await?; + + let secret_definition_path = PropPath::new(["root", "secret_definition"]); + let mut views = Vec::new(); + + for schema_variant_id in schema_variant_ids { + let maybe_secret_definition_prop_id = + Prop::find_prop_id_by_path_opt(ctx, schema_variant_id, &secret_definition_path) + .await?; + + // We have found a schema variant with a secret definition! + if let Some(secret_definition_prop_id) = maybe_secret_definition_prop_id { + let view = + Self::assemble(ctx, schema_variant_id, secret_definition_prop_id).await?; + views.push(view); + } + } + + Ok(views) + } + + async fn assemble( + ctx: &DalContext, + schema_variant_id: SchemaVariantId, + secret_definition_prop_id: PropId, + ) -> SecretResult { + // Now, find all the fields of the definition. + let field_prop_ids = Prop::direct_child_prop_ids(ctx, secret_definition_prop_id).await?; + + // Assemble the form data views. + let mut form_data_views = Vec::new(); + for field_prop_id in field_prop_ids { + let field_prop = Prop::get_by_id(ctx, field_prop_id).await?; + form_data_views.push(SecretFormDataView { + name: field_prop.name, + kind: field_prop.kind.to_string(), + widget_kind: PropertyEditorPropWidgetKind::new( + field_prop.widget_kind, + field_prop.widget_options, + ), + }); + } + + // Get the name from the (hopefully) only child of secrets prop. + let secrets_prop_id = + SchemaVariant::find_root_child_prop_id(ctx, schema_variant_id, RootPropChild::Secrets) + .await?; + + let entry_prop_id = Prop::direct_single_child_prop_id(ctx, secrets_prop_id).await?; + let entry_prop = Prop::get_by_id(ctx, entry_prop_id).await?; - Ok(objects_from_rows(rows)?) + Ok(Self { + secret_definition: entry_prop.name, + form_data: form_data_views, + }) } } diff --git a/lib/dal/src/status.rs b/lib/dal/src/status.rs index 5660f4f746..cc14aa0a61 100644 --- a/lib/dal/src/status.rs +++ b/lib/dal/src/status.rs @@ -16,9 +16,8 @@ use tokio::sync::Mutex; use crate::{ pk, schema::variant::leaves::LeafKind, standard_model::objects_from_rows, ActorView, - AttributeValue, AttributeValueError, AttributeValueId, ChangeSetPk, Component, ComponentError, - ComponentId, ComponentStatus, DalContext, ExternalProvider, ExternalProviderError, - InternalProvider, InternalProviderError, Prop, PropError, PropId, SchemaVariant, SocketId, + AttributeValue, AttributeValueId, ChangeSetPk, Component, ComponentId, ComponentStatus, + DalContext, ExternalProvider, InternalProvider, Prop, PropId, SchemaVariant, SocketId, StandardModel, StandardModelError, Tenancy, Timestamp, UserPk, WsEvent, WsEventError, WsEventResult, WsPayload, }; diff --git a/lib/dal/src/tasks/status_receiver.rs b/lib/dal/src/tasks/status_receiver.rs index 288f21dc7f..4a4e5b42e2 100644 --- a/lib/dal/src/tasks/status_receiver.rs +++ b/lib/dal/src/tasks/status_receiver.rs @@ -15,9 +15,9 @@ use thiserror::Error; use tokio::sync::broadcast; use crate::{ - AttributeValue, AttributeValueError, AttributeValueId, Component, ComponentId, DalContext, - DalContextBuilder, ServicesContext, StandardModel, StandardModelError, Tenancy, - TransactionsError, Visibility, WsEvent, + attribute::value::AttributeValueError, AttributeValue, AttributeValueId, Component, + ComponentId, DalContext, DalContextBuilder, ServicesContext, StandardModel, StandardModelError, + Tenancy, TransactionsError, Visibility, WsEvent, }; pub mod client; @@ -181,13 +181,8 @@ impl StatusReceiver { let mut seen_code_generation_components: HashSet = HashSet::new(); for dependent_value in flattened_dependent_graph { if code_generation_attribute_values.contains(dependent_value) { - let attribute_value = AttributeValue::get_by_id(&ctx, dependent_value) - .await? - .ok_or(AttributeValueError::NotFound( - *dependent_value, - *ctx.visibility(), - ))?; - let component_id = attribute_value.context.component_id(); + let attribute_value = AttributeValue::get_by_id(&ctx, *dependent_value)?; + let component_id = AttributeValue::component_id(&ctx, *dependent_value)?; if component_id != ComponentId::NONE && !seen_code_generation_components.contains(&component_id) { diff --git a/lib/dal/src/timestamp.rs b/lib/dal/src/timestamp.rs index 7580f14fec..2e5f756c7e 100644 --- a/lib/dal/src/timestamp.rs +++ b/lib/dal/src/timestamp.rs @@ -22,4 +22,11 @@ impl Timestamp { updated_at: now, } } + + pub fn assemble(created_at: DateTime, updated_at: DateTime) -> Self { + Self { + created_at, + updated_at, + } + } } diff --git a/lib/dal/src/visibility.rs b/lib/dal/src/visibility.rs index 0dcab5f934..d2f205a37d 100644 --- a/lib/dal/src/visibility.rs +++ b/lib/dal/src/visibility.rs @@ -1,11 +1,11 @@ -use crate::{ChangeSetPk, DalContext, TransactionsError}; use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; +use serde_aux::field_attributes::deserialize_number_from_string; use si_data_pg::PgError; use telemetry::prelude::*; use thiserror::Error; -use serde_aux::field_attributes::deserialize_number_from_string; +use crate::{ChangeSetPk, DalContext, TransactionsError}; #[remain::sorted] #[derive(Error, Debug)] diff --git a/lib/dal/src/workspace.rs b/lib/dal/src/workspace.rs index b72f0e87d5..b6c4ef2738 100644 --- a/lib/dal/src/workspace.rs +++ b/lib/dal/src/workspace.rs @@ -1,13 +1,16 @@ +use chrono::{DateTime, Utc}; use serde::{Deserialize, Serialize}; use si_data_nats::NatsError; -use si_data_pg::PgError; +use si_data_pg::{PgError, PgRow}; use telemetry::prelude::*; use thiserror::Error; +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; +use crate::workspace_snapshot::WorkspaceSnapshotError; use crate::{ pk, standard_model, standard_model_accessor_ro, DalContext, HistoryActor, HistoryEvent, - HistoryEventError, KeyPair, KeyPairError, StandardModelError, Tenancy, Timestamp, - TransactionsError, User, UserError, UserPk, + HistoryEventError, KeyPairError, StandardModelError, Tenancy, Timestamp, TransactionsError, + UserError, WorkspaceSnapshot, }; const WORKSPACE_GET_BY_PK: &str = include_str!("queries/workspace/get_by_pk.sql"); @@ -17,6 +20,8 @@ const WORKSPACE_LIST_FOR_USER: &str = include_str!("queries/workspace/list_for_u #[remain::sorted] #[derive(Error, Debug)] pub enum WorkspaceError { + #[error("change set pointer error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), #[error(transparent)] HistoryEvent(#[from] HistoryEventError), #[error(transparent)] @@ -35,45 +40,100 @@ pub enum WorkspaceError { Transactions(#[from] TransactionsError), #[error(transparent)] User(#[from] UserError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), } pub type WorkspaceResult = Result; pk!(WorkspacePk); - -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -pub struct WorkspaceSignup { - pub key_pair: KeyPair, - pub user: User, - pub workspace: Workspace, -} +pk!(WorkspaceId); #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] pub struct Workspace { pk: WorkspacePk, name: String, + default_change_set_id: ChangeSetPointerId, #[serde(flatten)] timestamp: Timestamp, } +impl TryFrom for Workspace { + type Error = WorkspaceError; + + fn try_from(row: PgRow) -> Result { + let created_at: DateTime = row.try_get("created_at")?; + let updated_at: DateTime = row.try_get("updated_at")?; + Ok(Self { + pk: row.try_get("pk")?, + name: row.try_get("name")?, + default_change_set_id: row.try_get("default_change_set_id")?, + timestamp: Timestamp::assemble(created_at, updated_at), + }) + } +} + impl Workspace { pub fn pk(&self) -> &WorkspacePk { &self.pk } - pub async fn builtin(ctx: &DalContext) -> WorkspaceResult { + pub fn default_change_set_id(&self) -> ChangeSetPointerId { + self.default_change_set_id + } + + /// Find or create the builtin [`Workspace`]. + #[instrument(skip_all)] + pub async fn builtin(ctx: &mut DalContext) -> WorkspaceResult { + // Check if the builtin already exists. + if let Some(found_builtin) = Self::find_builtin(ctx).await? { + return Ok(found_builtin); + } + + // If not, create the builtin workspace with a corresponding base change set and initial + // workspace snapshot. + let name = "builtin"; + let mut change_set = ChangeSetPointer::new_head(ctx).await?; + let workspace_snapshot = WorkspaceSnapshot::initial(ctx, &change_set).await?; + change_set + .update_pointer(ctx, workspace_snapshot.id()) + .await?; + let change_set_id = change_set.id; + + let head_pk = WorkspaceId::NONE; + let row = ctx .txns() .await? .pg() .query_one( - "SELECT object FROM workspace_find_or_create_builtin_v1()", - &[], + "INSERT INTO workspaces (pk, name, default_change_set_id) VALUES ($1, $2, $3) RETURNING *", + &[&head_pk, &name, &change_set_id], ) .await?; - let object = standard_model::object_from_row(row)?; - Ok(object) + let workspace = Self::try_from(row)?; + + change_set.update_workspace_id(ctx, *workspace.pk()).await?; + + Ok(workspace) + } + + /// This private method attempts to find the builtin [`Workspace`]. + #[instrument(skip_all)] + async fn find_builtin(ctx: &DalContext) -> WorkspaceResult> { + let head_pk = WorkspaceId::NONE; + let maybe_row = ctx + .txns() + .await? + .pg() + .query_opt("SELECT * FROM workspaces WHERE pk = $1", &[&head_pk]) + .await?; + let maybe_builtin = match maybe_row { + Some(found) => Some(Self::try_from(found)?), + None => None, + }; + Ok(maybe_builtin) } pub async fn list_for_user(ctx: &DalContext) -> WorkspaceResult> { @@ -92,11 +152,14 @@ impl Workspace { } pub async fn find_first_user_workspace(ctx: &DalContext) -> WorkspaceResult> { - let row = ctx.txns().await?.pg().query_opt( + let maybe_row = ctx.txns().await?.pg().query_opt( "SELECT row_to_json(w.*) AS object FROM workspaces AS w WHERE pk != $1 ORDER BY created_at ASC LIMIT 1", &[&WorkspacePk::NONE], ).await?; - - Ok(standard_model::option_object_from_row(row)?) + let maybe_workspace = match maybe_row { + Some(found) => Some(Self::try_from(found)?), + None => None, + }; + Ok(maybe_workspace) } pub async fn new( @@ -104,23 +167,48 @@ impl Workspace { pk: WorkspacePk, name: impl AsRef, ) -> WorkspaceResult { + // Get the default change set from the builtin workspace. + let builtin = match Self::find_builtin(ctx).await? { + Some(found_builtin) => found_builtin, + None => { + // TODO(nick,jacob): replace this with an error. + todo!("this should not happen") + } + }; + + // Create a new change set whose base is the default change set of the workspace. + // Point to the snapshot that the builtin's default change set is pointing to. + let mut change_set = + ChangeSetPointer::new(ctx, "HEAD", Some(builtin.default_change_set_id)).await?; + let workspace_snapshot = + WorkspaceSnapshot::find_for_change_set(ctx, builtin.default_change_set_id).await?; + change_set + .update_pointer(ctx, workspace_snapshot.id()) + .await?; + let change_set_id = change_set.id; + let name = name.as_ref(); let row = ctx .txns() .await? .pg() .query_one( - "SELECT object FROM workspace_create_v1($1, $2)", - &[&pk, &name], + "INSERT INTO workspaces (pk, name, default_change_set_id) VALUES ($1, $2, $3) RETURNING *", + &[&pk, &name, &change_set_id], ) .await?; + let new_workspace = Self::try_from(row)?; - // Inlined `finish_create_from_row` + change_set + .update_workspace_id(ctx, *new_workspace.pk()) + .await?; - let json: serde_json::Value = row.try_get("object")?; - let object: Self = serde_json::from_value(json)?; + ctx.update_tenancy(Tenancy::new(new_workspace.pk)); - ctx.update_tenancy(Tenancy::new(object.pk)); + // TODO(nick,zack,jacob): convert visibility (or get rid of it?) to use our the new change set id. + // should set_change_set_pointer and set_workspace_snapshot happen in update_visibility? + ctx.update_visibility_v2(&change_set); + ctx.update_snapshot_to_visibility().await?; let _history_event = HistoryEvent::new( ctx, @@ -129,7 +217,7 @@ impl Workspace { &serde_json::json![{ "visibility": ctx.visibility() }], ) .await?; - Ok(object) + Ok(new_workspace) } pub async fn clear(&self, ctx: &DalContext) -> WorkspaceResult<()> { @@ -162,45 +250,18 @@ impl Workspace { Ok(workspace) } - pub async fn signup( - ctx: &mut DalContext, - workspace_name: impl AsRef, - user_name: impl AsRef, - user_email: impl AsRef, - ) -> WorkspaceResult { - let workspace = Workspace::new(ctx, WorkspacePk::generate(), workspace_name).await?; - let key_pair = KeyPair::new(ctx, "default").await?; - - let user = User::new( - ctx, - UserPk::generate(), - &user_name, - &user_email, - None::<&str>, - ) - .await?; - user.associate_workspace(ctx, *workspace.pk()).await?; - - ctx.update_history_actor(HistoryActor::User(user.pk())); - - ctx.import_builtins().await?; - - Ok(WorkspaceSignup { - key_pair, - user, - workspace, - }) - } - pub async fn find_by_name(ctx: &DalContext, name: &str) -> WorkspaceResult> { - let row = ctx + let maybe_row = ctx .txns() .await? .pg() .query_opt(WORKSPACE_FIND_BY_NAME, &[&name]) .await?; - let result = standard_model::option_object_from_row(row)?; - Ok(result) + let maybe_workspace = match maybe_row { + Some(found) => Some(Self::try_from(found)?), + None => None, + }; + Ok(maybe_workspace) } pub async fn get_by_pk( diff --git a/lib/dal/src/workspace_snapshot.rs b/lib/dal/src/workspace_snapshot.rs new file mode 100644 index 0000000000..6ff274af7f --- /dev/null +++ b/lib/dal/src/workspace_snapshot.rs @@ -0,0 +1,663 @@ +//! Mostly everything is a node or an edge! + +// #![warn( +// missing_debug_implementations, +// missing_docs, +// unreachable_pub, +// bad_style, +// dead_code, +// improper_ctypes, +// non_shorthand_field_patterns, +// no_mangle_generic_items, +// overflowing_literals, +// path_statements, +// patterns_in_fns_without_body, +// unconditional_recursion, +// unused, +// unused_allocation, +// unused_comparisons, +// unused_parens, +// while_true, +// clippy::missing_panics_doc +// )] + +pub mod conflict; +pub mod content_address; +pub mod edge_weight; +pub mod graph; +pub mod lamport_clock; +pub mod node_weight; +pub mod update; +pub mod vector_clock; + +use chrono::{DateTime, Utc}; +use content_store::{ContentHash, Store, StoreError}; +use petgraph::prelude::*; +use petgraph::stable_graph::Edges; +use serde::{Deserialize, Serialize}; +use si_data_pg::{PgError, PgRow}; +use telemetry::prelude::*; +use thiserror::Error; +use tokio::time::Instant; +use ulid::Ulid; + +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; +use crate::workspace_snapshot::conflict::Conflict; +use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants, +}; +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::NodeWeight; +use crate::workspace_snapshot::update::Update; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::{ + pk, + workspace_snapshot::{graph::WorkspaceSnapshotGraphError, node_weight::NodeWeightError}, + DalContext, TransactionsError, WorkspaceSnapshotGraph, +}; + +use self::node_weight::{NodeWeightDiscriminants, OrderingNodeWeight}; + +const FIND_FOR_CHANGE_SET: &str = + include_str!("queries/workspace_snapshot/find_for_change_set.sql"); + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum WorkspaceSnapshotError { + #[error("change set pointer error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), + #[error("edge weight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("missing content from store for id: {0}")] + MissingContentFromStore(Ulid), + #[error("monotonic error: {0}")] + Monotonic(#[from] ulid::MonotonicError), + #[error("NodeWeight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("si_data_pg error: {0}")] + Pg(#[from] PgError), + #[error("postcard error: {0}")] + Postcard(#[from] postcard::Error), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("store error: {0}")] + Store(#[from] StoreError), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("Unexpected edge source {0} for target {1} and edge weight type {0:?}")] + UnexpectedEdgeSource(Ulid, Ulid, EdgeWeightKindDiscriminants), + #[error("Unexpected edge target {0} for source {1} and edge weight type {0:?}")] + UnexpectedEdgeTarget(Ulid, Ulid, EdgeWeightKindDiscriminants), + #[error("Unexpected number of incoming edges of type {0:?} for node type {1:?} with id {2}")] + UnexpectedNumberOfIncomingEdges(EdgeWeightKindDiscriminants, NodeWeightDiscriminants, Ulid), + #[error("WorkspaceSnapshotGraph error: {0}")] + WorkspaceSnapshotGraph(#[from] WorkspaceSnapshotGraphError), + #[error("workspace snapshot graph missing")] + WorkspaceSnapshotGraphMissing, + #[error("no workspace snapshot was fetched for this dal context")] + WorkspaceSnapshotNotFetched, +} + +pub type WorkspaceSnapshotResult = Result; + +pk!(WorkspaceSnapshotId); + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct WorkspaceSnapshot { + id: WorkspaceSnapshotId, + created_at: DateTime, + #[serde(skip_serializing)] + working_copy: WorkspaceSnapshotGraph, +} + +impl TryFrom for WorkspaceSnapshot { + type Error = WorkspaceSnapshotError; + + fn try_from(row: PgRow) -> Result { + let start = Instant::now(); + let snapshot: Vec = row.try_get("snapshot")?; + info!("snapshot copy into vec: {:?}", start.elapsed()); + let start = Instant::now(); + let working_copy = postcard::from_bytes(&snapshot)?; + info!("snapshot deserialize: {:?}", start.elapsed()); + Ok(Self { + id: row.try_get("id")?, + created_at: row.try_get("created_at")?, + working_copy, + }) + } +} + +#[allow(dead_code)] +pub(crate) fn serde_value_to_string_type(value: &serde_json::Value) -> String { + match value { + serde_json::Value::Array(_) => "array", + serde_json::Value::Bool(_) => "bool", + serde_json::Value::Null => "null", + serde_json::Value::Number(_) => "number", + serde_json::Value::Object(_) => "object", + serde_json::Value::String(_) => "string", + } + .into() +} + +impl WorkspaceSnapshot { + pub async fn initial( + ctx: &DalContext, + change_set: &ChangeSetPointer, + ) -> WorkspaceSnapshotResult { + let mut graph: WorkspaceSnapshotGraph = WorkspaceSnapshotGraph::new(change_set)?; + + // Create the category nodes under root. + let component_node_index = + graph.add_category_node(change_set, CategoryNodeKind::Component)?; + let func_node_index = graph.add_category_node(change_set, CategoryNodeKind::Func)?; + let schema_node_index = graph.add_category_node(change_set, CategoryNodeKind::Schema)?; + let secret_node_index = graph.add_category_node(change_set, CategoryNodeKind::Secret)?; + + // Connect them to root. + graph.add_edge( + graph.root(), + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + component_node_index, + )?; + graph.add_edge( + graph.root(), + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + func_node_index, + )?; + graph.add_edge( + graph.root(), + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + schema_node_index, + )?; + graph.add_edge( + graph.root(), + EdgeWeight::new(change_set, EdgeWeightKind::Use)?, + secret_node_index, + )?; + + // We do not care about any field other than "working_copy" because "write" will populate + // them using the assigned working copy. + let mut initial = Self { + id: WorkspaceSnapshotId::NONE, + created_at: Utc::now(), + working_copy: graph, + }; + initial.write(ctx, change_set.vector_clock_id()).await?; + + Ok(initial) + } + + pub async fn write( + &mut self, + ctx: &DalContext, + vector_clock_id: VectorClockId, + ) -> WorkspaceSnapshotResult { + // Pull out the working copy and clean it up. + let working_copy = self.working_copy_mut(); + working_copy.cleanup(); + + // Mark everything left as seen. + working_copy.mark_graph_seen(vector_clock_id)?; + + // Write out to the content store. + ctx.content_store().lock().await.write().await?; + + // Stamp the new workspace snapshot. + let serialized_snapshot = postcard::to_stdvec(&working_copy)?; + let row = ctx + .txns() + .await? + .pg() + .query_one( + "INSERT INTO workspace_snapshots (snapshot) VALUES ($1) RETURNING *", + &[&serialized_snapshot], + ) + .await?; + let object = Self::try_from(row)?; + + // Reset relevant fields on self. + self.id = object.id; + self.created_at = object.created_at; + + Ok(self.id) + } + + pub fn id(&self) -> WorkspaceSnapshotId { + self.id + } + + pub fn root(&self) -> WorkspaceSnapshotResult { + Ok(self.working_copy.root()) + } + + fn working_copy_mut(&mut self) -> &mut WorkspaceSnapshotGraph { + &mut self.working_copy + } + + pub fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotResult { + let new_node_index = self.working_copy.add_node(node)?; + Ok(new_node_index) + } + + pub fn add_ordered_node( + &mut self, + change_set: &ChangeSetPointer, + node: NodeWeight, + ) -> WorkspaceSnapshotResult { + let new_node_index = self.working_copy.add_ordered_node(change_set, node)?; + Ok(new_node_index) + } + + pub fn update_content( + &mut self, + change_set: &ChangeSetPointer, + id: Ulid, + new_content_hash: ContentHash, + ) -> WorkspaceSnapshotResult<()> { + Ok(self + .working_copy + .update_content(change_set, id, new_content_hash)?) + } + + pub fn add_edge( + &mut self, + from_node_id: impl Into, + edge_weight: EdgeWeight, + to_node_id: impl Into, + ) -> WorkspaceSnapshotResult { + let from_node_index = self.working_copy.get_node_index_by_id(from_node_id)?; + let to_node_index = self.working_copy.get_node_index_by_id(to_node_id)?; + Ok(self + .working_copy + .add_edge(from_node_index, edge_weight, to_node_index)?) + } + + // NOTE(nick): this should only be used by the rebaser and in specific scenarios where the + // indices are definitely correct. + pub fn add_edge_unchecked( + &mut self, + from_node_index: NodeIndex, + edge_weight: EdgeWeight, + to_node_index: NodeIndex, + ) -> WorkspaceSnapshotResult { + Ok(self + .working_copy + .add_edge(from_node_index, edge_weight, to_node_index)?) + } + + pub fn add_ordered_edge( + &mut self, + change_set: &ChangeSetPointer, + from_node_id: impl Into, + edge_weight: EdgeWeight, + to_node_id: impl Into, + ) -> WorkspaceSnapshotResult { + let from_node_index = self.working_copy.get_node_index_by_id(from_node_id)?; + let to_node_index = self.working_copy.get_node_index_by_id(to_node_id)?; + let (edge_index, _) = self.working_copy.add_ordered_edge( + change_set, + from_node_index, + edge_weight, + to_node_index, + )?; + Ok(edge_index) + } + + pub async fn detect_conflicts_and_updates( + &self, + to_rebase_vector_clock_id: VectorClockId, + onto_workspace_snapshot: &mut WorkspaceSnapshot, + onto_vector_clock_id: VectorClockId, + ) -> WorkspaceSnapshotResult<(Vec, Vec)> { + Ok(self.working_copy.detect_conflicts_and_updates( + to_rebase_vector_clock_id, + &onto_workspace_snapshot.working_copy, + onto_vector_clock_id, + )?) + } + + // NOTE(nick): this should only be used by the rebaser. + pub fn edge_endpoints( + &mut self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotResult<(NodeIndex, NodeIndex)> { + Ok(self.working_copy.edge_endpoints(edge_index)?) + } + + pub fn import_subgraph( + &mut self, + other: &mut Self, + root_index: NodeIndex, + ) -> WorkspaceSnapshotResult<()> { + Ok(self + .working_copy + .import_subgraph(&other.working_copy, root_index)?) + } + + /// Calls [`WorkspaceSnapshotGraph::replace_references()`] + pub fn replace_references( + &mut self, + original_node_index: NodeIndex, + ) -> WorkspaceSnapshotResult<()> { + Ok(self.working_copy.replace_references(original_node_index)?) + } + + pub fn get_node_weight_by_id( + &self, + id: impl Into, + ) -> WorkspaceSnapshotResult<&NodeWeight> { + let node_idx = self.get_node_index_by_id(id)?; + Ok(self.working_copy.get_node_weight(node_idx)?) + } + + pub fn get_node_weight(&self, node_index: NodeIndex) -> WorkspaceSnapshotResult<&NodeWeight> { + Ok(self.working_copy.get_node_weight(node_index)?) + } + + pub fn find_equivalent_node( + &self, + id: Ulid, + lineage_id: Ulid, + ) -> WorkspaceSnapshotResult> { + Ok(self.working_copy.find_equivalent_node(id, lineage_id)?) + } + + pub fn cleanup(&mut self) -> WorkspaceSnapshotResult<()> { + self.working_copy.cleanup(); + Ok(()) + } + + pub fn nodes(&self) -> WorkspaceSnapshotResult> { + Ok(self.working_copy.nodes()) + } + + pub fn edges( + &self, + ) -> WorkspaceSnapshotResult> { + Ok(self.working_copy.edges()) + } + + pub fn dot(&self) { + self.working_copy.dot(); + } + + pub fn tiny_dot_to_file(&self, suffix: Option<&str>) { + self.working_copy.tiny_dot_to_file(suffix); + } + + #[inline(always)] + pub fn get_node_index_by_id(&self, id: impl Into) -> WorkspaceSnapshotResult { + Ok(self.working_copy.get_node_index_by_id(id)?) + } + + pub fn get_latest_node_index( + &self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotResult { + Ok(self.working_copy.get_latest_node_idx(node_index)?) + } + + #[instrument(skip_all)] + pub async fn find( + ctx: &DalContext, + workspace_snapshot_id: WorkspaceSnapshotId, + ) -> WorkspaceSnapshotResult { + let start = tokio::time::Instant::now(); + let row = ctx + .txns() + .await? + .pg() + .query_one( + "SELECT * FROM workspace_snapshots WHERE id = $1", + &[&workspace_snapshot_id], + ) + .await?; + info!("data fetch: {:?}", start.elapsed()); + Self::try_from(row) + } + + #[instrument(skip_all)] + pub async fn find_for_change_set( + ctx: &DalContext, + change_set_pointer_id: ChangeSetPointerId, + ) -> WorkspaceSnapshotResult { + let row = ctx + .txns() + .await? + .pg() + .query_one(FIND_FOR_CHANGE_SET, &[&change_set_pointer_id]) + .await?; + Self::try_from(row) + } + + pub fn get_category_node( + &self, + source: Option, + kind: CategoryNodeKind, + ) -> WorkspaceSnapshotResult { + let (category_node_id, _) = self.working_copy.get_category_node(source, kind)?; + Ok(category_node_id) + } + + pub fn edges_directed( + &self, + id: impl Into, + direction: Direction, + ) -> WorkspaceSnapshotResult> { + let node_index = self.working_copy.get_node_index_by_id(id)?; + Ok(self.working_copy.edges_directed(node_index, direction)) + } + + pub fn edges_directed_for_edge_weight_kind( + &self, + id: impl Into, + direction: Direction, + edge_kind: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult>> { + let node_index = self.working_copy.get_node_index_by_id(id)?; + + Ok(self + .working_copy + .edges_directed_for_edge_weight_kind(node_index, direction, edge_kind)) + } + + pub fn edges_directed_by_index( + &self, + node_index: NodeIndex, + direction: Direction, + ) -> WorkspaceSnapshotResult> { + Ok(self.working_copy.edges_directed(node_index, direction)) + } + + pub fn incoming_sources_for_edge_weight_kind( + &self, + id: impl Into, + edge_weight_kind_discrim: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult> { + Ok(self + .edges_directed(id.into(), Direction::Incoming)? + .filter_map(|edge_ref| { + if edge_weight_kind_discrim == edge_ref.weight().kind().into() { + Some(edge_ref.source()) + } else { + None + } + }) + .collect()) + } + + pub fn outgoing_targets_for_edge_weight_kind( + &self, + id: impl Into, + edge_weight_kind_discrim: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult> { + let id = id.into(); + Ok(self + .edges_directed(id, Direction::Outgoing)? + .filter_map(|edge_ref| { + if edge_weight_kind_discrim == edge_ref.weight().kind().into() { + Some(edge_ref.target()) + } else { + None + } + }) + .collect()) + } + + pub fn outgoing_targets_for_edge_weight_kind_by_index( + &self, + node_index: NodeIndex, + edge_weight_kind_discrim: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult> { + Ok(self + .edges_directed_by_index(node_index, Direction::Outgoing)? + .filter_map(|edge_ref| { + if edge_weight_kind_discrim == edge_ref.weight().kind().into() { + Some(edge_ref.target()) + } else { + None + } + }) + .collect()) + } + + pub fn all_outgoing_targets( + &self, + id: impl Into, + ) -> WorkspaceSnapshotResult> { + let mut result = vec![]; + let target_idxs: Vec = self + .edges_directed(id, Direction::Outgoing)? + .map(|edge_ref| edge_ref.target()) + .collect(); + + for target_idx in target_idxs { + let node_weight = self.get_node_weight(target_idx)?; + result.push(node_weight.to_owned()); + } + + Ok(result) + } + + pub fn all_incoming_sources( + &self, + id: impl Into, + ) -> WorkspaceSnapshotResult> { + let mut result = vec![]; + let source_idxs: Vec = self + .edges_directed(id, Direction::Incoming)? + .map(|edge_ref| edge_ref.source()) + .collect(); + + for source_idx in source_idxs { + let node_weight = self.get_node_weight(source_idx)?; + result.push(node_weight.to_owned()); + } + + Ok(result) + } + + pub fn remove_incoming_edges_of_kind( + &mut self, + change_set: &ChangeSetPointer, + target_id: impl Into, + kind: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult<()> { + let target_id = target_id.into(); + + let sources = self.incoming_sources_for_edge_weight_kind(target_id, kind)?; + for source_node_idx in sources { + let target_node_idx = self.get_node_index_by_id(target_id)?; + self.remove_edge(change_set, source_node_idx, target_node_idx, kind)?; + } + + Ok(()) + } + + pub fn remove_node_by_id(&mut self, id: impl Into) -> WorkspaceSnapshotResult<()> { + let id: Ulid = id.into(); + let node_idx = self.get_node_index_by_id(id)?; + self.working_copy.remove_node(node_idx); + self.working_copy.remove_node_id(id); + + Ok(()) + } + + pub fn remove_edge( + &mut self, + change_set: &ChangeSetPointer, + source_node_index: NodeIndex, + target_node_index: NodeIndex, + edge_kind: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotResult<()> { + Ok(self.working_copy.remove_edge( + change_set, + source_node_index, + target_node_index, + edge_kind, + )?) + } + + /// Perform [`Updates`](Update) using [`self`](WorkspaceSnapshot) as the "to rebase" graph and + /// another [`snapshot`](WorkspaceSnapshot) as the "onto" graph. + pub fn perform_updates( + &mut self, + to_rebase_change_set: &ChangeSetPointer, + onto: &mut WorkspaceSnapshot, + updates: &[Update], + ) -> WorkspaceSnapshotResult<()> { + Ok(self + .working_copy + .perform_updates(to_rebase_change_set, &onto.working_copy, updates)?) + } + + /// Mark whether a prop can be used as an input to a function. Props below + /// Maps and Arrays are not valid inputs. Must only be used when + /// "finalizing" a schema variant! + pub fn mark_prop_as_able_to_be_used_as_prototype_arg( + &mut self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotResult<()> { + self.working_copy + .update_node_weight(node_index, |node_weight| match node_weight { + NodeWeight::Prop(prop_inner) => { + prop_inner.set_can_be_used_as_prototype_arg(true); + Ok(()) + } + _ => Err(WorkspaceSnapshotGraphError::IncompatibleNodeTypes)?, + })?; + + Ok(()) + } + + pub fn ordering_node_for_container( + &self, + id: impl Into, + ) -> WorkspaceSnapshotResult> { + let idx = self.get_node_index_by_id(id)?; + Ok(self.working_copy.ordering_node_for_container(idx)?) + } + + pub fn ordered_children_for_node( + &self, + id: impl Into, + ) -> WorkspaceSnapshotResult>> { + let idx = self.get_node_index_by_id(id.into())?; + let mut result = vec![]; + Ok( + if let Some(idxs) = self.working_copy.ordered_children_for_node(idx)? { + for idx in idxs { + let id = self.get_node_weight(idx)?.id(); + result.push(id); + } + Some(result) + } else { + None + }, + ) + } +} diff --git a/lib/dal/src/workspace_snapshot/conflict.rs b/lib/dal/src/workspace_snapshot/conflict.rs new file mode 100644 index 0000000000..659175f76e --- /dev/null +++ b/lib/dal/src/workspace_snapshot/conflict.rs @@ -0,0 +1,32 @@ +use petgraph::stable_graph::NodeIndex; +use serde::Deserialize; +use serde::Serialize; + +/// Describe the type of conflict between the given locations in a +/// workspace graph. +#[remain::sorted] +#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize)] +pub enum Conflict { + ChildOrder { + onto: NodeIndex, + to_rebase: NodeIndex, + }, + ModifyRemovedItem(NodeIndex), + NodeContent { + onto: NodeIndex, + to_rebase: NodeIndex, + }, + RemoveModifiedItem { + container: NodeIndex, + removed_item: NodeIndex, + }, +} + +/// The [`NodeIndex`] of the location in the graph where a conflict occurs. +#[derive(Debug, Copy, Clone)] +pub struct ConflictLocation { + /// The location of the conflict in the "base" graph of the merge. + pub onto: NodeIndex, + /// The location of the conflict in the graph that is attempting to be merged into "base". + pub to_rebase: NodeIndex, +} diff --git a/lib/dal/src/workspace_snapshot/content_address.rs b/lib/dal/src/workspace_snapshot/content_address.rs new file mode 100644 index 0000000000..92fc8a75a8 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/content_address.rs @@ -0,0 +1,51 @@ +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use strum::EnumDiscriminants; + +#[remain::sorted] +#[derive( + EnumDiscriminants, Debug, Serialize, Deserialize, Copy, Clone, PartialEq, Eq, strum::Display, +)] +#[strum_discriminants(derive(strum::Display, Serialize, Deserialize))] +/// The type of the object, and the content-addressable-storage address (content hash) +/// of the object itself. +pub enum ContentAddress { + ActionPrototype(ContentHash), + AttributePrototype(ContentHash), + Component(ContentHash), + ExternalProvider(ContentHash), + Func(ContentHash), + FuncArg(ContentHash), + InternalProvider(ContentHash), + JsonValue(ContentHash), + Prop(ContentHash), + Root, + Schema(ContentHash), + SchemaVariant(ContentHash), + Secret(ContentHash), + StaticArgumentValue(ContentHash), + ValidationPrototype(ContentHash), +} + +impl ContentAddress { + pub fn content_hash(&self) -> ContentHash { + match self { + ContentAddress::Root => None, + ContentAddress::ActionPrototype(id) + | ContentAddress::AttributePrototype(id) + | ContentAddress::Component(id) + | ContentAddress::ExternalProvider(id) + | ContentAddress::FuncArg(id) + | ContentAddress::Func(id) + | ContentAddress::InternalProvider(id) + | ContentAddress::JsonValue(id) + | ContentAddress::Prop(id) + | ContentAddress::Schema(id) + | ContentAddress::SchemaVariant(id) + | ContentAddress::Secret(id) + | ContentAddress::StaticArgumentValue(id) + | ContentAddress::ValidationPrototype(id) => Some(*id), + } + .unwrap_or_default() + } +} diff --git a/lib/dal/src/workspace_snapshot/edge_weight.rs b/lib/dal/src/workspace_snapshot/edge_weight.rs new file mode 100644 index 0000000000..8ffb48af23 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/edge_weight.rs @@ -0,0 +1,131 @@ +//! Edges + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::workspace_snapshot::vector_clock::{VectorClock, VectorClockError, VectorClockId}; +use crate::ActionKind; + +use strum::EnumDiscriminants; + +#[derive(Debug, Error)] +pub enum EdgeWeightError { + #[error("Vector Clock error: {0}")] + VectorClock(#[from] VectorClockError), +} + +pub type EdgeWeightResult = Result; + +#[remain::sorted] +#[derive(Default, Debug, Serialize, Deserialize, Clone, PartialEq, Eq, Hash, EnumDiscriminants)] +#[strum_discriminants(derive(Serialize, Deserialize))] +pub enum EdgeWeightKind { + /// A function used by a [`SchemaVariant`] to perform an action that affects its resource + ActionPrototype(ActionKind), + /// A function defined for a secret defining [`SchemaVariant`] to be executed before funcs on + /// components that have a secret of that kind + AuthenticationPrototype, + /// An [`AttributeValue`] "contained" by another [`AttributeValue`], such as an entry in an + /// array/map, or a field of an object. The optional [`String`] represents the key of the entry + /// in a map. + Contain(Option), + /// Used to indicate parentage within frames. It does not dictate data flow. That is provided via + /// [`ComponentType`](crate::ComponentType). + /// + /// This replaces "Symbolic" edges and "Frame" sockets from the old engine. + FrameContains, + /// Used to record the order that the elements of a container should be presented in. + Ordering, + /// Connects the node at the Ordering edge directly to the things it orders. + Ordinal, + /// Used to link an attribute value to the prop that it is for. + Prop, + /// An edge from a [`provider`](crate::provider) to an + /// [`AttributePrototype`](crate::AttributePrototype). The optional [`String`] is used for + /// maps, arrays and relevant container types to indicate which element the prototype is for. + Prototype(Option), + /// An edge from an [`AttributePrototype`][crate::AttributePrototype] to an + /// [`AttributePrototypeArgument`][crate::AttributePrototypeArgument]. + PrototypeArgument, + /// An edge from an + /// [`AttributePrototypeArgument`][crate::AttributePrototypeArgument] to the + /// source for the value for this argument + PrototypeArgumentValue, + /// Used when the target/destination of an edge is an [`InternalProvider`], or an + /// [`ExternalProvider`]. + Provider, + Proxy, + /// Indicates the "root" [`AttributeValue`](crate::AttributeValue) for a [`Component`](crate::Component). + /// + /// TODO(nick): in the future, this should be used for the "root" [`Prop`](crate::Prop) for a + /// [`SchemaVariant`](crate::SchemaVariant) as well. + Root, + /// Edge from component to input or output Socket's attribute value + Socket, + /// Workspaces "use" functions, modules, schemas. Schemas "use" schema variants. + /// Schema variants "use" props. Props "use" functions, and other props. Modules + /// "use" functions, schemas, and eventually(?) components. + #[default] + Use, +} + +#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)] +pub struct EdgeWeight { + kind: EdgeWeightKind, + vector_clock_first_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl EdgeWeight { + pub fn increment_vector_clocks( + &mut self, + change_set: &ChangeSetPointer, + ) -> EdgeWeightResult<()> { + self.vector_clock_write.inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn kind(&self) -> &EdgeWeightKind { + &self.kind + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn new(change_set: &ChangeSetPointer, kind: EdgeWeightKind) -> EdgeWeightResult { + Ok(Self { + kind, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + }) + } + + pub fn new_with_incremented_vector_clocks( + &self, + change_set: &ChangeSetPointer, + ) -> EdgeWeightResult { + let mut new_weight = self.clone(); + new_weight.increment_vector_clocks(change_set)?; + + Ok(new_weight) + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} diff --git a/lib/dal/src/workspace_snapshot/graph.rs b/lib/dal/src/workspace_snapshot/graph.rs new file mode 100644 index 0000000000..64a1996801 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/graph.rs @@ -0,0 +1,2134 @@ +use chrono::Utc; +use content_store::{ContentHash, Store, StoreError}; +use petgraph::stable_graph::{EdgeReference, Edges}; +use petgraph::{algo, prelude::*, visit::DfsEvent}; +use serde::{Deserialize, Serialize}; +use std::collections::{HashMap, HashSet, VecDeque}; +use std::fs::File; +use std::io::Write; + +use telemetry::prelude::*; +use thiserror::Error; +use ulid::Ulid; + +use crate::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::workspace_snapshot::{ + conflict::Conflict, + content_address::ContentAddress, + edge_weight::{EdgeWeight, EdgeWeightError, EdgeWeightKind, EdgeWeightKindDiscriminants}, + node_weight::{NodeWeight, NodeWeightError, OrderingNodeWeight}, + update::Update, +}; + +use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; +use crate::workspace_snapshot::node_weight::{CategoryNodeWeight, NodeWeightDiscriminants}; + +use crate::workspace_snapshot::content_address::ContentAddressDiscriminants; +/// Ensure [`NodeIndex`] is usable by external crates. +pub use petgraph::graph::NodeIndex; +pub use petgraph::Direction; + +mod tests; + +pub type LineageId = Ulid; + +#[allow(clippy::large_enum_variant)] +#[remain::sorted] +#[derive(Debug, Error)] +pub enum WorkspaceSnapshotGraphError { + #[error("Cannot compare ordering of container elements between ordered, and un-ordered container: {0:?}, {1:?}")] + CannotCompareOrderedAndUnorderedContainers(NodeIndex, NodeIndex), + #[error("could not find category node used by node with index {0:?}")] + CategoryNodeNotFound(NodeIndex), + #[error("ChangeSet error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("Unable to retrieve content for ContentHash")] + ContentMissingForContentHash, + #[error("Content store error: {0}")] + ContentStore(#[from] StoreError), + #[error("Action would create a graph cycle")] + CreateGraphCycle, + #[error("could not find the newly imported subgraph when performing updates")] + DestinationNotUpdatedWhenImportingSubgraph, + #[error("Edge does not exist for EdgeIndex: {0:?}")] + EdgeDoesNotExist(EdgeIndex), + #[error("EdgeWeight error: {0}")] + EdgeWeight(#[from] EdgeWeightError), + #[error("EdgeWeight not found")] + EdgeWeightNotFound, + #[error("Problem during graph traversal: {0:?}")] + GraphTraversal(petgraph::visit::DfsEvent), + #[error("Incompatible node types")] + IncompatibleNodeTypes, + #[error("Invalid value graph")] + InvalidValueGraph, + #[error("NodeWeight error: {0}")] + NodeWeight(#[from] NodeWeightError), + #[error("node weight not found")] + NodeWeightNotFound, + #[error("Node with ID {} not found", .0.to_string())] + NodeWithIdNotFound(Ulid), + #[error("No Prop found for NodeIndex {0:?}")] + NoPropFound(NodeIndex), + #[error("NodeIndex has too many Ordering children: {0:?}")] + TooManyOrderingForNode(NodeIndex), + #[error("NodeIndex has too many Prop children: {0:?}")] + TooManyPropForNode(NodeIndex), + #[error("Workspace Snapshot has conflicts and must be rebased")] + WorkspaceNeedsRebase, + #[error("Workspace Snapshot has conflicts")] + WorkspacesConflict, +} + +pub type WorkspaceSnapshotGraphResult = Result; + +#[derive(Default, Deserialize, Serialize, Clone)] +pub struct WorkspaceSnapshotGraph { + graph: StableDiGraph, + node_index_by_id: HashMap, + node_indices_by_lineage_id: HashMap>, + root_index: NodeIndex, +} + +impl std::fmt::Debug for WorkspaceSnapshotGraph { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("WorkspaceSnapshotGraph") + .field("root_index", &self.root_index) + .field("node_index_by_id", &self.node_index_by_id) + .field("graph", &self.graph) + .finish() + } +} + +impl WorkspaceSnapshotGraph { + pub fn new(change_set: &ChangeSetPointer) -> WorkspaceSnapshotGraphResult { + let mut graph: StableDiGraph = StableDiGraph::with_capacity(1, 0); + let root_index = graph.add_node(NodeWeight::new_content( + change_set, + change_set.generate_ulid()?, + ContentAddress::Root, + )?); + + Ok(Self { + root_index, + graph, + ..Default::default() + }) + } + + pub fn root(&self) -> NodeIndex { + self.root_index + } + + pub fn get_latest_node_idx_opt( + &self, + node_idx: NodeIndex, + ) -> WorkspaceSnapshotGraphResult> { + if !self.graph.contains_node(node_idx) { + return Ok(None); + } + + Ok(Some(self.get_latest_node_idx(node_idx)?)) + } + + #[inline(always)] + pub fn get_latest_node_idx( + &self, + node_idx: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + let node_id = self.get_node_weight(node_idx)?.id(); + self.get_node_index_by_id(node_id) + } + + pub fn add_edge( + &mut self, + from_node_index: NodeIndex, + edge_weight: EdgeWeight, + to_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + // Temporarily add the edge to the existing tree to see if it would create a cycle. + // Configured to run only in tests because it has a major perf impact otherwise + #[cfg(test)] + { + let temp_edge = + self.graph + .update_edge(from_node_index, to_node_index, edge_weight.clone()); + + let would_create_a_cycle = !self.is_acyclic_directed(); + self.graph.remove_edge(temp_edge); + if would_create_a_cycle { + return Err(WorkspaceSnapshotGraphError::CreateGraphCycle); + } + } + + // Because outgoing edges are part of a node's identity, we create a new "from" node + // as we are effectively writing to that node (we'll need to update the merkle tree + // hash), and everything in the graph should be treated as copy-on-write. + let new_from_node_index = self.copy_node_by_index(from_node_index)?; + + // Add the new edge to the new version of the "from" node. + let new_edge_index = + self.graph + .update_edge(new_from_node_index, to_node_index, edge_weight); + self.update_merkle_tree_hash(new_from_node_index)?; + + // Update the rest of the graph to reflect the new node/edge. + self.replace_references(from_node_index)?; + + Ok(new_edge_index) + } + + pub(crate) fn remove_node_id(&mut self, id: impl Into) { + self.node_index_by_id.remove(&id.into()); + } + + pub fn add_node(&mut self, node: NodeWeight) -> WorkspaceSnapshotGraphResult { + // Cache the node id and the lineage id; + let node_id = node.id(); + let lineage_id = node.lineage_id(); + + // Create the node and cache the index. + let new_node_index = self.graph.add_node(node); + + // Update the accessor maps using the new index. + self.node_index_by_id.insert(node_id, new_node_index); + self.node_indices_by_lineage_id + .entry(lineage_id) + .and_modify(|set| { + set.insert(new_node_index); + }) + .or_insert_with(|| HashSet::from([new_node_index])); + self.update_merkle_tree_hash(new_node_index)?; + + Ok(new_node_index) + } + + pub fn add_category_node( + &mut self, + change_set: &ChangeSetPointer, + kind: CategoryNodeKind, + ) -> WorkspaceSnapshotGraphResult { + let inner_weight = CategoryNodeWeight::new(change_set, kind)?; + let new_node_index = self.add_node(NodeWeight::Category(inner_weight))?; + Ok(new_node_index) + } + + pub fn get_category_node( + &self, + source: Option, + kind: CategoryNodeKind, + ) -> WorkspaceSnapshotGraphResult<(Ulid, NodeIndex)> { + let source_index = match source { + Some(provided_source) => self.get_node_index_by_id(provided_source)?, + None => self.root_index, + }; + + // TODO(nick): ensure that two target category nodes of the same kind don't exist for the + // same source node. + for edgeref in self.graph.edges_directed(source_index, Outgoing) { + let maybe_category_node_index = edgeref.target(); + let maybe_category_node_weight = self + .graph + .node_weight(maybe_category_node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + + if let NodeWeight::Category(category_node_weight) = maybe_category_node_weight { + if category_node_weight.kind() == kind { + return Ok((category_node_weight.id(), maybe_category_node_index)); + } + } + } + + Err(WorkspaceSnapshotGraphError::CategoryNodeNotFound( + source_index, + )) + } + + pub fn edges_directed( + &self, + node_index: NodeIndex, + direction: Direction, + ) -> Edges<'_, EdgeWeight, Directed, u32> { + self.graph.edges_directed(node_index, direction) + } + + pub fn edges_directed_for_edge_weight_kind( + &self, + node_index: NodeIndex, + direction: Direction, + edge_kind: EdgeWeightKindDiscriminants, + ) -> Vec> { + self.graph + .edges_directed(node_index, direction) + .filter(|edge_ref| edge_kind == edge_ref.weight().kind().into()) + .collect() + } + + pub fn nodes(&self) -> impl Iterator { + self.graph.node_indices().filter_map(|node_idx| { + self.graph + .node_weight(node_idx) + .map(|weight| (weight, node_idx)) + }) + } + + pub fn edges(&self) -> impl Iterator { + self.graph.edge_indices().filter_map(|edge_idx| { + self.graph.edge_weight(edge_idx).and_then(|weight| { + self.graph + .edge_endpoints(edge_idx) + .map(|(source, target)| (weight, source, target)) + }) + }) + } + + // TODO(nick): fix this clippy error. + #[allow(clippy::type_complexity)] + pub fn add_ordered_edge( + &mut self, + change_set: &ChangeSetPointer, + from_node_index: NodeIndex, + edge_weight: EdgeWeight, + to_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<(EdgeIndex, Option<(EdgeIndex, NodeIndex, NodeIndex)>)> { + let _start = std::time::Instant::now(); + let new_edge_index = self.add_edge(from_node_index, edge_weight, to_node_index)?; + + let from_node_index = self.get_latest_node_idx(from_node_index)?; + let to_node_index = self.get_latest_node_idx(to_node_index)?; + + // Find the ordering node of the "container" if there is one, and add the thing pointed to + // by the `to_node_index` to the ordering. Also point the ordering node at the thing with + // an `Ordinal` edge, so that Ordering nodes must be touched *after* the things they order + // in a depth first search + let maybe_ordinal_edge_information = if let Some(container_ordering_node_index) = + self.ordering_node_index_for_container(from_node_index)? + { + let ordinal_edge_index = self.add_edge( + container_ordering_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Ordinal)?, + to_node_index, + )?; + + let container_ordering_node_index = + self.get_latest_node_idx(container_ordering_node_index)?; + + if let NodeWeight::Ordering(previous_container_ordering_node_weight) = self + .graph + .node_weight(container_ordering_node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)? + { + let element_id = self + .node_index_to_id(to_node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + + let mut new_container_ordering_node_weight = + previous_container_ordering_node_weight.clone(); + new_container_ordering_node_weight.push_to_order(change_set, element_id)?; + self.add_node(NodeWeight::Ordering(new_container_ordering_node_weight))?; + self.replace_references(container_ordering_node_index)?; + } + + Some(( + ordinal_edge_index, + container_ordering_node_index, + to_node_index, + )) + } else { + None + }; + + Ok((new_edge_index, maybe_ordinal_edge_information)) + } + + pub fn add_ordered_node( + &mut self, + change_set: &ChangeSetPointer, + node: NodeWeight, + ) -> WorkspaceSnapshotGraphResult { + let new_node_index = self.add_node(node)?; + let ordering_node_index = + self.add_node(NodeWeight::Ordering(OrderingNodeWeight::new(change_set)?))?; + let edge_index = self.add_edge( + new_node_index, + EdgeWeight::new(change_set, EdgeWeightKind::Ordering)?, + ordering_node_index, + )?; + let (source, _) = self.edge_endpoints(edge_index)?; + Ok(source) + } + + pub async fn attribute_value_view( + &self, + content_store: &mut impl Store, + root_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + let mut view = serde_json::json![{}]; + let mut nodes_to_add = VecDeque::from([(root_index, "".to_string())]); + + while let Some((current_node_index, write_location)) = nodes_to_add.pop_front() { + let current_node_weight = self.get_node_weight(current_node_index)?; + let current_node_content: serde_json::Value = content_store + .get(¤t_node_weight.content_hash()) + .await? + .ok_or(WorkspaceSnapshotGraphError::ContentMissingForContentHash)?; + // We don't need to care what kind the prop is, since assigning a value via + // `pointer_mut` completely overwrites the existing value, regardless of any + // pre-existing data types. + let view_pointer = match view.pointer_mut(&write_location) { + Some(pointer) => { + *pointer = current_node_content.clone(); + pointer + } + None => { + // This is an error, and really shouldn't ever happen. + dbg!(view, write_location, current_node_content); + todo!(); + } + }; + + if current_node_content.is_null() { + // If the value we just inserted is "null", then there shouldn't be any child + // values, so don't bother looking for them in the graph to be able to add + // them to the work queue. + continue; + } + + // Find the ordering if there is one, so we can add the children in the proper order. + if let Some(child_ordering) = self.ordered_children_for_node(current_node_index)? { + for (child_position_index, &child_node_index) in child_ordering.iter().enumerate() { + // `.enumerate()` gives us 1-indexed, but we need 0-indexed. + + // We insert a JSON `Null` as a "place holder" for the write location. We need + // it to exist to be able to get a `pointer_mut` to it on the next time around, + // but we don't really care what it is, since we're going to completely + // overwrite it anyway. + for edge in self + .graph + .edges_connecting(current_node_index, child_node_index) + { + let child_position = match edge.weight().kind() { + EdgeWeightKind::Contain(Some(key)) => { + view_pointer + .as_object_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .insert(key.clone(), serde_json::json![null]); + key.clone() + } + EdgeWeightKind::Contain(None) => { + if current_node_content.is_array() { + view_pointer + .as_array_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .push(serde_json::json![null]); + child_position_index.to_string() + } else { + // Get prop name + if let NodeWeight::Prop(prop_weight) = self.get_node_weight( + self.prop_node_index_for_node_index(child_node_index)? + .ok_or(WorkspaceSnapshotGraphError::NoPropFound( + child_node_index, + ))?, + )? { + view_pointer + .as_object_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .insert( + prop_weight.name().to_string(), + serde_json::json![null], + ); + prop_weight.name().to_string() + } else { + return Err(WorkspaceSnapshotGraphError::InvalidValueGraph); + } + } + } + _ => continue, + }; + let child_write_location = format!("{}/{}", write_location, child_position); + nodes_to_add.push_back((child_node_index, child_write_location)); + } + } + } else { + // The child nodes aren't explicitly ordered, so we'll need to come up with one of + // our own. We'll sort the nodes by their `NodeIndex`, which means that when a + // write last happened to them (or anywhere further towards the leaves) will + // determine their sorting in oldest to most recent order. + let mut child_index_to_position = HashMap::new(); + let mut child_indexes = Vec::new(); + let outgoing_edges = self.graph.edges_directed(current_node_index, Outgoing); + for edge_ref in outgoing_edges { + match edge_ref.weight().kind() { + EdgeWeightKind::Contain(Some(key)) => { + view_pointer + .as_object_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .insert(key.clone(), serde_json::json![null]); + child_index_to_position.insert(edge_ref.target(), key.clone()); + child_indexes.push(edge_ref.target()); + } + EdgeWeightKind::Contain(None) => { + child_indexes.push(edge_ref.target()); + if current_node_content.is_array() { + view_pointer + .as_array_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .push(serde_json::json![null]); + } else { + // Get prop name + if let NodeWeight::Prop(prop_weight) = self.get_node_weight( + self.prop_node_index_for_node_index(edge_ref.target())? + .ok_or(WorkspaceSnapshotGraphError::NoPropFound( + edge_ref.target(), + ))?, + )? { + view_pointer + .as_object_mut() + .ok_or(WorkspaceSnapshotGraphError::InvalidValueGraph)? + .insert( + prop_weight.name().to_string(), + serde_json::json![null], + ); + child_index_to_position + .insert(edge_ref.target(), prop_weight.name().to_string()); + child_indexes.push(edge_ref.target()); + } else { + return Err(WorkspaceSnapshotGraphError::InvalidValueGraph); + } + } + } + _ => continue, + } + } + child_indexes.sort(); + + for (child_position_index, child_node_index) in child_indexes.iter().enumerate() { + if let Some(key) = child_index_to_position.get(child_node_index) { + nodes_to_add + .push_back((*child_node_index, format!("{}/{}", write_location, key))); + } else { + nodes_to_add.push_back(( + *child_node_index, + format!("{}/{}", write_location, child_position_index), + )); + } + } + } + } + + Ok(view) + } + + pub fn cleanup(&mut self) { + let start = tokio::time::Instant::now(); + + // We want to remove all of the "garbage" we've accumulated while operating on the graph. + // Anything that is no longer reachable from the current `self.root_index` should be + // removed as it is no longer referenced by anything in the current version of the graph. + // Fortunately, we don't need to walk the graph to find out if something is reachable from + // the root, since `has_path_connecting` is slow (depth-first search). Any node that does + // *NOT* have any incoming edges (aside from the `self.root_index` node) is not reachable, + // by definition. Finding the list of nodes with no incoming edges is very fast. If we + // remove all nodes (that are not the `self.root_index` node) that do not have any + // incoming edges, and we keep doing this until the only one left is the `self.root_index` + // node, then all remaining nodes are reachable from `self.root_index`. + let mut old_root_ids: HashSet; + loop { + old_root_ids = self + .graph + .externals(Incoming) + .filter(|node_id| *node_id != self.root_index) + .collect(); + if old_root_ids.is_empty() { + break; + } + + for stale_node_index in &old_root_ids { + self.graph.remove_node(*stale_node_index); + } + } + info!("Removed stale NodeIndex: {:?}", start.elapsed()); + + // After we retain the nodes, collect the remaining ids and indices. + let remaining_node_ids: HashSet = self.graph.node_weights().map(|n| n.id()).collect(); + info!( + "Got remaining node IDs: {} ({:?})", + remaining_node_ids.len(), + start.elapsed() + ); + let remaining_node_indices: HashSet = self.graph.node_indices().collect(); + info!( + "Got remaining NodeIndex: {} ({:?})", + remaining_node_indices.len(), + start.elapsed() + ); + + // Cleanup the node index by id map. + self.node_index_by_id + .retain(|id, _index| remaining_node_ids.contains(id)); + info!("Removed stale node_index_by_id: {:?}", start.elapsed()); + + // Cleanup the node indices by lineage id map. + self.node_indices_by_lineage_id + .iter_mut() + .for_each(|(_lineage_id, node_indices)| { + node_indices.retain(|node_index| remaining_node_indices.contains(node_index)); + }); + self.node_indices_by_lineage_id + .retain(|_lineage_id, node_indices| !node_indices.is_empty()); + info!( + "Removed stale node_indices_by_lineage_id: {:?}", + start.elapsed() + ); + } + + pub fn find_equivalent_node( + &self, + id: Ulid, + lineage_id: Ulid, + ) -> WorkspaceSnapshotGraphResult> { + let maybe_equivalent_node = match self.get_node_index_by_id(id) { + Ok(node_index) => { + let node_indices = self.get_node_index_by_lineage(lineage_id); + if node_indices.contains(&node_index) { + Some(node_index) + } else { + None + } + } + Err(WorkspaceSnapshotGraphError::NodeWithIdNotFound(_)) => None, + Err(e) => return Err(e), + }; + Ok(maybe_equivalent_node) + } + + fn copy_node_by_index( + &mut self, + node_index_to_copy: NodeIndex, + ) -> WorkspaceSnapshotGraphResult { + self.add_node(self.get_node_weight(node_index_to_copy)?.clone()) + } + + pub fn detect_conflicts_and_updates( + &self, + to_rebase_vector_clock_id: VectorClockId, + onto: &WorkspaceSnapshotGraph, + onto_vector_clock_id: VectorClockId, + ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { + let mut conflicts: Vec = Vec::new(); + let mut updates: Vec = Vec::new(); + if let Err(traversal_error) = + petgraph::visit::depth_first_search(&onto.graph, Some(onto.root_index), |event| { + self.detect_conflicts_and_updates_process_dfs_event( + to_rebase_vector_clock_id, + onto, + onto_vector_clock_id, + event, + &mut conflicts, + &mut updates, + ) + }) + { + return Err(WorkspaceSnapshotGraphError::GraphTraversal(traversal_error)); + }; + + Ok((conflicts, updates)) + } + + fn detect_conflicts_and_updates_process_dfs_event( + &self, + to_rebase_vector_clock_id: VectorClockId, + onto: &WorkspaceSnapshotGraph, + onto_vector_clock_id: VectorClockId, + event: DfsEvent, + conflicts: &mut Vec, + updates: &mut Vec, + ) -> Result, petgraph::visit::DfsEvent> { + match event { + DfsEvent::Discover(onto_node_index, _) => { + let onto_node_weight = onto.get_node_weight(onto_node_index).map_err(|err| { + dbg!( + "Unable to get NodeWeight for onto NodeIndex {:?}: {}", + onto_node_index, + err, + ); + event + })?; + + let mut to_rebase_node_indexes = HashSet::new(); + if onto_node_index == onto.root_index { + // There can only be one (valid/current) `ContentAddress::Root` at any + // given moment, and the `lineage_id` isn't really relevant as it's not + // globally stable (even though it is locally stable). This matters as we + // may be dealing with a `WorkspaceSnapshotGraph` that is coming to us + // externally from a module that we're attempting to import. The external + // `WorkspaceSnapshotGraph` will be `self`, and the "local" one will be + // `onto`. + to_rebase_node_indexes.insert(self.root_index); + } else { + // Only retain node indexes... or indices... if they are part of the current + // graph. There may still be garbage from previous updates to the graph + // laying around. + let mut potential_to_rebase_node_indexes = + self.get_node_index_by_lineage(onto_node_weight.lineage_id()); + potential_to_rebase_node_indexes + .retain(|node_index| self.has_path_to_root(*node_index)); + to_rebase_node_indexes.extend(potential_to_rebase_node_indexes); + + // TODO(nick): detect category nodes with a different lineage. We will likely + // need to check incoming edges in one graph and then look for outgoing edges in + // the other graph. + // // Since category nodes may be created from scratch from a different workspace, + // // they may have different lineage ids. We still want to consider the same + // // category kind as an equivalent node, even though it might have a different + // // lineage id. + // if let NodeWeight::Category(onto_category_node_weight) = onto_node_weight { + // onto_category_node_weight + // } + // let category_node_kind = onto_category_node_weight.kind(); + // let (_, to_rebase_category_node_index) = + // self.get_category_node(Some(onto_category_node_weight.id()), category_node_kind).map_err(|err| { + // error!( + // "Unable to get to rebase Category node for kind {:?} from onto {:?}: {}", + // onto_category_node_weight.kind(), onto, err, + // ); + // event + // })?; + // to_rebase_node_indexes.insert(to_rebase_category_node_index); + // } + } + + // We'll lazily populate these, since we don't know if we'll need it at all, and + // we definitely don't want to be re-fetching this information inside the loop + // below, as it will be identical every time. + let mut onto_ordering_node_index = None; + + // If everything with the same `lineage_id` is identical, then we can prune the + // graph traversal, and avoid unnecessary lookups/comparisons. + let mut any_content_with_lineage_has_changed = false; + + for to_rebase_node_index in to_rebase_node_indexes { + let to_rebase_node_weight = + self.get_node_weight(to_rebase_node_index).map_err(|err| { + error!( + "Unable to get to_rebase NodeWeight for NodeIndex {:?}: {}", + to_rebase_node_index, err, + ); + event + })?; + + if onto_node_weight.merkle_tree_hash() + == to_rebase_node_weight.merkle_tree_hash() + { + // If the merkle tree hashes are the same, then the entire sub-graph is + // identical, and we don't need to check any further. + debug!( + "onto {} and to rebase {} merkle tree hashes are the same", + onto_node_weight.id(), + to_rebase_node_weight.id() + ); + continue; + } + any_content_with_lineage_has_changed = true; + + // Check if there's a difference in the node itself (and whether it is a + // conflict if there is a difference). + if onto_node_weight.node_hash() != to_rebase_node_weight.node_hash() { + if to_rebase_node_weight + .vector_clock_write() + .is_newer_than(onto_node_weight.vector_clock_write()) + { + // The existing node (`to_rebase`) has changes, but has already seen + // all of the changes in `onto`. There is no conflict, and there is + // nothing to update. + } else if onto_node_weight + .vector_clock_write() + .is_newer_than(to_rebase_node_weight.vector_clock_write()) + { + // `onto` has changes, but has already seen all of the changes in + // `to_rebase`. There is no conflict, and we should update to use the + // `onto` node. + updates.push(Update::ReplaceSubgraph { + onto: onto_node_index, + to_rebase: to_rebase_node_index, + }); + } else { + // There are changes on both sides that have not + // been seen by the other side; this is a conflict. + // There may also be other conflicts in the outgoing + // relationships, the downstream nodes, or both. + + // If the nodes in question are ordering nodes, the + // conflict we care about is the ChildOrder + // conflict, and will have already been detected. + // The content on the ordering node is just the + // ordering of the edges, so what matters if there + // is a conflict in order, not if the hashes differ + // because there is an extra edge (but the rest of + // the edges are ordered the same) + if !matches!( + (onto_node_weight, to_rebase_node_weight), + ( + NodeWeight::Ordering(OrderingNodeWeight { .. }), + NodeWeight::Ordering(OrderingNodeWeight { .. }) + ) + ) { + conflicts.push(Conflict::NodeContent { + to_rebase: to_rebase_node_index, + onto: onto_node_index, + }); + } + } + } + + if onto_ordering_node_index.is_none() { + onto_ordering_node_index = onto + .ordering_node_index_for_container(onto_node_index) + .map_err(|_| event)?; + } + let to_rebase_ordering_node_index = self + .ordering_node_index_for_container(to_rebase_node_index) + .map_err(|_| event)?; + + match (to_rebase_ordering_node_index, onto_ordering_node_index) { + (None, None) => { + // Neither is ordered. The potential conflict could be because one + // or more elements changed, because elements were added/removed, + // or a combination of these. + // + // We need to check for all of these using the outgoing edges from + // the containers, since we can't rely on an ordering child to + // contain all the information to determine ordering/addition/removal. + // + // Eventually, this will only happen on the root node itself, since + // Objects, Maps, and Arrays should all have an ordering, for at + // least display purposes. + debug!( + "Found what appears to be two unordered containers: onto {:?}, to_rebase {:?}", + onto_node_index, to_rebase_node_index, + ); + debug!( + "Comparing unordered containers: {:?}, {:?}", + onto_node_index, to_rebase_node_index + ); + + let (container_conflicts, container_updates) = self + .find_unordered_container_membership_conflicts_and_updates( + to_rebase_vector_clock_id, + to_rebase_node_index, + onto, + onto_vector_clock_id, + onto_node_index, + ) + .map_err(|err| { + error!("Unable to find unordered container membership conflicts and updates for onto container NodeIndex {:?} and to_rebase container NodeIndex {:?}: {}", onto_node_index, to_rebase_node_index, err); + event + })?; + + updates.extend(container_updates); + conflicts.extend(container_conflicts); + } + (None, Some(_)) | (Some(_), None) => { + // We're trying to compare an ordered container with an unordered one, + // which isn't something that logically makes sense, so we've likely + // started comparing incompatible things. + warn!( + "Attempting to compare an ordered, and an unordered container: onto {:?}, to_rebase {:?}", + onto_node_index, to_rebase_node_index, + ); + return Err(event); + } + (Some(to_rebase_ordering_node_index), Some(onto_ordering_node_index)) => { + debug!( + "Comparing ordered containers: {:?}, {:?}", + onto_node_index, to_rebase_node_index + ); + let (container_conflicts, container_updates) = self + .find_ordered_container_membership_conflicts_and_updates( + to_rebase_vector_clock_id, + to_rebase_node_index, + to_rebase_ordering_node_index, + onto, + onto_vector_clock_id, + onto_node_index, + onto_ordering_node_index, + ) + .map_err(|_| event)?; + + updates.extend(container_updates); + conflicts.extend(container_conflicts); + + return Ok(petgraph::visit::Control::Continue); + } + } + } + + if any_content_with_lineage_has_changed { + // There was at least one thing with a merkle tree hash difference, so we need + // to examine further down the tree to see where the difference(s) are, and + // where there are conflicts, if there are any. + Ok(petgraph::visit::Control::Continue) + } else { + // Everything to be rebased is identical, so there's no need to examine the + // rest of the tree looking for differences & conflicts that won't be there. + Ok(petgraph::visit::Control::Prune) + } + } + DfsEvent::TreeEdge(_, _) + | DfsEvent::BackEdge(_, _) + | DfsEvent::CrossForwardEdge(_, _) + | DfsEvent::Finish(_, _) => { + // These events are all ignored, since we handle looking at edges as we encounter + // the node(s) the edges are coming from (Outgoing edges). + Ok(petgraph::visit::Control::Continue) + } + } + } + + #[allow(dead_code)] + pub fn dot(&self) { + // NOTE(nick): copy the output and execute this on macOS. It will create a file in the + // process and open a new tab in your browser. + // ``` + // pbpaste | dot -Tsvg -o foo.svg && open foo.svg + // ``` + let current_root_weight = self.get_node_weight(self.root_index).unwrap(); + println!( + "Root Node Weight: {current_root_weight:?}\n{:?}", + petgraph::dot::Dot::with_config(&self.graph, &[petgraph::dot::Config::EdgeNoLabel]) + ); + } + + #[allow(dead_code)] + pub fn tiny_dot_to_file(&self, suffix: Option<&str>) { + let suffix = suffix.unwrap_or("dot"); + // NOTE(nick): copy the output and execute this on macOS. It will create a file in the + // process and open a new tab in your browser. + // ``` + // GRAPHFILE=; cat $GRAPHFILE.txt | dot -Tsvg -o processed-$GRAPHFILE.svg; open processed-$GRAPHFILE.svg + // ``` + let dot = petgraph::dot::Dot::with_attr_getters( + &self.graph, + &[ + petgraph::dot::Config::NodeNoLabel, + petgraph::dot::Config::EdgeNoLabel, + ], + &|_, edgeref| { + let discrim: EdgeWeightKindDiscriminants = edgeref.weight().kind().into(); + let color = match discrim { + EdgeWeightKindDiscriminants::ActionPrototype => "black", + EdgeWeightKindDiscriminants::AuthenticationPrototype => "black", + EdgeWeightKindDiscriminants::Contain => "blue", + EdgeWeightKindDiscriminants::FrameContains => "black", + EdgeWeightKindDiscriminants::Ordering => "gray", + EdgeWeightKindDiscriminants::Ordinal => "gray", + EdgeWeightKindDiscriminants::Prop => "orange", + EdgeWeightKindDiscriminants::Prototype => "green", + EdgeWeightKindDiscriminants::PrototypeArgument => "green", + EdgeWeightKindDiscriminants::PrototypeArgumentValue => "green", + EdgeWeightKindDiscriminants::Provider => "red", + EdgeWeightKindDiscriminants::Proxy => "gray", + EdgeWeightKindDiscriminants::Root => "black", + EdgeWeightKindDiscriminants::Socket => "purple", + EdgeWeightKindDiscriminants::Use => "black", + }; + + match edgeref.weight().kind() { + EdgeWeightKind::Contain(key) => { + let key = key + .as_deref() + .map(|key| format!(" ({key}")) + .unwrap_or("".into()); + format!( + "label = \"{discrim:?}{key}\"\nfontcolor = {color}\ncolor = {color}" + ) + } + _ => format!("label = \"{discrim:?}\"\nfontcolor = {color}\ncolor = {color}"), + } + }, + &|_, (node_index, node_weight)| { + let (label, color) = match node_weight { + NodeWeight::Content(weight) => { + let discrim = ContentAddressDiscriminants::from(weight.content_address()); + let color = match discrim { + ContentAddressDiscriminants::ActionPrototype => "green", + ContentAddressDiscriminants::AttributePrototype => "green", + ContentAddressDiscriminants::Component => "black", + ContentAddressDiscriminants::ExternalProvider => "red", + ContentAddressDiscriminants::Func => "black", + ContentAddressDiscriminants::FuncArg => "black", + ContentAddressDiscriminants::InternalProvider => "red", + ContentAddressDiscriminants::JsonValue => "fuchsia", + ContentAddressDiscriminants::Prop => "orange", + ContentAddressDiscriminants::Root => "black", + ContentAddressDiscriminants::Schema => "black", + ContentAddressDiscriminants::SchemaVariant => "black", + ContentAddressDiscriminants::Secret => "black", + ContentAddressDiscriminants::StaticArgumentValue => "green", + ContentAddressDiscriminants::ValidationPrototype => "black", + }; + (discrim.to_string(), color) + } + NodeWeight::AttributePrototypeArgument(apa) => ( + format!( + "Attribute Prototype Argument{}", + apa.targets() + .map(|targets| format!( + "\nsource: {}\nto: {}", + targets.source_component_id, targets.destination_component_id + )) + .unwrap_or("".to_string()) + ), + "green", + ), + NodeWeight::AttributeValue(_) => ("Attribute Value".to_string(), "blue"), + NodeWeight::Category(category_node_weight) => match category_node_weight.kind() + { + CategoryNodeKind::Component => { + ("Components (Category)".to_string(), "black") + } + CategoryNodeKind::Func => ("Funcs (Category)".to_string(), "black"), + CategoryNodeKind::Schema => ("Schemas (Category)".to_string(), "black"), + CategoryNodeKind::Secret => ("Secrets (Category)".to_string(), "black"), + }, + NodeWeight::Func(func_node_weight) => { + (format!("Func\n{}", func_node_weight.name()), "black") + } + NodeWeight::FuncArgument(func_arg_node_weight) => ( + format!("Func Arg\n{}", func_arg_node_weight.name()), + "black", + ), + NodeWeight::Ordering(_) => { + (NodeWeightDiscriminants::Ordering.to_string(), "gray") + } + NodeWeight::Prop(prop_node_weight) => { + (format!("Prop\n{}", prop_node_weight.name()), "orange") + } + }; + let color = color.to_string(); + let id = node_weight.id(); + format!( + "label = \"\n\n{label}\n{node_index:?}\n{id}\n\n\"\nfontcolor = {color}\ncolor = {color}", + ) + }, + ); + let filename_no_extension = format!("{}-{}", Ulid::new().to_string(), suffix); + let mut file = File::create(format!("/home/zacharyhamm/{filename_no_extension}.txt")) + .expect("could not create file"); + file.write_all(format!("{dot:?}").as_bytes()) + .expect("could not write file"); + println!("dot output stored in file (filename without extension: {filename_no_extension})"); + } + + #[allow(clippy::too_many_arguments)] + fn find_ordered_container_membership_conflicts_and_updates( + &self, + to_rebase_vector_clock_id: VectorClockId, + to_rebase_container_index: NodeIndex, + to_rebase_ordering_index: NodeIndex, + onto: &WorkspaceSnapshotGraph, + onto_vector_clock_id: VectorClockId, + onto_container_index: NodeIndex, + onto_ordering_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { + let mut updates = Vec::new(); + let mut conflicts = Vec::new(); + + let onto_ordering = match onto.get_node_weight(onto_ordering_index)? { + NodeWeight::Ordering(ordering) => ordering, + _ => return Err(WorkspaceSnapshotGraphError::IncompatibleNodeTypes), + }; + let to_rebase_ordering = match self.get_node_weight(to_rebase_ordering_index)? { + NodeWeight::Ordering(ordering) => ordering, + _ => return Err(WorkspaceSnapshotGraphError::IncompatibleNodeTypes), + }; + + if onto_ordering.order() == to_rebase_ordering.order() { + // Both contain the same items, in the same order. No conflicts, and nothing + // to update. + return Ok((conflicts, updates)); + } else if onto_ordering + .vector_clock_write() + .is_newer_than(to_rebase_ordering.vector_clock_write()) + { + let onto_ordering_set: HashSet = onto_ordering.order().iter().copied().collect(); + let to_rebase_ordering_set: HashSet = + to_rebase_ordering.order().iter().copied().collect(); + let new_items: HashSet = onto_ordering_set + .difference(&to_rebase_ordering_set) + .copied() + .collect(); + let removed_items: HashSet = to_rebase_ordering_set + .difference(&onto_ordering_set) + .copied() + .collect(); + + // Find which `other` container items have the new ordering IDs so we can add edges + // from the `to_rebase` container to them (and create them in `to_rebase` if they don't + // already exist). + for onto_container_item_index in onto + .graph + .neighbors_directed(onto_container_index, Outgoing) + { + let onto_container_item_weight = onto.get_node_weight(onto_container_item_index)?; + if new_items.contains(&onto_container_item_weight.id()) { + for edge in onto + .graph + .edges_connecting(onto_container_index, onto_container_item_index) + { + updates.push(Update::NewEdge { + source: to_rebase_container_index, + destination: onto_container_item_index, + edge_weight: edge.weight().clone(), + }); + } + } + } + + // Remove the edges from the `to_rebase` container to the items removed in `onto`. We + // don't need to worry about removing the items themselves as they will be garbage + // collected when we drop all items that are not reachable from `to_rebase.root_index` + // if they are no longer referenced by anything. + for to_rebase_container_item_index in self + .graph + .neighbors_directed(to_rebase_container_index, Outgoing) + { + let to_rebase_container_item_weight = + self.get_node_weight(to_rebase_container_item_index)?; + if removed_items.contains(&to_rebase_container_item_weight.id()) { + for edgeref in self + .graph + .edges_connecting(to_rebase_container_index, to_rebase_container_item_index) + { + updates.push(Update::RemoveEdge { + source: edgeref.source(), + destination: edgeref.target(), + edge_kind: edgeref.weight().kind().into(), + }); + } + } + } + } else if to_rebase_ordering + .vector_clock_write() + .is_newer_than(onto_ordering.vector_clock_write()) + { + // We already have everything in `onto` as part of `to_rebase`. Nothing needs + // updating, and there are no conflicts. + } else { + // Both `onto` and `to_rebase` have changes that the other has not incorporated. We + // need to find out what the changes are to see what needs to be updated, and what + // conflicts. + let onto_ordering_set: HashSet = onto_ordering.order().iter().copied().collect(); + let to_rebase_ordering_set: HashSet = + to_rebase_ordering.order().iter().copied().collect(); + + // Make sure that both `onto` and `to_rebase` have the same relative ordering for the + // nodes they have in common. If they don't, then that means that the order changed on + // at least one of them. + let common_items: HashSet = onto_ordering_set + .intersection(&to_rebase_ordering_set) + .copied() + .collect(); + let common_onto_items = { + let mut items = onto_ordering.order().clone(); + items.retain(|i| common_items.contains(i)); + items + }; + let common_to_rebase_items = { + let mut items = to_rebase_ordering.order().clone(); + items.retain(|i| common_items.contains(i)); + items + }; + if common_onto_items != common_to_rebase_items { + conflicts.push(Conflict::ChildOrder { + onto: onto_ordering_index, + to_rebase: to_rebase_ordering_index, + }); + } + + let only_onto_items: HashSet = onto_ordering_set + .difference(&to_rebase_ordering_set) + .copied() + .collect(); + let only_to_rebase_items: HashSet = to_rebase_ordering_set + .difference(&onto_ordering_set) + .copied() + .collect(); + + let mut only_to_rebase_item_indexes = HashMap::new(); + for to_rebase_edgeref in self + .graph + .edges_directed(to_rebase_container_index, Outgoing) + { + let dest_node_weight = self.get_node_weight(to_rebase_edgeref.target())?; + if only_to_rebase_items.contains(&dest_node_weight.id()) { + only_to_rebase_item_indexes + .insert(dest_node_weight.id(), to_rebase_edgeref.target()); + } + } + + for only_to_rebase_item in only_to_rebase_items { + let only_to_rebase_item_index = *only_to_rebase_item_indexes + .get(&only_to_rebase_item) + .ok_or(WorkspaceSnapshotGraphError::NodeWithIdNotFound( + only_to_rebase_item, + ))?; + for to_rebase_edgeref in self + .graph + .edges_connecting(to_rebase_container_index, only_to_rebase_item_index) + { + if to_rebase_edgeref + .weight() + .vector_clock_first_seen() + .entry_for(onto_vector_clock_id) + .is_none() + { + // `only_to_rebase_item` is new: Edge in `to_rebase` does not have a "First Seen" for `onto`. + } else if self + .get_node_weight(only_to_rebase_item_index)? + .vector_clock_write() + .entry_for(to_rebase_vector_clock_id) + .is_some() + { + // Entry was deleted in `onto`. If we have also modified the entry, then + // there's a conflict. + conflicts.push(Conflict::ModifyRemovedItem(only_to_rebase_item_index)); + } else { + // Entry was deleted in `onto`, and has not been modified in `to_rebase`: + // Remove the edge. + updates.push(Update::RemoveEdge { + source: to_rebase_edgeref.source(), + destination: to_rebase_edgeref.target(), + edge_kind: to_rebase_edgeref.weight().kind().into(), + }); + } + } + } + + let mut only_onto_item_indexes = HashMap::new(); + for onto_edgeref in onto.graph.edges_directed(onto_container_index, Outgoing) { + let dest_node_weight = onto.get_node_weight(onto_edgeref.target())?; + if only_onto_items.contains(&dest_node_weight.id()) { + only_onto_item_indexes.insert(dest_node_weight.id(), onto_edgeref.target()); + } + } + + let onto_root_seen_as_of = self + .get_node_weight(self.root_index)? + .vector_clock_recently_seen() + .entry_for(onto_vector_clock_id); + for only_onto_item in only_onto_items { + let only_onto_item_index = *only_onto_item_indexes.get(&only_onto_item).ok_or( + WorkspaceSnapshotGraphError::NodeWithIdNotFound(only_onto_item), + )?; + for onto_edgeref in onto + .graph + .edges_connecting(onto_container_index, only_onto_item_index) + { + // `only_onto_item` is new: + // - "First seen" of edge for `onto` > "Seen As Of" on root for `onto` in + // `to_rebase`. + if let Some(onto_first_seen) = onto_edgeref + .weight() + .vector_clock_first_seen() + .entry_for(onto_vector_clock_id) + { + if let Some(root_seen_as_of) = onto_root_seen_as_of { + if onto_first_seen > root_seen_as_of { + // The edge for the item was created more recently than the last + // state we knew of from `onto`, which means that the item is + // "new". We can't have removed something that we didn't know + // existed in the first place. + updates.push(Update::NewEdge { + source: to_rebase_container_index, + destination: onto_edgeref.target(), + edge_weight: onto_edgeref.weight().clone(), + }); + } + } + } else if let Ok(onto_item_node_weight) = + onto.get_node_weight(only_onto_item_index) + { + if let Some(root_seen_as_of) = onto_root_seen_as_of { + if onto_item_node_weight + .vector_clock_write() + .has_entries_newer_than(root_seen_as_of) + { + // The item removed in `to_rebase` has been modified in `onto` + // since we last knew the state of `onto`: This is a conflict, as + // we don't know if the removal is still intended given the new + // state of the item. + conflicts.push(Conflict::RemoveModifiedItem { + container: to_rebase_container_index, + removed_item: only_onto_item_index, + }); + } + } + } + } + } + } + + Ok((conflicts, updates)) + } + + fn find_unordered_container_membership_conflicts_and_updates( + &self, + to_rebase_vector_clock_id: VectorClockId, + to_rebase_container_index: NodeIndex, + onto: &WorkspaceSnapshotGraph, + onto_vector_clock_id: VectorClockId, + onto_container_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<(Vec, Vec)> { + #[derive(Debug, Clone, Hash, PartialEq, Eq)] + struct UniqueEdgeInfo { + pub kind: EdgeWeightKind, + pub target_lineage: Ulid, + } + + #[derive(Debug, Copy, Clone)] + struct EdgeInfo { + pub source_node_index: NodeIndex, + pub target_node_index: NodeIndex, + pub edge_kind: EdgeWeightKindDiscriminants, + pub edge_index: EdgeIndex, + } + + let mut updates = Vec::new(); + let mut conflicts = Vec::new(); + + let mut to_rebase_edges = HashMap::::new(); + for edgeref in self + .graph + .edges_directed(to_rebase_container_index, Outgoing) + { + let target_node_weight = self.get_node_weight(edgeref.target())?; + to_rebase_edges.insert( + UniqueEdgeInfo { + kind: edgeref.weight().kind().clone(), + target_lineage: target_node_weight.lineage_id(), + }, + EdgeInfo { + source_node_index: edgeref.source(), + target_node_index: edgeref.target(), + edge_kind: edgeref.weight().kind().into(), + edge_index: edgeref.id(), + }, + ); + } + + let mut onto_edges = HashMap::::new(); + for edgeref in onto.graph.edges_directed(onto_container_index, Outgoing) { + let target_node_weight = onto.get_node_weight(edgeref.target())?; + onto_edges.insert( + UniqueEdgeInfo { + kind: edgeref.weight().kind().clone(), + target_lineage: target_node_weight.lineage_id(), + }, + EdgeInfo { + source_node_index: edgeref.source(), + target_node_index: edgeref.target(), + edge_kind: edgeref.weight().kind().into(), + edge_index: edgeref.id(), + }, + ); + } + + let only_to_rebase_edges = { + let mut unique_edges = to_rebase_edges.clone(); + for key in onto_edges.keys() { + unique_edges.remove(key); + } + unique_edges + }; + let only_onto_edges = { + let mut unique_edges = onto_edges.clone(); + for key in to_rebase_edges.keys() { + unique_edges.remove(key); + } + unique_edges + }; + + debug!("only to rebase edges: {:?}", &only_to_rebase_edges); + debug!("only onto edges: {:?}", &only_onto_edges); + + let root_seen_as_of_onto = self + .get_node_weight(self.root_index)? + .vector_clock_recently_seen() + .entry_for(onto_vector_clock_id); + + let onto_last_saw_to_rebase = onto + .get_node_weight(onto.root_index)? + .vector_clock_recently_seen() + .entry_for(to_rebase_vector_clock_id); + + for only_to_rebase_edge_info in only_to_rebase_edges.values() { + let to_rebase_edge_weight = self + .graph + .edge_weight(only_to_rebase_edge_info.edge_index) + .ok_or(WorkspaceSnapshotGraphError::EdgeWeightNotFound)?; + let to_rebase_item_weight = + self.get_node_weight(only_to_rebase_edge_info.target_node_index)?; + + // If `onto` has never seen this edge, then it's new, and there are no conflicts, and + // no updates. + if to_rebase_edge_weight + .vector_clock_first_seen() + .entry_for(to_rebase_vector_clock_id) + <= onto_last_saw_to_rebase + { + if to_rebase_item_weight + .vector_clock_write() + .entry_for(to_rebase_vector_clock_id) + >= onto_last_saw_to_rebase + { + // Item has been modified in `onto` (`onto` item write vector clock > "seen as + // of" for `onto` entry in `to_rebase` root): Conflict (ModifyRemovedItem) + conflicts.push(Conflict::ModifyRemovedItem( + only_to_rebase_edge_info.target_node_index, + )) + } else { + // Item not modified & removed by `onto`: No conflict; Update::RemoveEdge + updates.push(Update::RemoveEdge { + source: only_to_rebase_edge_info.source_node_index, + destination: only_to_rebase_edge_info.target_node_index, + edge_kind: only_to_rebase_edge_info.edge_kind, + }); + } + } else { + debug!( + "edge weight entry for to rebase vector clock id {:?} is older than onto last saw {:?}", to_rebase_edge_weight.vector_clock_first_seen().entry_for(to_rebase_vector_clock_id), onto_last_saw_to_rebase); + } + } + + // - Items unique to `onto`: + for only_onto_edge_info in only_onto_edges.values() { + let onto_edge_weight = onto + .graph + .edge_weight(only_onto_edge_info.edge_index) + .ok_or(WorkspaceSnapshotGraphError::EdgeWeightNotFound)?; + let onto_item_weight = onto.get_node_weight(only_onto_edge_info.target_node_index)?; + + if let Some(onto_first_seen) = onto_edge_weight + .vector_clock_first_seen() + .entry_for(onto_vector_clock_id) + { + // From "onto_first_seen", we know "when was the first time onto saw this edge?". + match root_seen_as_of_onto { + Some(root_seen_as_of) if onto_first_seen <= root_seen_as_of => {} + _ => { + // Edge first seen by `onto` > "seen as of" on `to_rebase` graph for `onto`'s entry on + // root node: Item is new. + // Other case where item is new: the `to_rebase` has never seen anything from + // the `onto` change set. All the items are new. + updates.push(Update::NewEdge { + source: to_rebase_container_index, + destination: only_onto_edge_info.target_node_index, + edge_weight: onto_edge_weight.clone(), + }); + } + } + } else if let Some(root_seen_as_of) = root_seen_as_of_onto { + if onto_item_weight + .vector_clock_write() + .has_entries_newer_than(root_seen_as_of) + { + // Item write vector clock has entries > "seen as of" on `to_rebase` graph for + // `onto`'s entry on root node: Conflict (RemoveModifiedItem) + conflicts.push(Conflict::RemoveModifiedItem { + container: to_rebase_container_index, + removed_item: only_onto_edge_info.target_node_index, + }); + } + } + // Item removed by `to_rebase`: No conflict & no update necessary. + } + + // - Sets same: No conflicts/updates + Ok((conflicts, updates)) + } + + #[inline(always)] + pub(crate) fn get_node_index_by_id( + &self, + id: impl Into, + ) -> WorkspaceSnapshotGraphResult { + let id = id.into(); + + self.node_index_by_id + .get(&id) + .copied() + .ok_or(WorkspaceSnapshotGraphError::NodeWithIdNotFound(id)) + } + + fn get_node_index_by_lineage(&self, lineage_id: Ulid) -> HashSet { + self.node_indices_by_lineage_id + .get(&lineage_id) + .cloned() + .unwrap_or_default() + } + + pub fn node_index_to_id(&self, node_idx: NodeIndex) -> Option { + self.graph + .node_weight(node_idx) + .map(|node_weight| node_weight.id()) + } + + pub fn get_node_weight( + &self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<&NodeWeight> { + self.graph + .node_weight(node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound) + } + + fn get_node_weight_mut( + &mut self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<&mut NodeWeight> { + self.graph + .node_weight_mut(node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound) + } + + fn has_path_to_root(&self, node: NodeIndex) -> bool { + algo::has_path_connecting(&self.graph, self.root_index, node, None) + } + + pub fn import_subgraph( + &mut self, + other: &WorkspaceSnapshotGraph, + root_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<()> { + let mut dfs = petgraph::visit::DfsPostOrder::new(&other.graph, root_index); + while let Some(node_index_to_copy) = dfs.next(&other.graph) { + let node_weight_to_copy = other.get_node_weight(node_index_to_copy)?.clone(); + let node_weight_id = node_weight_to_copy.id(); + let node_weight_lineage_id = node_weight_to_copy.lineage_id(); + + // The following assumes there are no conflicts between "self" and "other". If there + // are conflicts between them, we shouldn't be running updates. + let node_index = if let Some(equivalent_node_index) = + self.find_equivalent_node(node_weight_id, node_weight_lineage_id)? + { + let equivalent_node_weight = self.get_node_weight(equivalent_node_index)?; + if equivalent_node_weight + .vector_clock_write() + .is_newer_than(node_weight_to_copy.vector_clock_write()) + { + equivalent_node_index + } else { + let new_node_index = self.add_node(node_weight_to_copy)?; + + self.replace_references(equivalent_node_index)?; + self.get_latest_node_idx(new_node_index)? + } + } else { + self.add_node(node_weight_to_copy)? + }; + + for edge in other.graph.edges_directed(node_index_to_copy, Outgoing) { + let target_id = other.get_node_weight(edge.target())?.id(); + let latest_target = self.get_node_index_by_id(target_id)?; + self.graph + .update_edge(node_index, latest_target, edge.weight().clone()); + } + } + + Ok(()) + } + + #[allow(dead_code)] + fn is_acyclic_directed(&self) -> bool { + // Using this because "is_cyclic_directed" is recursive. + algo::toposort(&self.graph, None).is_ok() + } + + #[allow(dead_code)] + fn is_on_path_between(&self, start: NodeIndex, end: NodeIndex, node: NodeIndex) -> bool { + algo::has_path_connecting(&self.graph, start, node, None) + && algo::has_path_connecting(&self.graph, node, end, None) + } + + pub fn mark_graph_seen( + &mut self, + vector_clock_id: VectorClockId, + ) -> WorkspaceSnapshotGraphResult<()> { + let seen_at = Utc::now(); + for edge in self.graph.edge_weights_mut() { + edge.mark_seen_at(vector_clock_id, seen_at); + } + for node in self.graph.node_weights_mut() { + node.mark_seen_at(vector_clock_id, seen_at); + } + + Ok(()) + } + + pub fn node_count(&self) -> usize { + self.graph.node_count() + } + + /// Returns an `Option>`. If there is an ordering node, then the return will be a + /// [`Some`], where the [`Vec`] is populated with the [`NodeIndex`] of the nodes specified by + /// the ordering node, in the order defined by the ordering node. If there is not an ordering + /// node, then the return will be [`None`]. + pub fn ordered_children_for_node( + &self, + container_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult>> { + let mut ordered_child_indexes = Vec::new(); + if let Some(container_ordering_index) = + self.ordering_node_index_for_container(container_node_index)? + { + if let NodeWeight::Ordering(ordering_weight) = + self.get_node_weight(container_ordering_index)? + { + for ordered_id in ordering_weight.order() { + ordered_child_indexes.push( + *self + .node_index_by_id + .get(ordered_id) + .ok_or(WorkspaceSnapshotGraphError::NodeWithIdNotFound(*ordered_id))?, + ); + } + } + } else { + return Ok(None); + } + + Ok(Some(ordered_child_indexes)) + } + + pub fn ordering_node_for_container( + &self, + container_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult> { + Ok( + if let Some(NodeWeight::Ordering(ordering_node)) = self + .ordering_node_index_for_container(container_node_index)? + .and_then(|node_index| self.graph.node_weight(node_index)) + { + Some(ordering_node.clone()) + } else { + None + }, + ) + } + + pub fn ordering_node_index_for_container( + &self, + container_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult> { + let onto_ordering_node_indexes = + ordering_node_indexes_for_node_index(self, container_node_index); + if onto_ordering_node_indexes.len() > 1 { + error!( + "Too many ordering nodes found for container NodeIndex {:?}", + container_node_index + ); + return Err(WorkspaceSnapshotGraphError::TooManyOrderingForNode( + container_node_index, + )); + } + Ok(onto_ordering_node_indexes.first().copied()) + } + + pub fn prop_node_index_for_node_index( + &self, + node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult> { + let prop_node_indexes = prop_node_indexes_for_node_index(self, node_index); + if prop_node_indexes.len() > 1 { + error!("Too many prop nodes found for NodeIndex {:?}", node_index); + return Err(WorkspaceSnapshotGraphError::TooManyPropForNode(node_index)); + } + Ok(prop_node_indexes.first().copied()) + } + + pub(crate) fn remove_node(&mut self, node_index: NodeIndex) { + self.graph.remove_node(node_index); + } + + /// [`StableGraph`] guarantees the stability of [`NodeIndex`] across removals, however there + /// are **NO** guarantees around the stability of [`EdgeIndex`] across removals. If + /// [`Self::cleanup()`] has been called, then any [`EdgeIndex`] found before + /// [`Self::cleanup()`] has run should be considered invalid. + pub(crate) fn remove_edge( + &mut self, + change_set: &ChangeSetPointer, + source_node_index: NodeIndex, + target_node_index: NodeIndex, + edge_kind: EdgeWeightKindDiscriminants, + ) -> WorkspaceSnapshotGraphResult<()> { + let source_node_index = self.get_latest_node_idx(source_node_index)?; + let target_node_index = self.get_latest_node_idx(target_node_index)?; + + self.copy_node_by_index(source_node_index)?; + self.replace_references(source_node_index)?; + // replace references may copy the node again to a new index + let source_node_index = self.get_latest_node_idx(source_node_index)?; + + self.inner_remove_edge(source_node_index, target_node_index, edge_kind); + + if let Some(previous_container_ordering_node_index) = + self.ordering_node_index_for_container(source_node_index)? + { + let element_id = self + .node_index_to_id(target_node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + + if let NodeWeight::Ordering(previous_container_ordering_node_weight) = self + .graph + .node_weight(previous_container_ordering_node_index) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)? + { + let mut new_container_ordering_node_weight = + previous_container_ordering_node_weight.clone(); + + // We only want to update the ordering of the container if we removed an edge to + // one of the ordered relationships. + if new_container_ordering_node_weight.remove_from_order(change_set, element_id)? { + self.inner_remove_edge( + previous_container_ordering_node_index, + target_node_index, + EdgeWeightKindDiscriminants::Ordinal, + ); + + self.add_node(NodeWeight::Ordering(new_container_ordering_node_weight))?; + self.replace_references(previous_container_ordering_node_index)?; + } + } + } + + let source_node_index = self.get_latest_node_idx(source_node_index)?; + let mut work_queue = VecDeque::from([source_node_index]); + + while let Some(node_index) = work_queue.pop_front() { + self.update_merkle_tree_hash( + // If we updated the ordering node, that means we've invalidated the container's + // NodeIndex (new_source_node_index), so we need to find the new NodeIndex to be able + // to update the container's merkle tree hash. + node_index, + )?; + + for edge_ref in self.graph.edges_directed(node_index, Incoming) { + work_queue.push_back(edge_ref.source()); + } + } + + Ok(()) + } + + fn inner_remove_edge( + &mut self, + source_node_index: NodeIndex, + target_node_index: NodeIndex, + edge_kind: EdgeWeightKindDiscriminants, + ) { + let mut edges_to_remove = vec![]; + for edgeref in self + .graph + .edges_connecting(source_node_index, target_node_index) + { + if edge_kind == edgeref.weight().kind().into() { + edges_to_remove.push(edgeref.id()); + } + } + for edge_to_remove in edges_to_remove { + self.graph.remove_edge(edge_to_remove); + } + } + + pub fn edge_endpoints( + &self, + edge_index: EdgeIndex, + ) -> WorkspaceSnapshotGraphResult<(NodeIndex, NodeIndex)> { + let (source, destination) = self + .graph + .edge_endpoints(edge_index) + .ok_or(WorkspaceSnapshotGraphError::EdgeDoesNotExist(edge_index))?; + Ok((source, destination)) + } + + /// Replace references should be called when a node has been changed and copied into the graph. + /// It will use the original_node_index to find the most up to date version of the new node, + /// and replace all edges that point to that old node with edges pointing to the new node. + /// Because the graph is treated as an immutable, copy-on-write structure, this means walking + /// up the graph to the root and copying all nodes that have edges that point to the + /// original_node_index, and all nodes that have edges that point to *those* parent nodes, + /// etc, until we've processed the entire parent tree of the original node. + pub fn replace_references( + &mut self, + original_node_index: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<()> { + // Climb from the original node, up to root, rewriting outgoing edges along the way. But we + // have to be sure to climb to root once for each sibling node that we encounter as we + // walk up to root. + let mut outer_queue = VecDeque::from([original_node_index]); + + while let Some(old_node_index) = outer_queue.pop_front() { + let mut work_queue = VecDeque::from([old_node_index]); + + while let Some(old_node_index) = work_queue.pop_front() { + for edge_ref in self.edges_directed(old_node_index, Direction::Incoming) { + work_queue.push_back(edge_ref.source()); + outer_queue.push_back(edge_ref.source()) + } + + let latest_node_idx = self.get_latest_node_idx(old_node_index)?; + let new_node_index = if latest_node_idx != old_node_index { + latest_node_idx + } else { + self.copy_node_by_index(old_node_index)? + }; + + // Find all outgoing edges weights and find the edge targets. + let mut edges_to_create = Vec::new(); + for edge_ref in self.graph.edges_directed(old_node_index, Outgoing) { + edges_to_create.push(( + edge_ref.weight().clone(), + edge_ref.target(), + edge_ref.id(), + )); + } + + // Make copies of these edges where the source is the new node index and the + // destination is one of the following... + // - If an entry exists in `old_to_new_node_indices` for the destination node index, + // use the value of the entry (the destination was affected by the replacement, + // and needs to use the new node index to reflect this). + // - There is no entry in `old_to_new_node_indices`; use the same destination node + // index as the old edge (the destination was *NOT* affected by the replacement, + // and does not have any new information to reflect). + for (edge_weight, destination_node_index, edge_idx) in edges_to_create { + // Need to directly add the edge, without going through `self.add_edge` to avoid + // infinite recursion, and because we're the place doing all the book keeping + // that we'd be interested in happening from `self.add_edge`. + let destination_node_index = + self.get_latest_node_idx(destination_node_index)?; + + self.graph.remove_edge(edge_idx); + + self.graph + .update_edge(new_node_index, destination_node_index, edge_weight); + } + + self.update_merkle_tree_hash(new_node_index)?; + } + } + + // Use the new version of the old root node as our root node. + self.root_index = self.get_latest_node_idx(self.root_index)?; + + Ok(()) + } + + pub fn update_content( + &mut self, + change_set: &ChangeSetPointer, + id: Ulid, + new_content_hash: ContentHash, + ) -> WorkspaceSnapshotGraphResult<()> { + let original_node_index = self.get_node_index_by_id(id)?; + let new_node_index = self.copy_node_by_index(original_node_index)?; + let node_weight = self.get_node_weight_mut(new_node_index)?; + node_weight.increment_vector_clock(change_set)?; + node_weight.new_content_hash(new_content_hash)?; + + self.replace_references(original_node_index)?; + Ok(()) + } + + pub fn update_order( + &mut self, + change_set: &ChangeSetPointer, + container_id: Ulid, + new_order: Vec, + ) -> WorkspaceSnapshotGraphResult<()> { + let original_node_index = self + .ordering_node_index_for_container(self.get_node_index_by_id(container_id)?)? + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + let new_node_index = self.copy_node_by_index(original_node_index)?; + let node_weight = self.get_node_weight_mut(new_node_index)?; + node_weight.set_order(change_set, new_order)?; + + self.replace_references(original_node_index)?; + Ok(()) + } + + fn update_merkle_tree_hash( + &mut self, + node_index_to_update: NodeIndex, + ) -> WorkspaceSnapshotGraphResult<()> { + let mut hasher = ContentHash::hasher(); + hasher.update( + self.get_node_weight(node_index_to_update)? + .node_hash() + .to_string() + .as_bytes(), + ); + + // Need to make sure that ordered containers have their ordered children in the + // order specified by the ordering graph node. + let explicitly_ordered_children = self + .ordered_children_for_node(node_index_to_update)? + .unwrap_or_default(); + + // Need to make sure the unordered neighbors are added to the hash in a stable order to + // ensure the merkle tree hash is identical for identical trees. + let mut unordered_neighbors = Vec::new(); + for neighbor_node in self + .graph + .neighbors_directed(node_index_to_update, Outgoing) + { + // Only add the neighbor if it's not one of the ones with an explicit ordering. + if !explicitly_ordered_children.contains(&neighbor_node) { + let neighbor_id = self.get_node_weight(neighbor_node)?.id(); + unordered_neighbors.push((neighbor_id, neighbor_node)); + } + } + // We'll sort the neighbors by the ID in the NodeWeight, as that will result in more stable + // results than if we sorted by the NodeIndex itself. + unordered_neighbors.sort_by_cached_key(|(id, _index)| *id); + // It's not important whether the explicitly ordered children are first or last, as long as + // they are always in that position, and are always in the sequence specified by the + // container's Ordering node. + let mut ordered_neighbors = + Vec::with_capacity(explicitly_ordered_children.len() + unordered_neighbors.len()); + ordered_neighbors.extend(explicitly_ordered_children); + ordered_neighbors.extend::>( + unordered_neighbors + .iter() + .map(|(_id, index)| *index) + .collect(), + ); + + for neighbor_node in ordered_neighbors { + hasher.update( + self.graph + .node_weight(neighbor_node) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)? + .merkle_tree_hash() + .to_string() + .as_bytes(), + ); + + // The edge(s) between `node_index_to_update`, and `neighbor_node` potentially encode + // important information related to the "identity" of `node_index_to_update`. + for connecting_edgeref in self + .graph + .edges_connecting(node_index_to_update, neighbor_node) + { + match connecting_edgeref.weight().kind() { + // This is the key for an entry in a map. + EdgeWeightKind::Contain(Some(key)) => hasher.update(key.as_bytes()), + + // This is the kind of the action. + EdgeWeightKind::ActionPrototype(kind) => { + hasher.update(kind.to_string().as_bytes()) + } + + // This is the key representing an element in a container type corresponding + // to an AttributePrototype + EdgeWeightKind::Prototype(Some(key)) => hasher.update(key.as_bytes()), + + // Nothing to do, as these EdgeWeightKind do not encode extra information + // in the edge itself. + EdgeWeightKind::AuthenticationPrototype + | EdgeWeightKind::Contain(None) + | EdgeWeightKind::FrameContains + | EdgeWeightKind::PrototypeArgument + | EdgeWeightKind::PrototypeArgumentValue + | EdgeWeightKind::Provider + | EdgeWeightKind::Ordering + | EdgeWeightKind::Ordinal + | EdgeWeightKind::Prop + | EdgeWeightKind::Prototype(None) + | EdgeWeightKind::Proxy + | EdgeWeightKind::Root + | EdgeWeightKind::Socket + | EdgeWeightKind::Use => {} + } + } + } + + let new_node_weight = self + .graph + .node_weight_mut(node_index_to_update) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + new_node_weight.set_merkle_tree_hash(hasher.finalize()); + + Ok(()) + } + + /// Perform [`Updates`](Update) using [`self`](WorkspaceSnapshotGraph) as the "to rebase" graph + /// and a provided graph as the "onto" graph. + pub fn perform_updates( + &mut self, + to_rebase_change_set: &ChangeSetPointer, + onto: &WorkspaceSnapshotGraph, + updates: &[Update], + ) -> WorkspaceSnapshotGraphResult<()> { + for update in updates { + match update { + Update::NewEdge { + source, + destination, + edge_weight, + } => { + let updated_source = self.get_latest_node_idx(*source)?; + let destination = self.find_in_self_or_create_using_onto(*destination, onto)?; + + self.add_edge(updated_source, edge_weight.clone(), destination)?; + } + Update::RemoveEdge { + source, + destination, + edge_kind, + } => { + let updated_source = self.get_latest_node_idx(*source)?; + let destination = self.get_latest_node_idx(*destination)?; + self.remove_edge( + to_rebase_change_set, + updated_source, + destination, + *edge_kind, + )?; + } + Update::ReplaceSubgraph { + onto: onto_subgraph_root, + to_rebase: to_rebase_subgraph_root, + } => { + let updated_to_rebase = self.get_latest_node_idx(*to_rebase_subgraph_root)?; + self.find_in_self_or_create_using_onto(*onto_subgraph_root, onto)?; + self.replace_references(updated_to_rebase)?; + } + } + } + Ok(()) + } + + /// Update node weight in place with a lambda. Use with caution. Generally + /// we treat node weights as immutable and replace them by creating a new + /// node with a new node weight and replacing references to point to the new + /// node. + pub(crate) fn update_node_weight( + &mut self, + node_idx: NodeIndex, + lambda: L, + ) -> WorkspaceSnapshotGraphResult<()> + where + L: FnOnce(&mut NodeWeight) -> WorkspaceSnapshotGraphResult<()>, + { + let node_weight = self + .graph + .node_weight_mut(node_idx) + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)?; + + lambda(node_weight)?; + + Ok(()) + } + + /// Given the node index for a node in other, find if a node exists in self that has the same + /// id as the node found in other. + fn find_latest_idx_in_self_from_other_idx( + &mut self, + other: &WorkspaceSnapshotGraph, + other_idx: NodeIndex, + ) -> WorkspaceSnapshotGraphResult> { + let other_id = other.get_node_weight(other_idx)?.id(); + + Ok(self.get_node_index_by_id(other_id).ok()) + } + + /// Find in self where self is the "to rebase" side or create using "onto". + fn find_in_self_or_create_using_onto( + &mut self, + unchecked: NodeIndex, + onto: &WorkspaceSnapshotGraph, + ) -> WorkspaceSnapshotGraphResult { + let unchecked_node_weight = onto.get_node_weight(unchecked)?; + + let found_or_created = { + let equivalent_node = if let Some(found) = + self.find_latest_idx_in_self_from_other_idx(onto, unchecked)? + { + Some(found) + } else { + self.find_equivalent_node( + unchecked_node_weight.id(), + unchecked_node_weight.lineage_id(), + )? + }; + + match equivalent_node { + Some(found_equivalent_node) => { + let found_equivalent_node_weight = + self.get_node_weight(found_equivalent_node)?; + if found_equivalent_node_weight.merkle_tree_hash() + != unchecked_node_weight.merkle_tree_hash() + { + self.import_subgraph(onto, unchecked)?; + self.find_latest_idx_in_self_from_other_idx(onto, unchecked)? + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)? + } else { + found_equivalent_node + } + } + None => { + self.import_subgraph(onto, unchecked)?; + self.find_latest_idx_in_self_from_other_idx(onto, unchecked)? + .ok_or(WorkspaceSnapshotGraphError::NodeWeightNotFound)? + } + } + }; + Ok(found_or_created) + } +} + +fn ordering_node_indexes_for_node_index( + snapshot: &WorkspaceSnapshotGraph, + node_index: NodeIndex, +) -> Vec { + snapshot + .graph + .edges_directed(node_index, Outgoing) + .filter_map(|edge_reference| { + if edge_reference.weight().kind() == &EdgeWeightKind::Ordering + && matches!( + snapshot.get_node_weight(edge_reference.target()), + Ok(NodeWeight::Ordering(_)) + ) + { + return Some(edge_reference.target()); + } + + None + }) + .collect() +} + +fn prop_node_indexes_for_node_index( + snapshot: &WorkspaceSnapshotGraph, + node_index: NodeIndex, +) -> Vec { + snapshot + .graph + .edges_directed(node_index, Outgoing) + .filter_map(|edge_reference| { + if edge_reference.weight().kind() == &EdgeWeightKind::Prop + && matches!( + snapshot.get_node_weight(edge_reference.target()), + Ok(NodeWeight::Prop(_)) + ) + { + return Some(edge_reference.target()); + } + None + }) + .collect() +} diff --git a/lib/dal/src/workspace_snapshot/graph/tests.rs b/lib/dal/src/workspace_snapshot/graph/tests.rs new file mode 100644 index 0000000000..acf3a7aaea --- /dev/null +++ b/lib/dal/src/workspace_snapshot/graph/tests.rs @@ -0,0 +1,5678 @@ +mod rebase; + +#[cfg(test)] +mod test { + use content_store::ContentHash; + use petgraph::graph::NodeIndex; + use petgraph::visit::EdgeRef; + use petgraph::Outgoing; + use pretty_assertions_sorted::assert_eq; + use std::collections::HashMap; + use std::collections::HashSet; + + use crate::change_set_pointer::ChangeSetPointer; + use crate::workspace_snapshot::conflict::Conflict; + use crate::workspace_snapshot::content_address::ContentAddress; + use crate::workspace_snapshot::edge_weight::{ + EdgeWeight, EdgeWeightKind, EdgeWeightKindDiscriminants, + }; + use crate::workspace_snapshot::node_weight::NodeWeight; + use crate::workspace_snapshot::update::Update; + use crate::WorkspaceSnapshotGraph; + use crate::{ComponentId, FuncId, PropId, PropKind, SchemaId, SchemaVariantId}; + + #[derive(Debug, PartialEq)] + struct ConflictsAndUpdates { + conflicts: Vec, + updates: Vec, + } + + #[test] + fn new() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + assert!(graph.is_acyclic_directed()); + } + + #[test] + fn multiply_parented_nodes() { + // All edges are outgoing from top to bottom except e to u + // + // root node---->t--->u--->v + // | ^ + // | | + // r ------ | + // / \ | | + // a b | | + // \ / \ | | + // c | | | + // / | | | | + // | d <- | | + // | | | | + // ->e<------ | + // | | + // ---------------- + // + // Edge from e to u mimics a function edge from a prop through a prototype to a function + // There are a few other edges to "u" that are not represented in the drawing above. + // + + let nodes = ["r", "t", "u", "v", "a", "b", "c", "d", "e"]; + let edges = [ + (None, "r"), + (None, "t"), + (Some("t"), "u"), + (Some("u"), "v"), + (Some("r"), "a"), + (Some("r"), "b"), + (Some("r"), "e"), + (Some("a"), "c"), + (Some("b"), "c"), + (Some("c"), "d"), + (Some("b"), "d"), + (Some("d"), "e"), + (Some("c"), "e"), + (Some("e"), "u"), + (Some("c"), "u"), + (Some("a"), "u"), + (Some("a"), "b"), + ]; + + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let mut node_id_map = HashMap::new(); + + for node in nodes { + // "props" here are just nodes that are easy to create and render the name on the dot + // output. there is no domain modeling in this test. + let node_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let prop_node_weight = NodeWeight::new_prop( + change_set, + node_id, + PropKind::Object, + node, + ContentHash::new(node.as_bytes()), + ) + .expect("create prop node weight"); + graph + .add_node(prop_node_weight) + .expect("Unable to add prop"); + + node_id_map.insert(node, node_id); + } + + for (source, target) in edges { + let source = match source { + None => graph.root_index, + Some(node) => graph + .get_node_index_by_id( + node_id_map + .get(node) + .copied() + .expect("source node should have an id"), + ) + .expect("get node index by id"), + }; + + let target = graph + .get_node_index_by_id( + node_id_map + .get(target) + .copied() + .expect("target node should have an id"), + ) + .expect("get node index by id"); + + graph + .add_edge( + source, + EdgeWeight::new(change_set, EdgeWeightKind::Use).expect("create edge weight"), + target, + ) + .expect("add edge"); + } + + graph.cleanup(); + + for (source, target) in edges { + let source_idx = match source { + None => graph.root_index, + Some(node) => graph + .get_node_index_by_id( + node_id_map + .get(node) + .copied() + .expect("source node should have an id"), + ) + .expect("get node index by id"), + }; + + let target_idx = graph + .get_node_index_by_id( + node_id_map + .get(target) + .copied() + .expect("target node should have an id"), + ) + .expect("get node index by id"); + + assert!( + graph + .edges_directed(source_idx, Outgoing) + .any(|edge_ref| edge_ref.target() == target_idx), + "An edge from {} to {} should exist", + source.unwrap_or("root"), + target + ); + } + + for (_, id) in node_id_map.iter() { + let idx_for_node = graph + .get_node_index_by_id(*id) + .expect("able to get idx by id"); + graph + .get_node_weight(idx_for_node) + .expect("node with weight in graph"); + } + } + + #[test] + fn add_nodes_and_edges() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let component_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(ContentHash::new( + ComponentId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let func_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let func_index = graph + .add_node( + NodeWeight::new_content( + change_set, + func_id, + ContentAddress::Func(ContentHash::new( + FuncId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add func"); + let prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let prop_index = graph + .add_node( + NodeWeight::new_content( + change_set, + prop_id, + ContentAddress::Prop(ContentHash::new( + PropId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add prop"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + func_index, + ) + .expect("Unable to add root -> func edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + prop_index, + ) + .expect("Unable to add schema variant -> prop edge"); + graph + .add_edge( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(func_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add prop -> func edge"); + + assert!(graph.is_acyclic_directed()); + } + + #[test] + fn cyclic_failure() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let initial_schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let initial_schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let initial_component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(ContentHash::new( + ComponentId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + initial_component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + initial_schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to find NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + initial_schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to find NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to find NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let pre_cycle_root_index = graph.root_index; + + // This should cause a cycle. + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to find NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(component_id) + .expect("Unable to find NodeIndex"), + ) + .expect_err("Created a cycle"); + + assert_eq!(pre_cycle_root_index, graph.root_index,); + } + + #[test] + fn update_content() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Constellation")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + "Freestar Collective".as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let component_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(ContentHash::from("Crimson Fleet")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + // Ensure that the root node merkle tree hash looks as we expect before the update. + let pre_update_root_node_merkle_tree_hash: ContentHash = + serde_json::from_value(serde_json::json![ + "66e2b07b8a9a5f94a0ea18618a57b3264c850ea6cfeb81f5c9a42c4397f2f49d" + ]) + .expect("could not deserialize"); + assert_eq!( + pre_update_root_node_merkle_tree_hash, // expected + graph + .get_node_weight(graph.root_index) + .expect("could not get node weight") + .merkle_tree_hash(), // actual + ); + + let updated_content_hash = ContentHash::from("new_content"); + graph + .update_content(change_set, component_id, updated_content_hash) + .expect("Unable to update Component content hash"); + + let post_update_root_node_merkle_tree_hash: ContentHash = + serde_json::from_value(serde_json::json![ + "0b9b79be9c1b4107bd32dc9fb7accde544dc10171e37847e53c4d16a9efd2da1" + ]) + .expect("could not deserialize"); + assert_eq!( + post_update_root_node_merkle_tree_hash, // expected + graph + .get_node_weight(graph.root_index) + .expect("could not get node weight") + .merkle_tree_hash(), // actual + ); + assert_eq!( + updated_content_hash, // expected + graph + .get_node_weight( + graph + .get_node_index_by_id(component_id) + .expect("could not get node index by id") + ) + .expect("could not get node weight") + .content_hash(), // actual + ); + + graph.cleanup(); + + // Ensure that there are not more nodes than the ones that should be in use. + assert_eq!(4, graph.node_count()); + + // The hashes must not change upon cleanup. + assert_eq!( + post_update_root_node_merkle_tree_hash, // expected + graph + .get_node_weight(graph.root_index) + .expect("could not get node weight") + .merkle_tree_hash(), // actual + ); + assert_eq!( + updated_content_hash, // expected + graph + .get_node_weight( + graph + .get_node_index_by_id(component_id) + .expect("could not get node index by id") + ) + .expect("could not get node weight") + .content_hash(), // actual + ); + } + + #[test] + fn detect_conflicts_and_updates_simple_no_conflicts_no_updates_in_base() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + initial_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = initial_graph.clone(); + + let component_id = new_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let component_index = new_graph + .add_node( + NodeWeight::new_content( + new_change_set, + component_id, + ContentAddress::Schema(ContentHash::from("Component A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + new_graph + .add_edge( + new_graph.root_index, + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + new_graph + .add_edge( + new_graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + new_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + assert_eq!(Vec::::new(), updates); + } + + #[test] + fn detect_conflicts_and_updates_simple_no_conflicts_with_purely_new_content_in_base() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let new_graph = base_graph.clone(); + + let new_onto_component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let new_onto_component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + new_onto_component_id, + ContentAddress::Component(ContentHash::from("Component B")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component B"); + let _new_onto_root_component_edge_index = base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + new_onto_component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(new_onto_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + println!("Updated base graph (Root: {:?}):", base_graph.root_index); + base_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + + let new_onto_component_index = base_graph + .get_node_index_by_id(new_onto_component_id) + .expect("Unable to get NodeIndex"); + match updates.as_slice() { + [Update::NewEdge { + source, + destination, + edge_weight, + }] => { + assert_eq!(new_graph.root_index, *source); + assert_eq!(new_onto_component_index, *destination); + assert_eq!(&EdgeWeightKind::Use, edge_weight.kind()); + } + other => panic!("Unexpected updates: {:?}", other), + } + } + + #[test] + fn detect_conflicts_and_updates_with_purely_new_content_in_new_graph() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + component_id, + ContentAddress::Component(ContentHash::from("Component A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + + base_graph.cleanup(); + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + let new_component_id = new_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let new_component_index = new_graph + .add_node( + NodeWeight::new_content( + new_change_set, + new_component_id, + ContentAddress::Component(ContentHash::from("Component B")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component B"); + new_graph + .add_edge( + new_graph.root_index, + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + new_component_index, + ) + .expect("Unable to add root -> component edge"); + + new_graph.cleanup(); + println!("Updated new graph (Root: {:?}):", new_graph.root_index); + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert!(updates.is_empty()); + assert!(conflicts.is_empty()); + + let (conflicts, updates) = base_graph + .detect_conflicts_and_updates( + base_change_set.vector_clock_id(), + &new_graph, + new_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert!(conflicts.is_empty()); + + match updates.as_slice() { + [Update::NewEdge { + source, + destination, + edge_weight, + }] => { + assert_eq!(base_graph.root_index, *source); + assert_eq!(new_component_index, *destination); + assert_eq!(&EdgeWeightKind::Use, edge_weight.kind()); + } + other => panic!("Unexpected updates: {:?}", other), + } + } + + #[test] + fn detect_conflicts_and_updates_simple_no_conflicts_with_updates_on_both_sides() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + let component_id = new_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let component_index = new_graph + .add_node( + NodeWeight::new_content( + new_change_set, + component_id, + ContentAddress::Component(ContentHash::from("Component A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + new_graph + .add_edge( + new_graph.root_index, + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + new_graph + .add_edge( + new_graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + new_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + println!("new graph (Root {:?}):", new_graph.root_index); + new_graph.dot(); + + let new_onto_component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let new_onto_component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + new_onto_component_id, + ContentAddress::Component(ContentHash::from("Component B")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component B"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + new_onto_component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(new_onto_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + println!("Updated base graph (Root: {:?}):", base_graph.root_index); + base_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + + let new_onto_component_index = base_graph + .get_node_index_by_id(new_onto_component_id) + .expect("Unable to get NodeIndex"); + match updates.as_slice() { + [Update::NewEdge { + source, + destination, + edge_weight, + }] => { + assert_eq!(new_graph.root_index, *source); + assert_eq!(new_onto_component_index, *destination); + assert_eq!(&EdgeWeightKind::Use, edge_weight.kind()); + } + other => panic!("Unexpected updates: {:?}", other), + } + } + + #[test] + fn detect_conflicts_and_updates_simple_with_content_conflict() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + component_id, + ContentAddress::Component(ContentHash::from("Component A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + base_graph.cleanup(); + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + new_graph + .update_content( + new_change_set, + component_id, + ContentHash::from("Updated Component A"), + ) + .expect("Unable to update Component A"); + + new_graph.cleanup(); + println!("new graph (Root {:?}):", new_graph.root_index); + new_graph.dot(); + + base_graph + .update_content( + base_change_set, + component_id, + ContentHash::from("Base Updated Component A"), + ) + .expect("Unable to update Component A"); + + base_graph.cleanup(); + println!("Updated base graph (Root: {:?}):", base_graph.root_index); + base_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!( + vec![Conflict::NodeContent { + onto: base_graph + .get_node_index_by_id(component_id) + .expect("Unable to get component NodeIndex"), + to_rebase: new_graph + .get_node_index_by_id(component_id) + .expect("Unable to get component NodeIndex"), + }], + conflicts + ); + assert_eq!(Vec::::new(), updates); + } + + #[test] + fn detect_conflicts_and_updates_simple_with_modify_removed_item_conflict() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + component_id, + ContentAddress::Component(ContentHash::from("Component A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + base_graph.cleanup(); + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + base_graph + .remove_edge( + base_change_set, + base_graph.root_index, + base_graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeightKindDiscriminants::Use, + ) + .expect("Unable to remove Component A"); + + base_graph.cleanup(); + println!("Updated base graph (Root: {:?}):", base_graph.root_index); + base_graph.dot(); + + new_graph + .update_content( + new_change_set, + component_id, + ContentHash::from("Updated Component A"), + ) + .expect("Unable to update Component A"); + + new_graph.cleanup(); + println!("new graph (Root {:?}):", new_graph.root_index); + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!( + vec![Conflict::ModifyRemovedItem( + new_graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex") + )], + conflicts + ); + assert_eq!(Vec::::new(), updates); + } + + #[test] + fn detect_conflicts_and_updates_complex() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let base_change_set = &initial_change_set; + let mut base_graph = WorkspaceSnapshotGraph::new(base_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + // Docker Image Schema + let docker_image_schema_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let docker_image_schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + docker_image_schema_id, + ContentAddress::Schema(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + docker_image_schema_index, + ) + .expect("Unable to add root -> schema edge"); + + // Docker Image Schema Variant + let docker_image_schema_variant_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let docker_image_schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + docker_image_schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(docker_image_schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + docker_image_schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + // Nginx Docker Image Component + let nginx_docker_image_component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let nginx_docker_image_component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + nginx_docker_image_component_id, + ContentAddress::Component(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + nginx_docker_image_component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(nginx_docker_image_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(docker_image_schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + // Alpine Component + let alpine_component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let alpine_component_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + alpine_component_id, + ContentAddress::Component(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Component A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + alpine_component_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(alpine_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(docker_image_schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + // Butane Schema + let butane_schema_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let butane_schema_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + butane_schema_id, + ContentAddress::Schema(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + butane_schema_index, + ) + .expect("Unable to add root -> schema edge"); + + // Butane Schema Variant + let butane_schema_variant_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let butane_schema_variant_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + butane_schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(butane_schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + butane_schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + // Nginx Butane Component + let nginx_butane_component_id = base_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let nginx_butane_node_index = base_graph + .add_node( + NodeWeight::new_content( + base_change_set, + nginx_butane_component_id, + ContentAddress::Component(ContentHash::from("first")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + base_graph + .add_edge( + base_graph.root_index, + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + nginx_butane_node_index, + ) + .expect("Unable to add root -> component edge"); + base_graph + .add_edge( + base_graph + .get_node_index_by_id(nginx_butane_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(base_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + base_graph + .get_node_index_by_id(butane_schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + base_graph.cleanup(); + println!("Initial base graph (Root {:?}):", base_graph.root_index); + base_graph.dot(); + + // Create a new change set to cause some problems! + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = base_graph.clone(); + + // Create a modify removed item conflict. + base_graph + .remove_edge( + base_change_set, + base_graph.root_index, + base_graph + .get_node_index_by_id(nginx_butane_component_id) + .expect("Unable to get NodeIndex"), + EdgeWeightKindDiscriminants::Use, + ) + .expect("Unable to update the component"); + new_graph + .update_content( + new_change_set, + nginx_butane_component_id, + ContentHash::from("second"), + ) + .expect("Unable to update the component"); + + // Create a node content conflict. + base_graph + .update_content( + base_change_set, + docker_image_schema_variant_id, + ContentHash::from("oopsie"), + ) + .expect("Unable to update the component"); + new_graph + .update_content( + new_change_set, + docker_image_schema_variant_id, + ContentHash::from("poopsie"), + ) + .expect("Unable to update the component"); + + // Create a pure update. + base_graph + .update_content( + base_change_set, + docker_image_schema_id, + ContentHash::from("bg3"), + ) + .expect("Unable to update the schema"); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &base_graph, + base_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + println!("base graph current root: {:?}", base_graph.root_index); + base_graph.dot(); + println!("new graph current root: {:?}", new_graph.root_index); + new_graph.dot(); + + let expected_conflicts = vec![ + Conflict::ModifyRemovedItem( + new_graph + .get_node_index_by_id(nginx_butane_component_id) + .expect("Unable to get component NodeIndex"), + ), + Conflict::NodeContent { + onto: base_graph + .get_node_index_by_id(docker_image_schema_variant_id) + .expect("Unable to get component NodeIndex"), + to_rebase: new_graph + .get_node_index_by_id(docker_image_schema_variant_id) + .expect("Unable to get component NodeIndex"), + }, + ]; + let expected_updates = vec![Update::ReplaceSubgraph { + onto: base_graph + .get_node_index_by_id(docker_image_schema_id) + .expect("Unable to get NodeIndex"), + to_rebase: new_graph + .get_node_index_by_id(docker_image_schema_id) + .expect("Unable to get NodeIndex"), + }]; + + assert_eq!( + ConflictsAndUpdates { + conflicts: expected_conflicts, + updates: expected_updates, + }, + ConflictsAndUpdates { conflicts, updates }, + ); + } + + #[test] + fn add_ordered_node() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let func_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let func_index = graph + .add_node( + NodeWeight::new_content( + change_set, + func_id, + ContentAddress::Func(ContentHash::new( + FuncId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add func"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + func_index, + ) + .expect("Unable to add root -> func edge"); + + let prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let prop_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + prop_id, + ContentAddress::Prop(ContentHash::new( + PropId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + prop_index, + ) + .expect("Unable to add schema variant -> prop edge"); + graph + .add_edge( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(func_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add prop -> func edge"); + graph.cleanup(); + graph.dot(); + + let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_1_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_1_index, + ) + .expect("Unable to add prop -> ordered_prop_1 edge"); + + let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_2_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_2_index, + ) + .expect("Unable to add prop -> ordered_prop_2 edge"); + + let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_3_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_3_index, + ) + .expect("Unable to add prop -> ordered_prop_3 edge"); + graph.cleanup(); + graph.dot(); + + assert_eq!( + vec![ + ordered_prop_1_index, + ordered_prop_2_index, + ordered_prop_3_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") + ); + } + + #[test] + fn reorder_ordered_node() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let func_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let func_index = graph + .add_node( + NodeWeight::new_content( + change_set, + func_id, + ContentAddress::Func(ContentHash::new( + FuncId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add func"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + func_index, + ) + .expect("Unable to add root -> func edge"); + + let prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let prop_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + prop_id, + ContentAddress::Prop(ContentHash::new( + PropId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + prop_index, + ) + .expect("Unable to add schema variant -> prop edge"); + graph + .add_edge( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(func_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add prop -> func edge"); + graph.cleanup(); + graph.dot(); + + let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_1_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_1_index, + ) + .expect("Unable to add prop -> ordered_prop_1 edge"); + + let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_2_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_2_index, + ) + .expect("Unable to add prop -> ordered_prop_2 edge"); + + let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_3_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_3_index, + ) + .expect("Unable to add prop -> ordered_prop_3 edge"); + + let ordered_prop_4_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_4_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_4_index, + ) + .expect("Unable to add prop -> ordered_prop_4 edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + vec![ + ordered_prop_1_index, + ordered_prop_2_index, + ordered_prop_3_index, + ordered_prop_4_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") + ); + + let new_order = vec![ + ordered_prop_2_id, + ordered_prop_1_id, + ordered_prop_4_id, + ordered_prop_3_id, + ]; + + graph + .update_order(change_set, prop_id, new_order) + .expect("Unable to update order of prop's children"); + + assert_eq!( + vec![ + ordered_prop_2_index, + ordered_prop_1_index, + ordered_prop_4_index, + ordered_prop_3_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") + ); + } + + #[test] + fn remove_unordered_node_and_detect_edge_removal() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let schema_variant_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_2_index = graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_2_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_2_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let expected_edges = HashSet::from([schema_variant_2_index, schema_variant_index]); + + let existing_edges: HashSet = graph + .edges_directed( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex for schema"), + Outgoing, + ) + .map(|edge_ref| edge_ref.target()) + .collect(); + + assert_eq!( + expected_edges, existing_edges, + "confirm edges are there before deleting" + ); + + graph + .mark_graph_seen(initial_change_set.vector_clock_id()) + .expect("Unable to mark initial graph as seen"); + + let mut graph_with_deleted_edge = graph.clone(); + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + + graph_with_deleted_edge.dot(); + + graph_with_deleted_edge + .remove_edge( + new_change_set, + graph_with_deleted_edge + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex for schema"), + schema_variant_2_index, + EdgeWeightKindDiscriminants::Use, + ) + .expect("Edge removal failed"); + + graph_with_deleted_edge.dot(); + + let existing_edges: Vec = graph_with_deleted_edge + .edges_directed( + graph_with_deleted_edge + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex for schema"), + Outgoing, + ) + .map(|edge_ref| edge_ref.target()) + .collect(); + + assert_eq!( + vec![schema_variant_index], + existing_edges, + "confirm edges after deletion" + ); + + graph_with_deleted_edge + .mark_graph_seen(new_change_set.vector_clock_id()) + .expect("Unable to mark new graph as seen"); + + let (conflicts, updates) = graph + .detect_conflicts_and_updates( + initial_change_set.vector_clock_id(), + &graph_with_deleted_edge, + new_change_set.vector_clock_id(), + ) + .expect("Failed to detect conflicts and updates"); + + assert!(conflicts.is_empty()); + dbg!(&updates); + assert_eq!(1, updates.len()); + + assert!(matches!( + updates.first().expect("should be there"), + Update::RemoveEdge { .. } + )); + } + + #[test] + fn remove_unordered_node() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let schema_variant_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_2_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_2_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_2_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let expected_edges = HashSet::from([schema_variant_2_index, schema_variant_index]); + + let existing_edges: HashSet = graph + .edges_directed( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex for schema"), + Outgoing, + ) + .map(|edge_ref| edge_ref.target()) + .collect(); + + assert_eq!( + expected_edges, existing_edges, + "confirm edges are there before deleting" + ); + + graph + .remove_edge( + change_set, + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex for schema"), + schema_variant_2_index, + EdgeWeightKindDiscriminants::Use, + ) + .expect("Edge removal failed"); + + let existing_edges: Vec = graph + .edges_directed( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex for schema"), + Outgoing, + ) + .map(|edge_ref| edge_ref.target()) + .collect(); + + assert_eq!( + vec![schema_variant_index], + existing_edges, + "confirm edges after deletion" + ); + } + + #[test] + fn remove_ordered_node() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(ContentHash::new( + SchemaId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::new( + SchemaVariantId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let func_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let func_index = graph + .add_node( + NodeWeight::new_content( + change_set, + func_id, + ContentAddress::Func(ContentHash::new( + FuncId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add func"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + func_index, + ) + .expect("Unable to add root -> func edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_prop_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + root_prop_id, + ContentAddress::Prop(ContentHash::new( + PropId::generate().to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_index, + ) + .expect("Unable to add schema variant -> prop edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(func_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add prop -> func edge"); + graph.cleanup(); + graph.dot(); + + let ordered_prop_1_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_1_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_1_index, + ) + .expect("Unable to add prop -> ordered_prop_1 edge"); + + let ordered_prop_2_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_2_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_2_index, + ) + .expect("Unable to add prop -> ordered_prop_2 edge"); + + let ordered_prop_3_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_3_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_3_index, + ) + .expect("Unable to add prop -> ordered_prop_3 edge"); + + let ordered_prop_4_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ordered_prop_4_index = graph + .add_node( + NodeWeight::new_content( + change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeWeight for prop"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create uses edge weight"), + ordered_prop_4_index, + ) + .expect("Unable to add prop -> ordered_prop_4 edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + vec![ + ordered_prop_1_index, + ordered_prop_2_index, + ordered_prop_3_index, + ordered_prop_4_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") + ); + + graph + .remove_edge( + change_set, + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex for prop"), + ordered_prop_2_index, + EdgeWeightKindDiscriminants::Use, + ) + .expect("Unable to remove prop -> ordered_prop_2 edge"); + + assert_eq!( + vec![ + ordered_prop_1_index, + ordered_prop_3_index, + ordered_prop_4_index, + ], + graph + .ordered_children_for_node( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get prop NodeIndex") + ) + .expect("Unable to find ordered children for node") + .expect("Node is not an ordered node") + ); + if let NodeWeight::Ordering(ordering_weight) = graph + .get_node_weight( + graph + .ordering_node_index_for_container( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to find ordering node for prop"), + ) + .expect("Error getting ordering NodeIndex for prop") + .expect("Unable to find ordering NodeIndex"), + ) + .expect("Unable to get ordering NodeWeight for ordering node") + { + assert_eq!( + &vec![ordered_prop_1_id, ordered_prop_3_id, ordered_prop_4_id], + ordering_weight.order() + ); + } else { + panic!("Unable to destructure ordering node weight"); + } + } + + #[test] + fn detect_conflicts_and_updates_simple_ordering_no_conflicts_no_updates_in_base() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let container_prop_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let container_prop_index = initial_graph + .add_ordered_node( + initial_change_set, + NodeWeight::new_content( + initial_change_set, + container_prop_id, + ContentAddress::Prop(ContentHash::new( + container_prop_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add container prop"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + container_prop_index, + ) + .expect("Unable to add schema variant -> container prop edge"); + + let ordered_prop_1_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_1_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 1"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_1_index, + ) + .expect("Unable to add container prop -> ordered prop 1 edge"); + + let ordered_prop_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_2_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 2"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_2_index, + ) + .expect("Unable to add container prop -> ordered prop 2 edge"); + + let ordered_prop_3_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_3_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 3"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_3_index, + ) + .expect("Unable to add container prop -> ordered prop 3 edge"); + + let ordered_prop_4_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_4_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 4"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_4_index, + ) + .expect("Unable to add container prop -> ordered prop 4 edge"); + + initial_graph.cleanup(); + initial_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = initial_graph.clone(); + + let ordered_prop_5_id = new_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_5_index = new_graph + .add_node( + NodeWeight::new_content( + new_change_set, + ordered_prop_5_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_5_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 5"); + new_graph + .add_ordered_edge( + new_change_set, + new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(new_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_5_index, + ) + .expect("Unable to add container prop -> ordered prop 5 edge"); + + new_graph.cleanup(); + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + assert_eq!(Vec::::new(), updates); + } + + #[test] + fn detect_conflicts_and_updates_simple_ordering_no_conflicts_with_updates_in_base() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let container_prop_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let container_prop_index = initial_graph + .add_ordered_node( + initial_change_set, + NodeWeight::new_content( + initial_change_set, + container_prop_id, + ContentAddress::Prop(ContentHash::new( + container_prop_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add container prop"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + container_prop_index, + ) + .expect("Unable to add schema variant -> container prop edge"); + + let ordered_prop_1_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_1_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 1"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_1_index, + ) + .expect("Unable to add container prop -> ordered prop 1 edge"); + + let ordered_prop_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_2_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 2"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_2_index, + ) + .expect("Unable to add container prop -> ordered prop 2 edge"); + + let ordered_prop_3_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_3_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 3"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_3_index, + ) + .expect("Unable to add container prop -> ordered prop 3 edge"); + + let ordered_prop_4_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_4_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 4"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_4_index, + ) + .expect("Unable to add container prop -> ordered prop 4 edge"); + + initial_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let new_graph = initial_graph.clone(); + + let ordered_prop_5_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_5_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_5_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_5_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 5"); + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"); + let (_, maybe_ordinal_edge_information) = initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + new_edge_weight.clone(), + ordered_prop_5_index, + ) + .expect("Unable to add container prop -> ordered prop 5 edge"); + let ( + ordinal_edge_index, + source_node_index_for_ordinal_edge, + destination_node_index_for_ordinal_edge, + ) = maybe_ordinal_edge_information.expect("ordinal edge information not found"); + let ordinal_edge_weight = initial_graph + .graph + .edge_weight(ordinal_edge_index) + .expect("could not get edge weight for index") + .to_owned(); + let source_node_id_for_ordinal_edge = initial_graph + .get_node_weight(source_node_index_for_ordinal_edge) + .expect("could not get node weight") + .id(); + let destination_node_id_for_ordinal_edge = initial_graph + .get_node_weight(destination_node_index_for_ordinal_edge) + .expect("could not get node weight") + .id(); + + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + assert_eq!( + vec![ + Update::NewEdge { + source: new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + destination: initial_graph + .get_node_index_by_id(ordered_prop_5_id) + .expect("Unable to get NodeIndex"), + edge_weight: new_edge_weight, + }, + Update::ReplaceSubgraph { + onto: initial_graph + .ordering_node_index_for_container( + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex") + ) + .expect("Unable to get new ordering NodeIndex") + .expect("Ordering NodeIndex not found"), + to_rebase: new_graph + .ordering_node_index_for_container( + new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex") + ) + .expect("Unable to get old ordering NodeIndex") + .expect("Ordering NodeIndex not found"), + }, + Update::NewEdge { + source: new_graph + .get_node_index_by_id(source_node_id_for_ordinal_edge) + .expect("could not get node index by id"), + destination: initial_graph + .get_node_index_by_id(destination_node_id_for_ordinal_edge) + .expect("could not get node index by id"), + edge_weight: ordinal_edge_weight, + } + ], + updates + ); + } + + #[test] + fn detect_conflicts_and_updates_simple_ordering_with_conflicting_ordering_updates() { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let container_prop_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let container_prop_index = initial_graph + .add_ordered_node( + initial_change_set, + NodeWeight::new_content( + initial_change_set, + container_prop_id, + ContentAddress::Prop(ContentHash::new( + container_prop_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add container prop"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + container_prop_index, + ) + .expect("Unable to add schema variant -> container prop edge"); + + let ordered_prop_1_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_1_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 1"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_1_index, + ) + .expect("Unable to add container prop -> ordered prop 1 edge"); + + let ordered_prop_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_2_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 2"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_2_index, + ) + .expect("Unable to add container prop -> ordered prop 2 edge"); + + let ordered_prop_3_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_3_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 3"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_3_index, + ) + .expect("Unable to add container prop -> ordered prop 3 edge"); + + let ordered_prop_4_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_4_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 4"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_4_index, + ) + .expect("Unable to add container prop -> ordered prop 4 edge"); + + initial_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = initial_graph.clone(); + + let new_order = vec![ + ordered_prop_2_id, + ordered_prop_1_id, + ordered_prop_4_id, + ordered_prop_3_id, + ]; + new_graph + .update_order(new_change_set, container_prop_id, new_order) + .expect("Unable to update order of container prop's children"); + + let ordered_prop_5_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_5_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_5_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_5_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 5"); + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"); + let (_, maybe_ordinal_edge_information) = initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + new_edge_weight.clone(), + ordered_prop_5_index, + ) + .expect("Unable to add container prop -> ordered prop 5 edge"); + let ( + ordinal_edge_index, + source_node_index_for_ordinal_edge, + destination_node_index_for_ordinal_edge, + ) = maybe_ordinal_edge_information.expect("ordinal edge information not found"); + let ordinal_edge_weight = initial_graph + .graph + .edge_weight(ordinal_edge_index) + .expect("could not get edge weight for index") + .to_owned(); + let source_node_id_for_ordinal_edge = initial_graph + .get_node_weight(source_node_index_for_ordinal_edge) + .expect("could not get node weight") + .id(); + let destination_node_id_for_ordinal_edge = initial_graph + .get_node_weight(destination_node_index_for_ordinal_edge) + .expect("could not get node weight") + .id(); + + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!( + vec![Conflict::ChildOrder { + onto: initial_graph + .ordering_node_index_for_container( + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex") + ) + .expect("Unable to get ordering NodeIndex") + .expect("Ordering NodeIndex not found"), + to_rebase: new_graph + .ordering_node_index_for_container( + new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex") + ) + .expect("Unable to get ordering NodeIndex") + .expect("Ordering NodeIndex not found"), + }], + conflicts + ); + assert_eq!( + vec![ + Update::NewEdge { + source: new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get new_graph container NodeIndex"), + destination: initial_graph + .get_node_index_by_id(ordered_prop_5_id) + .expect("Unable to get ordered prop 5 NodeIndex"), + edge_weight: new_edge_weight, + }, + Update::NewEdge { + source: new_graph + .get_node_index_by_id(source_node_id_for_ordinal_edge) + .expect("could not get node index by id"), + destination: initial_graph + .get_node_index_by_id(destination_node_id_for_ordinal_edge) + .expect("could not get node index by id"), + edge_weight: ordinal_edge_weight, + } + ], + updates + ); + } + + #[test] + fn detect_conflicts_and_updates_simple_ordering_with_no_conflicts_add_in_onto_remove_in_to_rebase( + ) { + let initial_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let initial_change_set = &initial_change_set; + let mut initial_graph = WorkspaceSnapshotGraph::new(initial_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + let schema_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_id, + ContentAddress::Schema(ContentHash::from("Schema A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema A"); + let schema_variant_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let schema_variant_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + schema_variant_id, + ContentAddress::SchemaVariant(ContentHash::from("Schema Variant A")), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add Schema Variant A"); + + initial_graph + .add_edge( + initial_graph.root_index, + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_index, + ) + .expect("Unable to add root -> schema edge"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let container_prop_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let container_prop_index = initial_graph + .add_ordered_node( + initial_change_set, + NodeWeight::new_content( + initial_change_set, + container_prop_id, + ContentAddress::Prop(ContentHash::new( + container_prop_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add container prop"); + initial_graph + .add_edge( + initial_graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + container_prop_index, + ) + .expect("Unable to add schema variant -> container prop edge"); + + let ordered_prop_1_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_1_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_1_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_1_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 1"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_1_index, + ) + .expect("Unable to add container prop -> ordered prop 1 edge"); + + let ordered_prop_2_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_2_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_2_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_2_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 2"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_2_index, + ) + .expect("Unable to add container prop -> ordered prop 2 edge"); + + let ordered_prop_3_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_3_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_3_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_3_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 3"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_3_index, + ) + .expect("Unable to add container prop -> ordered prop 3 edge"); + + let ordered_prop_4_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_4_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_4_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_4_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 4"); + initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ordered_prop_4_index, + ) + .expect("Unable to add container prop -> ordered prop 4 edge"); + + initial_graph.cleanup(); + initial_graph + .mark_graph_seen(initial_change_set.vector_clock_id()) + .expect("Unable to update recently seen information"); + // initial_graph.dot(); + + let new_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let new_change_set = &new_change_set; + let mut new_graph = initial_graph.clone(); + + new_graph + .remove_edge( + new_change_set, + new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get container NodeIndex"), + ordered_prop_2_index, + EdgeWeightKindDiscriminants::Use, + ) + .expect("Unable to remove container prop -> prop 2 edge"); + + let ordered_prop_5_id = initial_change_set + .generate_ulid() + .expect("Unable to generate Ulid"); + let ordered_prop_5_index = initial_graph + .add_node( + NodeWeight::new_content( + initial_change_set, + ordered_prop_5_id, + ContentAddress::Prop(ContentHash::new( + ordered_prop_5_id.to_string().as_bytes(), + )), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ordered prop 5"); + + let new_edge_weight = EdgeWeight::new(initial_change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"); + let (_, maybe_ordinal_edge_information) = initial_graph + .add_ordered_edge( + initial_change_set, + initial_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get NodeIndex"), + new_edge_weight.clone(), + ordered_prop_5_index, + ) + .expect("Unable to add container prop -> ordered prop 5 edge"); + let ( + ordinal_edge_index, + source_node_index_for_ordinal_edge, + destination_node_index_for_ordinal_edge, + ) = maybe_ordinal_edge_information.expect("ordinal edge information not found"); + let ordinal_edge_weight = initial_graph + .graph + .edge_weight(ordinal_edge_index) + .expect("could not get edge weight for index") + .to_owned(); + let source_node_id_for_ordinal_edge = initial_graph + .get_node_weight(source_node_index_for_ordinal_edge) + .expect("could not get node weight") + .id(); + let destination_node_id_for_ordinal_edge = initial_graph + .get_node_weight(destination_node_index_for_ordinal_edge) + .expect("could not get node weight") + .id(); + + initial_graph.cleanup(); + initial_graph.dot(); + + new_graph.cleanup(); + new_graph.dot(); + + let (conflicts, updates) = new_graph + .detect_conflicts_and_updates( + new_change_set.vector_clock_id(), + &initial_graph, + initial_change_set.vector_clock_id(), + ) + .expect("Unable to detect conflicts and updates"); + + assert_eq!(Vec::::new(), conflicts); + assert_eq!( + vec![ + Update::NewEdge { + source: new_graph + .get_node_index_by_id(container_prop_id) + .expect("Unable to get new_graph container NodeIndex"), + destination: initial_graph + .get_node_index_by_id(ordered_prop_5_id) + .expect("Unable to get ordered prop 5 NodeIndex"), + edge_weight: new_edge_weight, + }, + Update::NewEdge { + source: new_graph + .get_node_index_by_id(source_node_id_for_ordinal_edge) + .expect("could not get node index by id"), + destination: initial_graph + .get_node_index_by_id(destination_node_id_for_ordinal_edge) + .expect("could not get node index by id"), + edge_weight: ordinal_edge_weight, + } + ], + updates + ); + } + + #[tokio::test] + #[cfg(ignore)] + async fn attribute_value_build_view() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + let mut content_store = content_store::LocalStore::default(); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_content_hash = content_store + .add(&serde_json::json!("Schema A")) + .expect("Unable to add to content store"); + let schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(schema_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_content_hash = content_store + .add(&serde_json::json!("Schema Variant A")) + .expect("Unable to add to content store"); + let schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(schema_variant_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_prop_content_hash = content_store + .add(&serde_json::json!("Root prop")) + .expect("Unable to add to content store"); + let root_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + root_prop_id, + PropKind::Object, + "root", + root_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_node_index, + ) + .expect("Unable to add schema variant -> root prop edge"); + + let si_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let si_prop_content_hash = content_store + .add(&serde_json::json!("SI Prop Content")) + .expect("Unable to add to content store"); + let si_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + si_prop_id, + PropKind::Object, + "si", + si_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add si prop"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + si_prop_node_index, + ) + .expect("Unable to add root prop -> si prop edge"); + + let name_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let name_prop_content_hash = content_store + .add(&serde_json::json!("Name Prop Content")) + .expect("Unable to add to content store"); + let name_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + name_prop_id, + PropKind::Object, + "name", + name_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add name prop"); + graph + .add_edge( + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + name_prop_node_index, + ) + .expect("Unable to add si prop -> name prop edge"); + + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let component_content_hash = content_store + .add(&serde_json::json!("Component Content")) + .expect("Unable to add to content store"); + let component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(component_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let root_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + root_av_id, + ContentAddress::AttributeValue(root_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root av"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_av_node_index, + ) + .expect("Unable to add component -> root av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add root av -> root prop edge"); + + let si_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let si_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let si_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + si_av_id, + ContentAddress::AttributeValue(si_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add si av"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + si_av_node_index, + ) + .expect("Unable to add root av -> si av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add si av -> si prop edge"); + + let name_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let name_av_content_hash = content_store + .add(&serde_json::json!("component name")) + .expect("Unable to add to content store"); + let name_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + name_av_id, + ContentAddress::AttributeValue(name_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add name av"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeWeight"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + name_av_node_index, + ) + .expect("Unable to add si av -> name av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(name_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(name_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to create name av -> name prop edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + serde_json::json![{"si": {"name": "component name"}}], + graph + .attribute_value_view( + &mut content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + ) + .await + .expect("Unable to generate attribute value view"), + ); + } + + #[tokio::test] + #[cfg(ignore)] + async fn attribute_value_build_view_unordered_object() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + let mut content_store = content_store::LocalStore::default(); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_content_hash = content_store + .add(&serde_json::json!("Schema A")) + .expect("Unable to add to content store"); + let schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(schema_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_content_hash = content_store + .add(&serde_json::json!("Schema Variant A")) + .expect("Unable to add to content store"); + let schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(schema_variant_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_prop_content_hash = content_store + .add(&serde_json::json!("Root prop")) + .expect("Unable to add to content store"); + let root_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + root_prop_id, + PropKind::Object, + "root", + root_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_node_index, + ) + .expect("Unable to add schema variant -> root prop edge"); + + let si_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let si_prop_content_hash = content_store + .add(&serde_json::json!("SI Prop Content")) + .expect("Unable to add to content store"); + let si_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + si_prop_id, + PropKind::Object, + "si", + si_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add si prop"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + si_prop_node_index, + ) + .expect("Unable to add root prop -> si prop edge"); + + let name_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let name_prop_content_hash = content_store + .add(&serde_json::json!("Name Prop Content")) + .expect("Unable to add to content store"); + let name_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + name_prop_id, + PropKind::Object, + "name", + name_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add name prop"); + graph + .add_edge( + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + name_prop_node_index, + ) + .expect("Unable to add si prop -> name prop edge"); + + let description_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let description_prop_content_hash = content_store + .add(&serde_json::json!("Description Prop Content")) + .expect("Unable to add to content store"); + let description_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + description_prop_id, + PropKind::String, + "description", + description_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add description prop"); + graph + .add_edge( + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + description_prop_node_index, + ) + .expect("Unable to add si prop -> description prop edge"); + + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let component_content_hash = content_store + .add(&serde_json::json!("Component Content")) + .expect("Unable to add to content store"); + let component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(component_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let root_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + root_av_id, + ContentAddress::AttributeValue(root_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root av"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_av_node_index, + ) + .expect("Unable to add component -> root av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add root av -> root prop edge"); + + let si_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let si_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let si_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + si_av_id, + ContentAddress::AttributeValue(si_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add si av"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + si_av_node_index, + ) + .expect("Unable to add root av -> si av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(si_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add si av -> si prop edge"); + + let name_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let name_av_content_hash = content_store + .add(&serde_json::json!("component name")) + .expect("Unable to add to content store"); + let name_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + name_av_id, + ContentAddress::AttributeValue(name_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add name av"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + name_av_node_index, + ) + .expect("Unable to add si av -> name av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(name_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(name_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to create name av -> name prop edge"); + + let description_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let description_av_content_hash = content_store + .add(&serde_json::json!("Component description")) + .expect("Unable to add to content store"); + let description_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + description_av_id, + ContentAddress::AttributeValue(description_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add description av"); + graph + .add_edge( + graph + .get_node_index_by_id(si_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + description_av_node_index, + ) + .expect("Unable to add si av -> description av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(description_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(description_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add description av -> description prop edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + serde_json::json![{ + "si": { + "description": "Component description", + "name": "component name", + } + }], + graph + .attribute_value_view( + &mut content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + ) + .await + .expect("Unable to generate attribute value view"), + ); + } + + #[tokio::test] + #[cfg(ignore)] + async fn attribute_value_build_view_ordered_array() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + let mut content_store = content_store::LocalStore::default(); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_content_hash = content_store + .add(&serde_json::json!("Schema A")) + .expect("Unable to add to content store"); + let schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(schema_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_content_hash = content_store + .add(&serde_json::json!("Schema Variant A")) + .expect("Unable to add to content store"); + let schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(schema_variant_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_prop_content_hash = content_store + .add(&serde_json::json!("Root prop")) + .expect("Unable to add to content store"); + let root_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + root_prop_id, + PropKind::Object, + "root", + root_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_node_index, + ) + .expect("Unable to add schema variant -> root prop edge"); + + let domain_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let domain_prop_content_hash = content_store + .add(&serde_json::json!("domain Prop Content")) + .expect("Unable to add to content store"); + let domain_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + domain_prop_id, + PropKind::Object, + "domain", + domain_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add domain prop"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + domain_prop_node_index, + ) + .expect("Unable to add root prop -> domain prop edge"); + + let ports_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ports_prop_content_hash = content_store + .add(&serde_json::json!("ports Prop Content")) + .expect("Unable to add to content store"); + let ports_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + ports_prop_id, + PropKind::Array, + "ports", + ports_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ports prop"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + ports_prop_node_index, + ) + .expect("Unable to add domain prop -> ports prop edge"); + + let port_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let port_prop_content_hash = content_store + .add(&serde_json::json!("port Prop Content")) + .expect("Unable to add to content store"); + let port_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + port_prop_id, + PropKind::String, + "port", + port_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port prop"); + graph + .add_edge( + graph + .get_node_index_by_id(ports_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + port_prop_node_index, + ) + .expect("Unable to add ports prop -> port prop edge"); + + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let component_content_hash = content_store + .add(&serde_json::json!("Component Content")) + .expect("Unable to add to content store"); + let component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(component_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let root_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + root_av_id, + ContentAddress::AttributeValue(root_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root av"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_av_node_index, + ) + .expect("Unable to add component -> root av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add root av -> root prop edge"); + + let domain_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let domain_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let domain_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + domain_av_id, + ContentAddress::AttributeValue(domain_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add domain av"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + domain_av_node_index, + ) + .expect("Unable to add root av -> domain av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(domain_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add domain av -> domain prop edge"); + + let ports_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ports_av_content_hash = content_store + .add(&serde_json::json!([])) + .expect("Unable to add to content store"); + let ports_av_node_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + ports_av_id, + ContentAddress::AttributeValue(ports_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add ports av"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + ports_av_node_index, + ) + .expect("Unable to add domain av -> ports av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(ports_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to create ports av -> ports prop edge"); + + let port1_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let port1_av_content_hash = content_store + .add(&serde_json::json!("Port 1")) + .expect("Unable to add to content store"); + let port1_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port1_av_id, + ContentAddress::AttributeValue(port1_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 1 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port1_av_node_index, + ) + .expect("Unable to add ports av -> port 1 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port1_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 1 av -> port prop edge"); + + let port2_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let port2_av_content_hash = content_store + .add(&serde_json::json!("Port 2")) + .expect("Unable to add to content store"); + let port2_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port2_av_id, + ContentAddress::AttributeValue(port2_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 2 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port2_av_node_index, + ) + .expect("Unable to add ports av -> port 2 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port2_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 2 av -> port prop edge"); + + let port3_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let port3_av_content_hash = content_store + .add(&serde_json::json!("Port 3")) + .expect("Unable to add to content store"); + let port3_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port3_av_id, + ContentAddress::AttributeValue(port3_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 3 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port3_av_node_index, + ) + .expect("Unable to add ports av -> port 3 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port3_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 3 av -> port prop edge"); + + let port4_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let port4_av_content_hash = content_store + .add(&serde_json::json!("Port 4")) + .expect("Unable to add to content store"); + let port4_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port4_av_id, + ContentAddress::AttributeValue(port4_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 4 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port4_av_node_index, + ) + .expect("Unable to add ports av -> port 4 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port4_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 4 av -> port prop edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + serde_json::json![{ + "domain": { + "ports": [ + "Port 1", + "Port 2", + "Port 3", + "Port 4", + ], + } + }], + graph + .attribute_value_view( + &mut content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + ) + .await + .expect("Unable to generate attribute value view"), + ); + + let new_order = vec![port3_av_id, port1_av_id, port4_av_id, port2_av_id]; + graph + .update_order(change_set, ports_av_id, new_order) + .expect("Unable to update order of ports attribute value's children"); + assert_eq!( + serde_json::json![{ + "domain": { + "ports": [ + "Port 3", + "Port 1", + "Port 4", + "Port 2", + ] + } + }], + graph + .attribute_value_view( + &mut content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + ) + .await + .expect("Unable to generate attribute value view"), + ); + + let port5_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let port5_av_content_hash = content_store + .add(&serde_json::json!("Port 5")) + .expect("Unable to add to content store"); + let port5_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + port5_av_id, + ContentAddress::AttributeValue(port5_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add port 5 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(ports_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + port5_av_node_index, + ) + .expect("Unable to add ports av -> port 5 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(port5_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(port_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add port 5 av -> port prop edge"); + + assert_eq!( + serde_json::json![{ + "domain": { + "ports": [ + "Port 3", + "Port 1", + "Port 4", + "Port 2", + "Port 5", + ] + } + }], + graph + .attribute_value_view( + &mut content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + ) + .await + .expect("Unable to generate attribute value view"), + ); + } + + #[tokio::test] + #[cfg(ignore)] + async fn attribute_value_build_view_ordered_map() { + let change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let change_set = &change_set; + let mut graph = WorkspaceSnapshotGraph::new(change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + let mut content_store = content_store::LocalStore::default(); + + let schema_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_content_hash = content_store + .add(&serde_json::json!("Schema A")) + .expect("Unable to add to content store"); + let schema_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_id, + ContentAddress::Schema(schema_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_node_index, + ) + .expect("Unable to add root -> schema edge"); + + let schema_variant_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let schema_variant_content_hash = content_store + .add(&serde_json::json!("Schema Variant A")) + .expect("Unable to add to content store"); + let schema_variant_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + schema_variant_id, + ContentAddress::SchemaVariant(schema_variant_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add schema variant"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + schema_variant_node_index, + ) + .expect("Unable to add schema -> schema variant edge"); + + let root_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_prop_content_hash = content_store + .add(&serde_json::json!("Root prop")) + .expect("Unable to add to content store"); + let root_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + root_prop_id, + PropKind::Object, + "root", + root_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root prop"); + graph + .add_edge( + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_prop_node_index, + ) + .expect("Unable to add schema variant -> root prop edge"); + + let domain_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let domain_prop_content_hash = content_store + .add(&serde_json::json!("domain Prop Content")) + .expect("Unable to add to content store"); + let domain_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + domain_prop_id, + PropKind::Object, + "domain", + domain_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add domain prop"); + graph + .add_edge( + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + domain_prop_node_index, + ) + .expect("Unable to add root prop -> domain prop edge"); + + let environment_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let environment_prop_content_hash = content_store + .add(&serde_json::json!("environment Prop Content")) + .expect("Unable to add to content store"); + let environment_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + environment_prop_id, + PropKind::Array, + "environment", + environment_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add environment prop"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + environment_prop_node_index, + ) + .expect("Unable to add domain prop -> environment prop edge"); + + let env_var_prop_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let env_var_prop_content_hash = content_store + .add(&serde_json::json!("port Prop Content")) + .expect("Unable to add to content store"); + let env_var_prop_node_index = graph + .add_node( + NodeWeight::new_prop( + change_set, + env_var_prop_id, + PropKind::String, + "port", + env_var_prop_content_hash, + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var prop"); + graph + .add_edge( + graph + .get_node_index_by_id(environment_prop_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + env_var_prop_node_index, + ) + .expect("Unable to add environment prop -> env var prop edge"); + + let component_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let component_content_hash = content_store + .add(&serde_json::json!("Component Content")) + .expect("Unable to add to content store"); + let component_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + component_id, + ContentAddress::Component(component_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add component"); + graph + .add_edge( + graph.root_index, + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + component_node_index, + ) + .expect("Unable to add root -> component edge"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(schema_variant_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add component -> schema variant edge"); + + let root_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let root_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let root_av_node_index = graph + .add_node( + NodeWeight::new_attribute_value(change_set, root_av_id, None, None, None) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add root av"); + graph + .add_edge( + graph + .get_node_index_by_id(component_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Use) + .expect("Unable to create EdgeWeight"), + root_av_node_index, + ) + .expect("Unable to add component -> root av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(root_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add root av -> root prop edge"); + + let domain_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let domain_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let domain_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + domain_av_id, + ContentAddress::AttributeValue(domain_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add domain av"); + graph + .add_edge( + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + domain_av_node_index, + ) + .expect("Unable to add root av -> domain av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(domain_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add domain av -> domain prop edge"); + + let envrionment_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let ports_av_content_hash = content_store + .add(&serde_json::json!({})) + .expect("Unable to add to content store"); + let environment_av_node_index = graph + .add_ordered_node( + change_set, + NodeWeight::new_content( + change_set, + envrionment_av_id, + ContentAddress::AttributeValue(ports_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add environment av"); + graph + .add_edge( + graph + .get_node_index_by_id(domain_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Contain(None)) + .expect("Unable to create EdgeWeight"), + environment_av_node_index, + ) + .expect("Unable to add domain av -> environment av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(environment_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to create environment av -> environment prop edge"); + + let env_var1_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let env_var1_av_content_hash = content_store + .add(&serde_json::json!("1111")) + .expect("Unable to add to content store"); + let port1_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var1_av_id, + ContentAddress::AttributeValue(env_var1_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env_var 1 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_1".to_string())), + ) + .expect("Unable to create EdgeWeight"), + port1_av_node_index, + ) + .expect("Unable to add environment av -> env var 1 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var1_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 1 av -> env var prop edge"); + + let env_var2_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let env_var2_av_content_hash = content_store + .add(&serde_json::json!("2222")) + .expect("Unable to add to content store"); + let env_var2_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var2_av_id, + ContentAddress::AttributeValue(env_var2_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var 2 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_2".to_string())), + ) + .expect("Unable to create EdgeWeight"), + env_var2_av_node_index, + ) + .expect("Unable to add environment av -> env var 2 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var2_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 2 av -> env var prop edge"); + + let env_var3_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let env_var3_av_content_hash = content_store + .add(&serde_json::json!("3333")) + .expect("Unable to add to content store"); + let port3_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var3_av_id, + ContentAddress::AttributeValue(env_var3_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var 3 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_3".to_string())), + ) + .expect("Unable to create EdgeWeight"), + port3_av_node_index, + ) + .expect("Unable to add environment av -> env var 3 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var3_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 3 av -> env var prop edge"); + + let env_var4_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let env_var4_av_content_hash = content_store + .add(&serde_json::json!("4444")) + .expect("Unable to add to content store"); + let env_var4_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var4_av_id, + ContentAddress::AttributeValue(env_var4_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var 4 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_4".to_string())), + ) + .expect("Unable to create EdgeWeight"), + env_var4_av_node_index, + ) + .expect("Unable to add environment av -> env var 4 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var4_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 4 av -> env var prop edge"); + + graph.cleanup(); + graph.dot(); + + assert_eq!( + serde_json::json![{ + "domain": { + "environment": { + "PORT_1": "1111", + "PORT_2": "2222", + "PORT_3": "3333", + "PORT_4": "4444", + }, + } + }], + graph + .attribute_value_view( + &mut content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + ) + .await + .expect("Unable to generate attribute value view"), + ); + + let new_order = vec![ + env_var3_av_id, + env_var1_av_id, + env_var4_av_id, + env_var2_av_id, + ]; + graph + .update_order(change_set, envrionment_av_id, new_order) + .expect("Unable to update order of environment attribute value's children"); + assert_eq!( + serde_json::json![{ + "domain": { + "environment": { + "PORT_3": "3333", + "PORT_1": "1111", + "PORT_4": "4444", + "PORT_2": "2222", + }, + } + }], + graph + .attribute_value_view( + &mut content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + ) + .await + .expect("Unable to generate attribute value view"), + ); + + let env_var5_av_id = change_set.generate_ulid().expect("Unable to generate Ulid"); + let env_var5_av_content_hash = content_store + .add(&serde_json::json!("5555")) + .expect("Unable to add to content store"); + let env_var5_av_node_index = graph + .add_node( + NodeWeight::new_content( + change_set, + env_var5_av_id, + ContentAddress::AttributeValue(env_var5_av_content_hash), + ) + .expect("Unable to create NodeWeight"), + ) + .expect("Unable to add env var 5 av"); + graph + .add_ordered_edge( + change_set, + graph + .get_node_index_by_id(envrionment_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new( + change_set, + EdgeWeightKind::Contain(Some("PORT_5".to_string())), + ) + .expect("Unable to create EdgeWeight"), + env_var5_av_node_index, + ) + .expect("Unable to add environment av -> env var 5 av edge"); + graph + .add_edge( + graph + .get_node_index_by_id(env_var5_av_id) + .expect("Unable to get NodeIndex"), + EdgeWeight::new(change_set, EdgeWeightKind::Prop) + .expect("Unable to create EdgeWeight"), + graph + .get_node_index_by_id(env_var_prop_id) + .expect("Unable to get NodeIndex"), + ) + .expect("Unable to add env var 5 av -> env var prop edge"); + + assert_eq!( + serde_json::json![{ + "domain": { + "environment": { + "PORT_3": "3333", + "PORT_1": "1111", + "PORT_4": "4444", + "PORT_2": "2222", + "PORT_5": "5555", + }, + } + }], + graph + .attribute_value_view( + &mut content_store, + graph + .get_node_index_by_id(root_av_id) + .expect("Unable to get NodeIndex"), + ) + .await + .expect("Unable to generate attribute value view"), + ); + } +} diff --git a/lib/dal/src/workspace_snapshot/graph/tests/rebase.rs b/lib/dal/src/workspace_snapshot/graph/tests/rebase.rs new file mode 100644 index 0000000000..54227cec05 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/graph/tests/rebase.rs @@ -0,0 +1,180 @@ +#[cfg(test)] +mod test { + use content_store::ContentHash; + use pretty_assertions_sorted::assert_eq; + + use crate::change_set_pointer::ChangeSetPointer; + use crate::workspace_snapshot::content_address::ContentAddress; + use crate::workspace_snapshot::edge_weight::{EdgeWeight, EdgeWeightKind}; + use crate::workspace_snapshot::node_weight::category_node_weight::CategoryNodeKind; + use crate::workspace_snapshot::node_weight::NodeWeight; + use crate::workspace_snapshot::node_weight::{ContentNodeWeight, FuncNodeWeight}; + use crate::FuncBackendKind; + use crate::WorkspaceSnapshotGraph; + + #[test] + fn simulate_rebase() { + let to_rebase_change_set = + ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let to_rebase_change_set = &to_rebase_change_set; + let mut to_rebase = WorkspaceSnapshotGraph::new(to_rebase_change_set) + .expect("Unable to create WorkspaceSnapshotGraph"); + + // Set up the to rebase graph. + let schema_category_node_index = to_rebase + .add_category_node(to_rebase_change_set, CategoryNodeKind::Schema) + .expect("could not add category node"); + to_rebase + .add_edge( + to_rebase.root_index, + EdgeWeight::new(to_rebase_change_set, EdgeWeightKind::Use) + .expect("could not create edge weight"), + schema_category_node_index, + ) + .expect("could not add edge"); + let func_category_node_index = to_rebase + .add_category_node(to_rebase_change_set, CategoryNodeKind::Func) + .expect("could not add category node"); + to_rebase + .add_edge( + to_rebase.root_index, + EdgeWeight::new(to_rebase_change_set, EdgeWeightKind::Use) + .expect("could not create edge weight"), + func_category_node_index, + ) + .expect("could not add edge"); + + // Create the onto graph from the to rebase graph. + let onto_change_set = ChangeSetPointer::new_local().expect("Unable to create ChangeSet"); + let onto_change_set = &onto_change_set; + let mut onto = to_rebase.clone(); + + // FuncCategory --Use--> Func + let func_id = onto_change_set + .generate_ulid() + .expect("could not generate ulid"); + let func_node_weight = FuncNodeWeight::new( + onto_change_set, + func_id, + ContentAddress::Func(ContentHash::from("foo")), + "foo".to_string(), + FuncBackendKind::String, + ) + .expect("could not create func node weight"); + let func_node_index = onto + .add_node(NodeWeight::Func(func_node_weight)) + .expect("could not add node"); + onto.add_edge( + func_category_node_index, + EdgeWeight::new(onto_change_set, EdgeWeightKind::Use) + .expect("could not create edge weight"), + func_node_index, + ) + .expect("could not add edge"); + + // SchemaCategory --Use--> Schema + let schema_node_weight = ContentNodeWeight::new( + onto_change_set, + onto_change_set + .generate_ulid() + .expect("could not generate ulid"), + ContentAddress::Schema(ContentHash::from("foo")), + ) + .expect("could not create func node weight"); + let schema_node_index = onto + .add_node(NodeWeight::Content(schema_node_weight)) + .expect("could not add node"); + onto.add_edge( + schema_category_node_index, + EdgeWeight::new(onto_change_set, EdgeWeightKind::Use) + .expect("could not create edge weight"), + schema_node_index, + ) + .expect("could not add edge"); + + // Schema --Use--> SchemaVariant + let schema_variant_node_weight = ContentNodeWeight::new( + onto_change_set, + onto_change_set + .generate_ulid() + .expect("could not generate ulid"), + ContentAddress::SchemaVariant(ContentHash::from("foo")), + ) + .expect("could not create func node weight"); + let schema_variant_node_index = onto + .add_node(NodeWeight::Content(schema_variant_node_weight)) + .expect("could not add node"); + onto.add_edge( + schema_node_index, + EdgeWeight::new(onto_change_set, EdgeWeightKind::Use) + .expect("could not create edge weight"), + schema_variant_node_index, + ) + .expect("could not add edge"); + + // SchemaVariant --Use--> Func + let func_node_index = onto + .get_node_index_by_id(func_id) + .expect("could not get node index by id"); + onto.add_edge( + schema_variant_node_index, + EdgeWeight::new(onto_change_set, EdgeWeightKind::Use) + .expect("could not create edge weight"), + func_node_index, + ) + .expect("could not add edge"); + + // Before cleanup, detect conflicts and updates. + let (before_cleanup_conflicts, before_cleanup_updates) = to_rebase + .detect_conflicts_and_updates( + to_rebase_change_set.vector_clock_id(), + &onto, + onto_change_set.vector_clock_id(), + ) + .expect("could not detect conflicts and updates"); + + // Cleanup and check node count. + onto.cleanup(); + to_rebase.cleanup(); + assert_eq!( + 6, // expected + onto.node_count() // actual + ); + + // Detect conflicts and updates. Ensure cleanup did not affect the results. + let (conflicts, updates) = to_rebase + .detect_conflicts_and_updates( + to_rebase_change_set.vector_clock_id(), + &onto, + onto_change_set.vector_clock_id(), + ) + .expect("could not detect conflicts and updates"); + assert!(conflicts.is_empty()); + assert_eq!( + 2, // expected + updates.len() // actual + ); + assert_eq!( + before_cleanup_conflicts, // expected + conflicts // actual + ); + assert_eq!( + before_cleanup_updates, // expected + updates // actual + ); + + // Ensure that we do not have duplicate updates. + let mut deduped_updates = updates.clone(); + deduped_updates.dedup(); + assert_eq!( + deduped_updates.len(), // expected + updates.len() // actual + ); + + // Perform the updates. In the future, we may want to see if the onto and resulting to + // rebase graphs are logically equivalent after updates are performed. + to_rebase + .perform_updates(to_rebase_change_set, &onto, &updates) + .expect("could not perform updates"); + } +} diff --git a/lib/dal/src/workspace_snapshot/lamport_clock.rs b/lib/dal/src/workspace_snapshot/lamport_clock.rs new file mode 100644 index 0000000000..4b77144236 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/lamport_clock.rs @@ -0,0 +1,68 @@ +//! Lamport Clocks + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +use crate::workspace_snapshot::ChangeSetPointerError; + +#[derive(Debug, Error)] +pub enum LamportClockError { + #[error("Change Set error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), +} + +pub type LamportClockResult = Result; + +#[derive(Clone, Copy, Deserialize, Serialize)] +pub struct LamportClock { + #[serde(with = "chrono::serde::ts_nanoseconds")] + pub counter: DateTime, +} + +impl LamportClock { + pub fn new() -> LamportClockResult { + let counter = Utc::now(); + Ok(LamportClock { counter }) + } + + pub fn new_with_value(new_value: DateTime) -> Self { + LamportClock { counter: new_value } + } + + pub fn inc(&mut self) -> LamportClockResult<()> { + self.counter = Utc::now(); + + Ok(()) + } + + pub fn inc_to(&mut self, new_value: DateTime) { + self.counter = new_value; + } + + pub fn merge(&mut self, other: &LamportClock) { + if self.counter < other.counter { + self.counter = other.counter; + } + } +} + +impl std::fmt::Debug for LamportClock { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "LamportClock({})", &self.counter.to_string()) + } +} + +impl Eq for LamportClock {} + +impl PartialEq for LamportClock { + fn eq(&self, other: &Self) -> bool { + self.counter == other.counter + } +} + +impl PartialOrd for LamportClock { + fn partial_cmp(&self, other: &Self) -> Option { + self.counter.partial_cmp(&other.counter) + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight.rs new file mode 100644 index 0000000000..978c71b5c7 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight.rs @@ -0,0 +1,571 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use strum::EnumDiscriminants; +use thiserror::Error; +use ulid::Ulid; + +use crate::func::execution::FuncExecutionPk; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::FuncBackendKind; +use crate::{ + change_set_pointer::{ChangeSetPointer, ChangeSetPointerError}, + workspace_snapshot::{ + content_address::ContentAddress, + vector_clock::{VectorClock, VectorClockError}, + }, + PropKind, +}; + +pub use attribute_prototype_argument_node_weight::AttributePrototypeArgumentNodeWeight; +pub use attribute_value_node_weight::AttributeValueNodeWeight; +pub use category_node_weight::CategoryNodeWeight; +pub use content_node_weight::ContentNodeWeight; +pub use func_argument_node_weight::FuncArgumentNodeWeight; +pub use func_node_weight::FuncNodeWeight; +pub use ordering_node_weight::OrderingNodeWeight; +pub use prop_node_weight::PropNodeWeight; + +use self::attribute_prototype_argument_node_weight::ArgumentTargets; + +use super::content_address::ContentAddressDiscriminants; + +pub mod attribute_prototype_argument_node_weight; +pub mod attribute_value_node_weight; +pub mod category_node_weight; +pub mod content_node_weight; +pub mod func_argument_node_weight; +pub mod func_node_weight; +pub mod ordering_node_weight; +pub mod prop_node_weight; + +#[derive(Debug, Error)] +pub enum NodeWeightError { + #[error("Cannot set content hash directly on node weight kind")] + CannotSetContentHashOnKind, + #[error("Cannot set content order directly on node weight kind")] + CannotSetOrderOnKind, + #[error("Cannot update root node's content hash")] + CannotUpdateRootNodeContentHash, + #[error("ChangeSet error: {0}")] + ChangeSet(#[from] ChangeSetPointerError), + #[error("Incompatible node weights")] + IncompatibleNodeWeightVariants, + #[error("Invalid ContentAddress variant ({0}) for NodeWeight variant ({1})")] + InvalidContentAddressForWeightKind(String, String), + #[error("Unexpected content address variant: {1} expected {0}")] + UnexpectedContentAddressVariant(ContentAddressDiscriminants, ContentAddressDiscriminants), + #[error("Unexpected node weight variant. Got {1} but expected {0}")] + UnexpectedNodeWeightVariant(NodeWeightDiscriminants, NodeWeightDiscriminants), + #[error("Vector Clock error: {0}")] + VectorClock(#[from] VectorClockError), +} + +pub type NodeWeightResult = Result; + +#[derive(Debug, Serialize, Deserialize, Clone, EnumDiscriminants)] +#[strum_discriminants(derive(strum::Display, Serialize, Deserialize))] +pub enum NodeWeight { + AttributePrototypeArgument(AttributePrototypeArgumentNodeWeight), + AttributeValue(AttributeValueNodeWeight), + Category(CategoryNodeWeight), + Content(ContentNodeWeight), + Func(FuncNodeWeight), + FuncArgument(FuncArgumentNodeWeight), + Ordering(OrderingNodeWeight), + Prop(PropNodeWeight), +} + +impl NodeWeight { + pub fn content_hash(&self) -> ContentHash { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.content_hash(), + NodeWeight::AttributeValue(weight) => weight.content_hash(), + NodeWeight::Category(weight) => weight.content_hash(), + NodeWeight::Content(weight) => weight.content_hash(), + NodeWeight::Func(weight) => weight.content_hash(), + NodeWeight::FuncArgument(weight) => weight.content_hash(), + NodeWeight::Ordering(weight) => weight.content_hash(), + NodeWeight::Prop(weight) => weight.content_hash(), + } + } + + pub fn content_address_discriminants(&self) -> Option { + match self { + NodeWeight::Content(weight) => Some(weight.content_address().into()), + NodeWeight::AttributePrototypeArgument(_) + | NodeWeight::AttributeValue(_) + | NodeWeight::Category(_) + | NodeWeight::Func(_) + | NodeWeight::FuncArgument(_) + | NodeWeight::Ordering(_) + | NodeWeight::Prop(_) => None, + } + } + + pub fn id(&self) -> Ulid { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.id(), + NodeWeight::AttributeValue(weight) => weight.id(), + NodeWeight::Category(weight) => weight.id(), + NodeWeight::Content(weight) => weight.id(), + NodeWeight::Func(weight) => weight.id(), + NodeWeight::FuncArgument(weight) => weight.id(), + NodeWeight::Ordering(weight) => weight.id(), + NodeWeight::Prop(weight) => weight.id(), + } + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + match self { + NodeWeight::AttributePrototypeArgument(weight) => { + weight.increment_vector_clock(change_set) + } + NodeWeight::AttributeValue(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Category(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Content(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Func(weight) => weight.increment_vector_clock(change_set), + NodeWeight::FuncArgument(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Ordering(weight) => weight.increment_vector_clock(change_set), + NodeWeight::Prop(weight) => weight.increment_vector_clock(change_set), + } + } + + pub fn lineage_id(&self) -> Ulid { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.lineage_id(), + NodeWeight::AttributeValue(weight) => weight.lineage_id(), + NodeWeight::Category(weight) => weight.lineage_id(), + NodeWeight::Content(weight) => weight.lineage_id(), + NodeWeight::Func(weight) => weight.lineage_id(), + NodeWeight::FuncArgument(weight) => weight.lineage_id(), + NodeWeight::Ordering(weight) => weight.lineage_id(), + NodeWeight::Prop(weight) => weight.lineage_id(), + } + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + match self { + NodeWeight::AttributePrototypeArgument(weight) => { + weight.mark_seen_at(vector_clock_id, seen_at) + } + NodeWeight::AttributeValue(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Category(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Content(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Func(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::FuncArgument(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Ordering(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + NodeWeight::Prop(weight) => weight.mark_seen_at(vector_clock_id, seen_at), + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &NodeWeight, + ) -> NodeWeightResult<()> { + match (self, other) { + ( + NodeWeight::AttributePrototypeArgument(self_weight), + NodeWeight::AttributePrototypeArgument(other_weight), + ) => self_weight.merge_clocks(change_set, other_weight), + (NodeWeight::AttributeValue(self_weight), NodeWeight::AttributeValue(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Category(self_weight), NodeWeight::Category(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Content(self_weight), NodeWeight::Content(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Func(self_weight), NodeWeight::Func(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::FuncArgument(self_weight), NodeWeight::FuncArgument(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Ordering(self_weight), NodeWeight::Ordering(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + (NodeWeight::Prop(self_weight), NodeWeight::Prop(other_weight)) => { + self_weight.merge_clocks(change_set, other_weight) + } + _ => Err(NodeWeightError::IncompatibleNodeWeightVariants), + } + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.merkle_tree_hash(), + NodeWeight::AttributeValue(weight) => weight.merkle_tree_hash(), + NodeWeight::Category(weight) => weight.merkle_tree_hash(), + NodeWeight::Content(weight) => weight.merkle_tree_hash(), + NodeWeight::Func(weight) => weight.merkle_tree_hash(), + NodeWeight::FuncArgument(weight) => weight.merkle_tree_hash(), + NodeWeight::Ordering(weight) => weight.merkle_tree_hash(), + NodeWeight::Prop(weight) => weight.merkle_tree_hash(), + } + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + match self { + NodeWeight::Content(weight) => weight.new_content_hash(content_hash), + NodeWeight::Func(weight) => weight.new_content_hash(content_hash), + NodeWeight::FuncArgument(weight) => weight.new_content_hash(content_hash), + NodeWeight::Prop(weight) => weight.new_content_hash(content_hash), + NodeWeight::AttributePrototypeArgument(_) + | NodeWeight::AttributeValue(_) + | NodeWeight::Category(_) + | NodeWeight::Ordering(_) => Err(NodeWeightError::CannotSetContentHashOnKind), + } + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let new_weight = match self { + NodeWeight::AttributePrototypeArgument(weight) => { + NodeWeight::AttributePrototypeArgument( + weight.new_with_incremented_vector_clock(change_set)?, + ) + } + NodeWeight::AttributeValue(weight) => { + NodeWeight::AttributeValue(weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::Category(weight) => { + NodeWeight::Category(weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::Content(weight) => { + NodeWeight::Content(weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::Func(weight) => { + NodeWeight::Func(weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::FuncArgument(weight) => { + NodeWeight::FuncArgument(weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::Ordering(weight) => { + NodeWeight::Ordering(weight.new_with_incremented_vector_clock(change_set)?) + } + NodeWeight::Prop(weight) => { + NodeWeight::Prop(weight.new_with_incremented_vector_clock(change_set)?) + } + }; + + Ok(new_weight) + } + + /// The node hash is used to compare nodes directly, and should be computed based on the data + /// that is specific to the node weight, *and* the content_hash, so that changes are detected + /// between nodes whether the content has changed or just the node weight has changed. + pub fn node_hash(&self) -> ContentHash { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.node_hash(), + NodeWeight::AttributeValue(weight) => weight.node_hash(), + NodeWeight::Category(weight) => weight.node_hash(), + NodeWeight::Content(weight) => weight.node_hash(), + NodeWeight::Func(weight) => weight.node_hash(), + NodeWeight::FuncArgument(weight) => weight.node_hash(), + NodeWeight::Ordering(weight) => weight.node_hash(), + NodeWeight::Prop(weight) => weight.node_hash(), + } + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::AttributeValue(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Category(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Content(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Func(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::FuncArgument(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Ordering(weight) => weight.set_merkle_tree_hash(new_hash), + NodeWeight::Prop(weight) => weight.set_merkle_tree_hash(new_hash), + } + } + + pub fn set_order( + &mut self, + change_set: &ChangeSetPointer, + order: Vec, + ) -> NodeWeightResult<()> { + match self { + NodeWeight::Ordering(ordering_weight) => ordering_weight.set_order(change_set, order), + + NodeWeight::AttributePrototypeArgument(_) + | NodeWeight::AttributeValue(_) + | NodeWeight::Category(_) + | NodeWeight::Content(_) + | NodeWeight::Func(_) + | NodeWeight::FuncArgument(_) + | NodeWeight::Prop(_) => Err(NodeWeightError::CannotSetOrderOnKind), + } + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + match self { + NodeWeight::AttributePrototypeArgument(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::AttributeValue(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::Category(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::Content(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::Func(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::FuncArgument(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::Ordering(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + NodeWeight::Prop(weight) => { + weight.set_vector_clock_recently_seen_to(change_set, new_val) + } + } + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.vector_clock_first_seen(), + NodeWeight::AttributeValue(weight) => weight.vector_clock_first_seen(), + NodeWeight::Category(weight) => weight.vector_clock_first_seen(), + NodeWeight::Content(weight) => weight.vector_clock_first_seen(), + NodeWeight::Func(weight) => weight.vector_clock_first_seen(), + NodeWeight::FuncArgument(weight) => weight.vector_clock_first_seen(), + NodeWeight::Ordering(weight) => weight.vector_clock_first_seen(), + NodeWeight::Prop(weight) => weight.vector_clock_first_seen(), + } + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.vector_clock_recently_seen(), + NodeWeight::AttributeValue(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Category(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Content(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Func(weight) => weight.vector_clock_recently_seen(), + NodeWeight::FuncArgument(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Ordering(weight) => weight.vector_clock_recently_seen(), + NodeWeight::Prop(weight) => weight.vector_clock_recently_seen(), + } + } + + pub fn vector_clock_write(&self) -> &VectorClock { + match self { + NodeWeight::AttributePrototypeArgument(weight) => weight.vector_clock_write(), + NodeWeight::AttributeValue(weight) => weight.vector_clock_write(), + NodeWeight::Category(weight) => weight.vector_clock_write(), + NodeWeight::Content(weight) => weight.vector_clock_write(), + NodeWeight::Func(weight) => weight.vector_clock_write(), + NodeWeight::FuncArgument(weight) => weight.vector_clock_write(), + NodeWeight::Ordering(weight) => weight.vector_clock_write(), + NodeWeight::Prop(weight) => weight.vector_clock_write(), + } + } + + pub fn get_attribute_prototype_argument_node_weight( + &self, + ) -> NodeWeightResult { + match self { + NodeWeight::AttributePrototypeArgument(inner) => Ok(inner.to_owned()), + other => Err(NodeWeightError::UnexpectedNodeWeightVariant( + NodeWeightDiscriminants::AttributePrototypeArgument, + other.into(), + )), + } + } + + pub fn get_attribute_value_node_weight(&self) -> NodeWeightResult { + match self { + NodeWeight::AttributeValue(inner) => Ok(inner.to_owned()), + other => Err(NodeWeightError::UnexpectedNodeWeightVariant( + NodeWeightDiscriminants::AttributeValue, + other.into(), + )), + } + } + + pub fn get_prop_node_weight(&self) -> NodeWeightResult { + match self { + NodeWeight::Prop(inner) => Ok(inner.to_owned()), + other => Err(NodeWeightError::UnexpectedNodeWeightVariant( + NodeWeightDiscriminants::Prop, + other.into(), + )), + } + } + + pub fn get_func_node_weight(&self) -> NodeWeightResult { + match self { + NodeWeight::Func(inner) => Ok(inner.to_owned()), + other => Err(NodeWeightError::UnexpectedNodeWeightVariant( + NodeWeightDiscriminants::Func, + other.into(), + )), + } + } + + pub fn get_func_argument_node_weight(&self) -> NodeWeightResult { + match self { + NodeWeight::FuncArgument(inner) => Ok(inner.to_owned()), + other => Err(NodeWeightError::UnexpectedNodeWeightVariant( + NodeWeightDiscriminants::FuncArgument, + other.into(), + )), + } + } + + pub fn get_ordering_node_weight(&self) -> NodeWeightResult { + match self { + NodeWeight::Ordering(inner) => Ok(inner.to_owned()), + other => Err(NodeWeightError::UnexpectedNodeWeightVariant( + NodeWeightDiscriminants::Ordering, + other.into(), + )), + } + } + + pub fn get_content_node_weight_of_kind( + &self, + content_addr_discrim: ContentAddressDiscriminants, + ) -> NodeWeightResult { + match self { + NodeWeight::Content(inner) => { + let inner_addr_discrim: ContentAddressDiscriminants = + inner.content_address().into(); + if inner_addr_discrim != content_addr_discrim { + return Err(NodeWeightError::UnexpectedContentAddressVariant( + content_addr_discrim, + inner_addr_discrim, + )); + } + + Ok(inner.to_owned()) + } + other => Err(NodeWeightError::UnexpectedNodeWeightVariant( + NodeWeightDiscriminants::Content, + other.into(), + )), + } + } + + pub fn get_option_content_node_weight_of_kind( + &self, + content_addr_discrim: ContentAddressDiscriminants, + ) -> Option { + match self { + NodeWeight::Content(inner) => { + let inner_addr_discrim: ContentAddressDiscriminants = + inner.content_address().into(); + if inner_addr_discrim != content_addr_discrim { + return None; + } + Some(inner.to_owned()) + } + _other => None, + } + } + + pub fn new_content( + change_set: &ChangeSetPointer, + content_id: Ulid, + kind: ContentAddress, + ) -> NodeWeightResult { + Ok(NodeWeight::Content(ContentNodeWeight::new( + change_set, content_id, kind, + )?)) + } + + pub fn new_attribute_value( + change_set: &ChangeSetPointer, + attribute_value_id: Ulid, + unprocessed_value: Option, + value: Option, + materialized_view: Option, + func_execution_pk: Option, + ) -> NodeWeightResult { + Ok(NodeWeight::AttributeValue(AttributeValueNodeWeight::new( + change_set, + attribute_value_id, + unprocessed_value, + value, + materialized_view, + func_execution_pk, + )?)) + } + + pub fn new_prop( + change_set: &ChangeSetPointer, + prop_id: Ulid, + prop_kind: PropKind, + name: impl AsRef, + content_hash: ContentHash, + ) -> NodeWeightResult { + Ok(NodeWeight::Prop(PropNodeWeight::new( + change_set, + prop_id, + ContentAddress::Prop(content_hash), + prop_kind, + name.as_ref().to_string(), + )?)) + } + + pub fn new_func( + change_set: &ChangeSetPointer, + func_id: Ulid, + name: impl AsRef, + backend_kind: FuncBackendKind, + content_hash: ContentHash, + ) -> NodeWeightResult { + Ok(NodeWeight::Func(FuncNodeWeight::new( + change_set, + func_id, + ContentAddress::Func(content_hash), + name.as_ref().to_string(), + backend_kind, + )?)) + } + + pub fn new_func_argument( + change_set: &ChangeSetPointer, + func_arg_id: Ulid, + name: impl AsRef, + content_hash: ContentHash, + ) -> NodeWeightResult { + Ok(NodeWeight::FuncArgument(FuncArgumentNodeWeight::new( + change_set, + func_arg_id, + ContentAddress::Func(content_hash), + name.as_ref().to_string(), + )?)) + } + + pub fn new_attribute_prototype_argument( + change_set: &ChangeSetPointer, + attribute_prototype_argument_id: Ulid, + targets: Option, + ) -> NodeWeightResult { + Ok(NodeWeight::AttributePrototypeArgument( + AttributePrototypeArgumentNodeWeight::new( + change_set, + attribute_prototype_argument_id, + targets, + )?, + )) + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/attribute_prototype_argument_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/attribute_prototype_argument_node_weight.rs new file mode 100644 index 0000000000..0c31edfadc --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/attribute_prototype_argument_node_weight.rs @@ -0,0 +1,185 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::{ + change_set_pointer::ChangeSetPointer, + workspace_snapshot::{ + graph::LineageId, node_weight::NodeWeightResult, vector_clock::VectorClock, + }, + ComponentId, Timestamp, +}; + +use crate::workspace_snapshot::vector_clock::VectorClockId; + +/// When this `AttributePrototypeArgument` represents a connection between two +/// components, we need to know which components are being connected. +#[derive(Copy, Clone, Serialize, Deserialize, Debug, PartialEq, Eq)] +pub struct ArgumentTargets { + pub source_component_id: ComponentId, + pub destination_component_id: ComponentId, +} + +#[derive(Clone, Serialize, Deserialize)] +pub struct AttributePrototypeArgumentNodeWeight { + id: Ulid, + lineage_id: LineageId, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, + targets: Option, + timestamp: Timestamp, +} + +impl AttributePrototypeArgumentNodeWeight { + pub fn new( + change_set: &ChangeSetPointer, + id: Ulid, + targets: Option, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + merkle_tree_hash: ContentHash::default(), + targets, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + timestamp: Timestamp::now(), + }) + } + + pub fn timestamp(&self) -> &Timestamp { + &self.timestamp + } + + pub fn content_hash(&self) -> ContentHash { + let target_string = self + .targets + .map(|targets| { + format!( + "{}{}", + targets.source_component_id, targets.destination_component_id + ) + }) + .unwrap_or("".into()); + + ContentHash::new(target_string.as_bytes()) + } + + pub fn node_hash(&self) -> ContentHash { + self.content_hash() + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &Self, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn targets(&self) -> Option { + self.targets + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for AttributePrototypeArgumentNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("AttributePrototypeArgumentNodeWeight") + .field("id", &self.id().to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("targets", &self.targets) + .field("node_hash", &self.node_hash()) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/attribute_value_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/attribute_value_node_weight.rs new file mode 100644 index 0000000000..29a4ebebd7 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/attribute_value_node_weight.rs @@ -0,0 +1,213 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::{ + change_set_pointer::ChangeSetPointer, + func::execution::FuncExecutionPk, + workspace_snapshot::{ + content_address::ContentAddress, + graph::LineageId, + node_weight::NodeWeightResult, + vector_clock::{VectorClock, VectorClockId}, + }, +}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct AttributeValueNodeWeight { + id: Ulid, + lineage_id: LineageId, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, + /// The unprocessed return value is the "real" result, unprocessed for any other behavior. + /// This is potentially-maybe-only-kinda-sort-of(?) useful for non-scalar values. + /// Example: a populated array. + unprocessed_value: Option, + /// The processed return value. + /// Example: empty array. + value: Option, + /// A cached representation of this value and all of its child values. + materialized_view: Option, + /// The id of the func execution that produced the values for this value + func_execution_pk: Option, +} + +impl AttributeValueNodeWeight { + pub fn new( + change_set: &ChangeSetPointer, + id: Ulid, + unprocessed_value: Option, + value: Option, + materialized_view: Option, + func_execution_pk: Option, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + merkle_tree_hash: ContentHash::default(), + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + + unprocessed_value, + value, + materialized_view, + func_execution_pk, + }) + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn unprocessed_value(&self) -> Option { + self.unprocessed_value + } + + pub fn set_unprocessed_value(&mut self, unprocessed_value: Option) { + self.unprocessed_value = unprocessed_value + } + + pub fn value(&self) -> Option { + self.value + } + + pub fn set_value(&mut self, value: Option) { + self.value = value + } + + pub fn materialized_view(&self) -> Option { + self.materialized_view + } + + pub fn set_materialized_view(&mut self, materialized_view: Option) { + self.materialized_view = materialized_view + } + + pub fn set_func_execution_pk(&mut self, func_execution_pk: Option) { + self.func_execution_pk = func_execution_pk + } + + pub fn func_execution_pk(&self) -> Option { + self.func_execution_pk + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &Self, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn content_hash(&self) -> ContentHash { + self.node_hash() + } + + pub fn node_hash(&self) -> ContentHash { + ContentHash::from(&serde_json::json![{ + "unprocessed_value": self.unprocessed_value, + "value": self.value, + "materialized_view": self.materialized_view, + }]) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for AttributeValueNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("AttributeValueNodeWeight") + .field("id", &self.id().to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("value", &self.value) + .field("unprocessed_value", &self.unprocessed_value) + .field("materialized_view", &self.materialized_view) + .field("node_hash", &self.node_hash()) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs new file mode 100644 index 0000000000..cabe964c99 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/category_node_weight.rs @@ -0,0 +1,167 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use strum::Display; +use ulid::Ulid; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::workspace_snapshot::{node_weight::NodeWeightResult, vector_clock::VectorClock}; + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Display)] +pub enum CategoryNodeKind { + Component, + Func, + Schema, + Secret, +} + +#[derive(Clone, Serialize, Deserialize)] +pub struct CategoryNodeWeight { + id: Ulid, + lineage_id: Ulid, + kind: CategoryNodeKind, + content_hash: ContentHash, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl CategoryNodeWeight { + pub fn content_hash(&self) -> ContentHash { + self.content_hash + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn kind(&self) -> CategoryNodeKind { + self.kind + } + + pub fn increment_seen_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_first_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .inc(change_set.vector_clock_id()) + .map_err(Into::into) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &CategoryNodeWeight, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), other.vector_clock_write())?; + self.vector_clock_first_seen.merge( + change_set.vector_clock_id(), + other.vector_clock_first_seen(), + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn new(change_set: &ChangeSetPointer, kind: CategoryNodeKind) -> NodeWeightResult { + Ok(Self { + id: change_set.generate_ulid()?, + lineage_id: change_set.generate_ulid()?, + kind, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + content_hash: ContentHash::from(&serde_json::json![kind]), + merkle_tree_hash: Default::default(), + vector_clock_recently_seen: Default::default(), + }) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_ordering_weight = self.clone(); + new_ordering_weight.increment_vector_clock(change_set)?; + + Ok(new_ordering_weight) + } + + pub fn node_hash(&self) -> ContentHash { + self.content_hash() + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for CategoryNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("CategoryNodeWeight") + .field("id", &self.id.to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("content_hash", &self.content_hash) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs new file mode 100644 index 0000000000..ba3f19624c --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/content_node_weight.rs @@ -0,0 +1,211 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::{ + change_set_pointer::ChangeSetPointer, + workspace_snapshot::{ + content_address::ContentAddress, + graph::LineageId, + node_weight::{NodeWeightError, NodeWeightResult}, + vector_clock::VectorClock, + }, +}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct ContentNodeWeight { + /// The stable local ID of the object in question. Mainly used by external things like + /// the UI to be able to say "do X to _this_ thing" since the `NodeIndex` is an + /// internal implementation detail, and the content ID wrapped by the + /// [`NodeWeightKind`] changes whenever something about the node itself changes (for + /// example, the name, or type of a [`Prop`].) + id: Ulid, + /// Globally stable ID for tracking the "lineage" of a thing to determine whether it + /// should be trying to receive updates. + lineage_id: LineageId, + /// What type of thing is this node representing, and what is the content hash used to + /// retrieve the data for this specific node. + content_address: ContentAddress, + /// [Merkle tree](https://en.wikipedia.org/wiki/Merkle_tree) hash for the graph + /// starting with this node as the root. Mainly useful in quickly determining "has + /// something changed anywhere in this (sub)graph". + merkle_tree_hash: ContentHash, + /// The first time a [`ChangeSetPointer`] has "seen" this content. This is useful for determining + /// whether the absence of this content on one side or the other of a rebase/merge is because + /// the content is new, or because one side deleted it. + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl ContentNodeWeight { + pub fn new( + change_set: &ChangeSetPointer, + id: Ulid, + content_address: ContentAddress, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + content_address, + merkle_tree_hash: ContentHash::default(), + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + }) + } + + pub fn content_address(&self) -> ContentAddress { + self.content_address + } + + pub fn content_hash(&self) -> ContentHash { + self.content_address.content_hash() + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &Self, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + let new_address = match &self.content_address { + ContentAddress::ActionPrototype(_) => ContentAddress::ActionPrototype(content_hash), + ContentAddress::AttributePrototype(_) => { + ContentAddress::AttributePrototype(content_hash) + } + ContentAddress::Component(_) => ContentAddress::Component(content_hash), + ContentAddress::ExternalProvider(_) => ContentAddress::ExternalProvider(content_hash), + ContentAddress::FuncArg(_) => ContentAddress::FuncArg(content_hash), + ContentAddress::Func(_) => ContentAddress::Func(content_hash), + ContentAddress::InternalProvider(_) => ContentAddress::InternalProvider(content_hash), + ContentAddress::JsonValue(_) => ContentAddress::JsonValue(content_hash), + ContentAddress::Prop(_) => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + "Prop".to_string(), + "Content".to_string(), + )); + } + ContentAddress::Root => return Err(NodeWeightError::CannotUpdateRootNodeContentHash), + ContentAddress::Schema(_) => ContentAddress::Schema(content_hash), + ContentAddress::SchemaVariant(_) => ContentAddress::SchemaVariant(content_hash), + ContentAddress::Secret(_) => ContentAddress::Secret(content_hash), + ContentAddress::StaticArgumentValue(_) => { + ContentAddress::StaticArgumentValue(content_hash) + } + ContentAddress::ValidationPrototype(_) => { + ContentAddress::ValidationPrototype(content_hash) + } + }; + + self.content_address = new_address; + + Ok(()) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn node_hash(&self) -> ContentHash { + self.content_hash() + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for ContentNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("ContentNodeWeight") + .field("id", &self.id.to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("content_address", &self.content_address) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/func_argument_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/func_argument_node_weight.rs new file mode 100644 index 0000000000..fa1ea9ac2d --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/func_argument_node_weight.rs @@ -0,0 +1,195 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; + +use ulid::Ulid; + +use crate::{ + change_set_pointer::ChangeSetPointer, + workspace_snapshot::{ + content_address::ContentAddress, + content_address::ContentAddressDiscriminants, + graph::LineageId, + node_weight::NodeWeightResult, + vector_clock::{VectorClock, VectorClockId}, + NodeWeightError, + }, +}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct FuncArgumentNodeWeight { + id: Ulid, + lineage_id: LineageId, + content_address: ContentAddress, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, + name: String, +} + +impl FuncArgumentNodeWeight { + pub fn new( + change_set: &ChangeSetPointer, + id: Ulid, + content_address: ContentAddress, + name: String, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + content_address, + merkle_tree_hash: ContentHash::default(), + name, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + }) + } + + pub fn content_address(&self) -> ContentAddress { + self.content_address + } + + pub fn content_hash(&self) -> ContentHash { + self.content_address.content_hash() + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &Self, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn name(&self) -> &str { + &self.name + } + + pub fn set_name(&mut self, name: impl Into) -> &mut Self { + self.name = name.into(); + self + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + let new_address = match &self.content_address { + ContentAddress::FuncArg(_) => ContentAddress::FuncArg(content_hash), + other => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + Into::::into(other).to_string(), + ContentAddressDiscriminants::FuncArg.to_string(), + )); + } + }; + + self.content_address = new_address; + + Ok(()) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn node_hash(&self) -> ContentHash { + ContentHash::from(&serde_json::json![{ + "content_address": self.content_address, + "name": self.name, + }]) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for FuncArgumentNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("FuncNodeWeight") + .field("id", &self.id().to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("name", &self.name) + .field("content_hash", &self.content_hash()) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/func_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/func_node_weight.rs new file mode 100644 index 0000000000..ef234774ad --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/func_node_weight.rs @@ -0,0 +1,209 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::workspace_snapshot::content_address::ContentAddressDiscriminants; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::FuncBackendKind; +use crate::{ + change_set_pointer::ChangeSetPointer, + workspace_snapshot::{ + content_address::ContentAddress, + graph::LineageId, + node_weight::{NodeWeightError, NodeWeightResult}, + vector_clock::VectorClock, + }, +}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct FuncNodeWeight { + id: Ulid, + lineage_id: LineageId, + content_address: ContentAddress, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, + name: String, + backend_kind: FuncBackendKind, +} + +impl FuncNodeWeight { + pub fn new( + change_set: &ChangeSetPointer, + id: Ulid, + content_address: ContentAddress, + name: String, + backend_kind: FuncBackendKind, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + content_address, + merkle_tree_hash: ContentHash::default(), + name, + backend_kind, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + }) + } + + pub fn content_address(&self) -> ContentAddress { + self.content_address + } + + pub fn content_hash(&self) -> ContentHash { + self.content_address.content_hash() + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &Self, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn name(&self) -> &str { + &self.name + } + + pub fn set_name(&mut self, name: impl Into) -> &mut Self { + self.name = name.into(); + self + } + + pub fn backend_kind(&self) -> FuncBackendKind { + self.backend_kind + } + + pub fn set_backend_kind(&mut self, backend_kind: FuncBackendKind) -> &mut Self { + self.backend_kind = backend_kind; + self + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + let new_address = match &self.content_address { + ContentAddress::Func(_) => ContentAddress::Func(content_hash), + other => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + Into::::into(other).to_string(), + ContentAddressDiscriminants::Func.to_string(), + )); + } + }; + + self.content_address = new_address; + + Ok(()) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn node_hash(&self) -> ContentHash { + ContentHash::from(&serde_json::json![{ + "content_address": self.content_address, + "name": self.name, + "backend_kind": self.backend_kind, + }]) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for FuncNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("FuncNodeWeight") + .field("id", &self.id().to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("name", &self.name) + .field("backend_kind", &self.backend_kind) + .field("content_hash", &self.content_hash()) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs new file mode 100644 index 0000000000..203c1176ce --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/ordering_node_weight.rs @@ -0,0 +1,215 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::change_set_pointer::ChangeSetPointer; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::workspace_snapshot::{node_weight::NodeWeightResult, vector_clock::VectorClock}; + +#[derive(Clone, Serialize, Deserialize, Default)] +pub struct OrderingNodeWeight { + id: Ulid, + lineage_id: Ulid, + /// The `id` of the items, in the order that they should appear in the container. + order: Vec, + content_hash: ContentHash, + merkle_tree_hash: ContentHash, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl OrderingNodeWeight { + pub fn content_hash(&self) -> ContentHash { + self.content_hash + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_seen_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_first_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .inc(change_set.vector_clock_id()) + .map_err(Into::into) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &OrderingNodeWeight, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), other.vector_clock_write())?; + self.vector_clock_first_seen.merge( + change_set.vector_clock_id(), + other.vector_clock_first_seen(), + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn new(change_set: &ChangeSetPointer) -> NodeWeightResult { + Ok(Self { + id: change_set.generate_ulid()?, + lineage_id: change_set.generate_ulid()?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + ..Default::default() + }) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_ordering_weight = self.clone(); + new_ordering_weight.increment_vector_clock(change_set)?; + + Ok(new_ordering_weight) + } + + pub fn node_hash(&self) -> ContentHash { + self.content_hash() + } + + pub fn order(&self) -> &Vec { + &self.order + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_order( + &mut self, + change_set: &ChangeSetPointer, + order: Vec, + ) -> NodeWeightResult<()> { + self.order = order; + self.update_content_hash(); + self.increment_vector_clock(change_set)?; + + Ok(()) + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + fn update_content_hash(&mut self) { + let mut content_hasher = ContentHash::hasher(); + let concat_elements = self + .order + .iter() + .map(|e| e.to_string()) + .collect::>() + .join(" "); + let content_bytes = concat_elements.as_bytes(); + content_hasher.update(content_bytes); + + self.content_hash = content_hasher.finalize(); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } + + pub fn push_to_order( + &mut self, + change_set: &ChangeSetPointer, + id: Ulid, + ) -> NodeWeightResult<()> { + let mut order = self.order().to_owned(); + order.push(id); + self.set_order(change_set, order) + } + + pub fn remove_from_order( + &mut self, + change_set: &ChangeSetPointer, + id: Ulid, + ) -> NodeWeightResult { + let mut order = self.order.to_owned(); + order.retain(|&item_id| item_id != id); + if order.len() != self.order().len() { + self.set_order(change_set, order)?; + Ok(true) + } else { + Ok(false) + } + } +} + +impl std::fmt::Debug for OrderingNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("OrderingNodeWeight") + .field("id", &self.id.to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field( + "order", + &self + .order + .iter() + .map(|id| id.to_string()) + .collect::>(), + ) + .field("content_hash", &self.content_hash) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs new file mode 100644 index 0000000000..0ccee7f470 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/node_weight/prop_node_weight.rs @@ -0,0 +1,209 @@ +use chrono::{DateTime, Utc}; +use content_store::ContentHash; +use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use crate::workspace_snapshot::content_address::ContentAddressDiscriminants; +use crate::workspace_snapshot::vector_clock::VectorClockId; +use crate::{ + change_set_pointer::ChangeSetPointer, + workspace_snapshot::{ + content_address::ContentAddress, + graph::LineageId, + node_weight::{NodeWeightError, NodeWeightResult}, + vector_clock::VectorClock, + }, + PropKind, +}; + +#[derive(Clone, Serialize, Deserialize)] +pub struct PropNodeWeight { + id: Ulid, + lineage_id: LineageId, + content_address: ContentAddress, + merkle_tree_hash: ContentHash, + kind: PropKind, + name: String, + can_be_used_as_prototype_arg: bool, + vector_clock_first_seen: VectorClock, + vector_clock_recently_seen: VectorClock, + vector_clock_write: VectorClock, +} + +impl PropNodeWeight { + pub fn new( + change_set: &ChangeSetPointer, + id: Ulid, + content_address: ContentAddress, + kind: PropKind, + name: String, + ) -> NodeWeightResult { + Ok(Self { + id, + lineage_id: change_set.generate_ulid()?, + content_address, + merkle_tree_hash: ContentHash::default(), + kind, + name, + can_be_used_as_prototype_arg: false, + vector_clock_first_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_recently_seen: VectorClock::new(change_set.vector_clock_id())?, + vector_clock_write: VectorClock::new(change_set.vector_clock_id())?, + }) + } + + pub fn kind(&self) -> PropKind { + self.kind + } + + pub fn content_address(&self) -> ContentAddress { + self.content_address + } + + pub fn content_hash(&self) -> ContentHash { + self.content_address.content_hash() + } + + pub fn can_be_used_as_prototype_arg(&self) -> bool { + self.can_be_used_as_prototype_arg + } + + pub fn set_can_be_used_as_prototype_arg(&mut self, can_be_used: bool) { + self.can_be_used_as_prototype_arg = can_be_used; + } + + pub fn id(&self) -> Ulid { + self.id + } + + pub fn increment_vector_clock( + &mut self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult<()> { + self.vector_clock_write.inc(change_set.vector_clock_id())?; + self.vector_clock_recently_seen + .inc(change_set.vector_clock_id())?; + + Ok(()) + } + + pub fn lineage_id(&self) -> Ulid { + self.lineage_id + } + + pub fn mark_seen_at(&mut self, vector_clock_id: VectorClockId, seen_at: DateTime) { + self.vector_clock_recently_seen + .inc_to(vector_clock_id, seen_at); + if self + .vector_clock_first_seen + .entry_for(vector_clock_id) + .is_none() + { + self.vector_clock_first_seen + .inc_to(vector_clock_id, seen_at); + } + } + + pub fn merge_clocks( + &mut self, + change_set: &ChangeSetPointer, + other: &Self, + ) -> NodeWeightResult<()> { + self.vector_clock_write + .merge(change_set.vector_clock_id(), &other.vector_clock_write)?; + self.vector_clock_first_seen + .merge(change_set.vector_clock_id(), &other.vector_clock_first_seen)?; + self.vector_clock_recently_seen.merge( + change_set.vector_clock_id(), + &other.vector_clock_recently_seen, + )?; + + Ok(()) + } + + pub fn merkle_tree_hash(&self) -> ContentHash { + self.merkle_tree_hash + } + + pub fn name(&self) -> &str { + &self.name + } + + pub fn new_content_hash(&mut self, content_hash: ContentHash) -> NodeWeightResult<()> { + let new_address = match &self.content_address { + ContentAddress::Prop(_) => ContentAddress::Prop(content_hash), + other => { + return Err(NodeWeightError::InvalidContentAddressForWeightKind( + Into::::into(other).to_string(), + ContentAddressDiscriminants::Prop.to_string(), + )); + } + }; + + self.content_address = new_address; + + Ok(()) + } + + pub fn new_with_incremented_vector_clock( + &self, + change_set: &ChangeSetPointer, + ) -> NodeWeightResult { + let mut new_node_weight = self.clone(); + new_node_weight.increment_vector_clock(change_set)?; + + Ok(new_node_weight) + } + + pub fn node_hash(&self) -> ContentHash { + ContentHash::from(&serde_json::json![{ + "content_address": self.content_address, + "kind": self.kind, + "name": self.name, + }]) + } + + pub fn set_merkle_tree_hash(&mut self, new_hash: ContentHash) { + self.merkle_tree_hash = new_hash; + } + + pub fn set_vector_clock_recently_seen_to( + &mut self, + change_set: &ChangeSetPointer, + new_val: DateTime, + ) { + self.vector_clock_recently_seen + .inc_to(change_set.vector_clock_id(), new_val); + } + + pub fn vector_clock_first_seen(&self) -> &VectorClock { + &self.vector_clock_first_seen + } + + pub fn vector_clock_recently_seen(&self) -> &VectorClock { + &self.vector_clock_recently_seen + } + + pub fn vector_clock_write(&self) -> &VectorClock { + &self.vector_clock_write + } +} + +impl std::fmt::Debug for PropNodeWeight { + fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { + f.debug_struct("PropNodeWeight") + .field("id", &self.id().to_string()) + .field("lineage_id", &self.lineage_id.to_string()) + .field("kind", &self.kind) + .field("name", &self.name) + .field("content_hash", &self.content_hash()) + .field("merkle_tree_hash", &self.merkle_tree_hash) + .field("vector_clock_first_seen", &self.vector_clock_first_seen) + .field( + "vector_clock_recently_seen", + &self.vector_clock_recently_seen, + ) + .field("vector_clock_write", &self.vector_clock_write) + .finish() + } +} diff --git a/lib/dal/src/workspace_snapshot/update.rs b/lib/dal/src/workspace_snapshot/update.rs new file mode 100644 index 0000000000..f27cf2cc29 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/update.rs @@ -0,0 +1,28 @@ +use petgraph::prelude::*; + +use super::edge_weight::{EdgeWeight, EdgeWeightKindDiscriminants}; +use serde::{Deserialize, Serialize}; + +#[remain::sorted] +#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)] +pub enum Update { + NewEdge { + source: NodeIndex, + // Check if already exists in "onto" (source). Grab node weight from "to_rebase" + // (destination) and see if there is an equivalent node (id and lineage) in "onto". + // If not, use "import_subgraph". + destination: NodeIndex, + edge_weight: EdgeWeight, + }, + RemoveEdge { + source: NodeIndex, + destination: NodeIndex, + edge_kind: EdgeWeightKindDiscriminants, + }, + ReplaceSubgraph { + onto: NodeIndex, + // Check if already exists in "onto". Grab node weight from "to_rebase" and see if there is + // an equivalent node (id and lineage) in "onto". If not, use "import_subgraph". + to_rebase: NodeIndex, + }, +} diff --git a/lib/dal/src/workspace_snapshot/vector_clock.rs b/lib/dal/src/workspace_snapshot/vector_clock.rs new file mode 100644 index 0000000000..b5793f9444 --- /dev/null +++ b/lib/dal/src/workspace_snapshot/vector_clock.rs @@ -0,0 +1,120 @@ +//! Vector Clocks + +use std::collections::HashMap; + +use chrono::{DateTime, Utc}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; + +use crate::pk; +use crate::workspace_snapshot::lamport_clock::{LamportClock, LamportClockError}; + +#[derive(Debug, Error)] +pub enum VectorClockError { + #[error("Lamport Clock Error: {0}")] + LamportClock(#[from] LamportClockError), +} + +pub type VectorClockResult = Result; + +pk!(VectorClockId); + +#[derive(Default, Serialize, Deserialize, PartialEq, Eq, Clone)] +pub struct VectorClock { + entries: HashMap, +} + +impl VectorClock { + /// Create a new [`VectorClock`] with an entry for [`VectorClockId`]. + pub fn new(vector_clock_id: VectorClockId) -> VectorClockResult { + let lamport_clock = LamportClock::new()?; + let mut entries = HashMap::new(); + entries.insert(vector_clock_id, lamport_clock); + + Ok(VectorClock { entries }) + } + + pub fn entry_for(&self, vector_clock_id: VectorClockId) -> Option { + self.entries.get(&vector_clock_id).copied() + } + + pub fn has_entries_newer_than(&self, clock_stamp: LamportClock) -> bool { + self.entries.values().any(|v| *v > clock_stamp) + } + + pub fn inc_to(&mut self, vector_clock_id: VectorClockId, new_clock_value: DateTime) { + if let Some(lamport_clock) = self.entries.get_mut(&vector_clock_id) { + lamport_clock.inc_to(new_clock_value); + } else { + self.entries.insert( + vector_clock_id, + LamportClock::new_with_value(new_clock_value), + ); + } + } + + /// Increment the entry for [`VectorClockId`], adding one if there wasn't one already. + pub fn inc(&mut self, vector_clock_id: VectorClockId) -> VectorClockResult<()> { + if let Some(lamport_clock) = self.entries.get_mut(&vector_clock_id) { + lamport_clock.inc()?; + } else { + self.entries.insert(vector_clock_id, LamportClock::new()?); + } + + Ok(()) + } + + /// Add all entries in `other` to `self`, taking the most recent value if the entry already + /// exists in `self`, then increment the entry for [`VectorClockId`] (adding one if it is not + /// already there). + pub fn merge( + &mut self, + vector_clock_id: VectorClockId, + other: &VectorClock, + ) -> VectorClockResult<()> { + for (other_vector_clock_id, other_lamport_clock) in other.entries.iter() { + if let Some(lamport_clock) = self.entries.get_mut(other_vector_clock_id) { + lamport_clock.merge(other_lamport_clock); + } else { + self.entries + .insert(*other_vector_clock_id, *other_lamport_clock); + } + } + self.inc(vector_clock_id)?; + + Ok(()) + } + + /// Return a new [`VectorClock`] with the entry for [`VectorClockId`] incremented. + pub fn fork(&self, vector_clock_id: VectorClockId) -> VectorClockResult { + let mut forked = self.clone(); + forked.inc(vector_clock_id)?; + + Ok(forked) + } + + /// Returns true if all entries in `other` are present in `self`, and `<=` the entry in + /// `self`, meaning that `self` has already seen/incorporated all of the information + /// in `other`. + pub fn is_newer_than(&self, other: &VectorClock) -> bool { + for (other_vector_clock_id, other_lamport_clock) in &other.entries { + if let Some(my_clock) = self.entries.get(other_vector_clock_id) { + if other_lamport_clock > my_clock { + return false; + } + } else { + return false; + } + } + + true + } +} + +impl std::fmt::Debug for VectorClock { + fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + fmt.debug_map() + .entries(self.entries.iter().map(|(k, v)| (k.to_string(), v))) + .finish() + } +} diff --git a/lib/dal/src/ws_event.rs b/lib/dal/src/ws_event.rs index cf28b4d3c9..9ee07dc2a0 100644 --- a/lib/dal/src/ws_event.rs +++ b/lib/dal/src/ws_event.rs @@ -4,29 +4,14 @@ use si_data_pg::PgError; use thiserror::Error; use ulid::Ulid; -use crate::action::{ActionAddedPayload, ActionRemovedPayload}; use crate::change_set::{ChangeSetActorPayload, ChangeSetMergeVotePayload}; use crate::component::{ComponentCreatedPayload, ComponentUpdatedPayload}; -use crate::func::{FuncCreatedPayload, FuncDeletedPayload, FuncRevertedPayload, FuncSavedPayload}; -use crate::pkg::{ - ImportWorkspaceVotePayload, ModuleImportedPayload, WorkspaceActorPayload, - WorkspaceExportPayload, WorkspaceImportApprovalActorPayload, WorkspaceImportPayload, -}; -use crate::schema::variant::definition::{ - SchemaVariantDefinitionClonedPayload, SchemaVariantDefinitionCreatedPayload, - SchemaVariantDefinitionSavedPayload, -}; +use crate::qualification::QualificationCheckPayload; use crate::secret::{SecretCreatedPayload, SecretUpdatedPayload}; +use crate::user::OnlinePayload; use crate::{ - component::{code::CodeGeneratedPayload, resource::ResourceRefreshedPayload}, - fix::{batch::FixBatchReturn, FixReturn}, - func::binding::LogLinePayload, - qualification::QualificationCheckPayload, - status::StatusMessage, - user::{CursorPayload, OnlinePayload}, - AttributePrototypeId, AttributeValueId, ChangeSetPk, ComponentId, DalContext, FuncId, - FuncVariant, PropId, PropKind, SchemaPk, SchemaVariantId, SocketId, StandardModelError, - TransactionsError, WorkspacePk, + func::binding::LogLinePayload, pkg::ModuleImportedPayload, user::CursorPayload, ChangeSetPk, + DalContext, PropId, StandardModelError, TransactionsError, WorkspacePk, }; #[remain::sorted] @@ -55,8 +40,8 @@ pub type WsEventResult = Result; #[serde(tag = "kind", content = "data")] #[allow(clippy::large_enum_variant)] pub enum WsPayload { - ActionAdded(ActionAddedPayload), - ActionRemoved(ActionRemovedPayload), + // ActionAdded(ActionAddedPayload), + // ActionRemoved(ActionRemovedPayload), AsyncError(ErrorPayload), AsyncFinish(FinishPayload), ChangeSetAbandoned(ChangeSetActorPayload), @@ -71,33 +56,29 @@ pub enum WsPayload { ChangeSetMergeVote(ChangeSetMergeVotePayload), ChangeSetWritten(ChangeSetPk), CheckedQualifications(QualificationCheckPayload), - CodeGenerated(CodeGeneratedPayload), + // CodeGenerated(CodeGeneratedPayload), ComponentCreated(ComponentCreatedPayload), ComponentUpdated(ComponentUpdatedPayload), Cursor(CursorPayload), - FixBatchReturn(FixBatchReturn), - FixReturn(FixReturn), - FuncCreated(FuncCreatedPayload), - FuncDeleted(FuncDeletedPayload), - FuncReverted(FuncRevertedPayload), - FuncSaved(FuncSavedPayload), - ImportWorkspaceVote(ImportWorkspaceVotePayload), + // FixBatchReturn(FixBatchReturn), + // FixReturn(FixReturn), + // ImportWorkspaceVote(ImportWorkspaceVotePayload), LogLine(LogLinePayload), ModuleImported(ModuleImportedPayload), Online(OnlinePayload), - ResourceRefreshed(ResourceRefreshedPayload), - SchemaCreated(SchemaPk), - SchemaVariantDefinitionCloned(SchemaVariantDefinitionClonedPayload), - SchemaVariantDefinitionCreated(SchemaVariantDefinitionCreatedPayload), - SchemaVariantDefinitionFinished(FinishSchemaVariantDefinitionPayload), - SchemaVariantDefinitionSaved(SchemaVariantDefinitionSavedPayload), + // ResourceRefreshed(ResourceRefreshedPayload), + // SchemaCreated(SchemaPk), + // SchemaVariantDefinitionCloned(SchemaVariantDefinitionClonedPayload), + // SchemaVariantDefinitionCreated(SchemaVariantDefinitionCreatedPayload), + // SchemaVariantDefinitionFinished(FinishSchemaVariantDefinitionPayload), + // SchemaVariantDefinitionSaved(SchemaVariantDefinitionSavedPayload), SecretCreated(SecretCreatedPayload), SecretUpdated(SecretUpdatedPayload), - StatusUpdate(StatusMessage), - WorkspaceExported(WorkspaceExportPayload), - WorkspaceImportBeginApprovalProcess(WorkspaceImportApprovalActorPayload), - WorkspaceImportCancelApprovalProcess(WorkspaceActorPayload), - WorkspaceImported(WorkspaceImportPayload), + // StatusUpdate(StatusMessage), + // WorkspaceExported(WorkspaceExportPayload), + // WorkspaceImportBeginApprovalProcess(WorkspaceImportApprovalActorPayload), + // WorkspaceImportCancelApprovalProcess(WorkspaceActorPayload), + // WorkspaceImported(WorkspaceImportPayload), } #[remain::sorted] @@ -106,33 +87,35 @@ pub enum WsPayload { pub enum StatusValueKind { Attribute(PropId), CodeGen, - InputSocket(SocketId), + // TODO(nick): sockets are no more, so replace this with the provider id. + // InputSocket(SocketId), Internal, - OutputSocket(SocketId), + // TODO(nick): sockets are no more, so replace this with the provider id. + // OutputSocket(SocketId), Qualification, } -#[derive(Deserialize, Serialize, Debug, Clone, Copy, Eq, Hash, PartialEq)] -#[serde(rename_all = "camelCase")] -pub struct AttributeValueStatusUpdate { - value_id: AttributeValueId, - component_id: ComponentId, - value_kind: StatusValueKind, -} - -impl AttributeValueStatusUpdate { - pub fn new( - value_id: AttributeValueId, - component_id: ComponentId, - value_kind: StatusValueKind, - ) -> Self { - Self { - value_id, - component_id, - value_kind, - } - } -} +// #[derive(Deserialize, Serialize, Debug, Clone, Copy, Eq, Hash, PartialEq)] +// #[serde(rename_all = "camelCase")] +// pub struct AttributeValueStatusUpdate { +// value_id: AttributeValueId, +// component_id: ComponentId, +// value_kind: StatusValueKind, +// } + +// impl AttributeValueStatusUpdate { +// pub fn new( +// value_id: AttributeValueId, +// component_id: ComponentId, +// value_kind: StatusValueKind, +// ) -> Self { +// Self { +// value_id, +// component_id, +// value_kind, +// } +// } +// } #[derive(Deserialize, Serialize, Debug, Clone, Eq, PartialEq)] pub struct WsEvent { @@ -195,7 +178,6 @@ impl WsEvent { .nats() .publish_immediately(self.workspace_subject(), &self) .await?; - Ok(()) } } @@ -222,38 +204,38 @@ impl WsEvent { } } -#[remain::sorted] -#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] -#[serde(tag = "type", content = "data")] -pub enum AttributePrototypeContextKind { - ExternalProvider { name: String }, - Prop { path: String, kind: PropKind }, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct AttributePrototypeView { - pub id: AttributePrototypeId, - pub func_id: FuncId, - pub func_name: String, - pub variant: Option, - pub key: Option, - pub context: AttributePrototypeContextKind, -} - -#[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct FinishSchemaVariantDefinitionPayload { - pub task_id: Ulid, - pub schema_variant_id: SchemaVariantId, - pub detached_attribute_prototypes: Vec, -} - -impl WsEvent { - pub async fn schema_variant_definition_finish( - ctx: &DalContext, - payload: FinishSchemaVariantDefinitionPayload, - ) -> WsEventResult { - WsEvent::new(ctx, WsPayload::SchemaVariantDefinitionFinished(payload)).await - } -} +// #[remain::sorted] +// #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] +// #[serde(tag = "type", content = "data")] +// pub enum AttributePrototypeContextKind { +// ExternalProvider { name: String }, +// Prop { path: String, kind: PropKind }, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct AttributePrototypeView { +// pub id: AttributePrototypeId, +// pub func_id: FuncId, +// pub func_name: String, +// pub variant: Option, +// pub key: Option, +// pub context: AttributePrototypeContextKind, +// } +// +// #[derive(Clone, Deserialize, Serialize, Debug, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct FinishSchemaVariantDefinitionPayload { +// pub task_id: Ulid, +// pub schema_variant_id: SchemaVariantId, +// pub detached_attribute_prototypes: Vec, +// } +// +// impl WsEvent { +// pub async fn schema_variant_definition_finish( +// ctx: &DalContext, +// payload: FinishSchemaVariantDefinitionPayload, +// ) -> WsEventResult { +// WsEvent::new(ctx, WsPayload::SchemaVariantDefinitionFinished(payload)).await +// } +// } diff --git a/lib/dal/tests/integration.rs b/lib/dal/tests/integration.rs index 0ad4379906..c1e017a6fc 100644 --- a/lib/dal/tests/integration.rs +++ b/lib/dal/tests/integration.rs @@ -1,3 +1,4 @@ const TEST_PG_DBNAME: &str = "si_test_dal"; +const TEST_CONTENT_STORE_PG_DBNAME: &str = "si_test_content_store"; mod integration_test; diff --git a/lib/dal/tests/integration_test/internal/key_pair.rs b/lib/dal/tests/integration_test/internal/key_pair.rs index a5e4c8b0e8..c9d22a0312 100644 --- a/lib/dal/tests/integration_test/internal/key_pair.rs +++ b/lib/dal/tests/integration_test/internal/key_pair.rs @@ -1,8 +1,5 @@ -use dal::{key_pair::PublicKey, DalContext, KeyPair, Tenancy}; -use dal_test::{ - test, - test_harness::{create_key_pair, create_workspace}, -}; +use dal::{key_pair::PublicKey, DalContext, KeyPair, Tenancy, Workspace, WorkspacePk}; +use dal_test::{test, test_harness::create_key_pair}; #[test] async fn new(ctx: &DalContext) { @@ -13,7 +10,9 @@ async fn new(ctx: &DalContext) { #[test] async fn belongs_to(ctx: &mut DalContext) { - let workspace = create_workspace(ctx).await; + let workspace = Workspace::new(ctx, WorkspacePk::generate(), "new") + .await + .expect("cannot create workspace"); ctx.update_tenancy(Tenancy::new(*workspace.pk())); let key_pair = create_key_pair(ctx).await; @@ -26,7 +25,9 @@ async fn belongs_to(ctx: &mut DalContext) { #[test] async fn public_key_get_current(ctx: &mut DalContext) { - let workspace = create_workspace(ctx).await; + let workspace = Workspace::new(ctx, WorkspacePk::generate(), "new") + .await + .expect("cannot create workspace"); ctx.update_tenancy(Tenancy::new(*workspace.pk())); let first_key_pair = create_key_pair(ctx).await; diff --git a/lib/dal/tests/integration_test/internal/mod.rs b/lib/dal/tests/integration_test/internal/mod.rs index b6ea6381d8..8cda84cb95 100644 --- a/lib/dal/tests/integration_test/internal/mod.rs +++ b/lib/dal/tests/integration_test/internal/mod.rs @@ -1,27 +1,30 @@ -mod action_prototype; -mod attribute; -mod change_set; -mod component; -mod diagram; -mod edge; -mod func; -mod func_execution; -mod graph; -mod history_event; -mod key_pair; -mod node; -mod node_menu; -mod pkg; -mod prop; -mod prop_tree; -mod property_editor; -mod provider; -mod schema; +// mod action_prototype; +// mod attribute; +// mod change_set; +// mod component; +// mod diagram; +// mod edge; +// mod func; +// mod func_execution; +// mod graph; +// mod history_event; +// mod key_pair; +mod new_engine; +// mod node; +// mod node_menu; +// mod pkg; +// mod prop; +// mod prop_tree; +// mod property_editor; +// mod provider; +// mod schema; mod secret; -mod socket; -mod standard_model; -mod status_update; -mod tenancy; -mod user; -mod visibility; -mod workspace; +// mod socket; +// mod standard_model; +// mod status_update; +// mod tenancy; +// mod user; +// mod validation_prototype; +// mod validation_resolver; +// mod visibility; +// mod workspace; diff --git a/lib/dal/tests/integration_test/internal/new_engine.rs b/lib/dal/tests/integration_test/internal/new_engine.rs new file mode 100644 index 0000000000..623250feac --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine.rs @@ -0,0 +1,15 @@ +//! This is a temporary module to co-locate all tests for the new engine layer. Once everything is +//! working, this module will go away and the tests will be moved or removed. +//! +//! For all tests in this module, provide "SI_TEST_BUILTIN_SCHEMAS=none" or "SI_TEST_BUILTIN_SCHEMAS=test" as an +//! environment variable. + +mod before_funcs; +mod builtins; +mod component; +mod connection; +mod frame; +mod prop; +mod property_editor; +mod rebaser; +mod sdf_mock; diff --git a/lib/dal/tests/integration_test/internal/new_engine/before_funcs.rs b/lib/dal/tests/integration_test/internal/new_engine/before_funcs.rs new file mode 100644 index 0000000000..5dba5d742f --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/before_funcs.rs @@ -0,0 +1,232 @@ +use dal::prop::PropPath; +use dal::property_editor::values::PropertyEditorValues; +use dal::{ + AttributeValue, Component, DalContext, EncryptedSecret, ExternalProvider, Prop, Schema, + SchemaVariant, +}; +use dal_test::test_harness::encrypt_message; +use dal_test::{test, WorkspaceSignup}; + +/// Run with the following environment variable: +/// ``` +/// SI_TEST_BUILTIN_SCHEMAS=test +/// ``` +#[test] +async fn secret_definition_works_with_dummy_qualification( + ctx: &mut DalContext, + nw: &WorkspaceSignup, +) { + let secret_definition_schema = Schema::find_by_name(ctx, "bethesda-secret") + .await + .expect("could not find schema") + .expect("schema not found"); + let secret_definition_schema_variant = + SchemaVariant::list_for_schema(ctx, secret_definition_schema.id()) + .await + .expect("failed listing schema variants") + .pop() + .expect("no schema variant found"); + let secret_definition_schema_variant_id = secret_definition_schema_variant.id(); + + let secret_definition_component = Component::new( + ctx, + "secret-definition", + secret_definition_schema_variant_id, + None, + ) + .await + .expect("could not create component"); + let secret_definition_component_id = secret_definition_component.id(); + + // This is the name of the secret definition from the "BethesdaSecret" test exclusive schema. + let secret_definition_name = "fake"; + + // Cache the output socket that will contain the secret id. + let output_socket = ExternalProvider::find_with_name( + ctx, + secret_definition_name, + secret_definition_schema_variant_id, + ) + .await + .expect("could not perform find with name") + .expect("output socket not found"); + + // Cache the prop we need for attribute value update. + let reference_to_secret_prop = Prop::find_prop_by_path( + ctx, + secret_definition_schema_variant_id, + &PropPath::new(["root", "secrets", secret_definition_name]), + ) + .await + .expect("could not find prop by path"); + + // First scenario: create and use a secret that will fail the qualification. + { + // Create a secret with a value that will fail the qualification. + let encrypted_message_that_will_fail_the_qualification = encrypt_message( + ctx, + nw.key_pair.pk(), + &serde_json::json![{"value": "howard"}], + ) + .await; + let secret_that_will_fail_the_qualification = EncryptedSecret::new( + ctx, + "secret that will fail the qualification", + secret_definition_name.to_string(), + None, + &encrypted_message_that_will_fail_the_qualification, + nw.key_pair.pk(), + Default::default(), + Default::default(), + ) + .await + .expect("cannot create secret"); + + // Commit and update snapshot to visibility. + let conflicts = ctx.blocking_commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visibility"); + + // Update the reference to secret prop with the secret it that will fail the qualification. + let property_values = PropertyEditorValues::assemble(ctx, secret_definition_component_id) + .await + .expect("unable to list prop values"); + let reference_to_secret_attribute_value_id = property_values + .find_by_prop_id(reference_to_secret_prop.id) + .expect("unable to find attribute value"); + + let fail_value = + serde_json::json!(secret_that_will_fail_the_qualification.id().to_string()); + AttributeValue::update( + ctx, + reference_to_secret_attribute_value_id, + Some(fail_value.clone()), + ) + .await + .expect("unable to perform attribute value update"); + + // Commit and update snapshot to visibility. + let conflicts = ctx.blocking_commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visibility"); + + // Check that the output socket value looks correct. + let mut output_socket_attribute_value_ids = + ExternalProvider::attribute_values_for_external_provider_id(ctx, output_socket.id()) + .await + .expect("could not perform attribute values for external provider id"); + let output_socket_attribute_value_id = output_socket_attribute_value_ids + .pop() + .expect("no output attribute value found"); + assert!(output_socket_attribute_value_ids.is_empty()); + let output_socket_attribute_value = + AttributeValue::get_by_id(ctx, output_socket_attribute_value_id) + .await + .expect("could not get attribute value by id") + .value(ctx) + .await + .expect("could not get value") + .expect("no value found"); + assert_eq!(fail_value, output_socket_attribute_value); + + // TODO(nick): restore the qualification check. + // // Check that the qualification fails. + // let mut qualifications = + // Component::list_qualifications(ctx, secret_definition_component_id) + // .await + // .expect("could not list qualifications"); + // let qualification = qualifications.pop().expect("no qualifications found"); + // assert!(qualifications.is_empty()); + // assert_eq!( + // QualificationSubCheckStatus::Failure, // expected + // qualification.result.expect("no result found").status // actual + // ); + } + + // Second scenario: create and use a secret that will pass the qualification. + { + // Create a secret with a value that will pass the qualification. + let encrypted_message_that_will_pass_the_qualification = + encrypt_message(ctx, nw.key_pair.pk(), &serde_json::json![{"value": "todd"}]).await; + let secret_that_will_pass_the_qualification = EncryptedSecret::new( + ctx, + "secret that will pass the qualification", + secret_definition_name.to_string(), + None, + &encrypted_message_that_will_pass_the_qualification, + nw.key_pair.pk(), + Default::default(), + Default::default(), + ) + .await + .expect("cannot create secret"); + + // Commit and update snapshot to visibility. + let conflicts = ctx.blocking_commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visibility"); + + // Update the reference to secret prop with the secret it that will pass the qualification. + let property_values = PropertyEditorValues::assemble(ctx, secret_definition_component_id) + .await + .expect("unable to list prop values"); + let reference_to_secret_attribute_value_id = property_values + .find_by_prop_id(reference_to_secret_prop.id) + .expect("could not find attribute value"); + + let success_value = + serde_json::json!(secret_that_will_pass_the_qualification.id().to_string()); + AttributeValue::update( + ctx, + reference_to_secret_attribute_value_id, + Some(success_value.clone()), + ) + .await + .expect("unable to perform attribute value update"); + + // Commit and update snapshot to visibility. + let conflicts = ctx.blocking_commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visibility"); + + // Check that the output socket value looks correct. + let mut output_socket_attribute_value_ids = + ExternalProvider::attribute_values_for_external_provider_id(ctx, output_socket.id()) + .await + .expect("could not perform attribute values for external provider id"); + let output_socket_attribute_value_id = output_socket_attribute_value_ids + .pop() + .expect("no output attribute value found"); + assert!(output_socket_attribute_value_ids.is_empty()); + let output_socket_attribute_value = + AttributeValue::get_by_id(ctx, output_socket_attribute_value_id) + .await + .expect("could not get attribute value by id") + .value(ctx) + .await + .expect("could not get value") + .expect("no value found"); + assert_eq!(success_value, output_socket_attribute_value); + + // TODO(nick): restore the qualification check. + // // Check that the qualification passes. + // let mut qualifications = + // Component::list_qualifications(ctx, secret_definition_component_id) + // .await + // .expect("could not list qualifications"); + // let qualification = qualifications.pop().expect("no qualifications found"); + // assert!(qualifications.is_empty()); + // assert_eq!( + // QualificationSubCheckStatus::Success, // expected + // qualification.result.expect("no result found").status // actual + // ); + } +} diff --git a/lib/dal/tests/integration_test/internal/new_engine/builtins.rs b/lib/dal/tests/integration_test/internal/new_engine/builtins.rs new file mode 100644 index 0000000000..605784cd4e --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/builtins.rs @@ -0,0 +1,77 @@ +use dal::workspace_snapshot::edge_weight::EdgeWeightKindDiscriminants; +use dal::{func::intrinsics::IntrinsicFunc, DalContext, Func, Schema, SchemaVariant}; +use dal_test::test; +use petgraph::prelude::*; +use strum::IntoEnumIterator; + +#[test] +async fn docker_image_has_one_qualfiication_map_prop(ctx: &DalContext) { + let docker_image = Schema::list(ctx) + .await + .expect("list schemas") + .iter() + .find(|schema| schema.name() == "Docker Image") + .expect("docker image does not exist") + .to_owned(); + + let variant = SchemaVariant::list_for_schema(ctx, docker_image.id()) + .await + .expect("get schema variants") + .pop() + .expect("get default variant"); + + let root_prop_id = SchemaVariant::get_root_prop_id(ctx, variant.id()) + .await + .expect("get root prop for variant"); + + let workspace_snapshot = ctx.workspace_snapshot().expect("get snap").read().await; + + let child_prop_targets = workspace_snapshot + .outgoing_targets_for_edge_weight_kind(root_prop_id, EdgeWeightKindDiscriminants::Use) + .expect("get all child prop targets of root"); + + let qualification_props: Vec<&NodeIndex> = child_prop_targets + .iter() + .filter(|&child_prop_target| { + let node_weight = workspace_snapshot + .get_node_weight(*child_prop_target) + .expect("get node weight") + .get_prop_node_weight() + .expect("should be prop") + .to_owned(); + + node_weight.name() == "qualification" + }) + .collect(); + + assert_eq!(1, qualification_props.len()); +} + +#[test] +async fn builtin_funcs_and_schemas_are_not_empty(ctx: &DalContext) { + let funcs: Vec = Func::list(ctx) + .await + .expect("list funcs should work") + .iter() + .map(|f| f.name.to_owned()) + .collect(); + + // Check that the funcs at least contain all intrinsics. + let intrinsics: Vec = IntrinsicFunc::iter() + .map(|intrinsic| intrinsic.name().to_owned()) + .collect(); + for intrinsic in intrinsics { + assert!(funcs.contains(&intrinsic)); + } + + // Ensure that we have at least one schema variant for every schema and that we have at least + // one schema. + let schemas: Vec = Schema::list(ctx).await.expect("could not list schemas"); + assert!(!schemas.is_empty()); + for schema in schemas { + let schema_variants: Vec = SchemaVariant::list_for_schema(ctx, schema.id()) + .await + .expect("could not list schema variants"); + assert!(!schema_variants.is_empty()); + } +} diff --git a/lib/dal/tests/integration_test/internal/new_engine/component.rs b/lib/dal/tests/integration_test/internal/new_engine/component.rs new file mode 100644 index 0000000000..6e34108515 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/component.rs @@ -0,0 +1,527 @@ +use dal::attribute::value::DependentValueGraph; +use dal::component::{DEFAULT_COMPONENT_HEIGHT, DEFAULT_COMPONENT_WIDTH}; +use dal::diagram::Diagram; +use dal::prop::{Prop, PropPath}; +use dal::property_editor::values::PropertyEditorValues; +use dal::{AttributeValue, AttributeValueId}; +use dal::{Component, DalContext, Schema, SchemaVariant}; +use dal_test::test; +use pretty_assertions_sorted::assert_eq; + +#[test] +async fn update_and_insert_and_update(ctx: &mut DalContext) { + let docker_image = Schema::list(ctx) + .await + .expect("list schemas") + .iter() + .find(|schema| schema.name() == "Docker Image") + .expect("docker image does not exist") + .to_owned(); + + let variant = SchemaVariant::list_for_schema(ctx, docker_image.id()) + .await + .expect("get schema variants") + .pop() + .expect("get default variant"); + + let name = "a tulip in a cup"; + + let component = Component::new(ctx, name, variant.id(), None) + .await + .expect("able to create component"); + + let property_values = PropertyEditorValues::assemble(ctx, component.id()) + .await + .expect("able to list prop values"); + + let image_prop_id = Prop::find_prop_id_by_path( + ctx, + variant.id(), + &PropPath::new(["root", "domain", "image"]), + ) + .await + .expect("able to find image prop"); + + let exposed_ports_prop_id = Prop::find_prop_id_by_path( + ctx, + variant.id(), + &PropPath::new(["root", "domain", "ExposedPorts"]), + ) + .await + .expect("able to find exposed ports prop"); + + let exposed_ports_elem_prop_id = Prop::find_prop_id_by_path( + ctx, + variant.id(), + &PropPath::new(["root", "domain", "ExposedPorts", "ExposedPort"]), + ) + .await + .expect("able to find exposed ports element prop"); + + // Update image + let image_av_id = property_values + .find_by_prop_id(image_prop_id) + .expect("can't find default attribute value for ExposedPorts"); + + let image_value = serde_json::json!("fiona/apple"); + AttributeValue::update(ctx, image_av_id, Some(image_value.clone())) + .await + .expect("able to update image prop with 'fiona/apple'"); + + let exposed_port_attribute_value_id = property_values + .find_by_prop_id(exposed_ports_prop_id) + .expect("can't find default attribute value for ExposedPorts"); + + // Insert it unset first (to mimick frontend) + let inserted_av_id = AttributeValue::insert(ctx, exposed_port_attribute_value_id, None, None) + .await + .expect("able to insert"); + + // Before sending to the rebaser, confirm the value is there and it's the only one for the + // ExposedPorts prop + let property_values = PropertyEditorValues::assemble(ctx, component.id()) + .await + .expect("able to list prop values"); + + let (fetched_image_value, image_av_id_again) = property_values + .find_with_value_by_prop_id(image_prop_id) + .expect("able to get image av id from pvalues"); + + assert_eq!(image_av_id, image_av_id_again); + assert_eq!(image_value, fetched_image_value); + + let mut inserted_attribute_values: Vec = + property_values.list_by_prop_id(exposed_ports_elem_prop_id); + + assert_eq!(1, inserted_attribute_values.len()); + let pvalues_inserted_attribute_value_id = + inserted_attribute_values.pop().expect("get our av id"); + assert_eq!(inserted_av_id, pvalues_inserted_attribute_value_id); + + // Rebase! + let conflicts = ctx.blocking_commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity"); + + dbg!(component + .materialized_view(ctx) + .await + .expect("materialized_view for component")); + + // Confirm after rebase + let property_values = PropertyEditorValues::assemble(ctx, component.id()) + .await + .expect("able to list prop values"); + + let (fetched_image_value, image_av_id_again) = property_values + .find_with_value_by_prop_id(image_prop_id) + .expect("able to get image av id from pvalues"); + + assert_eq!(image_av_id, image_av_id_again); + assert_eq!(image_value, fetched_image_value); + + let mut inserted_attribute_values = + property_values.list_with_values_by_prop_id(exposed_ports_elem_prop_id); + assert_eq!(1, inserted_attribute_values.len()); + let (inserted_value, pvalues_inserted_attribute_value_id) = + inserted_attribute_values.pop().expect("get our av id"); + assert_eq!(inserted_av_id, pvalues_inserted_attribute_value_id); + assert_eq!(inserted_value, serde_json::Value::Null); + + let value = serde_json::json!("i ran out of white doves feathers"); + + // Update the value we inserted + AttributeValue::update(ctx, inserted_av_id, Some(value.clone())) + .await + .expect("able to update"); + + // Confirm again before rebase + let property_values = PropertyEditorValues::assemble(ctx, component.id()) + .await + .expect("able to list prop values"); + + let mut inserted_attribute_values = + property_values.list_with_values_by_prop_id(exposed_ports_elem_prop_id); + assert_eq!(1, inserted_attribute_values.len()); + let (inserted_value, pvalues_inserted_attribute_value_id) = + inserted_attribute_values.pop().expect("get our av id"); + assert_eq!(inserted_av_id, pvalues_inserted_attribute_value_id); + assert_eq!(inserted_value, value.clone()); + + // Rebase again! + let conflicts = ctx.commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity"); + + let property_values = PropertyEditorValues::assemble(ctx, component.id()) + .await + .expect("able to list prop values"); + + let mut inserted_attribute_values = + property_values.list_with_values_by_prop_id(exposed_ports_elem_prop_id); + assert_eq!(1, inserted_attribute_values.len()); + let (inserted_value, pvalues_inserted_attribute_value_id) = + inserted_attribute_values.pop().expect("get our av id"); + assert_eq!(inserted_av_id, pvalues_inserted_attribute_value_id); + assert_eq!(inserted_value, value.clone()); +} + +#[test] +async fn create_and_determine_lineage(ctx: &DalContext) { + // List all schemas in the workspace. Pick the first one alphabetically. + let mut schemas: Vec = Schema::list(ctx).await.expect("could not list schemas"); + schemas.sort_by(|a, b| a.name.cmp(&b.name)); + let schema = schemas.pop().expect("schemas are empty"); + + // Ensure we can get it by id. + let found_schema = Schema::get_by_id(ctx, schema.id()) + .await + .expect("could not get schema by id"); + assert_eq!( + schema.id(), // expected + found_schema.id() // actual + ); + + // Pick a schema variant. + let mut schema_variants = SchemaVariant::list_for_schema(ctx, found_schema.id()) + .await + .expect("could not list schema variants for schema"); + let schema_variant = schema_variants.pop().expect("schemas are empty"); + let schema_variant_id = schema_variant.id(); + + // Create a component and set geometry. + let name = "fsu not top four"; + let component = Component::new(ctx, name, schema_variant_id, None) + .await + .expect("could not create component"); + let component = component + .set_geometry( + ctx, + "1", + "-1", + Some(DEFAULT_COMPONENT_WIDTH), + Some(DEFAULT_COMPONENT_HEIGHT), + ) + .await + .expect("could not set geometry"); + + // Determine the schema variant from the component. Ensure it is the same as before. + let post_creation_schema_variant = component + .schema_variant(ctx) + .await + .expect("could not get schema variant for component"); + assert_eq!( + schema_variant_id, // expected + post_creation_schema_variant.id() // actual + ); + + // Determine the schema from the schema variant. Ensure it is the same as before. + let post_creation_schema = SchemaVariant::schema(ctx, post_creation_schema_variant.id()) + .await + .expect("could not get schema for schema variant"); + assert_eq!( + schema.id(), // expected + post_creation_schema.id() // actual + ); + + // Assemble the diagram just to make sure it works. + let _diagram = Diagram::assemble(ctx) + .await + .expect("could not assemble diagram"); +} + +#[test] +async fn through_the_wormholes(ctx: &mut DalContext) { + let starfield_schema = Schema::list(ctx) + .await + .expect("list schemas") + .iter() + .find(|schema| schema.name() == "starfield") + .expect("starfield does not exist") + .to_owned(); + + let variant = SchemaVariant::list_for_schema(ctx, starfield_schema.id()) + .await + .expect("get schema variants") + .pop() + .expect("get default variant"); + + let name = "across the universe"; + + let component = Component::new(ctx, name, variant.id(), None) + .await + .expect("able to create component"); + + ctx.blocking_commit() + .await + .expect("blocking commit after component creation"); + + ctx.update_snapshot_to_visibility() + .await + .expect("update_snapshot_to_visibility"); + + let rigid_designator_prop_id = Prop::find_prop_id_by_path( + ctx, + variant.id(), + &PropPath::new([ + "root", + "domain", + "possible_world_a", + "wormhole_1", + "wormhole_2", + "wormhole_3", + "rigid_designator", + ]), + ) + .await + .expect("able to find 'rigid_designator' prop"); + + let rigid_designator_values = Prop::attribute_values_for_prop_id(ctx, rigid_designator_prop_id) + .await + .expect("able to get attribute value for universe prop"); + + assert_eq!(1, rigid_designator_values.len()); + + let rigid_designator_value_id = rigid_designator_values + .first() + .copied() + .expect("get first value id"); + + assert_eq!( + component.id(), + AttributeValue::component_id(ctx, rigid_designator_value_id) + .await + .expect("able to get component id for universe value") + ); + + let naming_and_necessity_prop_id = Prop::find_prop_id_by_path( + ctx, + variant.id(), + &PropPath::new([ + "root", + "domain", + "possible_world_b", + "wormhole_1", + "wormhole_2", + "wormhole_3", + "naming_and_necessity", + ]), + ) + .await + .expect("able to find 'naming_and_necessity' prop"); + + let naming_and_necessity_value_id = + Prop::attribute_values_for_prop_id(ctx, naming_and_necessity_prop_id) + .await + .expect("able to get values for naming_and_necessity") + .first() + .copied() + .expect("get first value id"); + + let update_graph = DependentValueGraph::for_values(ctx, vec![rigid_designator_value_id]) + .await + .expect("able to generate update graph"); + + assert!( + update_graph.contains_value(naming_and_necessity_value_id), + "update graph has the value we aren't setting but which depends on the value we are setting" + ); + + assert!(update_graph + .direct_dependencies_of(naming_and_necessity_value_id) + .iter() + .any(|&id| id == rigid_designator_value_id), + "update graph declares that `naming_and_necessity` value depends on `rigid_designator` value" + ); + + let rigid_designation = serde_json::json!("hesperus"); + + AttributeValue::update( + ctx, + rigid_designator_value_id, + Some(rigid_designation.to_owned()), + ) + .await + .expect("able to set universe value"); + + let materialized_view = AttributeValue::get_by_id(ctx, rigid_designator_value_id) + .await + .expect("get av") + .materialized_view(ctx) + .await + .expect("get view") + .expect("has a view"); + + assert_eq!(rigid_designation, materialized_view); + + ctx.blocking_commit().await.expect("commit"); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity"); + + let naming_and_necessity_view = AttributeValue::get_by_id(ctx, naming_and_necessity_value_id) + .await + .expect("able to get attribute value for `naming_and_necessity_value_id`") + .materialized_view(ctx) + .await + .expect("able to get materialized_view for `naming_and_necessity_value_id`") + .expect("naming and necessity has a value"); + + // hesperus is phosphorus (the attr func on naming_and_necessity_value_id will return + // phosphorus if it receives hesperus) + assert_eq!("phosphorus", naming_and_necessity_view); + + let root_prop_id = Prop::find_prop_id_by_path(ctx, variant.id(), &PropPath::new(["root"])) + .await + .expect("able to find root prop"); + + let root_value_id = Prop::attribute_values_for_prop_id(ctx, root_prop_id) + .await + .expect("get root prop value id") + .first() + .copied() + .expect("a value exists for the root prop"); + + let root_value = AttributeValue::get_by_id(ctx, root_value_id) + .await + .expect("able to get the value for the root prop attriburte value id"); + + let root_view = root_value + .materialized_view(ctx) + .await + .expect("able to fetch materialized_view for root value") + .expect("there is a value for the root value materialized_view"); + + assert_eq!( + serde_json::json!({ + "si": { "name": name, "color": "#ffffff", "type": "component" }, + "resource": {}, + "resource_value": {}, + "domain": { + "name": name, + "possible_world_a": { + "wormhole_1": { + "wormhole_2": { + "wormhole_3": { + "rigid_designator": rigid_designation + } + } + } + }, + "possible_world_b": { + "wormhole_1": { + "wormhole_2": { + "wormhole_3": { + "naming_and_necessity": "phosphorus" + } + } + } + }, + "universe": { "galaxies": [] }, + } + } + ), + root_view + ); +} +#[test] +async fn set_the_universe(ctx: &mut DalContext) { + let starfield_schema = Schema::list(ctx) + .await + .expect("list schemas") + .iter() + .find(|schema| schema.name() == "starfield") + .expect("starfield does not exist") + .to_owned(); + + let variant = SchemaVariant::list_for_schema(ctx, starfield_schema.id()) + .await + .expect("get schema variants") + .pop() + .expect("get default variant"); + + let name = "across the universe"; + + let component = Component::new(ctx, name, variant.id(), None) + .await + .expect("able to create component"); + + ctx.blocking_commit() + .await + .expect("blocking commit after component creation"); + + ctx.update_snapshot_to_visibility() + .await + .expect("update_snapshot_to_visibility"); + + let universe_prop_id = Prop::find_prop_id_by_path( + ctx, + variant.id(), + &PropPath::new(["root", "domain", "universe"]), + ) + .await + .expect("able to find 'root/domain/universe' prop"); + + let universe_values = Prop::attribute_values_for_prop_id(ctx, universe_prop_id) + .await + .expect("able to get attribute value for universe prop"); + + assert_eq!(1, universe_values.len()); + + let universe_value_id = universe_values + .first() + .copied() + .expect("get first value id"); + + assert_eq!( + component.id(), + AttributeValue::component_id(ctx, universe_value_id) + .await + .expect("able to get component id for universe value") + ); + + let universe_json = serde_json::json!({ + "galaxies": [ + { "sun": "sol", "planets": 9 }, + { "sun": "champagne supernova", "planets": 9000 }, + { "sun": "black hole", "planets": 0 } + ] + }); + + AttributeValue::update(ctx, universe_value_id, Some(universe_json.to_owned())) + .await + .expect("able to set universe value"); + + let materialized_view = AttributeValue::get_by_id(ctx, universe_value_id) + .await + .expect("get av") + .materialized_view(ctx) + .await + .expect("get view") + .expect("has a view"); + + assert_eq!(universe_json, materialized_view); + + ctx.blocking_commit().await.expect("commit"); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity"); + + let materialized_view = AttributeValue::get_by_id(ctx, universe_value_id) + .await + .expect("get av") + .materialized_view(ctx) + .await + .expect("get view") + .expect("has a view"); + + assert_eq!(universe_json, materialized_view); +} diff --git a/lib/dal/tests/integration_test/internal/new_engine/connection.rs b/lib/dal/tests/integration_test/internal/new_engine/connection.rs new file mode 100644 index 0000000000..4fd448819f --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/connection.rs @@ -0,0 +1,170 @@ +use dal::diagram::Diagram; +use dal::{ + AttributeValue, Component, DalContext, ExternalProvider, InternalProvider, Schema, + SchemaVariant, +}; +use dal_test::test; + +#[test] +async fn connect_components(ctx: &mut DalContext) { + // Get the source schema variant id. + let docker_image_schema = Schema::find_by_name(ctx, "Docker Image") + .await + .expect("could not perform find by name") + .expect("no schema found"); + let mut docker_image_schema_variants = + SchemaVariant::list_for_schema(ctx, docker_image_schema.id()) + .await + .expect("could not list schema variants for schema"); + let docker_image_schema_variant = docker_image_schema_variants + .pop() + .expect("schema variants are empty"); + let docker_image_schema_variant_id = docker_image_schema_variant.id(); + + // Get the destination schema variant id. + let butane_schema = Schema::find_by_name(ctx, "Butane") + .await + .expect("could not perform find by name") + .expect("no schema found"); + let mut butane_schema_variants = SchemaVariant::list_for_schema(ctx, butane_schema.id()) + .await + .expect("could not list schema variants for schema"); + let butane_schema_variant = butane_schema_variants + .pop() + .expect("schema variants are empty"); + let butane_schema_variant_id = butane_schema_variant.id(); + + // Find the providers we want to use. + let docker_image_external_providers = + ExternalProvider::list(ctx, docker_image_schema_variant_id) + .await + .expect("could not list external providers"); + let external_provider = docker_image_external_providers + .iter() + .find(|e| e.name() == "Container Image") + .expect("could not find external provider"); + let butane_explicit_internal_providers = InternalProvider::list(ctx, butane_schema_variant_id) + .await + .expect("could not list explicit internal providers"); + let explicit_internal_provider = butane_explicit_internal_providers + .iter() + .find(|e| e.name() == "Container Image") + .expect("could not find explicit internal provider"); + + // Create a component for both the source and the destination + let oysters_component = Component::new( + ctx, + "oysters in my pocket", + docker_image_schema_variant_id, + None, + ) + .await + .expect("could not create component"); + + ctx.blocking_commit() + .await + .expect("blocking commit after component creation"); + + ctx.update_snapshot_to_visibility() + .await + .expect("update_snapshot_to_visibility"); + + // Create a second component for a second source + let lunch_component = Component::new( + ctx, + "were saving for lunch", + docker_image_schema_variant_id, + None, + ) + .await + .expect("could not create component"); + + ctx.blocking_commit() + .await + .expect("blocking commit after component 2 creation"); + + ctx.update_snapshot_to_visibility() + .await + .expect("update_snapshot_to_visibility"); + + let royel_component = Component::new(ctx, "royel otis", butane_schema_variant_id, None) + .await + .expect("could not create component"); + + ctx.blocking_commit() + .await + .expect("blocking commit after butane component creation"); + + ctx.update_snapshot_to_visibility() + .await + .expect("update_snapshot_to_visibility"); + + // Connect the components! + let _inter_component_attribute_prototype_argument_id = Component::connect( + ctx, + oysters_component.id(), + external_provider.id(), + royel_component.id(), + explicit_internal_provider.id(), + ) + .await + .expect("could not connect components"); + + ctx.blocking_commit().await.expect("blocking commit failed"); + + ctx.update_snapshot_to_visibility() + .await + .expect("update_snapshot_to_visibility"); + + // Connect component 2 + let _inter_component_attribute_prototype_argument_id = Component::connect( + ctx, + lunch_component.id(), + external_provider.id(), + royel_component.id(), + explicit_internal_provider.id(), + ) + .await + .expect("could not connect components"); + + ctx.blocking_commit().await.expect("blocking commit failed"); + + ctx.update_snapshot_to_visibility() + .await + .expect("update_snapshot_to_visibility"); + + //dbg!(royel_component.incoming_connections(ctx).await.expect("ok")); + + let units_value_id = royel_component + .attribute_values_for_prop(ctx, &["root", "domain", "systemd", "units"]) + .await + .expect("able to get values for units") + .first() + .copied() + .expect("has a value"); + + let materialized_view = AttributeValue::get_by_id(ctx, units_value_id) + .await + .expect("value exists") + .materialized_view(ctx) + .await + .expect("able to get units materialized_view") + .expect("units has a materialized_view"); + + dbg!(lunch_component + .materialized_view(ctx) + .await + .expect("get docker image materialized_view")); + + assert!(matches!(materialized_view, serde_json::Value::Array(_))); + + if let serde_json::Value::Array(units_array) = materialized_view { + assert_eq!(2, units_array.len()) + } + + // Assemble the diagram and check the edges. + let diagram = Diagram::assemble(ctx) + .await + .expect("could not assemble the diagram"); + assert_eq!(2, diagram.edges.len()); +} diff --git a/lib/dal/tests/integration_test/internal/new_engine/frame.rs b/lib/dal/tests/integration_test/internal/new_engine/frame.rs new file mode 100644 index 0000000000..4eccb06c44 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/frame.rs @@ -0,0 +1,874 @@ +use dal::component::frame::{Frame, FrameError}; +use dal::diagram::{Diagram, DiagramResult, EdgeId, SummaryDiagramComponent, SummaryDiagramEdge}; +use dal::{ + AttributeValue, Component, DalContext, ExternalProvider, InternalProvider, Schema, + SchemaVariant, +}; +use dal_test::test; +use pretty_assertions_sorted::assert_eq; +use std::collections::HashMap; + +#[test] +async fn convert_component_to_frame_and_attach_no_nesting(ctx: &mut DalContext) { + let starfield_schema = Schema::find_by_name(ctx, "starfield") + .await + .expect("could not perform find by name") + .expect("schema not found by name"); + let fallout_schema = Schema::find_by_name(ctx, "fallout") + .await + .expect("could not perform find by name") + .expect("schema not found by name"); + + // Create components using the test exclusive schemas. Neither of them should be frames. + let starfield_schema_variant = SchemaVariant::list_for_schema(ctx, starfield_schema.id()) + .await + .expect("could not list schema variants") + .pop() + .expect("no schema variants found"); + let fallout_schema_variant = SchemaVariant::list_for_schema(ctx, fallout_schema.id()) + .await + .expect("could not list schema variants") + .pop() + .expect("no schema variants found"); + let starfield_component = Component::new(ctx, "parent", starfield_schema_variant.id(), None) + .await + .expect("could not create component"); + let fallout_component = Component::new(ctx, "child", fallout_schema_variant.id(), None) + .await + .expect("could not create component"); + + // Attempt to attach a child to a parent that is a not a frame. + match Frame::attach_child_to_parent(ctx, starfield_component.id(), fallout_component.id()).await + { + Ok(()) => panic!("attaching child to parent should fail if parent is not a frame"), + Err(FrameError::ParentIsNotAFrame(..)) => {} + Err(other_error) => panic!("unexpected error: {0}", other_error), + } + + // Change the parent to become a frame. + let type_attribute_value_id = starfield_component + .attribute_values_for_prop(ctx, &["root", "si", "type"]) + .await + .expect("could not find attribute values for prop") + .into_iter() + .next() + .expect("could not get type attribute value id"); + + AttributeValue::update( + ctx, + type_attribute_value_id, + Some(serde_json::json!["ConfigurationFrameDown"]), + ) + .await + .expect("could not update attribute value"); + + ctx.blocking_commit() + .await + .expect("could not perform blocking commit"); + + // Now that the parent is a frame, attempt to attach the child. + Frame::attach_child_to_parent(ctx, starfield_component.id(), fallout_component.id()) + .await + .expect("could not attach child to parent"); + + ctx.blocking_commit() + .await + .expect("could not perform blocking commit"); + + // Assemble the diagram and ensure we see the right number of components. + let diagram = Diagram::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!(2, diagram.components.len()); + + // Collect the parent ids for the components on the diagram. + let mut starfield_parent_node_id = None; + let mut fallout_parent_node_id = None; + for component in diagram.components { + match component.schema_name.as_str() { + "starfield" => starfield_parent_node_id = Some(component.parent_node_id), + "fallout" => fallout_parent_node_id = Some(component.parent_node_id), + schema_name => panic!( + "unexpected schema name for diagram component: {0}", + schema_name + ), + } + } + let starfield_parent_node_id = + starfield_parent_node_id.expect("could not find starfield parent node id"); + let fallout_parent_node_id = + fallout_parent_node_id.expect("could not find fallout parent node id"); + + // Ensure the frame does not have a parent and the child's parent is the frame. + assert!(starfield_parent_node_id.is_none()); + assert_eq!( + starfield_component.id(), + fallout_parent_node_id.expect("no parent node id for fallout component") + ); +} + +#[test] +async fn multiple_frames_with_complex_connections_no_nesting(ctx: &mut DalContext) { + let region_schema = Schema::find_by_name(ctx, "Region") + .await + .expect("could not perform find by name") + .expect("schema not found by name"); + let ec2_schema = Schema::find_by_name(ctx, "EC2 Instance") + .await + .expect("could not perform find by name") + .expect("schema not found by name"); + let ami_schema = Schema::find_by_name(ctx, "AMI") + .await + .expect("could not perform find by name") + .expect("schema not found by name"); + + // Collect schema variants. + let region_schema_variant_id = SchemaVariant::list_for_schema(ctx, region_schema.id()) + .await + .expect("could not list schema variants") + .pop() + .expect("no schema variants found") + .id(); + let ec2_schema_variant_id = SchemaVariant::list_for_schema(ctx, ec2_schema.id()) + .await + .expect("could not list schema variants") + .pop() + .expect("no schema variants found") + .id(); + let ami_schema_variant_id = SchemaVariant::list_for_schema(ctx, ami_schema.id()) + .await + .expect("could not list schema variants") + .pop() + .expect("no schema variants found") + .id(); + + // Scenario 1: create an AWS region frame. + let first_region_frame_name = "first region frame"; + let first_region_frame = + Component::new(ctx, first_region_frame_name, region_schema_variant_id, None) + .await + .expect("could not create component"); + + // Validate Scenario 1 + { + let diagram = DiagramByKey::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!( + 1, // expected + diagram.components.len() // actual + ); + assert!(diagram.edges.is_empty()); + + let first_region_frame_assembled = diagram + .components + .get(first_region_frame_name) + .expect("could not get component by name"); + + assert_eq!( + first_region_frame.id(), // expected + first_region_frame_assembled.component_id // actual + ); + assert!(first_region_frame_assembled.parent_node_id.is_none()); + } + + // Scenario 2: create an AMI and attach to region frame + let first_ami_component_name = "first ami component"; + let first_ami_component = + Component::new(ctx, first_ami_component_name, ami_schema_variant_id, None) + .await + .expect("could not create component"); + Frame::attach_child_to_parent(ctx, first_region_frame.id(), first_ami_component.id()) + .await + .expect("could not attach child to parent"); + + // Validate Scenario 2 + { + let diagram = DiagramByKey::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!( + 2, // expected + diagram.components.len() // actual + ); + assert_eq!( + 1, // expected + diagram.edges.len() // actual + ); + + let first_region_frame_assembled = diagram + .components + .get(first_region_frame_name) + .expect("could not get component by name"); + let first_ami_component_assembled = diagram + .components + .get(first_ami_component_name) + .expect("could not get component by name"); + + assert_eq!( + first_region_frame.id(), // expected + first_region_frame_assembled.component_id // actual + ); + assert_eq!( + first_ami_component.id(), // expected + first_ami_component_assembled.component_id // actual + ); + + assert!(first_region_frame_assembled.parent_node_id.is_none()); + assert_eq!( + first_region_frame.id(), // expected + first_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + } + + // Scenario 3: add another aws region frame on its own. + let second_region_frame_name = "second region frame"; + let second_region_frame = Component::new( + ctx, + second_region_frame_name, + region_schema_variant_id, + None, + ) + .await + .expect("could not create component"); + + // Validate Scenario 3 + { + let diagram = DiagramByKey::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!( + 3, // expected + diagram.components.len() // actual + ); + assert_eq!( + 1, // expected + diagram.edges.len() // actual + ); + + let first_region_frame_assembled = diagram + .components + .get(first_region_frame_name) + .expect("could not get component by name"); + let first_ami_component_assembled = diagram + .components + .get(first_ami_component_name) + .expect("could not get component by name"); + let second_region_frame_assembled = diagram + .components + .get(second_region_frame_name) + .expect("could not get component by name"); + + assert_eq!( + first_region_frame.id(), // expected + first_region_frame_assembled.component_id // actual + ); + assert_eq!( + first_ami_component.id(), // expected + first_ami_component_assembled.component_id // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_region_frame_assembled.component_id // actual + ); + + assert!(first_region_frame_assembled.parent_node_id.is_none()); + assert!(second_region_frame_assembled.parent_node_id.is_none()); + assert_eq!( + first_region_frame.id(), // expected + first_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + } + + // Scenarios 4 and 5: create another ami, but place it outside of both frames. Then, drag it onto the second region + // frame. Since we are working with dal integration tests and not sdf routes, we combine these two scenarios. + let second_ami_component_name = "second ami component"; + let second_ami_component = + Component::new(ctx, second_ami_component_name, ami_schema_variant_id, None) + .await + .expect("could not create component"); + Frame::attach_child_to_parent(ctx, second_region_frame.id(), second_ami_component.id()) + .await + .expect("could not attach child to parent"); + + // Validate Scenarios 4 and 5 + { + let diagram = DiagramByKey::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!( + 4, // expected + diagram.components.len() // actual + ); + assert_eq!( + 2, // expected + diagram.edges.len() // actual + ); + + let first_region_frame_assembled = diagram + .components + .get(first_region_frame_name) + .expect("could not get component by name"); + let first_ami_component_assembled = diagram + .components + .get(first_ami_component_name) + .expect("could not get component by name"); + let second_region_frame_assembled = diagram + .components + .get(second_region_frame_name) + .expect("could not get component by name"); + let second_ami_component_assembled = diagram + .components + .get(second_ami_component_name) + .expect("could not get component by name"); + + assert_eq!( + first_region_frame.id(), // expected + first_region_frame_assembled.component_id // actual + ); + assert_eq!( + first_ami_component.id(), // expected + first_ami_component_assembled.component_id // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_region_frame_assembled.component_id // actual + ); + assert_eq!( + second_ami_component.id(), // expected + second_ami_component_assembled.component_id // actual + ); + + assert!(first_region_frame_assembled.parent_node_id.is_none()); + assert!(second_region_frame_assembled.parent_node_id.is_none()); + assert_eq!( + first_region_frame.id(), // expected + first_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + } + + // Scenarios 6 and 7: create an ec2 instance, but place it outside of both frames. Then, drag it onto the first + // region frame. Since we are working with dal integration tests and not sdf routes, we combine these two scenarios. + let first_ec2_instance_component_name = "first ec2 instance component"; + let first_ec2_instance_component = Component::new( + ctx, + first_ec2_instance_component_name, + ec2_schema_variant_id, + None, + ) + .await + .expect("could not create component"); + Frame::attach_child_to_parent( + ctx, + first_region_frame.id(), + first_ec2_instance_component.id(), + ) + .await + .expect("could not attach child to parent"); + + // Validate Scenarios 6 and 7 + { + let diagram = DiagramByKey::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!( + 5, // expected + diagram.components.len() // actual + ); + assert_eq!( + 3, // expected + diagram.edges.len() // actual + ); + + let first_region_frame_assembled = diagram + .components + .get(first_region_frame_name) + .expect("could not get component by name"); + let first_ami_component_assembled = diagram + .components + .get(first_ami_component_name) + .expect("could not get component by name"); + let second_region_frame_assembled = diagram + .components + .get(second_region_frame_name) + .expect("could not get component by name"); + let second_ami_component_assembled = diagram + .components + .get(second_ami_component_name) + .expect("could not get component by name"); + let first_ec2_instance_component_assembled = diagram + .components + .get(first_ec2_instance_component_name) + .expect("could not get component by name"); + + assert_eq!( + first_region_frame.id(), // expected + first_region_frame_assembled.component_id // actual + ); + assert_eq!( + first_ami_component.id(), // expected + first_ami_component_assembled.component_id // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_region_frame_assembled.component_id // actual + ); + assert_eq!( + second_ami_component.id(), // expected + second_ami_component_assembled.component_id // actual + ); + assert_eq!( + first_ec2_instance_component.id(), // expected + first_ec2_instance_component_assembled.component_id // actual + ); + + assert!(first_region_frame_assembled.parent_node_id.is_none()); + assert!(second_region_frame_assembled.parent_node_id.is_none()); + assert_eq!( + first_region_frame.id(), // expected + first_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + first_region_frame.id(), // expected + first_ec2_instance_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + } + + // Scenario 8: draw an edge between the first ami and the first ec2 using the "Image ID" sockets. Both should exist + // within the first region frame. + let image_id_socket_name = "Image ID"; + let image_id_ami_external_provider_id = + ExternalProvider::find_with_name(ctx, image_id_socket_name, ami_schema_variant_id) + .await + .expect("could not perform find by name") + .expect("no external provider found") + .id(); + let image_id_ec2_instance_internal_provider_id = + InternalProvider::find_explicit_with_name(ctx, image_id_socket_name, ec2_schema_variant_id) + .await + .expect("could not perform find by name") + .expect("no internal provider found") + .id(); + let image_id_ami_to_ec2_instance_attribute_prototype_argument_id = Component::connect( + ctx, + first_ami_component.id(), + image_id_ami_external_provider_id, + first_ec2_instance_component.id(), + image_id_ec2_instance_internal_provider_id, + ) + .await + .expect("could not perform connection"); + + // Validate Scenario 8 + { + let diagram = DiagramByKey::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!( + 5, // expected + diagram.components.len() // actual + ); + assert_eq!( + 4, // expected + diagram.edges.len() // actual + ); + + let first_region_frame_assembled = diagram + .components + .get(first_region_frame_name) + .expect("could not get component by name"); + let first_ami_component_assembled = diagram + .components + .get(first_ami_component_name) + .expect("could not get component by name"); + let second_region_frame_assembled = diagram + .components + .get(second_region_frame_name) + .expect("could not get component by name"); + let second_ami_component_assembled = diagram + .components + .get(second_ami_component_name) + .expect("could not get component by name"); + let first_ec2_instance_component_assembled = diagram + .components + .get(first_ec2_instance_component_name) + .expect("could not get component by name"); + + assert_eq!( + first_region_frame.id(), // expected + first_region_frame_assembled.component_id // actual + ); + assert_eq!( + first_ami_component.id(), // expected + first_ami_component_assembled.component_id // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_region_frame_assembled.component_id // actual + ); + assert_eq!( + second_ami_component.id(), // expected + second_ami_component_assembled.component_id // actual + ); + assert_eq!( + first_ec2_instance_component.id(), // expected + first_ec2_instance_component_assembled.component_id // actual + ); + + assert!(first_region_frame_assembled.parent_node_id.is_none()); + assert!(second_region_frame_assembled.parent_node_id.is_none()); + assert_eq!( + first_region_frame.id(), // expected + first_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + first_region_frame.id(), // expected + first_ec2_instance_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + + let image_id_ami_to_ec2_instance_edge_assembled = diagram + .edges + .get(&image_id_ami_to_ec2_instance_attribute_prototype_argument_id) + .expect("could not get edge by id"); + assert_eq!( + first_ami_component.id(), // expected + image_id_ami_to_ec2_instance_edge_assembled.from_node_id // actual + ); + assert_eq!( + image_id_ami_external_provider_id, // expected + image_id_ami_to_ec2_instance_edge_assembled.from_socket_id // actual + ); + assert_eq!( + first_ec2_instance_component.id(), // expected + image_id_ami_to_ec2_instance_edge_assembled.to_node_id // actual + ); + assert_eq!( + image_id_ec2_instance_internal_provider_id, // expected + image_id_ami_to_ec2_instance_edge_assembled.to_socket_id // actual + ); + } + + // Scenario 9: create a third AMI outside of both frames. + let third_ami_component_name = "third ami component"; + let third_ami_component = + Component::new(ctx, third_ami_component_name, ami_schema_variant_id, None) + .await + .expect("could not create component"); + + // Validate Scenario 9 + { + let diagram = DiagramByKey::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!( + 6, // expected + diagram.components.len() // actual + ); + assert_eq!( + 4, // expected + diagram.edges.len() // actual + ); + + let first_region_frame_assembled = diagram + .components + .get(first_region_frame_name) + .expect("could not get component by name"); + let first_ami_component_assembled = diagram + .components + .get(first_ami_component_name) + .expect("could not get component by name"); + let second_region_frame_assembled = diagram + .components + .get(second_region_frame_name) + .expect("could not get component by name"); + let second_ami_component_assembled = diagram + .components + .get(second_ami_component_name) + .expect("could not get component by name"); + let first_ec2_instance_component_assembled = diagram + .components + .get(first_ec2_instance_component_name) + .expect("could not get component by name"); + let third_ami_component_assembled = diagram + .components + .get(third_ami_component_name) + .expect("could not get component by name"); + + assert_eq!( + first_region_frame.id(), // expected + first_region_frame_assembled.component_id // actual + ); + assert_eq!( + first_ami_component.id(), // expected + first_ami_component_assembled.component_id // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_region_frame_assembled.component_id // actual + ); + assert_eq!( + second_ami_component.id(), // expected + second_ami_component_assembled.component_id // actual + ); + assert_eq!( + first_ec2_instance_component.id(), // expected + first_ec2_instance_component_assembled.component_id // actual + ); + assert_eq!( + third_ami_component.id(), // expected + third_ami_component_assembled.component_id // actual + ); + + assert!(first_region_frame_assembled.parent_node_id.is_none()); + assert!(second_region_frame_assembled.parent_node_id.is_none()); + assert!(third_ami_component_assembled.parent_node_id.is_none()); + assert_eq!( + first_region_frame.id(), // expected + first_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + first_region_frame.id(), // expected + first_ec2_instance_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + + let image_id_ami_to_ec2_instance_edge_assembled = diagram + .edges + .get(&image_id_ami_to_ec2_instance_attribute_prototype_argument_id) + .expect("could not get edge by id"); + assert_eq!( + first_ami_component.id(), // expected + image_id_ami_to_ec2_instance_edge_assembled.from_node_id // actual + ); + assert_eq!( + image_id_ami_external_provider_id, // expected + image_id_ami_to_ec2_instance_edge_assembled.from_socket_id // actual + ); + assert_eq!( + first_ec2_instance_component.id(), // expected + image_id_ami_to_ec2_instance_edge_assembled.to_node_id // actual + ); + assert_eq!( + image_id_ec2_instance_internal_provider_id, // expected + image_id_ami_to_ec2_instance_edge_assembled.to_socket_id // actual + ); + } + + // Scenario 10: draw an edge (do not drag the component or place it onto a frame) between the "Region" socket of the + // second region frame and the "Region" socket of the third ami. + let region_socket_name = "Region"; + let region_region_external_provider_id = + ExternalProvider::find_with_name(ctx, region_socket_name, region_schema_variant_id) + .await + .expect("could not perform find by name") + .expect("no external provider found") + .id(); + let region_ami_internal_provider_id = + InternalProvider::find_explicit_with_name(ctx, region_socket_name, ami_schema_variant_id) + .await + .expect("could not perform find by name") + .expect("no internal provider found") + .id(); + let region_region_to_ami_attribute_prototype_argument_id = Component::connect( + ctx, + second_region_frame.id(), + region_region_external_provider_id, + third_ami_component.id(), + region_ami_internal_provider_id, + ) + .await + .expect("could not perform connection"); + + // Validate Scenario 10 + { + let diagram = DiagramByKey::assemble(ctx) + .await + .expect("could not assemble diagram"); + assert_eq!( + 6, // expected + diagram.components.len() // actual + ); + assert_eq!( + 5, // expected + diagram.edges.len() // actual + ); + + let first_region_frame_assembled = diagram + .components + .get(first_region_frame_name) + .expect("could not get component by name"); + let first_ami_component_assembled = diagram + .components + .get(first_ami_component_name) + .expect("could not get component by name"); + let second_region_frame_assembled = diagram + .components + .get(second_region_frame_name) + .expect("could not get component by name"); + let second_ami_component_assembled = diagram + .components + .get(second_ami_component_name) + .expect("could not get component by name"); + let first_ec2_instance_component_assembled = diagram + .components + .get(first_ec2_instance_component_name) + .expect("could not get component by name"); + let third_ami_component_assembled = diagram + .components + .get(third_ami_component_name) + .expect("could not get component by name"); + + assert_eq!( + first_region_frame.id(), // expected + first_region_frame_assembled.component_id // actual + ); + assert_eq!( + first_ami_component.id(), // expected + first_ami_component_assembled.component_id // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_region_frame_assembled.component_id // actual + ); + assert_eq!( + second_ami_component.id(), // expected + second_ami_component_assembled.component_id // actual + ); + assert_eq!( + first_ec2_instance_component.id(), // expected + first_ec2_instance_component_assembled.component_id // actual + ); + assert_eq!( + third_ami_component.id(), // expected + third_ami_component_assembled.component_id // actual + ); + + assert!(first_region_frame_assembled.parent_node_id.is_none()); + assert!(second_region_frame_assembled.parent_node_id.is_none()); + assert!(third_ami_component_assembled.parent_node_id.is_none()); + assert_eq!( + first_region_frame.id(), // expected + first_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + second_region_frame.id(), // expected + second_ami_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + assert_eq!( + first_region_frame.id(), // expected + first_ec2_instance_component_assembled + .parent_node_id + .expect("no parent node id") // actual + ); + + let image_id_ami_to_ec2_instance_edge_assembled = diagram + .edges + .get(&image_id_ami_to_ec2_instance_attribute_prototype_argument_id) + .expect("could not get edge by id"); + assert_eq!( + first_ami_component.id(), // expected + image_id_ami_to_ec2_instance_edge_assembled.from_node_id // actual + ); + assert_eq!( + image_id_ami_external_provider_id, // expected + image_id_ami_to_ec2_instance_edge_assembled.from_socket_id // actual + ); + assert_eq!( + first_ec2_instance_component.id(), // expected + image_id_ami_to_ec2_instance_edge_assembled.to_node_id // actual + ); + assert_eq!( + image_id_ec2_instance_internal_provider_id, // expected + image_id_ami_to_ec2_instance_edge_assembled.to_socket_id // actual + ); + + let region_region_to_ami_edge_assembled = diagram + .edges + .get(®ion_region_to_ami_attribute_prototype_argument_id) + .expect("could not get edge by id"); + assert_eq!( + second_region_frame.id(), // expected + region_region_to_ami_edge_assembled.from_node_id // actual + ); + assert_eq!( + region_region_external_provider_id, // expected + region_region_to_ami_edge_assembled.from_socket_id // actual + ); + assert_eq!( + third_ami_component.id(), // expected + region_region_to_ami_edge_assembled.to_node_id // actual + ); + assert_eq!( + region_ami_internal_provider_id, // expected + region_region_to_ami_edge_assembled.to_socket_id // actual + ); + } +} + +struct DiagramByKey { + pub components: HashMap, + pub edges: HashMap, +} + +impl DiagramByKey { + pub async fn assemble(ctx: &DalContext) -> DiagramResult { + let diagram = Diagram::assemble(ctx).await?; + + let mut components = HashMap::new(); + for component in &diagram.components { + components.insert(component.display_name.clone(), component.to_owned()); + } + + let mut edges = HashMap::new(); + for edge in &diagram.edges { + edges.insert(edge.edge_id, edge.to_owned()); + } + + Ok(Self { components, edges }) + } +} diff --git a/lib/dal/tests/integration_test/internal/new_engine/prop.rs b/lib/dal/tests/integration_test/internal/new_engine/prop.rs new file mode 100644 index 0000000000..5cbf2b5ea5 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/prop.rs @@ -0,0 +1,29 @@ +use dal::{prop::PropPath, DalContext, Prop, Schema, SchemaVariant}; +use dal_test::test; + +#[test] +async fn prop_path(ctx: &DalContext) { + let starfield_schema = Schema::list(ctx) + .await + .expect("list schemas") + .iter() + .find(|schema| schema.name() == "starfield") + .expect("starfield does not exist") + .to_owned(); + + let variant = SchemaVariant::list_for_schema(ctx, starfield_schema.id()) + .await + .expect("get schema variants") + .pop() + .expect("get default variant"); + + let name_path = PropPath::new(["root", "si", "name"]); + let name_id = Prop::find_prop_id_by_path(ctx, variant.id(), &name_path) + .await + .expect("get name prop id"); + let fetched_name_path = Prop::path_by_id(ctx, name_id) + .await + .expect("get prop path by id"); + + assert_eq!(name_path, fetched_name_path); +} diff --git a/lib/dal/tests/integration_test/internal/new_engine/property_editor.rs b/lib/dal/tests/integration_test/internal/new_engine/property_editor.rs new file mode 100644 index 0000000000..bbdead3370 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/property_editor.rs @@ -0,0 +1,34 @@ +use dal::property_editor::schema::PropertyEditorSchema; +use dal::property_editor::values::PropertyEditorValues; +use dal::{Component, DalContext, Schema, SchemaVariant}; +use dal_test::test; + +#[test] +async fn assemble(ctx: &DalContext) { + // List all schemas in the workspace. Pick the first one alphabetically. + let mut schemas: Vec = Schema::list(ctx).await.expect("could not list schemas"); + schemas.sort_by(|a, b| a.name.cmp(&b.name)); + let schema = schemas.pop().expect("schemas are empty"); + + // Pick a schema variant. + let mut schema_variants = SchemaVariant::list_for_schema(ctx, schema.id()) + .await + .expect("could not list schema variants for schema"); + let schema_variant = schema_variants.pop().expect("schemas are empty"); + let schema_variant_id = schema_variant.id(); + + // Create a component and set geometry. + let name = "steam deck"; + let component = Component::new(ctx, name, schema_variant_id, None) + .await + .expect("could not create component"); + + // Assemble both property editor blobs. + let property_editor_schema = PropertyEditorSchema::assemble(ctx, schema_variant_id) + .await + .expect("could not assemble property editor schema"); + let property_editor_values = PropertyEditorValues::assemble(ctx, component.id()) + .await + .expect("could not assemble property editor schema"); + dbg!(property_editor_schema, property_editor_values); +} diff --git a/lib/dal/tests/integration_test/internal/new_engine/rebaser.rs b/lib/dal/tests/integration_test/internal/new_engine/rebaser.rs new file mode 100644 index 0000000000..4df0a71010 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/rebaser.rs @@ -0,0 +1,490 @@ +use base64::{engine::general_purpose, Engine}; +use dal::func::argument::{FuncArgument, FuncArgumentKind}; +use dal::{DalContext, Func, FuncBackendKind, FuncBackendResponseType}; +use dal_test::test; +use pretty_assertions_sorted::assert_eq; + +#[test] +async fn modify_func_node(ctx: &mut DalContext) { + let code_base64 = general_purpose::STANDARD_NO_PAD.encode("this is code"); + + let func = Func::new( + ctx, + "test", + None::, + None::, + None::, + false, + false, + FuncBackendKind::JsAttribute, + FuncBackendResponseType::Boolean, + None::, + Some(code_base64.clone()), + ) + .await + .expect("able to make a func"); + + ctx.blocking_commit().await.expect("unable to commit"); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity"); + + Func::get_by_id(ctx, func.id) + .await + .expect("able to get func by id"); + + assert_eq!(Some(code_base64), func.code_base64); + + let new_code_base64 = general_purpose::STANDARD_NO_PAD.encode("this is new code"); + + let func = func + .modify(ctx, |f| { + f.name = "i changed this".into(); + + Ok(()) + }) + .await + .expect("able to modify func"); + + let conflicts = ctx.commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + + let refetched_func = Func::get_by_id(ctx, func.id) + .await + .expect("able to fetch func"); + + assert_eq!("i changed this", refetched_func.name.as_str()); + + let func = func + .modify(ctx, |f| { + f.code_base64 = Some(new_code_base64.clone()); + + Ok(()) + }) + .await + .expect("able to modify func"); + + let conflicts = ctx.blocking_commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visibility again"); + + let modified_func = Func::get_by_id(ctx, func.id) + .await + .expect("able to get func by id again"); + + assert_eq!( + Some(new_code_base64.as_str()), + modified_func.code_base64.as_deref() + ); + + let funcs = Func::list(ctx).await.expect("able to list funcs"); + let modified_func_again = funcs + .iter() + .find(|f| f.id == modified_func.id) + .expect("func should be in list"); + + assert_eq!(Some(new_code_base64), modified_func_again.code_base64); +} + +#[test] +async fn func_node_with_arguments(ctx: &mut DalContext) { + let code_base64 = general_purpose::STANDARD_NO_PAD.encode("this is code"); + + let func = Func::new( + ctx, + "test", + None::, + None::, + None::, + false, + false, + FuncBackendKind::JsAttribute, + FuncBackendResponseType::Boolean, + None::, + Some(code_base64), + ) + .await + .expect("able to make a func"); + + ctx.commit().await.expect("unable to commit"); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity"); + + Func::get_by_id(ctx, func.id) + .await + .expect("able to get func by id"); + + let new_code_base64 = general_purpose::STANDARD_NO_PAD.encode("this is new code"); + + // modify func + let func = func + .modify(ctx, |f| { + f.name = "test:modified".into(); + f.code_base64 = Some(new_code_base64.clone()); + + Ok(()) + }) + .await + .expect("able to modify func"); + + // create func arguments + let arg_1 = FuncArgument::new(ctx, "argle bargle", FuncArgumentKind::Object, None, func.id) + .await + .expect("able to create func argument"); + FuncArgument::new(ctx, "argy bargy", FuncArgumentKind::Object, None, func.id) + .await + .expect("able to create func argument 2"); + + let conflicts = ctx.commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity again"); + + let modified_func = Func::get_by_id(ctx, func.id) + .await + .expect("able to get func by id again"); + + assert_eq!( + Some(new_code_base64).as_deref(), + modified_func.code_base64.as_deref() + ); + assert_eq!("test:modified", modified_func.name.as_str()); + + let args = FuncArgument::list_for_func(ctx, modified_func.id) + .await + .expect("able to list args"); + + assert_eq!(2, args.len()); + + // Modify func argument + FuncArgument::modify_by_id(ctx, arg_1.id, |arg| { + arg.name = "bargle argle".into(); + + Ok(()) + }) + .await + .expect("able to modify func"); + + let func_arg_refetch = FuncArgument::get_by_id(ctx, arg_1.id) + .await + .expect("get func arg"); + + assert_eq!( + "bargle argle", + func_arg_refetch.name.as_str(), + "refetch should have updated func arg name" + ); + + let conflicts = ctx.commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity again"); + let args = FuncArgument::list_for_func(ctx, func.id) + .await + .expect("able to list args again"); + + assert_eq!(2, args.len()); + + let modified_arg = args + .iter() + .find(|a| a.id == arg_1.id) + .expect("able to get modified func arg"); + + assert_eq!( + "bargle argle", + modified_arg.name.as_str(), + "modified func arg should have new name after rebase" + ); +} + +#[test] +async fn delete_func_node(ctx: &mut DalContext) { + let code_base64 = general_purpose::STANDARD_NO_PAD.encode("this is code"); + + let func = Func::new( + ctx, + "test", + None::, + None::, + None::, + false, + false, + FuncBackendKind::JsAttribute, + FuncBackendResponseType::Boolean, + None::, + Some(code_base64), + ) + .await + .expect("able to make a func"); + + ctx.commit().await.expect("unable to commit"); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity"); + + let snapshot_id_before_deletion = { + ctx.workspace_snapshot() + .expect("get snap") + .read() + .await + .id() + }; + + Func::get_by_id(ctx, func.id) + .await + .expect("able to get func by id"); + + Func::remove(ctx, func.id) + .await + .expect("able to remove func"); + + assert!(Func::get_by_id(ctx, func.id).await.is_err()); + + let conflicts = ctx.commit().await.expect("unable to commit"); + assert!(conflicts.is_none()); + + ctx.update_snapshot_to_visibility() + .await + .expect("unable to update snapshot to visiblity"); + + let snapshot_id_after_deletion = { + ctx.workspace_snapshot() + .expect("get snap") + .read() + .await + .id() + }; + + // A sanity check + assert_ne!(snapshot_id_before_deletion, snapshot_id_after_deletion); + + let result = Func::get_by_id(ctx, func.id).await; + assert!(result.is_err()); +} + +// #[test] +// async fn pure_update_and_single_conflict(ctx: &mut DalContext) { +// ctx.update_visibility(Visibility::new_head(false)); +// ctx.update_tenancy(Tenancy::new(WorkspacePk::NONE)); + +// // Start with the base change set and the initial snapshot. +// let mut base_change_set = ChangeSetPointer::new(ctx, "main") +// .await +// .expect("could not create change set"); +// let base_change_set = &mut base_change_set; +// let mut snapshot = WorkspaceSnapshot::initial(ctx, &base_change_set) +// .await +// .expect("could not create workspace snapshot"); + +// // Add a new node, write and update the pointer. +// let olivia_rodrigo_id = base_change_set +// .generate_ulid() +// .expect("could not generate id"); +// let olivia_rodrigo_node_index = snapshot +// .add_node( +// NodeWeight::new_content( +// base_change_set, +// olivia_rodrigo_id, +// ContentAddress::Component(ContentHash::from("lacy - olivia rodrigo")), +// ) +// .expect("could not create node weight"), +// ) +// .expect("could not add node"); +// snapshot +// .add_edge_from_root(base_change_set, olivia_rodrigo_node_index) +// .expect("could not add edge"); +// snapshot +// .write(ctx, base_change_set.vector_clock_id()) +// .await +// .expect("could not write snapshot"); +// base_change_set +// .update_pointer(ctx, snapshot.id()) +// .await +// .expect("could not update change set"); + +// // Create another change set and update the snapshot. +// let mut forked_change_set = ChangeSetPointer::new(ctx, "fork") +// .await +// .expect("could not create change set"); +// let forked_change_set = &mut forked_change_set; +// let mut forked_snapshot = WorkspaceSnapshot::find_for_change_set(ctx, base_change_set.id) +// .await +// .expect("could not find snapshot"); +// let victoria_monet_id = forked_change_set +// .generate_ulid() +// .expect("could not generate id"); +// let victoria_monet_node_index = forked_snapshot +// .add_node( +// NodeWeight::new_content( +// forked_change_set, +// victoria_monet_id, +// ContentAddress::Component(ContentHash::from("i'm the one - victoria monét")), +// ) +// .expect("could not create node weight"), +// ) +// .expect("could not add node"); +// let victoria_monet_edge_index = forked_snapshot +// .add_edge_from_root(forked_change_set, victoria_monet_node_index) +// .expect("could not add edge"); +// forked_snapshot +// .write(ctx, forked_change_set.vector_clock_id()) +// .await +// .expect("could not write snapshot"); +// forked_change_set +// .update_pointer(ctx, forked_snapshot.id()) +// .await +// .expect("could not update change set"); + +// // Commit all changes made so that the rebaser can access them. +// ctx.blocking_commit().await.expect("could not commit"); + +// // Create a rebaser client and open a change set loop. +// let mut client = Client::new().await.expect("could not build client"); +// let _ = client +// .open_stream_for_change_set(base_change_set.id.into()) +// .await +// .expect("could not send management"); + +// // Cache expected updates and then perform a rebase. +// let expected_updates = [Update::NewEdge { +// source: snapshot.root().expect("could not get root"), +// destination: forked_snapshot +// .get_node_index_by_id(victoria_monet_id) +// .expect("could not get node index"), +// edge_weight: forked_snapshot +// .get_edge_by_index_stableish(victoria_monet_edge_index) +// .expect("could not find edge by index"), +// }]; +// let response = client +// .request_rebase( +// base_change_set.id.into(), +// forked_snapshot.id().into(), +// forked_change_set.vector_clock_id().into(), +// ) +// .await +// .expect("could not send"); + +// // Ensure the rebase was successful and no updates needed to be performed. +// match response { +// ChangeSetReplyMessage::Success { updates_performed } => { +// let actual_updates: Vec = +// serde_json::from_value(updates_performed).expect("could not deserialize"); +// assert_eq!( +// &expected_updates, // expected +// actual_updates.as_slice() // actual +// ); +// } +// ChangeSetReplyMessage::ConflictsFound { +// conflicts_found, +// updates_found_and_skipped: _, +// } => { +// let conflicts: Vec = +// serde_json::from_value(conflicts_found).expect("could not deserialize"); +// panic!("unexpected conflicts: {conflicts:?}"); +// } +// ChangeSetReplyMessage::Error { message } => { +// panic!("unexpected error: {message}"); +// } +// } + +// // Now, create a conflict. +// let mut snapshot = WorkspaceSnapshot::find_for_change_set(ctx, base_change_set.id) +// .await +// .expect("could not find snapshot"); +// snapshot +// .update_content( +// base_change_set, +// olivia_rodrigo_id, +// ContentHash::from("onto updated"), +// ) +// .expect("could not update content"); +// snapshot +// .write(ctx, base_change_set.vector_clock_id()) +// .await +// .expect("could not write snapshot"); +// base_change_set +// .update_pointer(ctx, snapshot.id()) +// .await +// .expect("could not update change set"); +// let mut forked_snapshot = WorkspaceSnapshot::find_for_change_set(ctx, forked_change_set.id) +// .await +// .expect("could not find snapshot"); +// forked_snapshot +// .update_content( +// forked_change_set, +// olivia_rodrigo_id, +// ContentHash::from("to rebase updated"), +// ) +// .expect("could not update content"); +// forked_snapshot +// .write(ctx, forked_change_set.vector_clock_id()) +// .await +// .expect("could not write snapshot"); +// forked_change_set +// .update_pointer(ctx, forked_snapshot.id()) +// .await +// .expect("could not update change set"); + +// // Commit all changes made so that the rebaser can access them. +// ctx.blocking_commit().await.expect("could not commit"); + +// // Cache the expected conflict and perform the rebase with the conflict. +// let expected_conflicts = [Conflict::NodeContent { +// onto: forked_snapshot +// .get_node_index_by_id(olivia_rodrigo_id) +// .expect("could not get node index by id"), +// to_rebase: snapshot +// .get_node_index_by_id(olivia_rodrigo_id) +// .expect("could not get node index by id"), +// }]; +// let response = client +// .request_rebase( +// base_change_set.id.into(), +// forked_snapshot.id().into(), +// forked_change_set.vector_clock_id().into(), +// ) +// .await +// .expect("could not send"); + +// // Ensure we see the conflict. +// match response { +// ChangeSetReplyMessage::Success { updates_performed } => { +// let updates_performed: Vec = +// serde_json::from_value(updates_performed).expect("could not deserialize"); +// panic!("unexpected success: {updates_performed:?}") +// } +// ChangeSetReplyMessage::ConflictsFound { +// conflicts_found, +// updates_found_and_skipped, +// } => { +// let actual_conflicts: Vec = +// serde_json::from_value(conflicts_found).expect("could not deserialize"); +// assert_eq!( +// &expected_conflicts, // expected +// actual_conflicts.as_slice() // actual +// ); +// let updates_found_and_skipped: Vec = +// serde_json::from_value(updates_found_and_skipped).expect("could not deserialize"); +// assert!(updates_found_and_skipped.is_empty()); +// } +// ChangeSetReplyMessage::Error { message } => { +// panic!("unexpected error: {message}"); +// } +// } + +// // TODO(nick): move cleanup to the test harness. +// let _ = client +// .close_stream_for_change_set(base_change_set.id.into()) +// .await; +// } diff --git a/lib/dal/tests/integration_test/internal/new_engine/sdf_mock.rs b/lib/dal/tests/integration_test/internal/new_engine/sdf_mock.rs new file mode 100644 index 0000000000..8c2a77aaf2 --- /dev/null +++ b/lib/dal/tests/integration_test/internal/new_engine/sdf_mock.rs @@ -0,0 +1,99 @@ +use dal::{ + DalContext, ExternalProviderId, InternalProviderId, Schema, SchemaId, SchemaVariant, + SchemaVariantId, +}; +use dal_test::test; + +#[test] +async fn list_schema_variant_views(ctx: &DalContext) { + let mut schema_variant_views: Vec = Vec::new(); + + let schemas = Schema::list(ctx).await.expect("could not list schemas"); + for schema in schemas { + if schema.ui_hidden { + continue; + } + + let schema_variants = SchemaVariant::list_for_schema(ctx, schema.id()) + .await + .expect("could not list schema variants for schema"); + for schema_variant in schema_variants { + if schema_variant.ui_hidden() { + continue; + } + + let mut input_sockets = Vec::new(); + let mut output_sockets = Vec::new(); + + let (external_providers, explicit_internal_providers) = + SchemaVariant::list_external_providers_and_explicit_internal_providers( + ctx, + schema_variant.id(), + ) + .await + .expect("could not list external providers and explicit internal providers"); + + for explicit_internal_provider in explicit_internal_providers { + input_sockets.push(InputSocketView { + id: explicit_internal_provider.id(), + name: explicit_internal_provider.name().to_owned(), + }) + } + + for external_provider in external_providers { + output_sockets.push(OutputSocketView { + id: external_provider.id(), + name: external_provider.name().to_owned(), + }) + } + + schema_variant_views.push(SchemaVariantView { + id: schema_variant.id(), + // FIXME(nick): use the real value here + builtin: true, + // builtin: schema_variant.is_builtin(ctx).await?, + name: schema_variant.name().to_owned(), + schema_id: schema.id(), + schema_name: schema.name.to_owned(), + color: schema_variant + .get_color(ctx) + .await + .expect("could not get color") + .unwrap_or("#0F0F0F".into()), + category: schema_variant.category().to_owned(), + input_sockets, + output_sockets, + }); + } + } + + dbg!(schema_variant_views); +} + +#[allow(dead_code)] +#[derive(Debug)] +pub struct OutputSocketView { + id: ExternalProviderId, + name: String, +} + +#[allow(dead_code)] +#[derive(Debug)] +pub struct InputSocketView { + id: InternalProviderId, + name: String, +} + +#[allow(dead_code)] +#[derive(Debug)] +pub struct SchemaVariantView { + id: SchemaVariantId, + builtin: bool, + name: String, + schema_name: String, + schema_id: SchemaId, + color: String, + category: String, + input_sockets: Vec, + output_sockets: Vec, +} diff --git a/lib/dal/tests/integration_test/internal/secret.rs b/lib/dal/tests/integration_test/internal/secret.rs index da8292c29f..007ee4c368 100644 --- a/lib/dal/tests/integration_test/internal/secret.rs +++ b/lib/dal/tests/integration_test/internal/secret.rs @@ -1,10 +1,8 @@ -use dal::{ - DalContext, EncryptedSecret, Secret, SecretAlgorithm, SecretVersion, StandardModel, - WorkspaceSignup, -}; +use dal::{DalContext, EncryptedSecret, Secret, SecretAlgorithm, SecretVersion, StandardModel}; use dal_test::{ test, test_harness::{create_secret, generate_fake_name}, + WorkspaceSignup, }; #[test] @@ -41,8 +39,7 @@ async fn secret_get_by_id(ctx: &DalContext, nw: &WorkspaceSignup) { let secret = Secret::get_by_id(ctx, og_secret.id()) .await - .expect("failed to get secret") - .expect("failed to find secret in current tenancy and visibility"); + .expect("failed to get secret"); assert_eq!(secret, og_secret); } @@ -50,12 +47,12 @@ async fn secret_get_by_id(ctx: &DalContext, nw: &WorkspaceSignup) { async fn encrypted_secret_get_by_id(ctx: &DalContext, nw: &WorkspaceSignup) { let secret = create_secret(ctx, nw.key_pair.pk()).await; - let encrypted_secret = EncryptedSecret::get_by_id(ctx, secret.id()) + let encrypted_secret = EncryptedSecret::get_by_id(ctx, &secret.id()) .await .expect("failed to get encrypted secret") .expect("failed to find encrypted secret in current tenancy and visibility"); - assert_eq!(secret.id(), encrypted_secret.id()); - assert_eq!(secret.pk(), encrypted_secret.pk()); + assert_eq!(secret.id(), *encrypted_secret.id()); + assert_eq!(secret.pk(), *encrypted_secret.pk()); assert_eq!(secret.name(), encrypted_secret.name()); assert_eq!( secret.description().as_deref(), @@ -64,19 +61,20 @@ async fn encrypted_secret_get_by_id(ctx: &DalContext, nw: &WorkspaceSignup) { assert_eq!(secret.definition(), encrypted_secret.definition()); } -#[test] -async fn secret_update_name(ctx: &DalContext, nw: &WorkspaceSignup) { - let mut secret = create_secret(ctx, nw.key_pair.pk()).await; - - let original_name = secret.name().to_string(); - secret - .set_name(ctx, "even-more-secret") - .await - .expect("failed to set name"); - - assert_ne!(secret.name(), original_name); - assert_eq!(secret.name(), "even-more-secret"); -} +// TODO(nick): this is unused in sdf. +// #[test] +// async fn secret_update_name(ctx: &DalContext, nw: &WorkspaceSignup) { +// let mut secret = create_secret(ctx, nw.key_pair.pk()).await; +// +// let original_name = secret.name().to_string(); +// secret +// .set_name(ctx, "even-more-secret") +// .await +// .expect("failed to set name"); +// +// assert_ne!(secret.name(), original_name); +// assert_eq!(secret.name(), "even-more-secret"); +// } #[test] async fn encrypt_decrypt_round_trip(ctx: &DalContext, nw: &WorkspaceSignup) { @@ -102,7 +100,7 @@ async fn encrypt_decrypt_round_trip(ctx: &DalContext, nw: &WorkspaceSignup) { .await .expect("failed to create encrypted secret"); - let decrypted = EncryptedSecret::get_by_id(ctx, secret.id()) + let decrypted = EncryptedSecret::get_by_id(ctx, &secret.id()) .await .expect("failed to fetch encrypted secret") .expect("failed to find encrypted secret for tenancy and/or visibility") diff --git a/lib/dal/tests/integration_test/mod.rs b/lib/dal/tests/integration_test/mod.rs index e5377e54db..581065c3cd 100644 --- a/lib/dal/tests/integration_test/mod.rs +++ b/lib/dal/tests/integration_test/mod.rs @@ -1,6 +1,6 @@ /// Contains tests that will become part of individual package testing (i.e. testing that a "Docker /// Image" connects and works as intended with a "Butane Container"). -mod external; +// mod external; /// Contains tests that test SI directly and use test-exclusive builtins. All tests in this module /// should (eventually) pass with `SI_TEST_BUILTIN_SCHEMAS=test`. mod internal; diff --git a/lib/pinga-server/BUCK b/lib/pinga-server/BUCK index ae13882f48..e7ae22f4f0 100644 --- a/lib/pinga-server/BUCK +++ b/lib/pinga-server/BUCK @@ -4,8 +4,10 @@ rust_library( name = "pinga-server", deps = [ "//lib/buck2-resources:buck2-resources", + "//lib/content-store:content-store", "//lib/dal:dal", "//lib/nats-subscriber:nats-subscriber", + "//lib/rebaser-client:rebaser-client", "//lib/si-crypto:si-crypto", "//lib/si-data-nats:si-data-nats", "//lib/si-data-pg:si-data-pg", diff --git a/lib/pinga-server/Cargo.toml b/lib/pinga-server/Cargo.toml index 69da8c9c91..09d68f489c 100644 --- a/lib/pinga-server/Cargo.toml +++ b/lib/pinga-server/Cargo.toml @@ -7,6 +7,7 @@ publish = false [dependencies] buck2-resources = { path = "../../lib/buck2-resources" } +content-store = { path = "../../lib/content-store" } dal = { path = "../../lib/dal" } derive_builder = { workspace = true } futures = { workspace = true } @@ -14,6 +15,7 @@ nats-subscriber = { path = "../../lib/nats-subscriber" } remain = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +rebaser-client = { path = "../../lib/rebaser-client" } si-crypto = { path = "../../lib/si-crypto" } si-data-nats = { path = "../../lib/si-data-nats" } si-data-pg = { path = "../../lib/si-data-pg" } diff --git a/lib/pinga-server/src/config.rs b/lib/pinga-server/src/config.rs index 21337b8fac..c09ec0f95b 100644 --- a/lib/pinga-server/src/config.rs +++ b/lib/pinga-server/src/config.rs @@ -1,6 +1,7 @@ use std::{env, path::Path}; use buck2_resources::Buck2Resources; +use content_store::PgStoreTools; use derive_builder::Builder; use serde::{Deserialize, Serialize}; use si_crypto::{CryptoConfig, SymmetricCryptoServiceConfig, SymmetricCryptoServiceConfigFile}; @@ -9,10 +10,9 @@ use si_data_pg::PgPoolConfig; use si_std::CanonicalFileError; use telemetry::prelude::*; use thiserror::Error; +use ulid::Ulid; -pub use si_crypto::CycloneKeyPair; pub use si_settings::{StandardConfig, StandardConfigFile}; -use ulid::Ulid; const DEFAULT_CONCURRENCY_LIMIT: usize = 5; @@ -56,6 +56,9 @@ pub struct Config { #[builder(default = "SymmetricCryptoServiceConfig::default()")] symmetric_crypto_service: SymmetricCryptoServiceConfig, + + #[builder(default = "PgStoreTools::default_pool_config()")] + content_store_pg_pool: PgPoolConfig, } impl StandardConfig for Config { @@ -99,12 +102,20 @@ impl Config { pub fn instance_id(&self) -> &str { self.instance_id.as_ref() } + + /// Gets a reference to the config's content store pg pool. + #[must_use] + pub fn content_store_pg_pool(&self) -> &PgPoolConfig { + &self.content_store_pg_pool + } } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct ConfigFile { #[serde(default)] pg: PgPoolConfig, + #[serde(default = "PgStoreTools::default_pool_config")] + content_store_pg: PgPoolConfig, #[serde(default)] nats: NatsConfig, #[serde(default)] @@ -121,6 +132,7 @@ impl Default for ConfigFile { fn default() -> Self { Self { pg: Default::default(), + content_store_pg: PgStoreTools::default_pool_config(), nats: Default::default(), concurrency_limit: default_concurrency_limit(), crypto: Default::default(), @@ -142,6 +154,7 @@ impl TryFrom for Config { let mut config = Config::builder(); config.pg_pool(value.pg); + config.content_store_pg_pool(value.content_store_pg); config.nats(value.nats); config.crypto(value.crypto); config.concurrency(value.concurrency_limit); @@ -210,7 +223,8 @@ fn buck2_development(config: &mut ConfigFile) -> Result<()> { active_key_base64: None, extra_keys: vec![], }; - config.pg.certificate_path = Some(postgres_key.try_into()?); + config.pg.certificate_path = Some(postgres_key.clone().try_into()?); + config.content_store_pg.certificate_path = Some(postgres_key.try_into()?); Ok(()) } @@ -242,7 +256,8 @@ fn cargo_development(dir: String, config: &mut ConfigFile) -> Result<()> { active_key_base64: None, extra_keys: vec![], }; - config.pg.certificate_path = Some(postgres_key.try_into()?); + config.pg.certificate_path = Some(postgres_key.clone().try_into()?); + config.content_store_pg.certificate_path = Some(postgres_key.try_into()?); Ok(()) } diff --git a/lib/pinga-server/src/server.rs b/lib/pinga-server/src/server.rs index c5f8c6db48..18f5270863 100644 --- a/lib/pinga-server/src/server.rs +++ b/lib/pinga-server/src/server.rs @@ -3,14 +3,15 @@ use std::{io, sync::Arc}; use dal::{ job::{ consumer::{JobConsumer, JobConsumerError, JobInfo}, - definition::{FixesJob, RefreshJob}, + definition::DependentValuesUpdate, producer::BlockingJobError, }, - DalContext, DalContextBuilder, DependentValuesUpdate, InitializationError, JobFailure, - JobFailureError, JobQueueProcessor, NatsProcessor, ServicesContext, TransactionsError, + DalContext, DalContextBuilder, InitializationError, JobFailure, JobFailureError, + JobQueueProcessor, NatsProcessor, ServicesContext, TransactionsError, }; use futures::{FutureExt, Stream, StreamExt}; use nats_subscriber::{Request, SubscriberError}; +use rebaser_client::Config as RebaserClientConfig; use si_crypto::{ CryptoConfig, SymmetricCryptoError, SymmetricCryptoService, SymmetricCryptoServiceConfig, }; @@ -106,10 +107,12 @@ impl Server { let encryption_key = Self::load_encryption_key(config.crypto().clone()).await?; let nats = Self::connect_to_nats(config.nats()).await?; let pg_pool = Self::create_pg_pool(config.pg_pool()).await?; + let content_store_pg_pool = Self::create_pg_pool(config.content_store_pg_pool()).await?; let veritech = Self::create_veritech_client(nats.clone()); let job_processor = Self::create_job_processor(nats.clone()); let symmetric_crypto_service = Self::create_symmetric_crypto_service(config.symmetric_crypto_service()).await?; + let rebaser_config = RebaserClientConfig::default(); let services_context = ServicesContext::new( pg_pool, @@ -120,6 +123,8 @@ impl Server { None, None, symmetric_crypto_service, + rebaser_config, + content_store_pg_pool, ); Self::from_services( @@ -459,18 +464,17 @@ async fn execute_job(mut ctx_builder: DalContextBuilder, job_info: JobInfo) -> R ctx_builder.set_blocking(); } - let job = - match job_info.kind.as_str() { - stringify!(DependentValuesUpdate) => { - Box::new(DependentValuesUpdate::try_from(job_info.clone())?) - as Box - } - stringify!(FixesJob) => Box::new(FixesJob::try_from(job_info.clone())?) - as Box, - stringify!(RefreshJob) => Box::new(RefreshJob::try_from(job_info.clone())?) - as Box, - kind => return Err(ServerError::UnknownJobKind(kind.to_owned())), - }; + let job = match job_info.kind.as_str() { + stringify!(DependentValuesUpdate) => { + Box::new(DependentValuesUpdate::try_from(job_info.clone())?) + as Box + } + // stringify!(FixesJob) => Box::new(FixesJob::try_from(job_info.clone())?) + // as Box, + // stringify!(RefreshJob) => Box::new(RefreshJob::try_from(job_info.clone())?) + // as Box, + kind => return Err(ServerError::UnknownJobKind(kind.to_owned())), + }; info!("Processing job"); @@ -484,6 +488,7 @@ async fn execute_job(mut ctx_builder: DalContextBuilder, job_info: JobInfo) -> R Ok(()) } +#[allow(dead_code)] async fn record_job_failure( ctx_builder: DalContextBuilder, job: Box, diff --git a/lib/rebaser-client/BUCK b/lib/rebaser-client/BUCK new file mode 100644 index 0000000000..62f19380f9 --- /dev/null +++ b/lib/rebaser-client/BUCK @@ -0,0 +1,19 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "rebaser-client", + deps = [ + "//lib/rebaser-core:rebaser-core", + "//lib/si-data-nats:si-data-nats", + "//lib/telemetry-rs:telemetry", + "//third-party/rust:futures", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:serde_json", + "//third-party/rust:thiserror", + "//third-party/rust:ulid", + ], + srcs = glob([ + "src/**/*.rs", + ]), +) diff --git a/lib/rebaser-client/Cargo.toml b/lib/rebaser-client/Cargo.toml new file mode 100644 index 0000000000..3744ff8aee --- /dev/null +++ b/lib/rebaser-client/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "rebaser-client" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +rebaser-core = { path = "../../lib/rebaser-core" } +si-data-nats = { path = "../../lib/si-data-nats" } +telemetry = { path = "../../lib/telemetry-rs" } + +futures = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +thiserror = { workspace = true } +ulid = { workspace = true } diff --git a/lib/rebaser-client/src/lib.rs b/lib/rebaser-client/src/lib.rs new file mode 100644 index 0000000000..aa1772c97b --- /dev/null +++ b/lib/rebaser-client/src/lib.rs @@ -0,0 +1,141 @@ +//! This crate provides the rebaser [`Client`], which is used for communicating with a running +//! rebaser [`Server`](rebaser_server::Server). + +#![warn( + bad_style, + clippy::missing_panics_doc, + clippy::panic, + clippy::panic_in_result_fn, + clippy::unwrap_in_result, + clippy::unwrap_used, + dead_code, + improper_ctypes, + missing_debug_implementations, + missing_docs, + no_mangle_generic_items, + non_shorthand_field_patterns, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + rust_2018_idioms, + unconditional_recursion, + unreachable_pub, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] + +use futures::StreamExt; +use rebaser_core::{RebaserMessagingConfig, RequestRebaseMessage, SubjectGenerator}; +use si_data_nats::jetstream::{Context, JetstreamError}; +use si_data_nats::subject::ToSubject; +use si_data_nats::NatsClient; +use telemetry::prelude::error; +use thiserror::Error; +use ulid::Ulid; + +// The client does yet need to have its own config, so it uses the messaging config. +pub use rebaser_core::RebaserMessagingConfig as Config; +pub use rebaser_core::ReplyRebaseMessage; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Debug, Error)] +pub enum ClientError { + #[error("jetstream error: {0}")] + Jetstream(#[from] JetstreamError), + #[error("nats error: {0}")] + Nats(#[from] si_data_nats::Error), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("unexpected empty stream when subscribing to subject: {0}")] + UnexpectedEmptyStream(String), +} + +#[allow(missing_docs)] +pub type ClientResult = Result; + +/// A tenant-scoped client used for communicating with 1:N rebaser servers. +#[derive(Debug)] +pub struct Client { + jetstream_ctx: Context, + nats: NatsClient, + subject_prefix: Option, + workspace_id: Ulid, +} + +impl Client { + /// Creates a new [`Client`]. + pub fn new( + nats: NatsClient, + messaging_config: RebaserMessagingConfig, + workspace_id: Ulid, + ) -> Self { + Self { + jetstream_ctx: nats.clone().to_jetstream_ctx(), + nats, + subject_prefix: messaging_config.subject_prefix().map(ToOwned::to_owned), + workspace_id, + } + } + + /// Publishes a rebase requester to the rebaser stream. + pub async fn request_rebase( + &self, + to_rebase_change_set_id: Ulid, + onto_workspace_snapshot_id: Ulid, + onto_vector_clock_id: Ulid, + ) -> ClientResult { + let subject = SubjectGenerator::request( + self.workspace_id, + to_rebase_change_set_id, + self.subject_prefix.as_ref(), + ); + + let serialized_messaged = serde_json::to_vec(&RequestRebaseMessage { + to_rebase_change_set_id, + onto_workspace_snapshot_id, + onto_vector_clock_id, + })?; + + let reply_subject = self + .jetstream_ctx + .publish_with_reply_mailbox_and_immediately_ack( + &self.nats, + subject, + serialized_messaged.into(), + ) + .await?; + + // NOTE(nick): we may want to add a timeout in the future when waiting for a reply. + self.wait_for_reply(reply_subject).await + } + + async fn wait_for_reply( + &self, + reply_subject: impl ToSubject, + ) -> ClientResult { + let reply_subject = reply_subject.to_subject(); + + let mut subscriber = self.nats.subscribe(reply_subject.clone()).await?; + + // Get the first immediate message (there should only ever be one) and deserialize it. + let message: ReplyRebaseMessage = if let Some(serialized_message) = subscriber.next().await + { + serde_json::from_slice(serialized_message.payload().to_vec().as_slice())? + } else { + return Err(ClientError::UnexpectedEmptyStream( + reply_subject.to_string(), + )); + }; + + // Attempt to unsubscribe. + if let Err(err) = subscriber.unsubscribe().await { + error!(error = ?err, %reply_subject, "error when unsubscribing"); + } + + Ok(message) + } +} diff --git a/lib/rebaser-core/BUCK b/lib/rebaser-core/BUCK new file mode 100644 index 0000000000..3ff014b557 --- /dev/null +++ b/lib/rebaser-core/BUCK @@ -0,0 +1,13 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "rebaser-core", + deps = [ + "//third-party/rust:serde", + "//third-party/rust:serde_json", + "//third-party/rust:ulid", + ], + srcs = glob([ + "src/**/*.rs", + ]), +) diff --git a/lib/rebaser-core/Cargo.toml b/lib/rebaser-core/Cargo.toml new file mode 100644 index 0000000000..4a4cbf5dd2 --- /dev/null +++ b/lib/rebaser-core/Cargo.toml @@ -0,0 +1,10 @@ +[package] +name = "rebaser-core" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +serde = { workspace = true } +serde_json = { workspace = true } +ulid = { workspace = true } diff --git a/lib/rebaser-core/src/lib.rs b/lib/rebaser-core/src/lib.rs new file mode 100644 index 0000000000..7af430585d --- /dev/null +++ b/lib/rebaser-core/src/lib.rs @@ -0,0 +1,77 @@ +//! This library exists to ensure that crate "rebaser-client" crate does not depend on the "rebaser-server" crate and +//! vice versa. Keeping the dependency chain intact is important because "rebaser-server" depends on the +//! dal. The dal, and any crate other than "rebaser-server" and this crate, must be able to use the "rebaser-client". + +#![warn( + bad_style, + clippy::missing_panics_doc, + clippy::panic, + clippy::panic_in_result_fn, + clippy::unwrap_in_result, + clippy::unwrap_used, + dead_code, + improper_ctypes, + missing_debug_implementations, + missing_docs, + no_mangle_generic_items, + non_shorthand_field_patterns, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + rust_2018_idioms, + unconditional_recursion, + unreachable_pub, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] + +use serde::Deserialize; +use serde::Serialize; +use serde_json::Value; +use ulid::Ulid; + +mod messaging_config; +mod subject; + +pub use messaging_config::RebaserMessagingConfig; +pub use subject::SubjectGenerator; + +/// The message that the server receives to perform a rebase. +#[derive(Debug, Serialize, Deserialize, Copy, Clone)] +pub struct RequestRebaseMessage { + /// Corresponds to the change set whose pointer is to be updated. + pub to_rebase_change_set_id: Ulid, + /// Corresponds to the workspace snapshot that will be the "onto" workspace snapshot when + /// rebasing the "to rebase" workspace snapshot. + pub onto_workspace_snapshot_id: Ulid, + /// Derived from the ephemeral or persisted change set that's either the base change set, the + /// last change set before edits were made, or the change set that you are trying to rebase + /// onto base. + pub onto_vector_clock_id: Ulid, +} + +/// The message that the server sends back to the requester. +#[derive(Debug, Serialize, Deserialize)] +pub enum ReplyRebaseMessage { + /// Processing the request and performing updates were both successful. Additionally, no conflicts were found. + Success { + /// The serialized updates performed when rebasing. + updates_performed: Value, + }, + /// Conflicts found when processing the request. + ConflictsFound { + /// A serialized list of the conflicts found during detection. + conflicts_found: Value, + /// A serialized list of the updates found during detection and skipped because at least + /// once conflict was found. + updates_found_and_skipped: Value, + }, + /// Error encountered when processing the request. + Error { + /// The error message. + message: String, + }, +} diff --git a/lib/rebaser-core/src/messaging_config.rs b/lib/rebaser-core/src/messaging_config.rs new file mode 100644 index 0000000000..26f365ec1a --- /dev/null +++ b/lib/rebaser-core/src/messaging_config.rs @@ -0,0 +1,29 @@ +//! This module contains [`RebaserMessagingConfig`], which is the config used for the messaging layer between "rebaser" +//! servers and clients. + +use serde::{Deserialize, Serialize}; + +/// A config used for the messaging layer between "rebaser" servers and clients. +#[derive(Clone, Debug, Serialize, Deserialize, Default)] +pub struct RebaserMessagingConfig { + subject_prefix: Option, +} + +impl RebaserMessagingConfig { + /// Creates a new [`messaging config`](RebaserMessagingConfig). + pub fn new(subject_prefix: Option) -> Self { + Self { subject_prefix } + } + + /// The subject prefix used for creating, using and deleting + /// [NATS Jetstream](https://docs.nats.io/nats-concepts/jetstream) streams. + pub fn subject_prefix(&self) -> Option<&str> { + self.subject_prefix.as_deref() + } + + /// Sets the subject prefix on the config. + pub fn set_subject_prefix(&mut self, subject_prefix: impl Into) -> &mut Self { + self.subject_prefix = Some(subject_prefix.into()); + self + } +} diff --git a/lib/rebaser-core/src/subject.rs b/lib/rebaser-core/src/subject.rs new file mode 100644 index 0000000000..a423eecf59 --- /dev/null +++ b/lib/rebaser-core/src/subject.rs @@ -0,0 +1,44 @@ +//! This module contains [`SubjectGenerator`] which is used to centralize subject naming for +//! "rebaser" client and server setup and communication. + +use ulid::Ulid; + +/// A generator that provides subject names in a centralized location. +#[allow(missing_debug_implementations)] +pub struct SubjectGenerator; + +impl SubjectGenerator { + /// Returns the root subject for all rebaser-related-messages. + pub fn root(subject_prefix: Option>) -> String { + Self::assemble_with_prefix("rebaser", subject_prefix) + } + + /// Returns the subject covering all rebaser-related messages. + pub fn all(subject_prefix: Option>) -> String { + Self::assemble_with_prefix("rebaser.>", subject_prefix) + } + + /// Returns the subject used for publishing a rebase request. + pub fn request( + workspace_id: Ulid, + change_set_id: Ulid, + subject_prefix: Option>, + ) -> String { + Self::assemble_with_prefix( + format!("rebaser.ws.{workspace_id}.cs.{change_set_id}"), + subject_prefix, + ) + } + + fn assemble_with_prefix( + base_subject_name: impl AsRef, + maybe_subject_prefix: Option>, + ) -> String { + let base_subject_name = base_subject_name.as_ref(); + if let Some(subject_prefix) = maybe_subject_prefix { + format!("{}-{base_subject_name}", subject_prefix.as_ref()) + } else { + base_subject_name.to_string() + } + } +} diff --git a/lib/rebaser-server/BUCK b/lib/rebaser-server/BUCK new file mode 100644 index 0000000000..db5d5b48c1 --- /dev/null +++ b/lib/rebaser-server/BUCK @@ -0,0 +1,32 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "rebaser-server", + deps = [ + "//lib/buck2-resources:buck2-resources", + "//lib/content-store:content-store", + "//lib/dal:dal", + "//lib/nats-subscriber:nats-subscriber", + "//lib/rebaser-core:rebaser-core", + "//lib/si-crypto:si-crypto", + "//lib/si-data-nats:si-data-nats", + "//lib/si-data-pg:si-data-pg", + "//lib/si-settings:si-settings", + "//lib/si-std:si-std", + "//lib/si-test-macros:si-test-macros", + "//lib/telemetry-rs:telemetry", + "//lib/veritech-client:veritech-client", + "//third-party/rust:derive_builder", + "//third-party/rust:futures", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:serde_json", + "//third-party/rust:stream-cancel", + "//third-party/rust:thiserror", + "//third-party/rust:tokio", + "//third-party/rust:ulid", + ], + srcs = glob([ + "src/**/*.rs", + ]), +) diff --git a/lib/rebaser-server/Cargo.toml b/lib/rebaser-server/Cargo.toml new file mode 100644 index 0000000000..3cee8f3907 --- /dev/null +++ b/lib/rebaser-server/Cargo.toml @@ -0,0 +1,30 @@ +[package] +name = "rebaser-server" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +buck2-resources = { path = "../../lib/buck2-resources" } +content-store = { path = "../../lib/content-store" } +dal = { path = "../../lib/dal" } +nats-subscriber = { path = "../../lib/nats-subscriber" } +rebaser-core = { path = "../../lib/rebaser-core" } +si-crypto = { path = "../../lib/si-crypto" } +si-data-nats = { path = "../../lib/si-data-nats" } +si-data-pg = { path = "../../lib/si-data-pg" } +si-settings = { path = "../../lib/si-settings" } +si-std = { path = "../../lib/si-std" } +si-test-macros = { path = "../../lib/si-test-macros" } +telemetry = { path = "../../lib/telemetry-rs" } +veritech-client = { path = "../../lib/veritech-client" } + +derive_builder = { workspace = true } +futures = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } +stream-cancel = { workspace = true } +thiserror = { workspace = true } +tokio = { workspace = true } +ulid = { workspace = true } diff --git a/lib/rebaser-server/src/config.rs b/lib/rebaser-server/src/config.rs new file mode 100644 index 0000000000..6b7186967c --- /dev/null +++ b/lib/rebaser-server/src/config.rs @@ -0,0 +1,249 @@ +use std::{env, path::Path}; + +use buck2_resources::Buck2Resources; +use content_store::PgStoreTools; +use derive_builder::Builder; +use rebaser_core::RebaserMessagingConfig; +use serde::{Deserialize, Serialize}; +use si_crypto::{SymmetricCryptoServiceConfig, SymmetricCryptoServiceConfigFile}; +use si_data_nats::NatsConfig; +use si_data_pg::PgPoolConfig; +use si_std::{CanonicalFile, CanonicalFileError}; +use telemetry::prelude::*; +use thiserror::Error; + +use crate::StandardConfig; +use crate::StandardConfigFile; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Debug, Error)] +pub enum ConfigError { + #[error("config builder")] + Builder(#[from] ConfigBuilderError), + #[error(transparent)] + CanonicalFile(#[from] CanonicalFileError), + #[error("error configuring for development")] + Development(#[source] Box), + #[error(transparent)] + Settings(#[from] si_settings::SettingsError), +} + +impl ConfigError { + fn development(err: impl std::error::Error + 'static + Sync + Send) -> Self { + Self::Development(Box::new(err)) + } +} + +type Result = std::result::Result; + +#[allow(missing_docs)] +#[derive(Debug, Builder)] +pub struct Config { + #[builder(default = "PgPoolConfig::default()")] + pg_pool: PgPoolConfig, + + #[builder(default = "NatsConfig::default()")] + nats: NatsConfig, + + cyclone_encryption_key_path: CanonicalFile, + + symmetric_crypto_service: SymmetricCryptoServiceConfig, + + #[builder(default)] + messaging_config: RebaserMessagingConfig, + + #[builder(default = "PgStoreTools::default_pool_config()")] + content_store_pg_pool: PgPoolConfig, +} + +impl StandardConfig for Config { + type Builder = ConfigBuilder; +} + +impl Config { + /// Gets a reference to the config's pg pool. + #[must_use] + pub fn pg_pool(&self) -> &PgPoolConfig { + &self.pg_pool + } + + /// Gets a reference to the config's nats. + #[must_use] + pub fn nats(&self) -> &NatsConfig { + &self.nats + } + + /// Gets a reference to the config's subject prefix. + pub fn subject_prefix(&self) -> Option<&str> { + self.nats.subject_prefix.as_deref() + } + + /// Gets a reference to the config's cyclone public key path. + #[must_use] + pub fn cyclone_encryption_key_path(&self) -> &Path { + self.cyclone_encryption_key_path.as_path() + } + + /// Gets a reference to the symmetric crypto service. + pub fn symmetric_crypto_service(&self) -> &SymmetricCryptoServiceConfig { + &self.symmetric_crypto_service + } + + /// Gets a reference to the messaging config + pub fn messaging_config(&self) -> &RebaserMessagingConfig { + &self.messaging_config + } + + /// Gets a reference to the config's content store pg pool. + #[must_use] + pub fn content_store_pg_pool(&self) -> &PgPoolConfig { + &self.content_store_pg_pool + } +} + +/// The configuration file for creating a [`Server`]. +#[derive(Clone, Debug, Deserialize, Serialize)] +pub struct ConfigFile { + #[serde(default)] + pg: PgPoolConfig, + #[serde(default = "PgStoreTools::default_pool_config")] + content_store_pg: PgPoolConfig, + #[serde(default)] + nats: NatsConfig, + #[serde(default = "default_cyclone_encryption_key_path")] + cyclone_encryption_key_path: String, + #[serde(default = "default_symmetric_crypto_config")] + symmetric_crypto_service: SymmetricCryptoServiceConfigFile, + #[serde(default)] + messaging_config: RebaserMessagingConfig, +} + +impl Default for ConfigFile { + fn default() -> Self { + Self { + pg: Default::default(), + content_store_pg: PgStoreTools::default_pool_config(), + nats: Default::default(), + cyclone_encryption_key_path: default_cyclone_encryption_key_path(), + symmetric_crypto_service: default_symmetric_crypto_config(), + messaging_config: Default::default(), + } + } +} + +impl StandardConfigFile for ConfigFile { + type Error = ConfigError; +} + +impl TryFrom for Config { + type Error = ConfigError; + + fn try_from(mut value: ConfigFile) -> Result { + detect_and_configure_development(&mut value)?; + + let mut config = Config::builder(); + config.pg_pool(value.pg); + config.content_store_pg_pool(value.content_store_pg); + config.nats(value.nats); + config.cyclone_encryption_key_path(value.cyclone_encryption_key_path.try_into()?); + config.symmetric_crypto_service(value.symmetric_crypto_service.try_into()?); + config.build().map_err(Into::into) + } +} + +fn default_cyclone_encryption_key_path() -> String { + "/run/rebaser/cyclone_encryption.key".to_string() +} + +fn default_symmetric_crypto_config() -> SymmetricCryptoServiceConfigFile { + SymmetricCryptoServiceConfigFile { + active_key: Some("/run/rebaser/donkey.key".to_owned()), + active_key_base64: None, + extra_keys: vec![], + } +} + +/// This function is used to determine the development environment and update the [`ConfigFile`] +/// accordingly. +#[allow(clippy::disallowed_methods)] +pub fn detect_and_configure_development(config: &mut ConfigFile) -> Result<()> { + if env::var("BUCK_RUN_BUILD_ID").is_ok() || env::var("BUCK_BUILD_ID").is_ok() { + buck2_development(config) + } else if let Ok(dir) = env::var("CARGO_MANIFEST_DIR") { + cargo_development(dir, config) + } else { + Ok(()) + } +} + +fn buck2_development(config: &mut ConfigFile) -> Result<()> { + let resources = Buck2Resources::read().map_err(ConfigError::development)?; + + let cyclone_encryption_key_path = resources + .get_ends_with("dev.encryption.key") + .map_err(ConfigError::development)? + .to_string_lossy() + .to_string(); + let symmetric_crypto_service_key = resources + .get_ends_with("dev.donkey.key") + .map_err(ConfigError::development)? + .to_string_lossy() + .to_string(); + let postgres_cert = resources + .get_ends_with("dev.postgres.root.crt") + .map_err(ConfigError::development)? + .to_string_lossy() + .to_string(); + + warn!( + cyclone_encryption_key_path = cyclone_encryption_key_path.as_str(), + symmetric_crypto_service_key = symmetric_crypto_service_key.as_str(), + postgres_cert = postgres_cert.as_str(), + "detected development run", + ); + + config.cyclone_encryption_key_path = cyclone_encryption_key_path; + config.symmetric_crypto_service = SymmetricCryptoServiceConfigFile { + active_key: Some(symmetric_crypto_service_key), + active_key_base64: None, + extra_keys: vec![], + }; + config.pg.certificate_path = Some(postgres_cert.clone().try_into()?); + config.content_store_pg.certificate_path = Some(postgres_cert.try_into()?); + + Ok(()) +} + +fn cargo_development(dir: String, config: &mut ConfigFile) -> Result<()> { + let cyclone_encryption_key_path = Path::new(&dir) + .join("../../lib/cyclone-server/src/dev.encryption.key") + .to_string_lossy() + .to_string(); + let symmetric_crypto_service_key = Path::new(&dir) + .join("../../lib/dal/dev.donkey.key") + .to_string_lossy() + .to_string(); + let postgres_cert = Path::new(&dir) + .join("../../config/keys/dev.postgres.root.crt") + .to_string_lossy() + .to_string(); + + warn!( + cyclone_encryption_key_path = cyclone_encryption_key_path.as_str(), + symmetric_crypto_service_key = symmetric_crypto_service_key.as_str(), + postgres_cert = postgres_cert.as_str(), + "detected development run", + ); + + config.cyclone_encryption_key_path = cyclone_encryption_key_path; + config.symmetric_crypto_service = SymmetricCryptoServiceConfigFile { + active_key: Some(symmetric_crypto_service_key), + active_key_base64: None, + extra_keys: vec![], + }; + config.pg.certificate_path = Some(postgres_cert.clone().try_into()?); + config.content_store_pg.certificate_path = Some(postgres_cert.try_into()?); + + Ok(()) +} diff --git a/lib/rebaser-server/src/lib.rs b/lib/rebaser-server/src/lib.rs new file mode 100644 index 0000000000..ce6fd274c8 --- /dev/null +++ b/lib/rebaser-server/src/lib.rs @@ -0,0 +1,40 @@ +//! This crate provides the rebaser [`Server`]. + +#![warn( + bad_style, + clippy::missing_panics_doc, + clippy::panic, + clippy::panic_in_result_fn, + clippy::unwrap_in_result, + clippy::unwrap_used, + dead_code, + improper_ctypes, + missing_debug_implementations, + missing_docs, + no_mangle_generic_items, + non_shorthand_field_patterns, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + rust_2018_idioms, + unconditional_recursion, + unreachable_pub, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] + +pub use config::detect_and_configure_development; +pub use config::Config; +pub use config::ConfigBuilder; +pub use config::ConfigError; +pub use config::ConfigFile; +pub use rebaser_core::RebaserMessagingConfig; +pub use server::Server; +pub use si_settings::StandardConfig; +pub use si_settings::StandardConfigFile; + +mod config; +mod server; diff --git a/lib/rebaser-server/src/server.rs b/lib/rebaser-server/src/server.rs new file mode 100644 index 0000000000..fd39e96f90 --- /dev/null +++ b/lib/rebaser-server/src/server.rs @@ -0,0 +1,293 @@ +use dal::{InitializationError, JobQueueProcessor, NatsProcessor}; +use rebaser_core::RebaserMessagingConfig; +use si_crypto::SymmetricCryptoServiceConfig; +use si_crypto::{SymmetricCryptoError, SymmetricCryptoService}; +use si_data_nats::{NatsClient, NatsConfig, NatsError}; +use si_data_pg::{PgPool, PgPoolConfig, PgPoolError}; +use std::{io, path::Path, sync::Arc}; +use telemetry::prelude::*; +use thiserror::Error; +use tokio::{ + signal::unix, + sync::{ + mpsc::{self}, + oneshot, watch, + }, +}; +use veritech_client::{Client as VeritechClient, CycloneEncryptionKey, CycloneEncryptionKeyError}; + +use crate::server::core_loop::CoreLoopSetupError; +use crate::Config; + +mod core_loop; +mod rebase; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Debug, Error)] +pub enum ServerError { + #[error("core loop setup error: {0}")] + CoreLoopSetup(#[from] CoreLoopSetupError), + #[error("error when loading encryption key: {0}")] + CycloneEncryptionKey(#[from] CycloneEncryptionKeyError), + #[error(transparent)] + Initialization(#[from] InitializationError), + #[error(transparent)] + Nats(#[from] NatsError), + #[error(transparent)] + PgPool(#[from] Box), + #[error("failed to setup signal handler")] + Signal(#[source] io::Error), + #[error("symmetric crypto error: {0}")] + SymmetricCrypto(#[from] SymmetricCryptoError), +} + +impl From for ServerError { + fn from(e: PgPoolError) -> Self { + Self::PgPool(Box::new(e)) + } +} + +type ServerResult = Result; + +/// The [`Server`] for managing rebaser tasks. +#[allow(missing_debug_implementations)] +pub struct Server { + encryption_key: Arc, + nats: NatsClient, + pg_pool: PgPool, + veritech: VeritechClient, + job_processor: Box, + symmetric_crypto_service: SymmetricCryptoService, + /// An internal shutdown watch receiver handle which can be provided to internal tasks which + /// want to be notified when a shutdown event is in progress. + shutdown_watch_rx: watch::Receiver<()>, + /// An external shutdown sender handle which can be handed out to external callers who wish to + /// trigger a server shutdown at will. + external_shutdown_tx: mpsc::Sender, + /// An internal graceful shutdown receiver handle which the server's main thread uses to stop + /// accepting work when a shutdown event is in progress. + graceful_shutdown_rx: oneshot::Receiver<()>, + /// The messaging configuration + messaging_config: RebaserMessagingConfig, + /// The pg pool for the content store + content_store_pg_pool: PgPool, +} + +impl Server { + /// Build a [`Server`] from a given [`Config`]. + #[instrument(name = "rebaser.init.from_config", skip_all)] + pub async fn from_config(config: Config) -> ServerResult { + dal::init()?; + + let encryption_key = + Self::load_encryption_key(config.cyclone_encryption_key_path()).await?; + let nats = Self::connect_to_nats(config.nats()).await?; + let pg_pool = Self::create_pg_pool(config.pg_pool()).await?; + let content_store_pg_pool = Self::create_pg_pool(config.content_store_pg_pool()).await?; + let veritech = Self::create_veritech_client(nats.clone()); + let job_processor = Self::create_job_processor(nats.clone()); + let symmetric_crypto_service = + Self::create_symmetric_crypto_service(config.symmetric_crypto_service()).await?; + let messaging_config = config.messaging_config(); + + Self::from_services( + encryption_key, + nats, + pg_pool, + veritech, + job_processor, + symmetric_crypto_service, + messaging_config.to_owned(), + content_store_pg_pool, + ) + } + + /// Build a [`Server`] from information provided via companion services. + #[allow(clippy::too_many_arguments)] + #[instrument(name = "rebaser.init.from_services", skip_all)] + pub fn from_services( + encryption_key: Arc, + nats: NatsClient, + pg_pool: PgPool, + veritech: VeritechClient, + job_processor: Box, + symmetric_crypto_service: SymmetricCryptoService, + messaging_config: RebaserMessagingConfig, + content_store_pg_pool: PgPool, + ) -> ServerResult { + // An mpsc channel which can be used to externally shut down the server. + let (external_shutdown_tx, external_shutdown_rx) = mpsc::channel(4); + // A watch channel used to notify internal parts of the server that a shutdown event is in + // progress. The value passed along is irrelevant--we only care that the event was + // triggered and react accordingly. + let (shutdown_watch_tx, shutdown_watch_rx) = watch::channel(()); + + dal::init()?; + + let graceful_shutdown_rx = + prepare_graceful_shutdown(external_shutdown_rx, shutdown_watch_tx)?; + + Ok(Server { + pg_pool, + nats, + veritech, + encryption_key, + job_processor, + symmetric_crypto_service, + shutdown_watch_rx, + external_shutdown_tx, + graceful_shutdown_rx, + messaging_config, + content_store_pg_pool, + }) + } + + /// The primary function for running the server. This should be called when deciding to run + /// the server as a task, in a standalone binary, etc. + pub async fn run(self) -> ServerResult<()> { + core_loop::setup_and_run_core_loop( + self.pg_pool, + self.nats, + self.veritech, + self.job_processor, + self.symmetric_crypto_service, + self.encryption_key, + self.shutdown_watch_rx, + self.messaging_config, + self.content_store_pg_pool, + ) + .await?; + + let _ = self.graceful_shutdown_rx.await; + info!("received and processed graceful shutdown, terminating server instance"); + + Ok(()) + } + + /// Gets a [`ShutdownHandle`](ServerShutdownHandle) that can externally or on demand trigger the server's shutdown + /// process. + pub fn shutdown_handle(&self) -> ServerShutdownHandle { + ServerShutdownHandle { + shutdown_tx: self.external_shutdown_tx.clone(), + } + } + + #[instrument(name = "gobbler.init.load_encryption_key", skip_all)] + async fn load_encryption_key( + path: impl AsRef, + ) -> ServerResult> { + Ok(Arc::new(CycloneEncryptionKey::load(path).await?)) + } + + #[instrument(name = "rebaser.init.connect_to_nats", skip_all)] + async fn connect_to_nats(nats_config: &NatsConfig) -> ServerResult { + let client = NatsClient::new(nats_config).await?; + debug!("successfully connected nats client"); + Ok(client) + } + + #[instrument(name = "rebaser.init.create_pg_pool", skip_all)] + async fn create_pg_pool(pg_pool_config: &PgPoolConfig) -> ServerResult { + dbg!(&pg_pool_config); + let pool = PgPool::new(pg_pool_config).await?; + debug!("successfully started pg pool (note that not all connections may be healthy)"); + Ok(pool) + } + + #[instrument(name = "rebaser.init.create_veritech_client", skip_all)] + fn create_veritech_client(nats: NatsClient) -> VeritechClient { + VeritechClient::new(nats) + } + + #[instrument(name = "rebaser.init.create_job_processor", skip_all)] + fn create_job_processor(nats: NatsClient) -> Box { + Box::new(NatsProcessor::new(nats)) as Box + } + + #[instrument(name = "pinga.init.create_symmetric_crypto_service", skip_all)] + async fn create_symmetric_crypto_service( + config: &SymmetricCryptoServiceConfig, + ) -> ServerResult { + SymmetricCryptoService::from_config(config) + .await + .map_err(Into::into) + } +} + +#[allow(missing_docs, missing_debug_implementations)] +pub struct ServerShutdownHandle { + shutdown_tx: mpsc::Sender, +} + +impl ServerShutdownHandle { + /// Perform server shutdown with the handle. + pub async fn shutdown(self) { + if let Err(err) = self.shutdown_tx.send(ShutdownSource::Handle).await { + warn!(error = ?err, "shutdown tx returned error, receiver is likely already closed"); + } + } +} + +#[remain::sorted] +#[derive(Debug, Eq, PartialEq)] +enum ShutdownSource { + Handle, +} + +impl Default for ShutdownSource { + fn default() -> Self { + Self::Handle + } +} + +fn prepare_graceful_shutdown( + mut external_shutdown_rx: mpsc::Receiver, + shutdown_watch_tx: watch::Sender<()>, +) -> ServerResult> { + // A oneshot channel signaling the start of a graceful shutdown. Receivers can use this to + // perform an clean/graceful shutdown work that needs to happen to preserve server integrity. + let (graceful_shutdown_tx, graceful_shutdown_rx) = oneshot::channel::<()>(); + // A stream of `SIGTERM` signals, emitted as the process receives them. + let mut sigterm_stream = + unix::signal(unix::SignalKind::terminate()).map_err(ServerError::Signal)?; + + tokio::spawn(async move { + fn send_graceful_shutdown( + graceful_shutdown_tx: oneshot::Sender<()>, + shutdown_watch_tx: watch::Sender<()>, + ) { + // Send shutdown to all long running subscriptions, so they can cleanly terminate + if shutdown_watch_tx.send(()).is_err() { + error!("all watch shutdown receivers have already been dropped"); + } + // Send graceful shutdown to main server thread which stops it from accepting requests. + // We'll do this step last so as to let all subscriptions have a chance to shutdown. + if graceful_shutdown_tx.send(()).is_err() { + error!("the server graceful shutdown receiver has already dropped"); + } + } + + info!("spawned graceful shutdown handler"); + + tokio::select! { + _ = sigterm_stream.recv() => { + info!("received SIGTERM signal, performing graceful shutdown"); + send_graceful_shutdown(graceful_shutdown_tx, shutdown_watch_tx); + } + source = external_shutdown_rx.recv() => { + info!( + "received external shutdown, performing graceful shutdown; source={:?}", + source, + ); + send_graceful_shutdown(graceful_shutdown_tx, shutdown_watch_tx); + } + else => { + // All other arms are closed, nothing left to do but return + trace!("returning from graceful shutdown with all select arms closed"); + } + }; + }); + + Ok(graceful_shutdown_rx) +} diff --git a/lib/rebaser-server/src/server/core_loop.rs b/lib/rebaser-server/src/server/core_loop.rs new file mode 100644 index 0000000000..5ef1d6ebd6 --- /dev/null +++ b/lib/rebaser-server/src/server/core_loop.rs @@ -0,0 +1,202 @@ +use dal::{ + DalContext, DalContextBuilder, JobQueueProcessor, ServicesContext, Tenancy, TransactionsError, + Visibility, WorkspacePk, +}; +use futures::FutureExt; +use futures::StreamExt; +use rebaser_core::{ + RebaserMessagingConfig, ReplyRebaseMessage, RequestRebaseMessage, SubjectGenerator, +}; +use si_crypto::SymmetricCryptoService; +use si_data_nats::jetstream::{AckKind, JetstreamError, Stream, REPLY_SUBJECT_HEADER_NAME}; +use si_data_nats::subject::ToSubject; +use si_data_nats::NatsClient; +use si_data_pg::PgPool; +use std::sync::Arc; +use std::time::Instant; +use stream_cancel::StreamExt as CancelStreamExt; +use telemetry::prelude::*; +use thiserror::Error; +use tokio::sync::watch; + +use crate::server::rebase::perform_rebase; + +#[remain::sorted] +#[derive(Debug, Error)] +pub enum CoreLoopSetupError { + #[error("jetstream error: {0}")] + Jetstream(#[from] JetstreamError), + #[error("serde json erorr: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("transactions error: {0}")] + Transactions(#[from] Box), +} + +impl From for CoreLoopSetupError { + fn from(e: TransactionsError) -> Self { + Self::Transactions(Box::new(e)) + } +} + +type CoreLoopSetupResult = Result; + +#[allow(clippy::too_many_arguments)] +pub(crate) async fn setup_and_run_core_loop( + pg_pool: PgPool, + nats: NatsClient, + veritech: veritech_client::Client, + job_processor: Box, + symmetric_crypto_service: SymmetricCryptoService, + encryption_key: Arc, + shutdown_watch_rx: watch::Receiver<()>, + messaging_config: RebaserMessagingConfig, + content_store_pg_pool: PgPool, +) -> CoreLoopSetupResult<()> { + let services_context = ServicesContext::new( + pg_pool, + nats.clone(), + job_processor, + veritech.clone(), + encryption_key, + None, + None, + symmetric_crypto_service, + messaging_config.clone(), + content_store_pg_pool, + ); + + // Setup the subjects. + let subject_all = SubjectGenerator::all(messaging_config.subject_prefix()); + let subject_root = SubjectGenerator::root(messaging_config.subject_prefix()); + info!(%subject_all, %subject_root, "created services context and prepared subjects"); + + // Setup the stream and the consumer. + let jetstream_ctx = nats.clone().to_jetstream_ctx(); + info!(%subject_all, %subject_root, "finding or creating stream"); + let rebaser_jetstream_stream = jetstream_ctx + .get_or_create_work_queue_stream(&subject_root, vec![subject_all.clone()]) + .await?; + + info!(%subject_all, %subject_root, "finding or creating durable consumer"); + let consumer = jetstream_ctx + .get_or_create_durable_consumer(&rebaser_jetstream_stream, &subject_root) + .await?; + + info!(%subject_all, %subject_root, "getting stream from consumer"); + let stream = consumer.stream().await?; + + info!("getting dal context builder"); + let ctx_builder = DalContext::builder(services_context.clone(), false); + + info!("setup complete, entering core loop"); + core_loop_infallible(ctx_builder, stream, shutdown_watch_rx).await; + info!("exited core loop"); + + Ok(()) +} + +async fn core_loop_infallible( + ctx_builder: DalContextBuilder, + stream: Stream, + mut shutdown_watch_rx: watch::Receiver<()>, +) { + let mut stream = stream.take_until_if(Box::pin(shutdown_watch_rx.changed().map(|_| true))); + + while let Some(unprocessed_message) = stream.next().await { + let message = match unprocessed_message { + Ok(processed) => processed, + Err(err) => { + error!(error = ?err, "error when pull message off stream"); + continue; + } + }; + + if let Err(err) = message.ack_with(AckKind::Progress).await { + error!(error = ?err, "could not ack with progress, going to continue anyway"); + } + + // Deserialize the message payload so that we can process it. + let request_message: RequestRebaseMessage = + match serde_json::from_slice(message.message.payload.to_vec().as_slice()) { + Ok(deserialized) => deserialized, + Err(err) => { + error!(error = ?err, ?message, "failed to deserialize message payload"); + continue; + } + }; + + // Pull the reply subject off of the message. + let reply_subject = if let Some(headers) = &message.headers { + if let Some(value) = headers.get(REPLY_SUBJECT_HEADER_NAME.clone()) { + value.to_string() + } else { + // NOTE(nick): we may actually want to process the message anyway, but things would be super messed up + // at that point... because no one should be sending messages exterior to rebaser clients. + error!( + ?message, + "no reply subject found in headers, skipping messages because we cannot reply" + ); + continue; + } + } else { + // NOTE(nick): we may actually want to process the message anyway, but things would be super messed up + // at that point... because no one should be sending messages exterior to rebaser clients. + error!( + ?message, + "no headers found, skipping message because we cannot reply" + ); + continue; + }; + + let ctx_builder = ctx_builder.clone(); + tokio::spawn(async move { + perform_rebase_and_reply_infallible(ctx_builder, request_message, reply_subject).await; + if let Err(err) = message.ack_with(AckKind::Ack).await { + error!(?message, ?err, "failing acking message"); + } + }); + } +} + +async fn perform_rebase_and_reply_infallible( + ctx_builder: DalContextBuilder, + message: RequestRebaseMessage, + reply_subject: impl ToSubject, +) { + let start = Instant::now(); + + let mut ctx = match ctx_builder.build_default().await { + Ok(ctx) => ctx, + Err(err) => { + error!(error = ?err, "unable to build dal context"); + return; + } + }; + ctx.update_visibility(Visibility::new_head(false)); + ctx.update_tenancy(Tenancy::new(WorkspacePk::NONE)); + + let reply_subject = reply_subject.to_subject(); + + let reply_message = perform_rebase(&mut ctx, message).await.unwrap_or_else(|err| { + error!(error = ?err, ?message, ?reply_subject, "performing rebase failed, attempting to reply"); + ReplyRebaseMessage::Error { + message: err.to_string(), + } + }); + + match serde_json::to_vec(&reply_message) { + Ok(serialized_payload) => { + if let Err(publish_err) = ctx + .nats_conn() + .publish(reply_subject.clone(), serialized_payload.into()) + .await + { + error!(error = ?publish_err, %reply_subject, "replying to requester failed"); + } + } + Err(serialization_err) => { + error!(error = ?serialization_err, %reply_subject, "failed to serialize reply message"); + } + } + info!("perform rebase and reply total: {:?}", start.elapsed()); +} diff --git a/lib/rebaser-server/src/server/rebase.rs b/lib/rebaser-server/src/server/rebase.rs new file mode 100644 index 0000000000..3c255f84e1 --- /dev/null +++ b/lib/rebaser-server/src/server/rebase.rs @@ -0,0 +1,112 @@ +use dal::change_set_pointer::{ChangeSetPointer, ChangeSetPointerError, ChangeSetPointerId}; +use dal::workspace_snapshot::vector_clock::VectorClockId; +use dal::workspace_snapshot::WorkspaceSnapshotError; +use dal::{DalContext, TransactionsError, WorkspaceSnapshot}; +use rebaser_core::{ReplyRebaseMessage, RequestRebaseMessage}; +use telemetry::prelude::*; +use thiserror::Error; +use tokio::time::Instant; + +#[remain::sorted] +#[derive(Debug, Error)] +pub(crate) enum RebaseError { + #[error("workspace snapshot error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), + #[error("missing change set pointer")] + MissingChangeSetPointer(ChangeSetPointerId), + #[error("missing workspace snapshot for change set ({0}) (the change set likely isn't pointing at a workspace snapshot)")] + MissingWorkspaceSnapshotForChangeSet(ChangeSetPointerId), + #[error("serde json error: {0}")] + SerdeJson(#[from] serde_json::Error), + #[error("transactions error: {0}")] + Transactions(#[from] TransactionsError), + #[error("workspace snapshot error: {0}")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), +} + +type RebaseResult = Result; + +pub(crate) async fn perform_rebase( + ctx: &mut DalContext, + message: RequestRebaseMessage, +) -> RebaseResult { + let start = Instant::now(); + // Gather everything we need to detect conflicts and updates from the inbound message. + let mut to_rebase_change_set = + ChangeSetPointer::find(ctx, message.to_rebase_change_set_id.into()) + .await? + .ok_or(RebaseError::MissingChangeSetPointer( + message.to_rebase_change_set_id.into(), + ))?; + let to_rebase_workspace_snapshot_id = to_rebase_change_set.workspace_snapshot_id.ok_or( + RebaseError::MissingWorkspaceSnapshotForChangeSet(to_rebase_change_set.id), + )?; + info!("before snapshot fetch and parse: {:?}", start.elapsed()); + let mut to_rebase_workspace_snapshot = + WorkspaceSnapshot::find(ctx, to_rebase_workspace_snapshot_id).await?; + let mut onto_workspace_snapshot: WorkspaceSnapshot = + WorkspaceSnapshot::find(ctx, message.onto_workspace_snapshot_id.into()).await?; + info!( + "to_rebase_id: {}, onto_id: {}", + to_rebase_workspace_snapshot_id, + onto_workspace_snapshot.id() + ); + info!("after snapshot fetch and parse: {:?}", start.elapsed()); + + // Perform the conflicts and updates detection. + let onto_vector_clock_id: VectorClockId = message.onto_vector_clock_id.into(); + let (conflicts, updates) = to_rebase_workspace_snapshot + .detect_conflicts_and_updates( + to_rebase_change_set.vector_clock_id(), + &mut onto_workspace_snapshot, + onto_vector_clock_id, + ) + .await?; + info!( + "count: conflicts ({}) and updates ({}), {:?}", + conflicts.len(), + updates.len(), + start.elapsed() + ); + + // If there are conflicts, immediately assemble a reply message that conflicts were found. + // Otherwise, we can perform updates and assemble a "success" reply message. + let message: ReplyRebaseMessage = if conflicts.is_empty() { + // TODO(nick): store the offset with the change set. + to_rebase_workspace_snapshot.perform_updates( + &to_rebase_change_set, + &mut onto_workspace_snapshot, + updates.as_slice(), + )?; + info!("updates complete: {:?}", start.elapsed()); + + if !updates.is_empty() { + // Once all updates have been performed, we can write out, mark everything as recently seen + // and update the pointer. + to_rebase_workspace_snapshot + .write(ctx, to_rebase_change_set.vector_clock_id()) + .await?; + info!("snapshot written: {:?}", start.elapsed()); + to_rebase_change_set + .update_pointer(ctx, to_rebase_workspace_snapshot.id()) + .await?; + info!("pointer updated: {:?}", start.elapsed()); + } + + ReplyRebaseMessage::Success { + updates_performed: serde_json::to_value(updates)?, + } + } else { + ReplyRebaseMessage::ConflictsFound { + conflicts_found: serde_json::to_value(conflicts)?, + updates_found_and_skipped: serde_json::to_value(updates)?, + } + }; + + info!("rebase performed: {:?}", start.elapsed()); + + // Before replying to the requester, we must commit. + ctx.commit_no_rebase().await?; + + Ok(message) +} diff --git a/lib/sdf-server/BUCK b/lib/sdf-server/BUCK index 55123e00e4..ee6623d813 100644 --- a/lib/sdf-server/BUCK +++ b/lib/sdf-server/BUCK @@ -4,6 +4,7 @@ rust_library( name = "sdf-server", deps = [ "//lib/buck2-resources:buck2-resources", + "//lib/content-store:content-store", "//lib/dal:dal", "//lib/module-index-client:module-index-client", "//lib/nats-multiplexer:nats-multiplexer", diff --git a/lib/sdf-server/Cargo.toml b/lib/sdf-server/Cargo.toml index abff8fafdd..3894917eb2 100644 --- a/lib/sdf-server/Cargo.toml +++ b/lib/sdf-server/Cargo.toml @@ -27,6 +27,7 @@ async-trait = { workspace = true } axum = { workspace = true } base64 = { workspace = true } chrono = { workspace = true } +content-store = { path = "../../lib/content-store" } convert_case = { workspace = true } derive_builder = { workspace = true } futures = { workspace = true } diff --git a/lib/sdf-server/src/server/config.rs b/lib/sdf-server/src/server/config.rs index 3d1903be5e..b2a474ab0c 100644 --- a/lib/sdf-server/src/server/config.rs +++ b/lib/sdf-server/src/server/config.rs @@ -7,6 +7,7 @@ use std::{ }; use buck2_resources::Buck2Resources; +use content_store::PgStoreTools; use derive_builder::Builder; use serde::{Deserialize, Serialize}; use si_crypto::{SymmetricCryptoServiceConfig, SymmetricCryptoServiceConfigFile}; @@ -78,6 +79,9 @@ pub struct Config { #[builder(default = "JwtConfig::default()")] jwt_signing_public_key: JwtConfig, + #[builder(default = "PgStoreTools::default_pool_config()")] + content_store_pg_pool: PgPoolConfig, + signup_secret: SensitiveString, pkgs_path: CanonicalFile, } @@ -150,6 +154,12 @@ impl Config { pub fn module_index_url(&self) -> &str { &self.module_index_url } + + /// Gets a reference to the config's content store pg pool. + #[must_use] + pub fn content_store_pg_pool(&self) -> &PgPoolConfig { + &self.content_store_pg_pool + } } impl ConfigBuilder { @@ -166,6 +176,8 @@ impl ConfigBuilder { pub struct ConfigFile { #[serde(default)] pub pg: PgPoolConfig, + #[serde(default = "PgStoreTools::default_pool_config")] + pub content_store_pg: PgPoolConfig, #[serde(default)] pub nats: NatsConfig, #[serde(default)] @@ -190,6 +202,7 @@ impl Default for ConfigFile { fn default() -> Self { Self { pg: Default::default(), + content_store_pg: PgStoreTools::default_pool_config(), nats: Default::default(), migration_mode: Default::default(), jwt_signing_public_key: Default::default(), @@ -215,6 +228,7 @@ impl TryFrom for Config { let mut config = Config::builder(); config.pg_pool(value.pg); + config.content_store_pg_pool(value.content_store_pg); config.nats(value.nats); config.migration_mode(value.migration_mode); config.jwt_signing_public_key(value.jwt_signing_public_key); @@ -348,7 +362,8 @@ fn buck2_development(config: &mut ConfigFile) -> Result<()> { active_key_base64: None, extra_keys: vec![], }; - config.pg.certificate_path = Some(postgres_cert.try_into()?); + config.pg.certificate_path = Some(postgres_cert.clone().try_into()?); + config.content_store_pg.certificate_path = Some(postgres_cert.try_into()?); config.pkgs_path = pkgs_path; Ok(()) @@ -406,7 +421,8 @@ fn cargo_development(dir: String, config: &mut ConfigFile) -> Result<()> { active_key_base64: None, extra_keys: vec![], }; - config.pg.certificate_path = Some(postgres_cert.try_into()?); + config.pg.certificate_path = Some(postgres_cert.clone().try_into()?); + config.content_store_pg.certificate_path = Some(postgres_cert.try_into()?); config.pkgs_path = pkgs_path; Ok(()) diff --git a/lib/sdf-server/src/server/job_processor.rs b/lib/sdf-server/src/server/job_processor.rs index c404791a2a..9e32462df9 100644 --- a/lib/sdf-server/src/server/job_processor.rs +++ b/lib/sdf-server/src/server/job_processor.rs @@ -34,6 +34,7 @@ impl JobProcessorConnector for NatsProcessor { config: &Config, ) -> Result<(Self::Client, Box), ServerError> { let job_client = Server::connect_to_nats(config.nats()).await?; + dbg!(&job_client); let job_processor = Box::new(NatsProcessor::new(job_client.clone())) as Box; Ok((job_client, job_processor)) diff --git a/lib/sdf-server/src/server/routes.rs b/lib/sdf-server/src/server/routes.rs index 96469920a4..ceca980d96 100644 --- a/lib/sdf-server/src/server/routes.rs +++ b/lib/sdf-server/src/server/routes.rs @@ -31,28 +31,29 @@ pub fn routes(state: AppState) -> Router { "/api/component", crate::server::service::component::routes(), ) - .nest("/api/fix", crate::server::service::fix::routes()) + .nest("/api/diagram", crate::server::service::diagram::routes()) .nest("/api/func", crate::server::service::func::routes()) - .nest("/api/pkg", crate::server::service::pkg::routes()) - .nest("/api/provider", crate::server::service::provider::routes()) + .nest("/api/graphviz", crate::server::service::graphviz::routes()) .nest( "/api/qualification", crate::server::service::qualification::routes(), ) .nest("/api/schema", crate::server::service::schema::routes()) - .nest("/api/diagram", crate::server::service::diagram::routes()) .nest("/api/secret", crate::server::service::secret::routes()) .nest("/api/session", crate::server::service::session::routes()) - .nest("/api/status", crate::server::service::status::routes()) - .nest( - "/api/variant_def", - crate::server::service::variant_definition::routes(), - ) .nest("/api/ws", crate::server::service::ws::routes()) + // .nest("/api/fix", crate::server::service::fix::routes()) + // .nest("/api/pkg", crate::server::service::pkg::routes()) + // .nest("/api/provider", crate::server::service::provider::routes()) + // .nest("/api/status", crate::server::service::status::routes()) + // .nest( + // "/api/variant_def", + // crate::server::service::variant_definition::routes(), + // ) .layer(CompressionLayer::new()); // Load dev routes if we are in dev mode (decided by "opt-level" at the moment). - router = dev_routes(router); + // router = dev_routes(router); router.with_state(state) } @@ -61,11 +62,11 @@ async fn system_status_route() -> Json { Json(json!({ "ok": true })) } -#[cfg(debug_assertions)] -pub fn dev_routes(mut router: Router) -> Router { - router = router.nest("/api/dev", crate::server::service::dev::routes()); - router -} +// #[cfg(debug_assertions)] +// pub fn dev_routes(mut router: Router) -> Router { +// router = router.nest("/api/dev", crate::server::service::dev::routes()); +// router +// } #[cfg(not(debug_assertions))] pub fn dev_routes(router: Router) -> Router { diff --git a/lib/sdf-server/src/server/server.rs b/lib/sdf-server/src/server/server.rs index 37408e8ca0..23b7a31f3e 100644 --- a/lib/sdf-server/src/server/server.rs +++ b/lib/sdf-server/src/server/server.rs @@ -1,18 +1,9 @@ -use std::{ - io, - net::SocketAddr, - path::{Path, PathBuf}, - sync::Arc, - time::Duration, -}; - -use axum::{routing::IntoMakeService, Router}; +use axum::routing::IntoMakeService; +use axum::Router; +use dal::jwt_key::JwtConfig; +use dal::ServicesContext; use dal::{ - builtins, - jwt_key::JwtConfig, - pkg::{import_pkg_from_pkg, ImportOptions, PkgError}, - tasks::{ResourceScheduler, StatusReceiver, StatusReceiverError}, - BuiltinsError, DalContext, JwtPublicSigningKey, ServicesContext, Tenancy, TransactionsError, + builtins, BuiltinsError, DalContext, JwtPublicSigningKey, Tenancy, TransactionsError, Workspace, WorkspaceError, }; use hyper::server::{accept::Accept, conn::AddrIncoming}; @@ -20,34 +11,36 @@ use module_index_client::{types::BuiltinsDetailsResponse, IndexClient, ModuleDet use nats_multiplexer::Multiplexer; use nats_multiplexer_client::MultiplexerClient; use si_crypto::{ - CryptoConfig, CycloneKeyPairError, SymmetricCryptoError, SymmetricCryptoService, - SymmetricCryptoServiceConfig, + CryptoConfig, CycloneEncryptionKey, CycloneEncryptionKeyError, CycloneKeyPairError, + SymmetricCryptoError, SymmetricCryptoService, SymmetricCryptoServiceConfig, }; use si_data_nats::{NatsClient, NatsConfig, NatsError}; use si_data_pg::{PgError, PgPool, PgPoolConfig, PgPoolError}; use si_pkg::{SiPkg, SiPkgError}; use si_posthog::{PosthogClient, PosthogConfig}; use si_std::SensitiveString; +use std::sync::Arc; +use std::time::Duration; +use std::{io, net::SocketAddr, path::Path, path::PathBuf}; use telemetry::prelude::*; use telemetry_http::{HttpMakeSpan, HttpOnResponse}; use thiserror::Error; +use tokio::time::Instant; use tokio::{ io::{AsyncRead, AsyncWrite}, signal, sync::{broadcast, mpsc, oneshot}, task::{JoinError, JoinSet}, - time::{self, Instant}, + time, }; use tower_http::trace::TraceLayer; use ulid::Ulid; -use veritech_client::{Client as VeritechClient, CycloneEncryptionKey, CycloneEncryptionKeyError}; +use veritech_client::Client as VeritechClient; +use super::state::AppState; +use super::{routes, Config, IncomingStream, UdsIncomingStream, UdsIncomingStreamError}; use crate::server::config::CycloneKeyPair; -use super::{ - routes, state::AppState, Config, IncomingStream, UdsIncomingStream, UdsIncomingStreamError, -}; - #[remain::sorted] #[derive(Debug, Error)] pub enum ServerError { @@ -81,8 +74,6 @@ pub enum ServerError { Pg(#[from] PgError), #[error(transparent)] PgPool(#[from] Box), - #[error(transparent)] - Pkg(#[from] PkgError), #[error("failed to install package")] PkgInstall, #[error(transparent)] @@ -92,8 +83,6 @@ pub enum ServerError { #[error(transparent)] SiPkg(#[from] SiPkgError), #[error(transparent)] - StatusReceiver(#[from] StatusReceiverError), - #[error(transparent)] SymmetricCryptoService(#[from] SymmetricCryptoError), #[error("transactions error: {0}")] Transactions(#[from] TransactionsError), @@ -278,23 +267,23 @@ impl Server<(), ()> { Ok(()) } - /// Start the basic resource refresh scheduler - pub async fn start_resource_refresh_scheduler( - services_context: ServicesContext, - shutdown_broadcast_rx: broadcast::Receiver<()>, - ) { - ResourceScheduler::new(services_context).start(shutdown_broadcast_rx); - } - - pub async fn start_status_updater( - services_context: ServicesContext, - shutdown_broadcast_rx: broadcast::Receiver<()>, - ) -> Result<()> { - StatusReceiver::new(services_context) - .await? - .start(shutdown_broadcast_rx); - Ok(()) - } + // /// Start the basic resource refresh scheduler + // pub async fn start_resource_refresh_scheduler( + // services_context: ServicesContext, + // shutdown_broadcast_rx: broadcast::Receiver<()>, + // ) { + // ResourceScheduler::new(services_context).start(shutdown_broadcast_rx); + // } + + // pub async fn start_status_updater( + // services_context: ServicesContext, + // shutdown_broadcast_rx: broadcast::Receiver<()>, + // ) -> Result<()> { + // StatusReceiver::new(services_context) + // .await? + // .start(shutdown_broadcast_rx); + // Ok(()) + // } #[instrument(name = "sdf.init.create_pg_pool", level = "info", skip_all)] pub async fn create_pg_pool(pg_pool_config: &PgPoolConfig) -> Result { @@ -364,22 +353,21 @@ pub async fn migrate_builtins_from_module_index(services_context: &ServicesConte dal_context.set_no_dependent_values(); let mut ctx = dal_context.build_default().await?; - let workspace = Workspace::builtin(&ctx).await?; + let workspace = Workspace::builtin(&mut ctx).await?; ctx.update_tenancy(Tenancy::new(*workspace.pk())); - ctx.blocking_commit().await?; + ctx.update_to_head(); + ctx.update_snapshot_to_visibility().await?; info!("migrating intrinsic functions"); builtins::func::migrate_intrinsics(&ctx).await?; - info!("migrating builtin functions"); - builtins::func::migrate(&ctx).await?; + // info!("migrating builtin functions"); + // builtins::func::migrate(&ctx).await?; let module_index_url = services_context .module_index_url() - .as_ref() .ok_or(ServerError::ModuleIndexNotSet)?; - let module_index_client = - IndexClient::unauthenticated_client(module_index_url.clone().as_str().try_into()?); + let module_index_client = IndexClient::unauthenticated_client(module_index_url.try_into()?); let module_list = module_index_client.list_builtins().await?; let install_builtins = install_builtins(ctx, module_list, module_index_client); tokio::pin!(install_builtins); @@ -408,7 +396,10 @@ async fn install_builtins( ) -> Result<()> { let dal = &ctx; let client = &module_index_client.clone(); - let modules = module_list.modules; + let modules: Vec = module_list.modules; + // .into_iter() + // .filter(|m| m.name == "si-docker-image-builtin") + // .collect(); let total = modules.len(); let mut join_set = JoinSet::new(); @@ -428,10 +419,10 @@ async fn install_builtins( let (pkg_name, res) = res?; match res { Ok(pkg) => { - if let Err(err) = import_pkg_from_pkg( + if let Err(err) = dal::pkg::import_pkg_from_pkg( &ctx, &pkg, - Some(ImportOptions { + Some(dal::pkg::ImportOptions { schemas: None, skip_import_funcs: None, no_record: false, @@ -442,12 +433,10 @@ async fn install_builtins( { println!("Pkg {pkg_name} Install failed, {err}"); } else { - ctx.commit().await?; - count += 1; println!( - "Pkg {pkg_name} Install finished successfully. {count} of {total} installed.", - ); + "Pkg {pkg_name} Install finished successfully. {count} of {total} installed.", + ); } } Err(err) => { @@ -455,9 +444,11 @@ async fn install_builtins( } } } - dal.commit().await?; + let mut ctx = ctx.clone(); + ctx.update_snapshot_to_visibility().await?; + Ok(()) } diff --git a/lib/sdf-server/src/server/service.rs b/lib/sdf-server/src/server/service.rs index e4089ced2e..69ddcc93d0 100644 --- a/lib/sdf-server/src/server/service.rs +++ b/lib/sdf-server/src/server/service.rs @@ -2,18 +2,20 @@ pub mod async_route; pub mod change_set; pub mod component; pub mod diagram; -pub mod fix; pub mod func; -pub mod pkg; -pub mod provider; -pub mod qualification; +pub mod graphviz; pub mod schema; -pub mod secret; pub mod session; -pub mod status; -pub mod variant_definition; pub mod ws; -/// A module containing dev routes for local development only. -#[cfg(debug_assertions)] -pub mod dev; +// pub mod fix; +// pub mod pkg; +// pub mod provider; +pub mod qualification; +pub mod secret; +// pub mod status; +// pub mod variant_definition; + +// /// A module containing dev routes for local development only. +// #[cfg(debug_assertions)] +// pub mod dev; diff --git a/lib/sdf-server/src/server/service/change_set.rs b/lib/sdf-server/src/server/service/change_set.rs index 278317824f..39c300032c 100644 --- a/lib/sdf-server/src/server/service/change_set.rs +++ b/lib/sdf-server/src/server/service/change_set.rs @@ -5,52 +5,58 @@ use axum::{ Json, Router, }; use dal::{ - change_status::ChangeStatusError, ActionError, ActionId, ChangeSetError as DalChangeSetError, - ComponentError as DalComponentError, FixError, StandardModelError, TransactionsError, - UserError, UserPk, WsEventError, + change_set_pointer::{ChangeSetPointerError, ChangeSetPointerId}, + ChangeSetError as DalChangeSetError, StandardModelError, TransactionsError, UserError, UserPk, + WorkspaceError, WorkspacePk, WsEventError, }; use module_index_client::IndexClientError; use telemetry::prelude::*; use thiserror::Error; -use crate::{server::state::AppState, service::pkg::PkgError}; +use crate::server::state::AppState; -pub mod abandon_change_set; -mod abandon_vote; -pub mod add_action; +// pub mod abandon_change_set; +// mod abandon_vote; +// pub mod add_action; pub mod apply_change_set; -mod begin_abandon_approval_process; -mod begin_approval_process; +// mod begin_abandon_approval_process; +// mod begin_approval_process; pub mod create_change_set; -pub mod get_change_set; -pub mod get_stats; +// pub mod get_change_set; +// pub mod get_stats; pub mod list_open_change_sets; -pub mod list_queued_actions; -mod merge_vote; -pub mod remove_action; -pub mod update_selected_change_set; +// pub mod list_queued_actions; +// mod merge_vote; +// pub mod remove_action; +// pub mod update_selected_change_set; #[remain::sorted] #[derive(Debug, Error)] pub enum ChangeSetError { - #[error(transparent)] - Action(#[from] ActionError), - #[error("action {0} not found")] - ActionNotFound(ActionId), + // #[error(transparent)] + // Action(#[from] ActionError), + // #[error("action {0} not found")] + // ActionNotFound(ActionId), #[error(transparent)] ChangeSet(#[from] DalChangeSetError), #[error("change set not found")] ChangeSetNotFound, - #[error(transparent)] - ChangeStatusError(#[from] ChangeStatusError), - #[error(transparent)] - Component(#[from] DalComponentError), + #[error("change set error: {0}")] + ChangeSetPointer(#[from] ChangeSetPointerError), + // #[error(transparent)] + // ChangeStatusError(#[from] ChangeStatusError), + // #[error(transparent)] + // Component(#[from] DalComponentError), #[error(transparent)] ContextError(#[from] TransactionsError), - #[error(transparent)] - DalPkg(#[from] dal::pkg::PkgError), - #[error(transparent)] - Fix(#[from] FixError), + #[error("could not find default change set: {0}")] + DefaultChangeSetNotFound(ChangeSetPointerId), + #[error("default change set {0} has no workspace snapshot pointer")] + DefaultChangeSetNoWorkspaceSnapshotPointer(ChangeSetPointerId), + // #[error(transparent)] + // DalPkg(#[from] dal::pkg::PkgError), + // #[error(transparent)] + // Fix(#[from] FixError), #[error("invalid header name {0}")] Hyper(#[from] hyper::http::Error), #[error(transparent)] @@ -61,16 +67,22 @@ pub enum ChangeSetError { InvalidUserSystemInit, #[error(transparent)] Nats(#[from] si_data_nats::NatsError), + #[error("no tenancy set in context")] + NoTenancySet, #[error(transparent)] Pg(#[from] si_data_pg::PgError), - #[error(transparent)] - PkgService(#[from] PkgError), + // #[error(transparent)] + // PkgService(#[from] PkgError), #[error(transparent)] StandardModel(#[from] StandardModelError), #[error(transparent)] UrlParse(#[from] url::ParseError), #[error(transparent)] User(#[from] UserError), + #[error("workspace error: {0}")] + Workspace(#[from] WorkspaceError), + #[error("workspace not found: {0}")] + WorkspaceNotFound(WorkspacePk), #[error(transparent)] WsEvent(#[from] WsEventError), } @@ -98,46 +110,46 @@ pub fn routes() -> Router { "/list_open_change_sets", get(list_open_change_sets::list_open_change_sets), ) - .route( - "/list_queued_actions", - get(list_queued_actions::list_queued_actions), - ) - .route("/remove_action", post(remove_action::remove_action)) - .route("/add_action", post(add_action::add_action)) + // .route( + // "/list_queued_actions", + // get(list_queued_actions::list_queued_actions), + // ) + // .route("/remove_action", post(remove_action::remove_action)) + // .route("/add_action", post(add_action::add_action)) .route( "/create_change_set", post(create_change_set::create_change_set), ) - .route("/get_change_set", get(get_change_set::get_change_set)) - .route("/get_stats", get(get_stats::get_stats)) + // .route("/get_change_set", get(get_change_set::get_change_set)) + // .route("/get_stats", get(get_stats::get_stats)) .route( "/apply_change_set", post(apply_change_set::apply_change_set), ) - .route( - "/abandon_change_set", - post(abandon_change_set::abandon_change_set), - ) - .route( - "/update_selected_change_set", - post(update_selected_change_set::update_selected_change_set), - ) - .route( - "/begin_approval_process", - post(begin_approval_process::begin_approval_process), - ) - .route( - "/cancel_approval_process", - post(begin_approval_process::cancel_approval_process), - ) - .route("/merge_vote", post(merge_vote::merge_vote)) - .route( - "/begin_abandon_approval_process", - post(begin_abandon_approval_process::begin_abandon_approval_process), - ) - .route( - "/cancel_abandon_approval_process", - post(begin_abandon_approval_process::cancel_abandon_approval_process), - ) - .route("/abandon_vote", post(abandon_vote::abandon_vote)) + // .route( + // "/abandon_change_set", + // post(abandon_change_set::abandon_change_set), + // ) + // .route( + // "/update_selected_change_set", + // post(update_selected_change_set::update_selected_change_set), + // ) + // .route( + // "/begin_approval_process", + // post(begin_approval_process::begin_approval_process), + // ) + // .route( + // "/cancel_approval_process", + // post(begin_approval_process::cancel_approval_process), + // ) + // .route("/merge_vote", post(merge_vote::merge_vote)) + // .route( + // "/begin_abandon_approval_process", + // post(begin_abandon_approval_process::begin_abandon_approval_process), + // ) + // .route( + // "/cancel_abandon_approval_process", + // post(begin_abandon_approval_process::cancel_abandon_approval_process), + // ) + // .route("/abandon_vote", post(abandon_vote::abandon_vote)) } diff --git a/lib/sdf-server/src/server/service/change_set/apply_change_set.rs b/lib/sdf-server/src/server/service/change_set/apply_change_set.rs index 520561dd35..4f6188a312 100644 --- a/lib/sdf-server/src/server/service/change_set/apply_change_set.rs +++ b/lib/sdf-server/src/server/service/change_set/apply_change_set.rs @@ -4,27 +4,25 @@ use crate::server::service::change_set::ChangeSetError; use crate::server::tracking::track; use axum::extract::OriginalUri; use axum::Json; -use dal::job::definition::{FixItem, FixesJob}; -use dal::{ - action::ActionBag, ActionId, ChangeSet, ChangeSetPk, Component, ComponentError, Fix, FixBatch, - FixId, HistoryActor, StandardModel, User, -}; +use dal::change_set_pointer::{ChangeSetPointer, ChangeSetPointerId}; +use dal::ChangeSetStatus; use serde::{Deserialize, Serialize}; -use std::collections::{HashMap, VecDeque}; //use telemetry::tracing::{info_span, Instrument, log::warn}; #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct ApplyChangeSetRequest { - pub change_set_pk: ChangeSetPk, + pub change_set_pk: ChangeSetPointerId, } #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct ApplyChangeSetResponse { - pub change_set: ChangeSet, + pub change_set: ChangeSetPointer, } +// TODO: This does not handle anything related to actions yet, after the switchover to workspace +// snapshot graphs. pub async fn apply_change_set( HandlerContext(builder): HandlerContext, AccessBuilder(access_builder): AccessBuilder, @@ -32,14 +30,15 @@ pub async fn apply_change_set( OriginalUri(original_uri): OriginalUri, Json(request): Json, ) -> ChangeSetResult> { - let mut ctx = builder.build_head(access_builder).await?; + let ctx = builder.build_head(access_builder).await?; - let mut change_set = ChangeSet::get_by_pk(&ctx, &request.change_set_pk) + let mut change_set = ChangeSetPointer::find(&ctx, request.change_set_pk) .await? .ok_or(ChangeSetError::ChangeSetNotFound)?; - let actions = change_set.actions(&ctx).await?; - let actors = change_set.actors(&ctx).await?; - change_set.apply(&mut ctx).await?; + change_set.apply_to_base_change_set(&ctx).await?; + change_set + .update_status(&ctx, ChangeSetStatus::Applied) + .await?; track( &posthog_client, @@ -53,91 +52,91 @@ pub async fn apply_change_set( ctx.blocking_commit().await?; - let user = match ctx.history_actor() { - HistoryActor::User(user_pk) => User::get_by_pk(&ctx, *user_pk) - .await? - .ok_or(ChangeSetError::InvalidUser(*user_pk))?, - - HistoryActor::SystemInit => return Err(ChangeSetError::InvalidUserSystemInit), - }; - - if !actions.is_empty() { - let actors_delimited_string = actors.join(","); - let batch = FixBatch::new(&ctx, user.email(), &actors_delimited_string).await?; - let mut fixes: HashMap = HashMap::new(); - let mut fixes_by_action: HashMap = HashMap::new(); - - let mut values: Vec = actions.values().cloned().collect(); - values.sort_by_key(|a| *a.action.id()); - - let mut values: VecDeque = values.into_iter().collect(); - - // Fixes have to be created in the order we want to display them in the fix history panel - // So we do extra work here to ensure the order is the execution order - 'outer: while let Some(bag) = values.pop_front() { - let mut parents = Vec::new(); - for parent_id in bag.parents.clone() { - if let Some(parent_id) = fixes_by_action.get(&parent_id) { - parents.push(*parent_id); - } else { - values.push_back(bag); - continue 'outer; - } - } - - let component = Component::get_by_id( - &ctx.clone_with_delete_visibility(), - bag.action.component_id(), - ) - .await? - .ok_or_else(|| ComponentError::NotFound(*bag.action.component_id()))?; - let fix = Fix::new( - &ctx, - *batch.id(), - *bag.action.component_id(), - component.name(&ctx).await?, - *bag.action.action_prototype_id(), - ) - .await?; - fixes_by_action.insert(*bag.action.id(), *fix.id()); - - fixes.insert( - *fix.id(), - FixItem { - id: *fix.id(), - component_id: *bag.action.component_id(), - action_prototype_id: *bag.action.action_prototype_id(), - parents, - }, - ); - } - - track( - &posthog_client, - &ctx, - &original_uri, - "apply_fix", - serde_json::json!({ - "fix_batch_id": batch.id(), - "number_of_fixes_in_batch": fixes.len(), - "fixes_applied": fixes, - }), - ); - - ctx.enqueue_job(FixesJob::new(&ctx, fixes, *batch.id())) - .await?; - } - - ctx.commit().await?; - - // If anything fails with uploading the workspace backup module, just log it. We shouldn't - // have the change set apply itself fail because of this. - /* - tokio::task::spawn( - super::upload_workspace_backup_module(ctx, raw_access_token) - .instrument(info_span!("Workspace backup module upload")), - ); - */ - + // TODO(nick): restore in new engine. + // let user = match ctx.history_actor() { + // HistoryActor::User(user_pk) => User::get_by_pk(&ctx, *user_pk) + // .await? + // .ok_or(ChangeSetError::InvalidUser(*user_pk))?, + + // HistoryActor::SystemInit => return Err(ChangeSetError::InvalidUserSystemInit), + // }; + + // if !actions.is_empty() { + // let actors_delimited_string = actors.join(","); + // let batch = FixBatch::new(&ctx, user.email(), &actors_delimited_string).await?; + // let mut fixes: HashMap = HashMap::new(); + // let mut fixes_by_action: HashMap = HashMap::new(); + + // let mut values: Vec = actions.values().cloned().collect(); + // values.sort_by_key(|a| *a.action.id()); + + // let mut values: VecDeque = values.into_iter().collect(); + + // // Fixes have to be created in the order we want to display them in the fix history panel + // // So we do extra work here to ensure the order is the execution order + // 'outer: while let Some(bag) = values.pop_front() { + // let mut parents = Vec::new(); + // for parent_id in bag.parents.clone() { + // if let Some(parent_id) = fixes_by_action.get(&parent_id) { + // parents.push(*parent_id); + // } else { + // values.push_back(bag); + // continue 'outer; + // } + // } + + // let component = Component::get_by_id( + // &ctx.clone_with_delete_visibility(), + // bag.action.component_id(), + // ) + // .await? + // .ok_or_else(|| ComponentError::NotFound(*bag.action.component_id()))?; + // let fix = Fix::new( + // &ctx, + // *batch.id(), + // *bag.action.component_id(), + // component.name(&ctx).await?, + // *bag.action.action_prototype_id(), + // ) + // .await?; + // fixes_by_action.insert(*bag.action.id(), *fix.id()); + + // fixes.insert( + // *fix.id(), + // FixItem { + // id: *fix.id(), + // component_id: *bag.action.component_id(), + // action_prototype_id: *bag.action.action_prototype_id(), + // parents, + // }, + // ); + // } + + // track( + // &posthog_client, + // &ctx, + // &original_uri, + // "apply_fix", + // serde_json::json!({ + // "fix_batch_id": batch.id(), + // "number_of_fixes_in_batch": fixes.len(), + // "fixes_applied": fixes, + // }), + // ); + + // ctx.enqueue_job(FixesJob::new(&ctx, fixes, *batch.id())) + // .await?; + // } + + // ctx.commit().await?; + + // // If anything fails with uploading the workspace backup module, just log it. We shouldn't + // // have the change set apply itself fail because of this. + // /* + // tokio::task::spawn( + // super::upload_workspace_backup_module(ctx, raw_access_token) + // .instrument(info_span!("Workspace backup module upload")), + // ); + // */ Ok(Json(ApplyChangeSetResponse { change_set })) } diff --git a/lib/sdf-server/src/server/service/change_set/create_change_set.rs b/lib/sdf-server/src/server/service/change_set/create_change_set.rs index 7f984091c9..5e3bd5a797 100644 --- a/lib/sdf-server/src/server/service/change_set/create_change_set.rs +++ b/lib/sdf-server/src/server/service/change_set/create_change_set.rs @@ -1,6 +1,6 @@ use axum::extract::OriginalUri; use axum::Json; -use dal::ChangeSet; +use dal::change_set_pointer::ChangeSetPointer; use serde::{Deserialize, Serialize}; use super::ChangeSetResult; @@ -16,7 +16,7 @@ pub struct CreateChangeSetRequest { #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct CreateChangeSetResponse { - pub change_set: ChangeSet, + pub change_set: ChangeSetPointer, } pub async fn create_change_set( @@ -29,7 +29,8 @@ pub async fn create_change_set( let ctx = builder.build_head(access_builder).await?; let change_set_name = &request.change_set_name; - let change_set = ChangeSet::new(&ctx, change_set_name, None).await?; + + let change_set_pointer = ChangeSetPointer::fork_head(&ctx, change_set_name).await?; track( &posthog_client, @@ -41,7 +42,9 @@ pub async fn create_change_set( }), ); - ctx.commit().await?; + ctx.commit_no_rebase().await?; - Ok(Json(CreateChangeSetResponse { change_set })) + Ok(Json(CreateChangeSetResponse { + change_set: change_set_pointer, + })) } diff --git a/lib/sdf-server/src/server/service/change_set/list_open_change_sets.rs b/lib/sdf-server/src/server/service/change_set/list_open_change_sets.rs index 65baf93563..51d1c7c461 100644 --- a/lib/sdf-server/src/server/service/change_set/list_open_change_sets.rs +++ b/lib/sdf-server/src/server/service/change_set/list_open_change_sets.rs @@ -1,14 +1,34 @@ -use super::ChangeSetResult; -use crate::server::extract::{AccessBuilder, HandlerContext}; use axum::Json; use chrono::{DateTime, Utc}; -use dal::{ChangeSet, ChangeSetPk, ChangeSetStatus, UserPk}; +//use dal::action::ActionId; +use dal::change_set_pointer::{ChangeSetPointer, ChangeSetPointerId}; +use dal::ActionKind; +use dal::{ActionPrototypeId, ChangeSetStatus, ComponentId, UserPk}; use serde::{Deserialize, Serialize}; +use ulid::Ulid; + +use super::ChangeSetResult; +use crate::server::extract::{AccessBuilder, HandlerContext}; + +#[derive(Deserialize, Serialize, Debug, PartialEq, Eq)] +#[serde(rename_all = "camelCase")] +pub struct ActionView { + // FIXME(nick,zack,jacob): drop ActionId since it does not exist yet for the graph switchover. + pub id: Ulid, + pub action_prototype_id: ActionPrototypeId, + pub kind: ActionKind, + pub name: String, + pub component_id: ComponentId, + pub actor: Option, + pub parents: Vec<()>, +} #[derive(Deserialize, Serialize, Debug, PartialEq, Eq)] #[serde(rename_all = "camelCase")] pub struct ChangeSetView { - pub pk: ChangeSetPk, + // TODO: pk and id are now identical and one of them should be removed + pub id: ChangeSetPointerId, + pub pk: ChangeSetPointerId, pub name: String, pub status: ChangeSetStatus, pub merge_requested_at: Option>, @@ -25,17 +45,77 @@ pub async fn list_open_change_sets( ) -> ChangeSetResult> { let ctx = builder.build_head(access_builder).await?; - let list = ChangeSet::list_open(&ctx).await?; + let list = ChangeSetPointer::list_open(&ctx).await?; let mut view = Vec::with_capacity(list.len()); for cs in list { + // let ctx = + // ctx.clone_with_new_visibility(Visibility::new(cs.pk, ctx.visibility().deleted_at)); + // let actions = HashMap::new(); + // for ( + // _, + // ActionBag { + // action, + // parents, + // kind, + // }, + // ) in cs.actions(&ctx).await? + // { + // let mut display_name = None; + // let prototype = action.prototype(&ctx).await?; + // let func_details = Func::get_by_id(&ctx, &prototype.func_id()).await?; + // if let Some(func) = func_details { + // if func.display_name().is_some() { + // display_name = func.display_name().map(|dname| dname.to_string()); + // } + // } + + // let mut actor_email: Option = None; + // { + // if let Some(created_at_user) = action.creation_user_id() { + // let history_actor = history_event::HistoryActor::User(*created_at_user); + // let actor = ActorView::from_history_actor(&ctx, history_actor).await?; + // match actor { + // ActorView::System { label } => actor_email = Some(label), + // ActorView::User { label, email, .. } => { + // if let Some(em) = email { + // actor_email = Some(em) + // } else { + // actor_email = Some(label) + // } + // } + // }; + // } + // } + + // actions.insert( + // *action.id(), + // ActionView { + // id: *action.id(), + // action_prototype_id: *prototype.id(), + // kind, + // name: display_name.unwrap_or_else(|| match kind { + // ActionKind::Create => "create".to_owned(), + // ActionKind::Delete => "delete".to_owned(), + // ActionKind::Other => "other".to_owned(), + // ActionKind::Refresh => "refresh".to_owned(), + // }), + // component_id: *action.component_id(), + // actor: actor_email, + // parents, + // }, + // ); + // } + view.push(ChangeSetView { - pk: cs.pk, + // TODO: remove change sets entirely! + id: cs.id, + pk: cs.id, name: cs.name, status: cs.status, - merge_requested_at: cs.merge_requested_at, - merge_requested_by_user_id: cs.merge_requested_by_user_id, - abandon_requested_at: cs.abandon_requested_at, - abandon_requested_by_user_id: cs.abandon_requested_by_user_id, + merge_requested_at: None, // cs.merge_requested_at, + merge_requested_by_user_id: None, // cs.merge_requested_by_user_id, + abandon_requested_at: None, // cs.abandon_requested_at, + abandon_requested_by_user_id: None, // cs.abandon_requested_by_user_id, }); } diff --git a/lib/sdf-server/src/server/service/component.rs b/lib/sdf-server/src/server/service/component.rs index de2574edbe..64e2c89e04 100644 --- a/lib/sdf-server/src/server/service/component.rs +++ b/lib/sdf-server/src/server/service/component.rs @@ -4,136 +4,123 @@ use axum::{ routing::{get, post}, Json, Router, }; -use dal::{ - change_status::ChangeStatusError, - component::{migrate::ComponentMigrateError, ComponentViewError}, - ActionPrototypeError, PropError, -}; -use dal::{ - component::view::debug::ComponentDebugViewError, node::NodeError, - property_editor::PropertyEditorError, AttributeContextBuilderError, - AttributePrototypeArgumentError, AttributePrototypeError, AttributeValueError, ChangeSetError, - ComponentError as DalComponentError, ComponentId, DiagramError, ExternalProviderError, - FuncBindingError, FuncError, InternalProviderError, PropId, ReconciliationPrototypeError, - SchemaError as DalSchemaError, StandardModelError, TransactionsError, ValidationResolverError, - WsEventError, -}; +use dal::attribute::value::AttributeValueError; +use dal::component::ComponentId; +use dal::property_editor::PropertyEditorError; +use dal::validation::resolver::ValidationResolverError; +use dal::{ChangeSetError, TransactionsError}; +use dal::{ComponentError as DalComponentError, StandardModelError}; use thiserror::Error; -use crate::{server::state::AppState, service::schema::SchemaError}; +use crate::server::state::AppState; -pub mod alter_simulation; -pub mod debug; -pub mod delete_property_editor_value; -pub mod get_actions; -pub mod get_code; -pub mod get_components_metadata; -pub mod get_diff; pub mod get_property_editor_schema; pub mod get_property_editor_validations; pub mod get_property_editor_values; -pub mod get_resource; +pub mod update_property_editor_value; + +// pub mod alter_simulation; +// pub mod debug; +// pub mod delete_property_editor_value; +// pub mod get_code; +// pub mod get_components_metadata; +// pub mod get_diff; +// pub mod get_resource; pub mod insert_property_editor_value; -pub mod json; +// pub mod json; pub mod list_qualifications; -pub mod migrate_to_default_variant; -pub mod refresh; -pub mod resource_domain_diff; -pub mod restore_default_function; -pub mod set_type; -pub mod update_property_editor_value; +// pub mod list_resources; +// pub mod refresh; +// pub mod resource_domain_diff; +// pub mod set_type; #[remain::sorted] #[derive(Debug, Error)] pub enum ComponentError { - #[error("action prototype error")] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute prototype not found")] - AttributePrototypeNotFound, + // #[error("attribute context builder error: {0}")] + // AttributeContextBuilder(#[from] AttributeContextBuilderError), + // #[error("attribute prototype error: {0}")] + // AttributePrototype(#[from] AttributePrototypeError), + // #[error("attribute prototype argument error: {0}")] + // AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), + // #[error("attribute prototype not found")] + // AttributePrototypeNotFound, #[error("attribute value error: {0}")] AttributeValue(#[from] AttributeValueError), - #[error("attribute value not found")] - AttributeValueNotFound, + // #[error("attribute value not found")] + // AttributeValueNotFound, #[error("change set error: {0}")] ChangeSet(#[from] ChangeSetError), - #[error("change status error: {0}")] - ChangeStatus(#[from] ChangeStatusError), - #[error("component error: {0}")] - Component(#[from] DalComponentError), - #[error("component debug view error: {0}")] - ComponentDebug(String), - #[error("component debug view error: {0}")] - ComponentDebugView(#[from] ComponentDebugViewError), - #[error("component migration error: {0}")] - ComponentMigrate(#[from] ComponentMigrateError), - #[error("component name not found")] - ComponentNameNotFound, + // #[error("change status error: {0}")] + // ChangeStatus(#[from] ChangeStatusError), + // #[error("component debug view error: {0}")] + // ComponentDebug(String), + // #[error("component debug view error: {0}")] + // ComponentDebugView(#[from] ComponentDebugViewError), + // #[error("component name not found")] + // ComponentNameNotFound, #[error("component not found for id: {0}")] ComponentNotFound(ComponentId), - #[error("component view error: {0}")] - ComponentView(#[from] ComponentViewError), - #[error("dal schema error: {0}")] - DalSchema(#[from] DalSchemaError), - #[error("diagram error: {0}")] - Diagram(#[from] DiagramError), - #[error("external provider error: {0}")] - ExternalProvider(#[from] ExternalProviderError), - #[error("func error: {0}")] - Func(#[from] FuncError), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), + // #[error("component view error: {0}")] + // ComponentView(#[from] ComponentViewError), + #[error("dal component error: {0}")] + DalComponent(#[from] DalComponentError), + // #[error("dal schema error: {0}")] + // DalSchema(#[from] DalSchemaError), + // #[error("diagram error: {0}")] + // Diagram(#[from] DiagramError), + // #[error("external provider error: {0}")] + // ExternalProvider(#[from] ExternalProviderError), + // #[error("func error: {0}")] + // Func(#[from] FuncError), + // #[error("func binding error: {0}")] + // FuncBinding(#[from] FuncBindingError), #[error("hyper error: {0}")] Http(#[from] axum::http::Error), - #[error("identity func not found")] - IdentityFuncNotFound, - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), - #[error("invalid request")] - InvalidRequest, + // #[error("identity func not found")] + // IdentityFuncNotFound, + // #[error("internal provider error: {0}")] + // InternalProvider(#[from] InternalProviderError), + // #[error("invalid request")] + // InvalidRequest, #[error("invalid visibility")] InvalidVisibility, - #[error("property value key not found")] - KeyNotFound, - #[error(transparent)] - Nats(#[from] si_data_nats::NatsError), - #[error("node error: {0}")] - Node(#[from] NodeError), - #[error(transparent)] - Pg(#[from] si_data_pg::PgError), - #[error(transparent)] - Prop(#[from] PropError), + // #[error("property value key not found")] + // KeyNotFound, + // #[error(transparent)] + // Nats(#[from] si_data_nats::NatsError), + // #[error("node error: {0}")] + // Node(#[from] NodeError), + // #[error(transparent)] + // Pg(#[from] si_data_pg::PgError), + // #[error(transparent)] + // Prop(#[from] PropError), #[error("property editor error: {0}")] PropertyEditor(#[from] PropertyEditorError), - #[error("prop not found for id: {0}")] - PropNotFound(PropId), - #[error("reconciliation prototype: {0}")] - ReconciliationPrototype(#[from] ReconciliationPrototypeError), - #[error("can't delete attribute value for root prop")] - RootPropAttributeValue, - #[error("schema error: {0}")] - Schema(#[from] SchemaError), + // #[error("prop not found for id: {0}")] + // PropNotFound(PropId), + // #[error("reconciliation prototype: {0}")] + // ReconciliationPrototype(#[from] ReconciliationPrototypeError), + // #[error("can't delete attribute value for root prop")] + // RootPropAttributeValue, + // #[error("schema error: {0}")] + // Schema(#[from] SchemaError), #[error("schema not found")] SchemaNotFound, - #[error("schema variant not found")] - SchemaVariantNotFound, + // #[error("schema variant not found")] + // SchemaVariantNotFound, #[error("serde json error: {0}")] SerdeJson(#[from] serde_json::Error), #[error(transparent)] StandardModel(#[from] StandardModelError), - #[error("system id is required: ident_nil_v1() was provided")] - SystemIdRequired, + // #[error("system id is required: ident_nil_v1() was provided")] + // SystemIdRequired, #[error(transparent)] Transactions(#[from] TransactionsError), - #[error(transparent)] + // #[error("ws event error: {0}")] + // WsEvent(#[from] WsEventError), + #[error("validation resolver error: {0}")] ValidationResolver(#[from] ValidationResolverError), - #[error("ws event error: {0}")] - WsEvent(#[from] WsEventError), } pub type ComponentResult = std::result::Result; @@ -156,18 +143,6 @@ impl IntoResponse for ComponentError { pub fn routes() -> Router { Router::new() - .route( - "/get_components_metadata", - get(get_components_metadata::get_components_metadata), - ) - .route( - "/list_qualifications", - get(list_qualifications::list_qualifications), - ) - .route("/get_code", get(get_code::get_code)) - .route("/get_resource", get(get_resource::get_resource)) - .route("/get_actions", get(get_actions::get_actions)) - .route("/get_diff", get(get_diff::get_diff)) .route( "/get_property_editor_schema", get(get_property_editor_schema::get_property_editor_schema), @@ -176,10 +151,21 @@ pub fn routes() -> Router { "/get_property_editor_values", get(get_property_editor_values::get_property_editor_values), ) + //.route( + // "/get_property_editor_validations", + // get(get_property_editor_validations::get_property_editor_validations), + // ) + // .route( + // "/get_components_metadata", + // get(get_components_metadata::get_components_metadata), + // ) .route( - "/get_property_editor_validations", - get(get_property_editor_validations::get_property_editor_validations), + "/list_qualifications", + get(list_qualifications::list_qualifications), ) + // .route("/list_resources", get(list_resources::list_resources)) + // .route("/get_code", get(get_code::get_code)) + // .route("/get_diff", get(get_diff::get_diff)) .route( "/update_property_editor_value", post(update_property_editor_value::update_property_editor_value), @@ -188,25 +174,17 @@ pub fn routes() -> Router { "/insert_property_editor_value", post(insert_property_editor_value::insert_property_editor_value), ) - .route( - "/delete_property_editor_value", - post(delete_property_editor_value::delete_property_editor_value), - ) - .route( - "/restore_default_function", - post(restore_default_function::restore_default_function), - ) - .route("/set_type", post(set_type::set_type)) - .route("/refresh", post(refresh::refresh)) - .route("/resource_domain_diff", get(resource_domain_diff::get_diff)) - .route( - "/alter_simulation", - post(alter_simulation::alter_simulation), - ) - .route("/debug", get(debug::debug_component)) - .route("/json", get(json::json)) - .route( - "/migrate_to_default_variant", - post(migrate_to_default_variant::migrate_to_default_variant), - ) + // .route( + // "/delete_property_editor_value", + // post(delete_property_editor_value::delete_property_editor_value), + // ) + // .route("/set_type", post(set_type::set_type)) + // .route("/refresh", post(refresh::refresh)) + // .route("/resource_domain_diff", get(resource_domain_diff::get_diff)) + // .route( + // "/alter_simulation", + // post(alter_simulation::alter_simulation), + // ) + // .route("/debug", get(debug::debug_component)) + // .route("/json", get(json::json)) } diff --git a/lib/sdf-server/src/server/service/component/get_property_editor_schema.rs b/lib/sdf-server/src/server/service/component/get_property_editor_schema.rs index b917e715d5..8b0ebd7729 100644 --- a/lib/sdf-server/src/server/service/component/get_property_editor_schema.rs +++ b/lib/sdf-server/src/server/service/component/get_property_editor_schema.rs @@ -1,10 +1,10 @@ use axum::extract::Query; use axum::Json; use dal::property_editor::schema::PropertyEditorSchema; -use dal::{Component, ComponentId, StandardModel, Visibility}; +use dal::{Component, ComponentId, Visibility}; use serde::{Deserialize, Serialize}; -use super::{ComponentError, ComponentResult}; +use super::ComponentResult; use crate::server::extract::{AccessBuilder, HandlerContext}; #[derive(Deserialize, Serialize, Debug)] @@ -24,24 +24,19 @@ pub async fn get_property_editor_schema( ) -> ComponentResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let is_component_in_tenancy = Component::is_in_tenancy(&ctx, request.component_id).await?; - let is_component_in_visibility = Component::get_by_id(&ctx, &request.component_id) - .await? - .is_some(); - if is_component_in_tenancy && !is_component_in_visibility { - return Err(ComponentError::InvalidVisibility); - } - - let component = Component::get_by_id(&ctx, &request.component_id) - .await? - .ok_or(ComponentError::ComponentNotFound(request.component_id))?; - let schema_variant_id = *component - .schema_variant(&ctx) - .await? - .ok_or(ComponentError::SchemaNotFound)? - .id(); - let prop_edit_schema = - PropertyEditorSchema::for_schema_variant(&ctx, schema_variant_id).await?; + // TODO(nick): restore this functionality with the new graph, + // let is_component_in_tenancy = Component::is_in_tenancy(&ctx, request.component_id).await?; + // let is_component_in_visibility = Component::get_by_id(&ctx, &request.component_id) + // .await? + // .is_some(); + // if is_component_in_tenancy && !is_component_in_visibility { + // return Err(ComponentError::InvalidVisibility); + // } + + let schema_variant = + Component::schema_variant_for_component_id(&ctx, request.component_id).await?; + + let prop_edit_schema = PropertyEditorSchema::assemble(&ctx, schema_variant.id()).await?; Ok(Json(prop_edit_schema)) } diff --git a/lib/sdf-server/src/server/service/component/get_property_editor_validations.rs b/lib/sdf-server/src/server/service/component/get_property_editor_validations.rs index 21856d77bd..771684be43 100644 --- a/lib/sdf-server/src/server/service/component/get_property_editor_validations.rs +++ b/lib/sdf-server/src/server/service/component/get_property_editor_validations.rs @@ -1,6 +1,7 @@ use axum::extract::Query; use axum::Json; -use dal::{ComponentId, PropId, StandardModel, ValidationOutput, ValidationResolver, Visibility}; +use dal::validation::resolver::{ValidationOutput, ValidationResolver}; +use dal::{ComponentId, PropId, StandardModel, Visibility}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; diff --git a/lib/sdf-server/src/server/service/component/get_property_editor_values.rs b/lib/sdf-server/src/server/service/component/get_property_editor_values.rs index 3a16013edc..d9e2979e7b 100644 --- a/lib/sdf-server/src/server/service/component/get_property_editor_values.rs +++ b/lib/sdf-server/src/server/service/component/get_property_editor_values.rs @@ -1,10 +1,10 @@ use axum::extract::Query; use axum::Json; use dal::property_editor::values::PropertyEditorValues; -use dal::{Component, ComponentId, StandardModel, Visibility}; +use dal::{ComponentId, Visibility}; use serde::{Deserialize, Serialize}; -use super::{ComponentError, ComponentResult}; +use super::ComponentResult; use crate::server::extract::{AccessBuilder, HandlerContext}; #[derive(Deserialize, Serialize, Debug)] @@ -24,15 +24,20 @@ pub async fn get_property_editor_values( ) -> ComponentResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let is_component_in_tenancy = Component::is_in_tenancy(&ctx, request.component_id).await?; - let is_component_in_visibility = Component::get_by_id(&ctx, &request.component_id) - .await? - .is_some(); - if is_component_in_tenancy && !is_component_in_visibility { - return Err(ComponentError::InvalidVisibility); - } + // TODO(nick): restore functionality. + // let is_component_in_tenancy = Component::is_in_tenancy(&ctx, request.component_id).await?; + // let is_component_in_visibility = Component::get_by_id(&ctx, &request.component_id) + // .await? + // .is_some(); + // if is_component_in_tenancy && !is_component_in_visibility { + // return Err(ComponentError::InvalidVisibility); + // } + // - let prop_edit_values = PropertyEditorValues::for_component(&ctx, request.component_id).await?; + let prop_edit_values = PropertyEditorValues::assemble(&ctx, request.component_id).await?; + + // TODO(nick): this is temporary since main uses a serialized payload from the summary table. + let prop_edit_values = serde_json::to_value(prop_edit_values)?; Ok(Json(prop_edit_values)) } diff --git a/lib/sdf-server/src/server/service/component/insert_property_editor_value.rs b/lib/sdf-server/src/server/service/component/insert_property_editor_value.rs index 55d5e657d5..4b222d0ec7 100644 --- a/lib/sdf-server/src/server/service/component/insert_property_editor_value.rs +++ b/lib/sdf-server/src/server/service/component/insert_property_editor_value.rs @@ -1,8 +1,5 @@ use axum::{response::IntoResponse, Json}; -use dal::{ - AttributeContext, AttributeValue, AttributeValueId, ChangeSet, ComponentId, PropId, Visibility, - WsEvent, -}; +use dal::{AttributeValue, AttributeValueId, ComponentId, PropId, Visibility}; use serde::{Deserialize, Serialize}; use super::ComponentResult; @@ -25,42 +22,43 @@ pub async fn insert_property_editor_value( AccessBuilder(request_ctx): AccessBuilder, Json(request): Json, ) -> ComponentResult { - let mut ctx = builder.build(request_ctx.build(request.visibility)).await?; + let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let mut force_changeset_pk = None; - if ctx.visibility().is_head() { - let change_set = ChangeSet::new(&ctx, ChangeSet::generate_name(), None).await?; - - let new_visibility = Visibility::new(change_set.pk, request.visibility.deleted_at); - - ctx.update_visibility(new_visibility); - - force_changeset_pk = Some(change_set.pk); - - WsEvent::change_set_created(&ctx, change_set.pk) - .await? - .publish_on_commit(&ctx) - .await?; - }; - - let attribute_context = AttributeContext::builder() - .set_prop_id(request.prop_id) - .set_component_id(request.component_id) - .to_context()?; - let _ = AttributeValue::insert_for_context( + // let mut force_changeset_pk = None; + // if ctx.visibility().is_head() { + // let change_set = ChangeSet::new(&ctx, ChangeSet::generate_name(), None).await?; + // + // let new_visibility = Visibility::new(change_set.pk, request.visibility.deleted_at); + // + // ctx.update_visibility(new_visibility); + // + // force_changeset_pk = Some(change_set.pk); + // + // WsEvent::change_set_created(&ctx, change_set.pk) + // .await? + // .publish_on_commit(&ctx) + // .await?; + // }; + // + // + let _ = AttributeValue::insert( &ctx, - attribute_context, request.parent_attribute_value_id, request.value, request.key, ) .await?; + // WsEvent::change_set_written(&ctx) + // .await? + // .publish_on_commit(&ctx) + // .await?; + ctx.commit().await?; - let mut response = axum::response::Response::builder(); - if let Some(force_changeset_pk) = force_changeset_pk { - response = response.header("force_changeset_pk", force_changeset_pk.to_string()); - } + let response = axum::response::Response::builder(); + // if let Some(force_changeset_pk) = force_changeset_pk { + // response = response.header("force_changeset_pk", force_changeset_pk.to_string()); + // } Ok(response.body(axum::body::Empty::new())?) } diff --git a/lib/sdf-server/src/server/service/component/list_qualifications.rs b/lib/sdf-server/src/server/service/component/list_qualifications.rs index 335b29d55e..3ffdf70a57 100644 --- a/lib/sdf-server/src/server/service/component/list_qualifications.rs +++ b/lib/sdf-server/src/server/service/component/list_qualifications.rs @@ -1,9 +1,9 @@ use axum::extract::Query; use axum::Json; -use dal::{qualification::QualificationView, Component, ComponentId, StandardModel, Visibility}; +use dal::{qualification::QualificationView, Component, ComponentId, Visibility}; use serde::{Deserialize, Serialize}; -use super::{ComponentError, ComponentResult}; +use super::ComponentResult; use crate::server::extract::{AccessBuilder, HandlerContext}; #[derive(Deserialize, Serialize, Debug)] @@ -23,13 +23,6 @@ pub async fn list_qualifications( ) -> ComponentResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let is_component_in_tenancy = Component::is_in_tenancy(&ctx, request.component_id).await?; - let is_component_in_visibility = Component::get_by_id(&ctx, &request.component_id) - .await? - .is_some(); - if is_component_in_tenancy && !is_component_in_visibility { - return Err(ComponentError::InvalidVisibility); - } let qualifications = Component::list_qualifications(&ctx, request.component_id).await?; Ok(Json(qualifications)) diff --git a/lib/sdf-server/src/server/service/component/update_property_editor_value.rs b/lib/sdf-server/src/server/service/component/update_property_editor_value.rs index 5678f61c75..7d4eeff620 100644 --- a/lib/sdf-server/src/server/service/component/update_property_editor_value.rs +++ b/lib/sdf-server/src/server/service/component/update_property_editor_value.rs @@ -1,15 +1,10 @@ use axum::extract::OriginalUri; use axum::{response::IntoResponse, Json}; -use dal::{ - AttributeContext, AttributeValue, AttributeValueId, ChangeSet, Component, ComponentId, Prop, - PropId, StandardModel, Visibility, -}; +use dal::{AttributeValue, AttributeValueId, ChangeSet, ComponentId, PropId, Visibility}; use serde::{Deserialize, Serialize}; use super::ComponentResult; use crate::server::extract::{AccessBuilder, HandlerContext, PosthogClient}; -use crate::server::tracking::track; -use crate::service::component::ComponentError; #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] @@ -27,66 +22,57 @@ pub struct UpdatePropertyEditorValueRequest { pub async fn update_property_editor_value( HandlerContext(builder): HandlerContext, AccessBuilder(request_ctx): AccessBuilder, - PosthogClient(posthog_client): PosthogClient, - OriginalUri(original_uri): OriginalUri, + PosthogClient(_posthog_client): PosthogClient, + OriginalUri(_original_uri): OriginalUri, Json(request): Json, ) -> ComponentResult { let mut ctx = builder.build(request_ctx.build(request.visibility)).await?; let force_changeset_pk = ChangeSet::force_new(&mut ctx).await?; - let attribute_context = AttributeContext::builder() - .set_prop_id(request.prop_id) - .set_component_id(request.component_id) - .to_context()?; - AttributeValue::update_for_context( - &ctx, - request.attribute_value_id, - request.parent_attribute_value_id, - attribute_context, - request.value, - request.key, - ) - .await?; + AttributeValue::update(&ctx, request.attribute_value_id, request.value).await?; // Track - { - let component = Component::get_by_id(&ctx, &request.component_id) - .await? - .ok_or(ComponentError::ComponentNotFound(request.component_id))?; - - let component_schema = component - .schema(&ctx) - .await? - .ok_or(ComponentError::SchemaNotFound)?; - - let prop = Prop::get_by_id(&ctx, &request.prop_id) - .await? - .ok_or(ComponentError::PropNotFound(request.prop_id))?; - - // In this context, there will always be a parent attribute value id - let parent_prop = if let Some(att_val_id) = request.parent_attribute_value_id { - Some(AttributeValue::find_prop_for_value(&ctx, att_val_id).await?) - } else { - None - }; - - track( - &posthog_client, - &ctx, - &original_uri, - "property_value_updated", - serde_json::json!({ - "component_id": component.id(), - "component_schema_name": component_schema.name(), - "prop_id": prop.id(), - "prop_name": prop.name(), - "parent_prop_id": parent_prop.as_ref().map(|prop| prop.id()), - "parent_prop_name": parent_prop.as_ref().map(|prop| prop.name()), - }), - ); - } - + // { + // let component = Component::get_by_id(&ctx, request.component_id).await?; + // + // let component_schema = component + // .schema(&ctx) + // .await? + // .ok_or(ComponentError::SchemaNotFound)?; + // + // let prop = Prop::get_by_id(&ctx, &request.prop_id) + // .await? + // .ok_or(ComponentError::PropNotFound(request.prop_id))?; + // + // // In this context, there will always be a parent attribute value id + // let parent_prop = if let Some(att_val_id) = request.parent_attribute_value_id { + // Some(AttributeValue::find_prop_for_value(&ctx, att_val_id).await?) + // } else { + // None + // }; + // + // track( + // &posthog_client, + // &ctx, + // &original_uri, + // "property_value_updated", + // serde_json::json!({ + // "component_id": component.id(), + // "component_schema_name": component_schema.name(), + // "prop_id": prop.id(), + // "prop_name": prop.name(), + // "parent_prop_id": parent_prop.as_ref().map(|prop| prop.id()), + // "parent_prop_name": parent_prop.as_ref().map(|prop| prop.name()), + // }), + // ); + // } + // + // WsEvent::change_set_written(&ctx) + // .await? + // .publish_on_commit(&ctx) + // .await?; + // ctx.commit().await?; let mut response = axum::response::Response::builder(); diff --git a/lib/sdf-server/src/server/service/diagram.rs b/lib/sdf-server/src/server/service/diagram.rs index 770ea869cd..55d8f8f8ca 100644 --- a/lib/sdf-server/src/server/service/diagram.rs +++ b/lib/sdf-server/src/server/service/diagram.rs @@ -3,49 +3,35 @@ use axum::response::{IntoResponse, Response}; use axum::routing::{get, post}; use axum::Json; use axum::Router; -use dal::provider::external::ExternalProviderError as DalExternalProviderError; -use dal::socket::{SocketError, SocketId}; -use dal::{ - component::ComponentViewError, node::NodeId, schema::variant::SchemaVariantError, ActionError, - ActionPrototypeError, AttributeContextBuilderError, AttributeValueError, ChangeSetError, - ComponentError, ComponentType, DiagramError as DalDiagramError, EdgeError, - InternalProviderError, NodeError, NodeKind, NodeMenuError, SchemaError as DalSchemaError, - SchemaVariantId, StandardModelError, TransactionsError, -}; -use dal::{AttributeReadContext, WsEventError}; -use std::num::ParseFloatError; +use dal::component::ComponentError; +use dal::node_menu::NodeMenuError; +use dal::workspace_snapshot::WorkspaceSnapshotError; +use dal::WsEventError; +use dal::{ChangeSetError, SchemaVariantId, StandardModelError, TransactionsError}; use thiserror::Error; use crate::server::state::AppState; -use crate::service::schema::SchemaError; -mod connect_component_to_frame; +use self::get_node_add_menu::get_node_add_menu; + +mod connect_component_to_frame_new_engine; +pub mod create_component; pub mod create_connection; -pub mod create_node; -pub mod delete_component; -pub mod delete_connection; -mod detach_component_from_frame; pub mod get_diagram; pub mod get_node_add_menu; pub mod list_schema_variants; -pub mod paste_component; -mod restore_component; -pub mod restore_connection; -pub mod set_node_position; +pub mod set_component_position; + +// mod connect_component_to_frame; +// pub mod delete_component; +// pub mod delete_connection; +// pub mod paste_component; +// mod restore_component; +// pub mod restore_connection; #[remain::sorted] #[derive(Debug, Error)] pub enum DiagramError { - #[error("action error: {0}")] - ActionError(#[from] ActionError), - #[error("action prototype error: {0}")] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute context builder: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("attribute value not found for context: {0:?}")] - AttributeValueNotFoundForContext(AttributeReadContext), #[error("changeset error: {0}")] ChangeSet(#[from] ChangeSetError), #[error("change set not found")] @@ -54,76 +40,52 @@ pub enum DiagramError { Component(#[from] ComponentError), #[error("component not found")] ComponentNotFound, - #[error("component view error: {0}")] - ComponentView(#[from] ComponentViewError), #[error(transparent)] ContextTransaction(#[from] TransactionsError), - #[error("dal schema error: {0}")] - DalSchema(#[from] DalSchemaError), #[error("dal diagram error: {0}")] - DiagramError(#[from] DalDiagramError), - #[error(transparent)] - Edge(#[from] EdgeError), + DalDiagram(#[from] dal::diagram::DiagramError), + #[error("dal frame error: {0}")] + DalFrame(#[from] dal::component::frame::FrameError), + #[error("dal schema error: {0}")] + DalSchema(#[from] dal::SchemaError), + #[error("dal schema variant error: {0}")] + DalSchemaVariant(#[from] dal::schema::variant::SchemaVariantError), #[error("edge not found")] EdgeNotFound, - #[error("external provider error: {0}")] - ExternalProvider(#[from] DalExternalProviderError), - #[error("external provider not found for socket id: {0}")] - ExternalProviderNotFoundForSocket(SocketId), #[error("frame internal provider not found for schema variant id: {0}")] FrameInternalProviderNotFoundForSchemaVariant(SchemaVariantId), #[error("frame socket not found for schema variant id: {0}")] FrameSocketNotFound(SchemaVariantId), #[error("invalid header name {0}")] Hyper(#[from] hyper::http::Error), - #[error(transparent)] - InternalProvider(#[from] InternalProviderError), - #[error("internal provider not found for socket id: {0}")] - InternalProviderNotFoundForSocket(SocketId), - #[error("invalid component type ({0:?}) for frame")] - InvalidComponentTypeForFrame(ComponentType), - #[error("invalid parent node kind {0:?}")] - InvalidParentNode(NodeKind), #[error("invalid request")] InvalidRequest, #[error("invalid system")] InvalidSystem, #[error(transparent)] Nats(#[from] si_data_nats::NatsError), - #[error("node error: {0}")] - Node(#[from] NodeError), #[error("node menu error: {0}")] NodeMenu(#[from] NodeMenuError), - #[error("node not found: {0}")] - NodeNotFound(NodeId), #[error("not authorized")] NotAuthorized, - #[error("parent node not found {0}")] - ParentNodeNotFound(NodeId), - #[error("parse int: {0}")] - ParseFloat(#[from] ParseFloatError), #[error("paste failed")] PasteError, #[error(transparent)] Pg(#[from] si_data_pg::PgError), #[error(transparent)] PgPool(#[from] si_data_pg::PgPoolError), - #[error("schema error: {0}")] - Schema(#[from] SchemaError), #[error("schema not found")] SchemaNotFound, - #[error("schema variant error: {0}")] - SchemaVariant(#[from] SchemaVariantError), #[error("schema variant not found")] SchemaVariantNotFound, #[error("serde error: {0}")] Serde(#[from] serde_json::Error), - #[error("socket error: {0}")] - Socket(#[from] SocketError), #[error("socket not found")] SocketNotFound, #[error(transparent)] StandardModel(#[from] StandardModelError), + #[error(transparent)] + WorkspaceSnaphot(#[from] WorkspaceSnapshotError), #[error("ws event error: {0}")] WsEvent(#[from] WsEventError), } @@ -147,53 +109,45 @@ impl IntoResponse for DiagramError { pub fn routes() -> Router { Router::new() - .route("/get_diagram", get(get_diagram::get_diagram)) + // .route( + // "/delete_connection", + // post(delete_connection::delete_connection), + // ) + // .route( + // "/restore_connection", + // post(restore_connection::restore_connection), + // ) + // .route( + // "/delete_component", + // post(delete_component::delete_component), + // ) + // .route( + // "/delete_components", + // post(delete_component::delete_components), + // ) + // .route( + // "/restore_component", + // post(restore_component::restore_component), + // ) + // .route( + // "/restore_components", + // post(restore_component::restore_components), + // ) .route( - "/get_node_add_menu", - post(get_node_add_menu::get_node_add_menu), - ) - .route("/create_node", post(create_node::create_node)) - .route( - "/set_node_position", - post(set_node_position::set_node_position), + "/connect_component_to_frame", + post(connect_component_to_frame_new_engine::connect_component_to_frame), ) + .route("/get_node_add_menu", post(get_node_add_menu)) .route( "/create_connection", post(create_connection::create_connection), ) + .route("/create_node", post(create_component::create_component)) .route( - "/delete_connection", - post(delete_connection::delete_connection), - ) - .route( - "/restore_connection", - post(restore_connection::restore_connection), - ) - .route( - "/delete_component", - post(delete_component::delete_component), - ) - .route( - "/delete_components", - post(delete_component::delete_components), - ) - .route( - "/detach_component", - post(detach_component_from_frame::detach_component_from_frame), - ) - .route("/paste_components", post(paste_component::paste_components)) - .route( - "/restore_component", - post(restore_component::restore_component), - ) - .route( - "/restore_components", - post(restore_component::restore_components), - ) - .route( - "/connect_component_to_frame", - post(connect_component_to_frame::connect_component_to_frame), + "/set_node_position", + post(set_component_position::set_component_position), ) + .route("/get_diagram", get(get_diagram::get_diagram)) .route( "/list_schema_variants", get(list_schema_variants::list_schema_variants), diff --git a/lib/sdf-server/src/server/service/diagram/connect_component_to_frame_new_engine.rs b/lib/sdf-server/src/server/service/diagram/connect_component_to_frame_new_engine.rs new file mode 100644 index 0000000000..72c01ffafa --- /dev/null +++ b/lib/sdf-server/src/server/service/diagram/connect_component_to_frame_new_engine.rs @@ -0,0 +1,49 @@ +use axum::extract::OriginalUri; +use axum::{response::IntoResponse, Json}; +use serde::{Deserialize, Serialize}; + +use dal::component::frame::{Connection, Frame}; +use dal::diagram::NodeId; +use dal::Visibility; + +use crate::server::extract::{AccessBuilder, HandlerContext, PosthogClient}; + +use super::DiagramResult; + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct CreateFrameConnectionRequest { + pub child_node_id: NodeId, + pub parent_node_id: NodeId, + #[serde(flatten)] + pub visibility: Visibility, +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct CreateFrameConnectionResponse { + pub connection: Connection, +} + +/// Create a [`Connection`](dal::Connection) with a _to_ [`Socket`](dal::Socket) and +/// [`Node`](dal::Node) and a _from_ [`Socket`](dal::Socket) and [`Node`](dal::Node). +/// Creating a change set if on head. +pub async fn connect_component_to_frame( + HandlerContext(builder): HandlerContext, + AccessBuilder(request_ctx): AccessBuilder, + PosthogClient(_posthog_client): PosthogClient, + OriginalUri(_original_uri): OriginalUri, + Json(request): Json, +) -> DiagramResult { + let ctx = builder.build(request_ctx.build(request.visibility)).await?; + + // Connect children to parent through frame edge + Frame::attach_child_to_parent(&ctx, request.parent_node_id, request.child_node_id).await?; + + ctx.commit().await?; + + let response = axum::response::Response::builder(); + Ok(response + .header("content-type", "application/json") + .body("{}".to_owned())?) +} diff --git a/lib/sdf-server/src/server/service/diagram/create_component.rs b/lib/sdf-server/src/server/service/diagram/create_component.rs new file mode 100644 index 0000000000..fe46ef8172 --- /dev/null +++ b/lib/sdf-server/src/server/service/diagram/create_component.rs @@ -0,0 +1,211 @@ +use axum::extract::OriginalUri; +use axum::{response::IntoResponse, Json}; +use serde::{Deserialize, Serialize}; + +use dal::component::frame::Frame; +use dal::component::{DEFAULT_COMPONENT_HEIGHT, DEFAULT_COMPONENT_WIDTH}; +use dal::{generate_name, Component, ComponentId, SchemaId, SchemaVariant, Visibility, WsEvent}; + +use crate::server::extract::{AccessBuilder, HandlerContext, PosthogClient}; +use crate::service::diagram::DiagramResult; + +use super::DiagramError; + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct CreateComponentRequest { + pub schema_id: SchemaId, + pub parent_id: Option, + pub x: String, + pub y: String, + #[serde(flatten)] + pub visibility: Visibility, +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct CreateComponentResponse { + pub component_id: ComponentId, +} + +pub async fn create_component( + HandlerContext(builder): HandlerContext, + AccessBuilder(request_ctx): AccessBuilder, + PosthogClient(_posthog_client): PosthogClient, + OriginalUri(_original_uri): OriginalUri, + Json(request): Json, +) -> DiagramResult { + let ctx = builder.build(request_ctx.build(request.visibility)).await?; + + // TODO(nick): restore this with new engine semantics. + // let mut force_changeset_pk = None; + // if ctx.visibility().is_head() { + // let change_set = ChangeSet::new(&ctx, ChangeSet::generate_name(), None).await?; + // + // let new_visibility = Visibility::new(change_set.pk, request.visibility.deleted_at); + // + // ctx.update_visibility(new_visibility); + // + // force_changeset_pk = Some(change_set.pk); + // + // WsEvent::change_set_created(&ctx, change_set.pk) + // .await? + // .publish_on_commit(&ctx) + // .await?; + // }; + + let name = generate_name(); + + // TODO: restore the notion of a "default" schema variant + let variant = SchemaVariant::list_for_schema(&ctx, request.schema_id) + .await? + .into_iter() + .next() + .ok_or(DiagramError::SchemaVariantNotFound)?; + + let component = Component::new(&ctx, &name, variant.id(), None).await?; + + // TODO(nick): restore the action prototype usage here. + // for prototype in ActionPrototype::find_for_context_and_kind( + // &ctx, + // ActionKind::Create, + // ActionPrototypeContext::new_for_context_field(ActionPrototypeContextField::SchemaVariant( + // *schema_variant_id, + // )), + // ) + // .await? + // { + // let action = Action::new(&ctx, *prototype.id(), *component.id()).await?; + // let prototype = action.prototype(&ctx).await?; + // let component = action.component(&ctx).await?; + // + // track( + // &posthog_client, + // &ctx, + // &original_uri, + // "create_action", + // serde_json::json!({ + // "how": "/diagram/create_component", + // "prototype_id": prototype.id(), + // "prototype_kind": prototype.kind(), + // "component_id": component.id(), + // "component_name": component.name(&ctx).await?, + // "change_set_pk": ctx.visibility().change_set_pk, + // }), + // ); + // } + + let component = component + .set_geometry( + &ctx, + request.x.clone(), + request.y.clone(), + Some(DEFAULT_COMPONENT_WIDTH), + Some(DEFAULT_COMPONENT_HEIGHT), + ) + .await?; + + if let Some(frame_id) = request.parent_id { + Frame::attach_child_to_parent(&ctx, frame_id, component.id()).await?; + } + + // TODO(nick): restore posthog logic and other potential missing frame logic. + // if let Some(frame_id) = request.parent_id { + // let component_socket = Socket::find_frame_socket_for_node( + // &ctx, + // *node.id(), + // SocketEdgeKind::ConfigurationOutput, + // ) + // .await?; + // let frame_socket = + // Socket::find_frame_socket_for_node(&ctx, frame_id, SocketEdgeKind::ConfigurationInput) + // .await?; + // + // let _connection = Connection::new( + // &ctx, + // *node.id(), + // *component_socket.id(), + // frame_id, + // *frame_socket.id(), + // EdgeKind::Symbolic, + // ) + // .await?; + // + // connect_component_sockets_to_frame(&ctx, frame_id, *node.id()).await?; + // + // let child_comp = Node::get_by_id(&ctx, node.id()) + // .await? + // .ok_or(DiagramError::NodeNotFound(*node.id()))? + // .component(&ctx) + // .await? + // .ok_or(DiagramError::ComponentNotFound)?; + // + // let child_schema = child_comp + // .schema(&ctx) + // .await? + // .ok_or(DiagramError::SchemaNotFound)?; + // + // let parent_comp = Node::get_by_id(&ctx, &frame_id) + // .await? + // .ok_or(DiagramError::NodeNotFound(frame_id))? + // .component(&ctx) + // .await? + // .ok_or(DiagramError::ComponentNotFound)?; + // + // let parent_schema = parent_comp + // .schema(&ctx) + // .await? + // .ok_or(DiagramError::SchemaNotFound)?; + // + // track( + // &posthog_client, + // &ctx, + // &original_uri, + // "component_connected_to_frame", + // serde_json::json!({ + // "parent_component_id": parent_comp.id(), + // "parent_component_schema_name": parent_schema.name(), + // "parent_socket_id": frame_socket.id(), + // "parent_socket_name": frame_socket.name(), + // "child_component_id": child_comp.id(), + // "child_component_schema_name": child_schema.name(), + // "child_socket_id": component_socket.id(), + // "child_socket_name": component_socket.name(), + // }), + // ); + // } + + WsEvent::component_created(&ctx, component.id()) + .await? + .publish_on_commit(&ctx) + .await?; + + // TODO(nick): restore posthog tracking. + // track( + // &posthog_client, + // &ctx, + // &original_uri, + // "component_created", + // serde_json::json!({ + // "schema_id": schema.id(), + // "schema_name": schema.name(), + // "schema_variant_id": &schema_variant_id, + // "component_id": component.id(), + // "component_name": &name, + // }), + // ); + + ctx.commit().await?; + + let mut response = axum::response::Response::builder(); + // TODO(nick): restore change set creation when on head. + // if let Some(force_changeset_pk) = force_changeset_pk { + // response = response.header("force_changeset_pk", force_changeset_pk.to_string()); + // } + response = response.header("content-type", "application/json"); + Ok( + response.body(serde_json::to_string(&CreateComponentResponse { + component_id: component.id(), + })?)?, + ) +} diff --git a/lib/sdf-server/src/server/service/diagram/create_connection.rs b/lib/sdf-server/src/server/service/diagram/create_connection.rs index 0ec5a8c501..a89cd723f3 100644 --- a/lib/sdf-server/src/server/service/diagram/create_connection.rs +++ b/lib/sdf-server/src/server/service/diagram/create_connection.rs @@ -1,23 +1,19 @@ use axum::extract::OriginalUri; use axum::{response::IntoResponse, Json}; -use dal::edge::EdgeKind; -use dal::{ - node::NodeId, socket::SocketId, AttributeReadContext, AttributeValue, ChangeSet, Connection, - InternalProvider, Node, Socket, StandardModel, Visibility, WsEvent, -}; +use dal::attribute::prototype::argument::AttributePrototypeArgumentId; +use dal::{Component, ComponentId, ExternalProviderId, InternalProviderId, User, Visibility}; use serde::{Deserialize, Serialize}; -use super::{DiagramError, DiagramResult}; +use super::DiagramResult; use crate::server::extract::{AccessBuilder, HandlerContext, PosthogClient}; -use crate::server::tracking::track; #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct CreateConnectionRequest { - pub from_node_id: NodeId, - pub from_socket_id: SocketId, - pub to_node_id: NodeId, - pub to_socket_id: SocketId, + pub from_node_id: ComponentId, + pub from_socket_id: ExternalProviderId, + pub to_node_id: ComponentId, + pub to_socket_id: InternalProviderId, #[serde(flatten)] pub visibility: Visibility, } @@ -25,130 +21,104 @@ pub struct CreateConnectionRequest { #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct CreateConnectionResponse { - pub connection: Connection, + pub id: AttributePrototypeArgumentId, + pub created_by: Option, + pub deleted_by: Option, } -/// Create a [`Connection`](dal::Connection) with a _to_ [`Socket`](dal::Socket) and -/// [`Node`](dal::Node) and a _from_ [`Socket`](dal::Socket) and [`Node`](dal::Node). -/// Creating change set if on head pub async fn create_connection( HandlerContext(builder): HandlerContext, AccessBuilder(request_ctx): AccessBuilder, - PosthogClient(posthog_client): PosthogClient, - OriginalUri(original_uri): OriginalUri, + PosthogClient(_posthog_client): PosthogClient, + OriginalUri(_original_uri): OriginalUri, Json(request): Json, ) -> DiagramResult { - let mut ctx = builder.build(request_ctx.build(request.visibility)).await?; - - let mut force_changeset_pk = None; - if ctx.visibility().is_head() { - let change_set = ChangeSet::new(&ctx, ChangeSet::generate_name(), None).await?; - - let new_visibility = Visibility::new(change_set.pk, request.visibility.deleted_at); - - ctx.update_visibility(new_visibility); - - force_changeset_pk = Some(change_set.pk); - - WsEvent::change_set_created(&ctx, change_set.pk) - .await? - .publish_on_commit(&ctx) - .await?; - }; - - let connection = Connection::new( + let ctx = builder.build(request_ctx.build(request.visibility)).await?; + + // TODO(nick): restore this with the new engine. + // let mut force_changeset_pk = None; + // if ctx.visibility().is_head() { + // let change_set = ChangeSet::new(&ctx, ChangeSet::generate_name(), None).await?; + // + // let new_visibility = Visibility::new(change_set.pk, request.visibility.deleted_at); + // + // ctx.update_visibility(new_visibility); + // + // force_changeset_pk = Some(change_set.pk); + // + // WsEvent::change_set_created(&ctx, change_set.pk) + // .await? + // .publish_on_commit(&ctx) + // .await?; + // }; + + let attribute_prototype_argument_id = Component::connect( &ctx, request.from_node_id, request.from_socket_id, request.to_node_id, request.to_socket_id, - EdgeKind::Configuration, ) .await?; - let from_component = Node::get_by_id(&ctx, &request.from_node_id) - .await? - .ok_or(DiagramError::NodeNotFound(request.from_node_id))? - .component(&ctx) - .await? - .ok_or(DiagramError::ComponentNotFound)?; - - let from_component_schema = from_component - .schema(&ctx) - .await? - .ok_or(DiagramError::SchemaNotFound)?; - - let from_socket = Socket::get_by_id(&ctx, &request.from_socket_id) - .await? - .ok_or(DiagramError::SocketNotFound)?; - - let to_component = Node::get_by_id(&ctx, &request.to_node_id) - .await? - .ok_or(DiagramError::NodeNotFound(request.to_node_id))? - .component(&ctx) - .await? - .ok_or(DiagramError::ComponentNotFound)?; - - let to_component_schema = to_component - .schema(&ctx) - .await? - .ok_or(DiagramError::SchemaNotFound)?; - - let to_socket = Socket::get_by_id(&ctx, &request.to_socket_id) - .await? - .ok_or(DiagramError::SocketNotFound)?; - - let to_socket_internal_provider = - InternalProvider::find_explicit_for_socket(&ctx, request.to_socket_id) - .await? - .ok_or(DiagramError::InternalProviderNotFoundForSocket( - request.to_socket_id, - ))?; - - let to_attribute_value_context = AttributeReadContext { - internal_provider_id: Some(*to_socket_internal_provider.id()), - component_id: Some(*to_component.id()), - ..Default::default() - }; - let mut to_attribute_value = AttributeValue::find_for_context(&ctx, to_attribute_value_context) - .await? - .ok_or(DiagramError::AttributeValueNotFoundForContext( - to_attribute_value_context, - ))?; - - to_attribute_value - .update_from_prototype_function(&ctx) - .await?; - - ctx.enqueue_dependent_values_update(vec![*to_attribute_value.id()]) - .await?; - - track( - &posthog_client, - &ctx, - &original_uri, - "connection_created", - serde_json::json!({ - "from_node_id": request.from_node_id, - "from_node_schema_name": &from_component_schema.name(), - "from_socket_id": request.from_socket_id, - "from_socket_name": &from_socket.name(), - "to_node_id": request.to_node_id, - "to_node_schema_name": &to_component_schema.name(), - "to_socket_id": request.to_socket_id, - "to_socket_name": &to_socket.name(), - }), - ); + // TODO(nick): restore dependent values update. + // let to_attribute_value_context = AttributeReadContext { + // internal_provider_id: Some(*to_socket_internal_provider.id()), + // component_id: Some(*to_component.id()), + // ..Default::default() + // }; + // let mut to_attribute_value = AttributeValue::find_for_context(&ctx, to_attribute_value_context) + // .await? + // .ok_or(DiagramError::AttributeValueNotFoundForContext( + // to_attribute_value_context, + // ))?; + // + // to_attribute_value + // .update_from_prototype_function(&ctx) + // .await?; + // + // ctx.enqueue_job(DependentValuesUpdate::new( + // ctx.access_builder(), + // *ctx.visibility(), + // vec![*to_attribute_value.id()], + // )) + // .await?; + // + // WsEvent::change_set_written(&ctx) + // .await? + // .publish_on_commit(&ctx) + // .await?; + // + // track( + // &posthog_client, + // &ctx, + // &original_uri, + // "connection_created", + // serde_json::json!({ + // "from_node_id": request.from_node_id, + // "from_node_schema_name": &from_component_schema.name(), + // "from_socket_id": request.from_socket_id, + // "from_socket_name": &from_socket.name(), + // "to_node_id": request.to_node_id, + // "to_node_schema_name": &to_component_schema.name(), + // "to_socket_id": request.to_socket_id, + // "to_socket_name": &to_socket.name(), + // }), + // ); ctx.commit().await?; - let mut response = axum::response::Response::builder(); - if let Some(force_changeset_pk) = force_changeset_pk { - response = response.header("force_changeset_pk", force_changeset_pk.to_string()); - } + let response = axum::response::Response::builder(); + // TODO(nick): restore this with the new engine. + // if let Some(force_changeset_pk) = force_changeset_pk { + // response = response.header("force_changeset_pk", force_changeset_pk.to_string()); + // } Ok(response .header("content-type", "application/json") .body(serde_json::to_string(&CreateConnectionResponse { - connection, + id: attribute_prototype_argument_id, + // TODO(nick): figure out what to do with these fields that were left over from the "Connection" struct. + created_by: None, + deleted_by: None, })?)?) } diff --git a/lib/sdf-server/src/server/service/diagram/create_node.rs b/lib/sdf-server/src/server/service/diagram/create_node.rs deleted file mode 100644 index 8f2419a918..0000000000 --- a/lib/sdf-server/src/server/service/diagram/create_node.rs +++ /dev/null @@ -1,136 +0,0 @@ -use axum::extract::OriginalUri; -use axum::{response::IntoResponse, Json}; -use serde::{Deserialize, Serialize}; - -use dal::node::NodeId; -use dal::{ - action_prototype::ActionPrototypeContextField, generate_name_from_schema_name, Action, - ActionKind, ActionPrototype, ActionPrototypeContext, ChangeSet, Component, ComponentId, Schema, - SchemaId, StandardModel, Visibility, WsEvent, -}; - -use crate::server::extract::{AccessBuilder, HandlerContext, PosthogClient}; -use crate::server::tracking::track; -use crate::service::diagram::connect_component_to_frame::connect_component_sockets_to_frame; -use crate::service::diagram::{DiagramError, DiagramResult}; - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct CreateNodeRequest { - pub schema_id: SchemaId, - pub parent_id: Option, - pub x: String, - pub y: String, - #[serde(flatten)] - pub visibility: Visibility, -} - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct CreateNodeResponse { - pub component_id: ComponentId, - pub node_id: NodeId, -} - -pub async fn create_node( - HandlerContext(builder): HandlerContext, - AccessBuilder(request_ctx): AccessBuilder, - PosthogClient(posthog_client): PosthogClient, - OriginalUri(original_uri): OriginalUri, - Json(request): Json, -) -> DiagramResult { - let mut ctx = builder.build(request_ctx.build(request.visibility)).await?; - - let force_changeset_pk = ChangeSet::force_new(&mut ctx).await?; - - let schema = Schema::get_by_id(&ctx, &request.schema_id) - .await? - .ok_or(DiagramError::SchemaNotFound)?; - let name = generate_name_from_schema_name(schema.name()); - - let schema_variant_id = schema - .default_schema_variant_id() - .ok_or(DiagramError::SchemaVariantNotFound)?; - - let (component, mut node) = Component::new(&ctx, &name, *schema_variant_id).await?; - - for prototype in ActionPrototype::find_for_context_and_kind( - &ctx, - ActionKind::Create, - ActionPrototypeContext::new_for_context_field(ActionPrototypeContextField::SchemaVariant( - *schema_variant_id, - )), - ) - .await? - { - let action = Action::new(&ctx, *prototype.id(), *component.id()).await?; - let prototype = action.prototype(&ctx).await?; - let component = action.component(&ctx).await?; - - track( - &posthog_client, - &ctx, - &original_uri, - "create_action", - serde_json::json!({ - "how": "/diagram/create_node", - "prototype_id": prototype.id(), - "prototype_kind": prototype.kind(), - "component_id": component.id(), - "component_name": component.name(&ctx).await?, - "change_set_pk": ctx.visibility().change_set_pk, - }), - ); - } - - node.set_geometry( - &ctx, - request.x.clone(), - request.y.clone(), - Some("500"), - Some("500"), - ) - .await?; - - if let Some(frame_id) = request.parent_id { - connect_component_sockets_to_frame( - &ctx, - frame_id, - *node.id(), - &original_uri, - &posthog_client, - ) - .await?; - } - - WsEvent::component_created(&ctx, *component.id()) - .await? - .publish_on_commit(&ctx) - .await?; - - track( - &posthog_client, - &ctx, - &original_uri, - "component_created", - serde_json::json!({ - "schema_id": schema.id(), - "schema_name": schema.name(), - "schema_variant_id": &schema_variant_id, - "component_id": component.id(), - "component_name": &name, - }), - ); - - ctx.commit().await?; - - let mut response = axum::response::Response::builder(); - if let Some(force_changeset_pk) = force_changeset_pk { - response = response.header("force_changeset_pk", force_changeset_pk.to_string()); - } - response = response.header("content-type", "application/json"); - Ok(response.body(serde_json::to_string(&CreateNodeResponse { - component_id: *component.id(), - node_id: *node.id(), - })?)?) -} diff --git a/lib/sdf-server/src/server/service/diagram/get_diagram.rs b/lib/sdf-server/src/server/service/diagram/get_diagram.rs index 309b7b6bd6..253f1937b8 100644 --- a/lib/sdf-server/src/server/service/diagram/get_diagram.rs +++ b/lib/sdf-server/src/server/service/diagram/get_diagram.rs @@ -1,5 +1,6 @@ use axum::{extract::Query, Json}; -use dal::{Diagram, Visibility}; +use dal::diagram::Diagram; +use dal::Visibility; use serde::{Deserialize, Serialize}; use super::DiagramResult; @@ -20,8 +21,6 @@ pub async fn get_diagram( Query(request): Query, ) -> DiagramResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let response = Diagram::assemble(&ctx).await?; - Ok(Json(response)) } diff --git a/lib/sdf-server/src/server/service/diagram/list_schema_variants.rs b/lib/sdf-server/src/server/service/diagram/list_schema_variants.rs index d1b81aa40c..590966b5b8 100644 --- a/lib/sdf-server/src/server/service/diagram/list_schema_variants.rs +++ b/lib/sdf-server/src/server/service/diagram/list_schema_variants.rs @@ -1,12 +1,11 @@ use axum::extract::{Json, Query}; use dal::{ - socket::{SocketEdgeKind, SocketId}, - DiagramKind, ExternalProvider, ExternalProviderId, InternalProvider, InternalProviderId, - SchemaId, SchemaVariant, SchemaVariantId, StandardModel, Visibility, + ExternalProviderId, InternalProviderId, Schema, SchemaId, SchemaVariant, SchemaVariantId, + Visibility, }; use serde::{Deserialize, Serialize}; -use super::{DiagramError, DiagramResult}; +use super::DiagramResult; use crate::server::extract::{AccessBuilder, HandlerContext}; #[derive(Deserialize, Serialize, Debug)] @@ -16,38 +15,18 @@ pub struct ListSchemaVariantsRequest { pub visibility: Visibility, } -pub type ProviderMetadata = String; - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct OutputProviderView { - id: ExternalProviderId, - ty: ProviderMetadata, -} - #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct OutputSocketView { - id: SocketId, + id: ExternalProviderId, name: String, - diagram_kind: DiagramKind, - provider: OutputProviderView, -} - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct InputProviderView { - id: InternalProviderId, - ty: ProviderMetadata, } #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct InputSocketView { - id: SocketId, + id: InternalProviderId, name: String, - diagram_kind: DiagramKind, - provider: InputProviderView, } #[derive(Deserialize, Serialize, Debug)] @@ -59,6 +38,7 @@ pub struct SchemaVariantView { schema_name: String, schema_id: SchemaId, color: String, + category: String, input_sockets: Vec, output_sockets: Vec, } @@ -71,82 +51,62 @@ pub async fn list_schema_variants( ) -> DiagramResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let variants = SchemaVariant::list(&ctx).await?; - let external_provider_by_socket = ExternalProvider::by_socket(&ctx).await?; - let internal_provider_by_socket = InternalProvider::by_socket(&ctx).await?; + let mut schema_variants_views: Vec = Vec::new(); + let schemas = Schema::list(&ctx).await?; - let mut variants_view = Vec::with_capacity(variants.len()); - for variant in variants { - if variant.ui_hidden() { + for schema in schemas { + if schema.ui_hidden { continue; } - let schema = variant - .schema(&ctx) - .await? - .ok_or(DiagramError::SchemaNotFound)?; - - if schema.ui_hidden() { - continue; - } - let mut input_sockets = Vec::new(); - let mut output_sockets = Vec::new(); + let schema_variants = SchemaVariant::list_for_schema(&ctx, schema.id()).await?; + for schema_variant in schema_variants { + if schema_variant.ui_hidden() { + continue; + } - let sockets = variant.sockets(&ctx).await?; + let mut input_sockets = Vec::new(); + let mut output_sockets = Vec::new(); + + let (external_providers, explicit_internal_providers) = + SchemaVariant::list_external_providers_and_explicit_internal_providers( + &ctx, + schema_variant.id(), + ) + .await?; + + for explicit_internal_provider in explicit_internal_providers { + input_sockets.push(InputSocketView { + id: explicit_internal_provider.id(), + name: explicit_internal_provider.name().to_owned(), + }) + } - for socket in sockets { - match socket.edge_kind() { - SocketEdgeKind::ConfigurationOutput => { - let provider = - external_provider_by_socket - .get(socket.id()) - .ok_or_else(|| { - DiagramError::ExternalProviderNotFoundForSocket(*socket.id()) - })?; - output_sockets.push(OutputSocketView { - id: *socket.id(), - name: socket.name().to_owned(), - diagram_kind: *socket.diagram_kind(), - provider: OutputProviderView { - id: *provider.id(), - ty: socket.name().to_owned(), - }, - }) - } - SocketEdgeKind::ConfigurationInput => { - let provider = - internal_provider_by_socket - .get(socket.id()) - .ok_or_else(|| { - DiagramError::InternalProviderNotFoundForSocket(*socket.id()) - })?; - input_sockets.push(InputSocketView { - id: *socket.id(), - name: socket.name().to_owned(), - diagram_kind: *socket.diagram_kind(), - provider: InputProviderView { - id: *provider.id(), - ty: socket.name().to_owned(), - }, - }) - } + for external_provider in external_providers { + output_sockets.push(OutputSocketView { + id: external_provider.id(), + name: external_provider.name().to_owned(), + }) } - } - variants_view.push(SchemaVariantView { - id: *variant.id(), - builtin: variant.is_builtin(&ctx).await?, - name: variant.name().to_owned(), - schema_id: *schema.id(), - schema_name: schema.name().to_owned(), - input_sockets, - color: variant - .color(&ctx) - .await? - .unwrap_or_else(|| "00b0bc".to_owned()), - output_sockets, - }); + schema_variants_views.push(SchemaVariantView { + id: schema_variant.id(), + // FIXME(nick): use the real value here + builtin: true, + // builtin: schema_variant.is_builtin(&ctx).await?, + name: schema_variant.name().to_owned(), + schema_id: schema.id(), + schema_name: schema.name.to_owned(), + color: schema_variant + .get_color(&ctx) + .await? + .unwrap_or("#0F0F0F".into()), + category: schema_variant.category().to_owned(), + input_sockets, + output_sockets, + }); + } } - Ok(Json(variants_view)) + Ok(Json(schema_variants_views)) } diff --git a/lib/sdf-server/src/server/service/diagram/set_component_position.rs b/lib/sdf-server/src/server/service/diagram/set_component_position.rs new file mode 100644 index 0000000000..aaf5adbce4 --- /dev/null +++ b/lib/sdf-server/src/server/service/diagram/set_component_position.rs @@ -0,0 +1,93 @@ +use axum::Json; +use dal::{Component, ComponentId, SchemaVariant, Visibility}; +use serde::{Deserialize, Serialize}; + +use super::DiagramResult; +use crate::server::extract::{AccessBuilder, HandlerContext}; + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct SetComponentPositionRequest { + #[serde(flatten)] + pub visibility: Visibility, + pub node_id: ComponentId, + pub x: String, + pub y: String, + pub width: Option, + pub height: Option, +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct SetComponentPositionResponse { + pub component: Component, +} + +pub async fn set_component_position( + HandlerContext(builder): HandlerContext, + AccessBuilder(request_ctx): AccessBuilder, + Json(request): Json, +) -> DiagramResult> { + // let visibility = Visibility::new_change_set(request.visibility.change_set_pk, true); + // let ctx = builder.build(request_ctx.build(visibility)).await?; + + // TODO(nick): I think the above visibility style is wrong for the new engine and we want what's below. + let ctx = builder.build(request_ctx.build(request.visibility)).await?; + + let component = Component::get_by_id(&ctx, request.node_id).await?; + let schema_variant_id = Component::schema_variant_id(&ctx, request.node_id).await?; + + let (width, height) = { + let (_, explicit_internal_providers) = + SchemaVariant::list_external_providers_and_explicit_internal_providers( + &ctx, + schema_variant_id, + ) + .await?; + + let mut size = (None, None); + + for explicit_internal_provider in explicit_internal_providers { + // NOTE(nick): the comment below may be out of date, depending on how we handle frames with the new engine. + + // If component is a frame, we set the size as either the one from the request or the previous one + // If we don't do it like this upsert_by_node_id will delete the size on None instead of keeping it as is + if explicit_internal_provider.name() == "Frame" { + size = ( + request + .width + .or_else(|| component.width().map(|v| v.to_string())), + request + .height + .or_else(|| component.height().map(|v| v.to_string())), + ); + break; + } + } + + size + }; + + // TODO(nick): handle the "deleted" case with the new engine. + let component = component + .set_geometry(&ctx, request.x, request.y, width, height) + .await?; + // { + // if node.visibility().deleted_at.is_some() { + // node.set_geometry(&ctx, &request.x, &request.y, width, height) + // .await?; + // } else { + // let ctx_without_deleted = &ctx.clone_with_new_visibility(Visibility::new_change_set( + // ctx.visibility().change_set_pk, + // false, + // )); + // + // node.set_geometry(ctx_without_deleted, &request.x, &request.y, width, height) + // .await?; + // }; + // } + + ctx.commit().await?; + + Ok(Json(SetComponentPositionResponse { component })) +} diff --git a/lib/sdf-server/src/server/service/diagram/set_node_position.rs b/lib/sdf-server/src/server/service/diagram/set_node_position.rs deleted file mode 100644 index f850b677cf..0000000000 --- a/lib/sdf-server/src/server/service/diagram/set_node_position.rs +++ /dev/null @@ -1,91 +0,0 @@ -use super::DiagramResult; -use crate::server::extract::{AccessBuilder, HandlerContext}; -use crate::service::diagram::DiagramError; -use axum::Json; -use dal::node::NodeId; -use dal::socket::SocketEdgeKind; -use dal::{Node, StandardModel, Visibility}; -use serde::{Deserialize, Serialize}; - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct SetNodePositionRequest { - #[serde(flatten)] - pub visibility: Visibility, - pub node_id: NodeId, - pub x: String, - pub y: String, - pub width: Option, - pub height: Option, -} - -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct SetNodePositionResponse { - pub node: Node, -} - -pub async fn set_node_position( - HandlerContext(builder): HandlerContext, - AccessBuilder(request_ctx): AccessBuilder, - Json(request): Json, -) -> DiagramResult> { - let visibility = Visibility::new_change_set(request.visibility.change_set_pk, true); - let ctx = builder.build(request_ctx.build(visibility)).await?; - - let mut node = Node::get_by_id(&ctx, &request.node_id) - .await? - .ok_or(DiagramError::NodeNotFound(request.node_id))?; - - let (width, height) = { - let component = dal::Component::find_for_node(&ctx, request.node_id) - .await? - .ok_or(DiagramError::ComponentNotFound)?; - - let sockets = component - .schema_variant(&ctx) - .await? - .ok_or(DiagramError::SchemaVariantNotFound)? - .sockets(&ctx) - .await?; - - let mut size = (None, None); - - for s in sockets { - // If component is a frame, we set the size as either the one from the request or the previous one - // If we don't do it like this upsert_by_node_id will delete the size on None instead of keeping it as is - if s.name() == "Frame" && *s.edge_kind() == SocketEdgeKind::ConfigurationInput { - size = ( - request - .width - .or_else(|| node.width().map(|v| v.to_string())), - request - .height - .or_else(|| node.height().map(|v| v.to_string())), - ); - break; - } - } - - size - }; - - { - if node.visibility().deleted_at.is_some() { - node.set_geometry(&ctx, &request.x, &request.y, width, height) - .await?; - } else { - let ctx_without_deleted = &ctx.clone_with_new_visibility(Visibility::new_change_set( - ctx.visibility().change_set_pk, - false, - )); - - node.set_geometry(ctx_without_deleted, &request.x, &request.y, width, height) - .await?; - }; - } - - ctx.commit().await?; - - Ok(Json(SetNodePositionResponse { node })) -} diff --git a/lib/sdf-server/src/server/service/func.rs b/lib/sdf-server/src/server/service/func.rs index b7bf7a06d3..34379fed80 100644 --- a/lib/sdf-server/src/server/service/func.rs +++ b/lib/sdf-server/src/server/service/func.rs @@ -1,254 +1,297 @@ -use std::collections::HashMap; - use axum::{ response::Response, routing::{get, post}, Json, Router, }; -use serde::{Deserialize, Serialize}; -use thiserror::Error; -use tokio::task::JoinError; - -use dal::authentication_prototype::{AuthenticationPrototype, AuthenticationPrototypeError}; -use dal::func::execution::FuncExecutionError; +use dal::authentication_prototype::AuthenticationPrototypeError; +use dal::func::argument::{FuncArgument, FuncArgumentError, FuncArgumentId, FuncArgumentKind}; +use dal::schema::variant::SchemaVariantError; use dal::{ - attribute::context::{AttributeContextBuilder, AttributeContextBuilderError}, - func::{ - argument::{FuncArgument, FuncArgumentError, FuncArgumentId, FuncArgumentKind}, - binding_return_value::FuncBindingReturnValueError, - }, - prop_tree::PropTreeError, - prototype_context::PrototypeContextError, - schema::variant::SchemaVariantError, - ActionKind, ActionPrototype, ActionPrototypeError, AttributeContext, AttributeContextError, - AttributePrototype, AttributePrototypeArgumentError, AttributePrototypeArgumentId, - AttributePrototypeError, AttributePrototypeId, AttributeValueError, ChangeSetError, - ComponentError, ComponentId, DalContext, ExternalProviderError, ExternalProviderId, Func, - FuncBackendKind, FuncBackendResponseType, FuncBindingError, FuncId, FuncVariant, - InternalProvider, InternalProviderError, InternalProviderId, LeafInputLocation, Prop, - PropError, PropId, PrototypeListForFuncError, SchemaVariant, SchemaVariantId, StandardModel, - StandardModelError, TenancyError, TransactionsError, WsEventError, + workspace_snapshot::WorkspaceSnapshotError, DalContext, Func, FuncBackendKind, + FuncBackendResponseType, FuncId, SchemaVariantId, TransactionsError, }; +use dal::{ChangeSetError, WsEventError}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; use crate::server::{impl_default_error_into_response, state::AppState}; use crate::service::func::get_func::GetFuncResponse; pub mod create_func; -pub mod delete_func; -pub mod execute; pub mod get_func; pub mod list_funcs; -pub mod list_input_sources; -pub mod revert_func; -pub mod save_and_exec; pub mod save_func; +// pub mod delete_func; +// pub mod execute; +// pub mod list_input_sources; +// pub mod revert_func; +// pub mod save_and_exec; + #[remain::sorted] #[derive(Error, Debug)] pub enum FuncError { - #[error("action func {0} assigned to multiple kinds")] - ActionFuncMultipleKinds(FuncId), - #[error("action kind missing on prototypes for action func {0}")] - ActionKindMissing(FuncId), - #[error(transparent)] - ActionPrototype(#[from] ActionPrototypeError), - #[error("attribute context error: {0}")] - AttributeContext(#[from] AttributeContextError), - #[error("attribute context builder error: {0}")] - AttributeContextBuilder(#[from] AttributeContextBuilderError), - #[error("attribute prototype error: {0}")] - AttributePrototype(#[from] AttributePrototypeError), - #[error("That attribute is already set by the function named \"{0}\"")] - AttributePrototypeAlreadySetByFunc(String), - #[error("attribute prototype argument error: {0}")] - AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), - #[error("attribute prototype missing")] - AttributePrototypeMissing, - #[error("attribute prototype {0} is missing argument {1}")] - AttributePrototypeMissingArgument(AttributePrototypeId, AttributePrototypeArgumentId), - #[error("attribute prototype argument {0} is internal provider id")] - AttributePrototypeMissingInternalProviderId(AttributePrototypeArgumentId), - #[error("attribute prototype {0} is missing its prop {1}")] - AttributePrototypeMissingProp(AttributePrototypeId, PropId), - #[error("attribute prototype {0} has no PropId or ExternalProviderId")] - AttributePrototypeMissingPropIdOrExternalProviderId(AttributePrototypeId), - #[error("attribute prototype {0} schema is missing")] - AttributePrototypeMissingSchema(AttributePrototypeId), - #[error("attribute prototype {0} schema_variant is missing")] - AttributePrototypeMissingSchemaVariant(AttributePrototypeId), - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), - #[error("attribute value missing")] - AttributeValueMissing, + // #[error("action func {0} assigned to multiple kinds")] + // ActionFuncMultipleKinds(FuncId), + // #[error("action kind missing on prototypes for action func {0}")] + // ActionKindMissing(FuncId), + // #[error(transparent)] + // ActionPrototype(#[from] ActionPrototypeError), + // #[error("attribute context error: {0}")] + // AttributeContext(#[from] AttributeContextError), + // #[error("attribute context builder error: {0}")] + // AttributeContextBuilder(#[from] AttributeContextBuilderError), + // #[error("attribute prototype error: {0}")] + // AttributePrototype(#[from] AttributePrototypeError), + // #[error("That attribute is already set by the function named \"{0}\"")] + // AttributePrototypeAlreadySetByFunc(String), + // #[error("attribute prototype argument error: {0}")] + // AttributePrototypeArgument(#[from] AttributePrototypeArgumentError), + // #[error("attribute prototype missing")] + // AttributePrototypeMissing, + // #[error("attribute prototype {0} is missing argument {1}")] + // AttributePrototypeMissingArgument(AttributePrototypeId, AttributePrototypeArgumentId), + // #[error("attribute prototype argument {0} is internal provider id")] + // AttributePrototypeMissingInternalProviderId(AttributePrototypeArgumentId), + // #[error("attribute prototype {0} is missing its prop {1}")] + // AttributePrototypeMissingProp(AttributePrototypeId, PropId), + // #[error("attribute prototype {0} has no PropId or ExternalProviderId")] + // AttributePrototypeMissingPropIdOrExternalProviderId(AttributePrototypeId), + // #[error("attribute prototype {0} schema is missing")] + // AttributePrototypeMissingSchema(AttributePrototypeId), + // #[error("attribute prototype {0} schema_variant is missing")] + // AttributePrototypeMissingSchemaVariant(AttributePrototypeId), + // #[error("attribute value error: {0}")] + // AttributeValue(#[from] AttributeValueError), #[error("authentication prototype error: {0}")] - AuthenticationPrototypeError(#[from] AuthenticationPrototypeError), + AuthenticationPrototype(#[from] AuthenticationPrototypeError), + // #[error("attribute value missing")] + // AttributeValueMissing, #[error("change set error: {0}")] ChangeSet(#[from] ChangeSetError), - #[error("component error: {0}")] - Component(#[from] ComponentError), - #[error("component missing schema variant")] - ComponentMissingSchemaVariant(ComponentId), + // #[error("component error: {0}")] + // Component(#[from] ComponentError), + // #[error("component missing schema variant")] + // ComponentMissingSchemaVariant(ComponentId), #[error(transparent)] ContextTransaction(#[from] TransactionsError), - #[error("editing reconciliation functions is not implemented")] - EditingReconciliationFuncsNotImplemented, - #[error("editing validation functions is not implemented")] - EditingValidationFuncsNotImplemented, + // #[error("editing reconciliation functions is not implemented")] + // EditingReconciliationFuncsNotImplemented, + // #[error(transparent)] + // ExternalProvider(#[from] ExternalProviderError), #[error(transparent)] - ExternalProvider(#[from] ExternalProviderError), - #[error(transparent)] - Func(#[from] dal::FuncError), - #[error("func argument not found")] - FuncArgNotFound, + Func(#[from] dal::func::FuncError), + // #[error("func argument not found")] + // FuncArgNotFound, #[error("func argument error: {0}")] FuncArgument(#[from] FuncArgumentError), - #[error("func argument already exists for that name")] - FuncArgumentAlreadyExists, - #[error("func argument {0} missing attribute prototype argument for prototype {1}")] - FuncArgumentMissingPrototypeArgument(FuncArgumentId, AttributePrototypeId), - #[error("func binding error: {0}")] - FuncBinding(#[from] FuncBindingError), - #[error("func binding return value error: {0}")] - FuncBindingReturnValue(#[from] FuncBindingReturnValueError), - #[error("func binding return value not found")] - FuncBindingReturnValueMissing, - // XXX: we will be able to remove this error once we make output sockets typed - #[error("Cannot bind function to both an output socket and a prop")] - FuncDestinationPropAndOutputSocket, - #[error("cannot bind func to different prop kinds")] - FuncDestinationPropKindMismatch, - #[error("Function execution: {0}")] - FuncExecution(#[from] FuncExecutionError), - #[error("Function execution failed: {0}")] - FuncExecutionFailed(String), - #[error("Function execution failed: this function is not connected to any assets, and was not executed")] - FuncExecutionFailedNoPrototypes, - #[error("Function still has associations: {0}")] - FuncHasAssociations(FuncId), + // #[error("func argument already exists for that name")] + // FuncArgumentAlreadyExists, + // #[error("func argument {0} missing attribute prototype argument for prototype {1}")] + // FuncArgumentMissingPrototypeArgument(FuncArgumentId, AttributePrototypeId), + // #[error("func binding error: {0}")] + // FuncBinding(#[from] FuncBindingError), + // #[error("func binding return value error: {0}")] + // FuncBindingReturnValue(#[from] FuncBindingReturnValueError), + // #[error("func binding return value not found")] + // FuncBindingReturnValueMissing, + #[error("func {0} cannot be converted to frontend variant")] + FuncCannotBeTurnedIntoVariant(FuncId), + // // XXX: we will be able to remove this error once we make output sockets typed + // #[error("Cannot bind function to both an output socket and a prop")] + // FuncDestinationPropAndOutputSocket, + // #[error("cannot bind func to different prop kinds")] + // FuncDestinationPropKindMismatch, + // #[error("Function execution: {0}")] + // FuncExecution(#[from] FuncExecutionError), + // #[error("Function execution failed: {0}")] + // FuncExecutionFailed(String), + // #[error("Function execution failed: this function is not connected to any assets, and was not executed")] + // FuncExecutionFailedNoPrototypes, + // #[error("Function still has associations: {0}")] + // FuncHasAssociations(FuncId), #[error("Function named \"{0}\" already exists in this changeset")] FuncNameExists(String), #[error("The function name \"{0}\" is reserved")] FuncNameReserved(String), - #[error("Function not found")] - FuncNotFound, - #[error("func is not revertible")] - FuncNotRevertible, - #[error("Function not runnable")] - FuncNotRunnable, - #[error("Cannot create that type of function")] - FuncNotSupported, - #[error("Function options are incompatible with variant")] - FuncOptionsAndVariantMismatch, + // #[error("func is not revertible")] + // FuncNotRevertible, + // #[error("Cannot create that type of function")] + // FuncNotSupported, + // #[error("Function options are incompatible with variant")] + // FuncOptionsAndVariantMismatch, #[error("Hyper error: {0}")] Hyper(#[from] hyper::http::Error), - #[error("internal provider error: {0}")] - InternalProvider(#[from] InternalProviderError), - #[error("failed to join async task; bug!")] - Join(#[from] JoinError), - #[error("Missing required options for creating a function")] - MissingOptions, + // #[error("internal provider error: {0}")] + // InternalProvider(#[from] InternalProviderError), + // #[error("failed to join async task; bug!")] + // Join(#[from] JoinError), + // #[error("Missing required options for creating a function")] + // MissingOptions, #[error("Function is read-only")] NotWritable, - #[error(transparent)] - Pg(#[from] si_data_pg::PgError), - #[error(transparent)] - PgPool(#[from] Box), - #[error("prop error: {0}")] - Prop(#[from] PropError), + // #[error(transparent)] + // Pg(#[from] si_data_pg::PgError), + // #[error(transparent)] + // PgPool(#[from] Box), + // #[error("prop error: {0}")] + // Prop(#[from] PropError), #[error("prop for value not found")] PropNotFound, - #[error("prop tree error: {0}")] - PropTree(#[from] PropTreeError), - #[error("prototype context error: {0}")] - PrototypeContext(#[from] PrototypeContextError), - #[error("prototype list for func error: {0}")] - PrototypeListForFunc(#[from] PrototypeListForFuncError), + // #[error("prop tree error: {0}")] + // PropTree(#[from] PropTreeError), + // #[error("prototype context error: {0}")]self + // PrototypeContext(#[from] PrototypeContextError), + // #[error("prototype list for func error: {0}")] + // PrototypeListForFunc(#[from] PrototypeListForFuncError), #[error("schema variant error: {0}")] SchemaVariant(#[from] SchemaVariantError), - #[error("schema variant missing schema")] - SchemaVariantMissingSchema(SchemaVariantId), - #[error("Could not find schema variant for prop {0}")] - SchemaVariantNotFoundForProp(PropId), + // #[error("schema variant missing schema")] + // SchemaVariantMissingSchema(SchemaVariantId), + // #[error("Could not find schema variant for prop {0}")] + // SchemaVariantNotFoundForProp(PropId), #[error("json serialization error: {0}")] SerdeJson(#[from] serde_json::Error), - #[error(transparent)] - StandardModel(#[from] StandardModelError), - #[error("tenancy error: {0}")] - Tenancy(#[from] TenancyError), + // StandardModel(#[from] StandardModelError), + // #[error("tenancy error: {0}")] + // Tenancy(#[from] TenancyError), #[error("unexpected func variant ({0:?}) creating attribute func")] UnexpectedFuncVariantCreatingAttributeFunc(FuncVariant), - #[error("A validation already exists for that attribute")] - ValidationAlreadyExists, - #[error("validation prototype schema is missing")] - ValidationPrototypeMissingSchema, - #[error("validation prototype {0} schema_variant is missing")] - ValidationPrototypeMissingSchemaVariant(SchemaVariantId), + // #[error("A validation already exists for that attribute")] + // ValidationAlreadyExists, + // #[error("validation prototype error: {0}")] + // ValidationPrototype(#[from] ValidationPrototypeError), + // #[error("validation prototype schema is missing")] + // ValidationPrototypeMissingSchema, + // #[error("validation prototype {0} schema_variant is missing")] + // ValidationPrototypeMissingSchemaVariant(SchemaVariantId), + #[error(transparent)] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), #[error("could not publish websocket event: {0}")] WsEvent(#[from] WsEventError), } -impl From for FuncError { - fn from(value: si_data_pg::PgPoolError) -> Self { - Self::PgPool(Box::new(value)) - } -} +//impl From for FuncError { +// fn from(value: si_data_pg::PgPoolError) -> Self { +// Self::PgPool(Box::new(value)) +// } +//} pub type FuncResult = Result; impl_default_error_into_response!(FuncError); -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct AttributePrototypeArgumentView { - func_argument_id: FuncArgumentId, - func_argument_name: Option, - id: Option, - internal_provider_id: Option, +// Variants don't map 1:1 onto FuncBackendKind, since some JsAttribute functions +// are a special case (Qualification, CodeGeneration etc) +#[remain::sorted] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Copy)] +pub enum FuncVariant { + Action, + Attribute, + Authentication, + CodeGeneration, + Qualification, + Reconciliation, + Validation, } -#[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] -#[serde(rename_all = "camelCase")] -pub struct AttributePrototypeView { - id: AttributePrototypeId, - component_id: Option, - prop_id: Option, - external_provider_id: Option, - prototype_arguments: Vec, +impl From for FuncBackendKind { + fn from(value: FuncVariant) -> Self { + match value { + FuncVariant::Reconciliation => FuncBackendKind::JsReconciliation, + FuncVariant::Action => FuncBackendKind::JsAction, + FuncVariant::Validation => FuncBackendKind::JsValidation, + FuncVariant::Attribute | FuncVariant::CodeGeneration | FuncVariant::Qualification => { + FuncBackendKind::JsAttribute + } + FuncVariant::Authentication => FuncBackendKind::JsAuthentication, + } + } } -impl AttributePrototypeView { - pub fn to_attribute_context(&self) -> FuncResult { - let mut builder = AttributeContextBuilder::new(); - if let Some(component_id) = self.component_id { - builder.set_component_id(component_id); - } - if let Some(prop_id) = self.prop_id { - builder.set_prop_id(prop_id); - } - if let Some(external_provider_id) = self.external_provider_id { - builder.set_external_provider_id(external_provider_id); +impl TryFrom<&Func> for FuncVariant { + type Error = FuncError; + + fn try_from(func: &Func) -> Result { + match (func.backend_kind, func.backend_response_type) { + (FuncBackendKind::JsAttribute, response_type) => match response_type { + FuncBackendResponseType::CodeGeneration => Ok(FuncVariant::CodeGeneration), + FuncBackendResponseType::Qualification => Ok(FuncVariant::Qualification), + _ => Ok(FuncVariant::Attribute), + }, + (FuncBackendKind::JsReconciliation, _) => Ok(FuncVariant::Reconciliation), + (FuncBackendKind::JsAction, _) => Ok(FuncVariant::Action), + (FuncBackendKind::JsValidation, _) => Ok(FuncVariant::Validation), + (FuncBackendKind::JsAuthentication, _) => Ok(FuncVariant::Authentication), + (FuncBackendKind::Array, _) + | (FuncBackendKind::Boolean, _) + | (FuncBackendKind::Diff, _) + | (FuncBackendKind::Identity, _) + | (FuncBackendKind::Integer, _) + | (FuncBackendKind::JsSchemaVariantDefinition, _) + | (FuncBackendKind::Map, _) + | (FuncBackendKind::Object, _) + | (FuncBackendKind::String, _) + | (FuncBackendKind::Unset, _) + | (FuncBackendKind::Validation, _) => { + Err(FuncError::FuncCannotBeTurnedIntoVariant(func.id)) + } } - - Ok(builder.to_context()?) } } +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct AttributePrototypeArgumentView { +// func_argument_id: FuncArgumentId, +// func_argument_name: Option, +// id: Option, +// internal_provider_id: Option, +// } +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] #[serde(rename_all = "camelCase")] -pub struct ValidationPrototypeView { - schema_variant_id: SchemaVariantId, - prop_id: PropId, -} - +pub struct AttributePrototypeView {} +// id: AttributePrototypeId, +// component_id: Option, +// prop_id: Option, +// external_provider_id: Option, +// prototype_arguments: Vec, +// } + +// impl AttributePrototypeView { +// pub fn to_attribute_context(&self) -> FuncResult { +// let mut builder = AttributeContextBuilder::new(); +// if let Some(component_id) = self.component_id { +// builder.set_component_id(component_id); +// } +// if let Some(prop_id) = self.prop_id { +// builder.set_prop_id(prop_id); +// } +// if let Some(external_provider_id) = self.external_provider_id { +// builder.set_external_provider_id(external_provider_id); +// } + +// Ok(builder.to_context()?) +// } +// } + +// #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] +// #[serde(rename_all = "camelCase")] +// pub struct ValidationPrototypeView { +// schema_variant_id: SchemaVariantId, +// prop_id: PropId, +// } +// #[remain::sorted] #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] #[serde(tag = "type", rename_all = "camelCase")] pub enum FuncAssociations { - #[serde(rename_all = "camelCase")] - Action { - schema_variant_ids: Vec, - kind: Option, - }, + // #[serde(rename_all = "camelCase")] + // Action { + // schema_variant_ids: Vec, + // kind: Option, + // }, #[serde(rename_all = "camelCase")] Attribute { prototypes: Vec, @@ -258,26 +301,26 @@ pub enum FuncAssociations { Authentication { schema_variant_ids: Vec, }, - #[serde(rename_all = "camelCase")] - CodeGeneration { - schema_variant_ids: Vec, - component_ids: Vec, - inputs: Vec, - }, - #[serde(rename_all = "camelCase")] - Qualification { - schema_variant_ids: Vec, - component_ids: Vec, - inputs: Vec, - }, - #[serde(rename_all = "camelCase")] - SchemaVariantDefinitions { - schema_variant_ids: Vec, - }, - #[serde(rename_all = "camelCase")] - Validation { - prototypes: Vec, - }, + // #[serde(rename_all = "camelCase")] + // CodeGeneration { + // schema_variant_ids: Vec, + // component_ids: Vec, + // inputs: Vec, + // }, + // #[serde(rename_all = "camelCase")] + // Qualification { + // schema_variant_ids: Vec, + // component_ids: Vec, + // inputs: Vec, + // }, + // #[serde(rename_all = "camelCase")] + // SchemaVariantDefinitions { + // schema_variant_ids: Vec, + // }, + // #[serde(rename_all = "camelCase")] + // Validation { + // prototypes: Vec, + // }, } #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] @@ -288,227 +331,236 @@ pub struct FuncArgumentView { pub kind: FuncArgumentKind, pub element_kind: Option, } - -async fn is_func_revertible(ctx: &DalContext, func: &Func) -> FuncResult { - // refetch to get updated visibility - let is_in_change_set = match Func::get_by_id(ctx, func.id()).await? { - Some(func) => func.visibility().in_change_set(), - None => return Ok(false), - }; - // Clone a new ctx vith head visibility - let ctx = ctx.clone_with_head(); - let head_func = Func::get_by_id(&ctx, func.id()).await?; - - Ok(head_func.is_some() && is_in_change_set) -} - -async fn prototype_view_for_attribute_prototype( - ctx: &DalContext, - func_id: FuncId, - proto: &AttributePrototype, -) -> FuncResult { - let prop_id = if proto.context.prop_id().is_some() { - Some(proto.context.prop_id()) - } else { - None - }; - - let external_provider_id = if proto.context.external_provider_id().is_some() { - Some(proto.context.external_provider_id()) - } else { - None - }; - - if prop_id.is_none() && external_provider_id.is_none() { - return Err(FuncError::AttributePrototypeMissingPropIdOrExternalProviderId(*proto.id())); - } - - let component_id = if proto.context.component_id().is_some() { - Some(proto.context.component_id()) - } else { - None - }; - - let prototype_arguments = - FuncArgument::list_for_func_with_prototype_arguments(ctx, func_id, *proto.id()) - .await? - .iter() - .map( - |(func_arg, maybe_proto_arg)| AttributePrototypeArgumentView { - func_argument_id: *func_arg.id(), - func_argument_name: Some(func_arg.name().to_owned()), - id: maybe_proto_arg.as_ref().map(|proto_arg| *proto_arg.id()), - internal_provider_id: maybe_proto_arg - .as_ref() - .map(|proto_arg| proto_arg.internal_provider_id()), - }, - ) - .collect(); - - Ok(AttributePrototypeView { - id: *proto.id(), - prop_id, - component_id, - external_provider_id, - prototype_arguments, - }) -} - -async fn action_prototypes_into_schema_variants_and_components( - ctx: &DalContext, - func_id: FuncId, -) -> FuncResult<(Option, Vec)> { - let mut variant_ids = vec![]; - let mut action_kind: Option = None; - - for proto in ActionPrototype::find_for_func(ctx, func_id).await? { - if let Some(action_kind) = &action_kind { - if action_kind != proto.kind() { - return Err(FuncError::ActionFuncMultipleKinds(func_id)); - } - } else { - action_kind = Some(*proto.kind()); - } - - if proto.schema_variant_id().is_some() { - variant_ids.push(proto.schema_variant_id()); - } - } - - if !variant_ids.is_empty() && action_kind.is_none() { - return Err(FuncError::ActionKindMissing(func_id)); - } - - Ok((action_kind, variant_ids)) -} - -async fn attribute_prototypes_into_schema_variants_and_components( - ctx: &DalContext, - func_id: FuncId, -) -> FuncResult<(Vec, Vec)> { - let schema_variants_components = - AttributePrototype::find_for_func_as_variant_and_component(ctx, func_id).await?; - - let mut schema_variant_ids = vec![]; - let mut component_ids = vec![]; - - for (schema_variant_id, component_id) in schema_variants_components { - if component_id == ComponentId::NONE { - schema_variant_ids.push(schema_variant_id); - } else { - component_ids.push(component_id); - } - } - - Ok((schema_variant_ids, component_ids)) -} - -pub async fn get_leaf_function_inputs( - ctx: &DalContext, - func_id: FuncId, -) -> FuncResult> { - Ok(FuncArgument::list_for_func(ctx, func_id) - .await? - .iter() - .filter_map(|arg| LeafInputLocation::maybe_from_arg_name(arg.name())) - .collect()) -} - +// +// async fn is_func_revertible(ctx: &DalContext, func: &Func) -> FuncResult { +// // refetch to get updated visibility +// let is_in_change_set = match Func::get_by_id(ctx, func.id()).await? { +// Some(func) => func.visibility().in_change_set(), +// None => return Ok(false), +// }; +// // Clone a new ctx vith head visibility +// let ctx = ctx.clone_with_head(); +// let head_func = Func::get_by_id(&ctx, func.id()).await?; + +// Ok(head_func.is_some() && is_in_change_set) +// } + +// async fn prototype_view_for_attribute_prototype( +// ctx: &DalContext, +// func_id: FuncId, +// proto: &AttributePrototype, +// ) -> FuncResult { +// let prop_id = if proto.context.prop_id().is_some() { +// Some(proto.context.prop_id()) +// } else { +// None +// }; + +// let external_provider_id = if proto.context.external_provider_id().is_some() { +// Some(proto.context.external_provider_id()) +// } else { +// None +// }; + +// if prop_id.is_none() && external_provider_id.is_none() { +// return Err(FuncError::AttributePrototypeMissingPropIdOrExternalProviderId(*proto.id())); +// } + +// let component_id = if proto.context.component_id().is_some() { +// Some(proto.context.component_id()) +// } else { +// None +// }; + +// let prototype_arguments = +// FuncArgument::list_for_func_with_prototype_arguments(ctx, func_id, *proto.id()) +// .await? +// .iter() +// .map( +// |(func_arg, maybe_proto_arg)| AttributePrototypeArgumentView { +// func_argument_id: *func_arg.id(), +// func_argument_name: Some(func_arg.name().to_owned()), +// id: maybe_proto_arg.as_ref().map(|proto_arg| *proto_arg.id()), +// internal_provider_id: maybe_proto_arg +// .as_ref() +// .map(|proto_arg| proto_arg.internal_provider_id()), +// }, +// ) +// .collect(); + +// Ok(AttributePrototypeView { +// id: *proto.id(), +// prop_id, +// component_id, +// external_provider_id, +// prototype_arguments, +// }) +// } + +// async fn action_prototypes_into_schema_variants_and_components( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> FuncResult<(Option, Vec)> { +// let mut variant_ids = vec![]; +// let mut action_kind: Option = None; + +// for proto in ActionPrototype::find_for_func(ctx, func_id).await? { +// if let Some(action_kind) = &action_kind { +// if action_kind != proto.kind() { +// return Err(FuncError::ActionFuncMultipleKinds(func_id)); +// } +// } else { +// action_kind = Some(*proto.kind()); +// } + +// if proto.schema_variant_id().is_some() { +// variant_ids.push(proto.schema_variant_id()); +// } +// } + +// if !variant_ids.is_empty() && action_kind.is_none() { +// return Err(FuncError::ActionKindMissing(func_id)); +// } + +// Ok((action_kind, variant_ids)) +// } + +// async fn attribute_prototypes_into_schema_variants_and_components( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> FuncResult<(Vec, Vec)> { +// let schema_variants_components = +// AttributePrototype::find_for_func_as_variant_and_component(ctx, func_id).await?; + +// let mut schema_variant_ids = vec![]; +// let mut component_ids = vec![]; + +// for (schema_variant_id, component_id) in schema_variants_components { +// if component_id == ComponentId::NONE { +// schema_variant_ids.push(schema_variant_id); +// } else { +// component_ids.push(component_id); +// } +// } + +// Ok((schema_variant_ids, component_ids)) +// } + +// pub async fn get_leaf_function_inputs( +// ctx: &DalContext, +// func_id: FuncId, +// ) -> FuncResult> { +// Ok(FuncArgument::list_for_func(ctx, func_id) +// .await? +// .iter() +// .filter_map(|arg| LeafInputLocation::maybe_from_arg_name(arg.name())) +// .collect()) +// } +// pub async fn get_func_view(ctx: &DalContext, func: &Func) -> FuncResult { - let arguments = FuncArgument::list_for_func(ctx, *func.id()).await?; + let arguments = FuncArgument::list_for_func(ctx, func.id).await?; - let (associations, input_type) = match func.backend_kind() { + let (associations, input_type) = match &func.backend_kind { FuncBackendKind::JsAttribute => { - let (associations, input_type) = match func.backend_response_type() { + let (associations, input_type) = match &func.backend_response_type { FuncBackendResponseType::CodeGeneration | FuncBackendResponseType::Qualification => { - let (schema_variant_ids, component_ids) = - attribute_prototypes_into_schema_variants_and_components(ctx, *func.id()) - .await?; - - let inputs = get_leaf_function_inputs(ctx, *func.id()).await?; - let input_type = - compile_leaf_function_input_types(ctx, &schema_variant_ids, &inputs) - .await?; - - ( - Some(match func.backend_response_type() { - FuncBackendResponseType::CodeGeneration => { - FuncAssociations::CodeGeneration { - schema_variant_ids, - component_ids, - inputs, - } - } - - FuncBackendResponseType::Qualification => { - FuncAssociations::Qualification { - schema_variant_ids, - component_ids, - inputs: get_leaf_function_inputs(ctx, *func.id()).await?, - } - } - _ => unreachable!("the match above ensures this is unreachable"), - }), - input_type, - ) + (None, "".into()) + // let (schema_variant_ids, component_ids) = + // attribute_prototypes_into_schema_variants_and_components(ctx, *func.id()) + // .await?; + // + // let inputs = get_leaf_function_inputs(ctx, *func.id()).await?; + // let input_type = + // compile_leaf_function_input_types(ctx, &schema_variant_ids, &inputs) + // .await?; + // + // ( + // Some(match func.backend_response_type() { + // FuncBackendResponseType::CodeGeneration => { + // FuncAssociations::CodeGeneration { + // schema_variant_ids, + // component_ids, + // inputs, + // } + // } + // + // FuncBackendResponseType::Qualification => { + // FuncAssociations::Qualification { + // schema_variant_ids, + // component_ids, + // inputs: get_leaf_function_inputs(ctx, *func.id()).await?, + // } + // } + // _ => unreachable!("the match above ensures this is unreachable"), + // }), + // input_type, + // ) } _ => { - let protos = AttributePrototype::find_for_func(ctx, func.id()).await?; + // let protos = AttributePrototype::find_for_func(ctx, func.id()).await?; - let mut prototypes = Vec::with_capacity(protos.len()); - for proto in &protos { - prototypes.push( - prototype_view_for_attribute_prototype(ctx, *func.id(), proto).await?, - ); - } + // let mut prototypes = Vec::with_capacity(protos.len()); + // for proto in &protos { + // prototypes.push( + // prototype_view_for_attribute_prototype(ctx, *func.id(), proto).await?, + // ); + // } - let ts_types = compile_attribute_function_types(ctx, &prototypes).await?; + // let ts_types = compile_attribute_function_types(ctx, &prototypes).await?; ( Some(FuncAssociations::Attribute { - prototypes, + prototypes: vec![], arguments: arguments .iter() .map(|arg| FuncArgumentView { - id: *arg.id(), - name: arg.name().to_owned(), - kind: arg.kind().to_owned(), - element_kind: arg.element_kind().cloned(), + id: arg.id, + name: arg.name.to_owned(), + kind: arg.kind, + element_kind: arg.element_kind.to_owned(), }) .collect(), }), - ts_types, + "type Input = any".into(), ) } }; (associations, input_type) } - FuncBackendKind::JsAction => { - let (kind, schema_variant_ids) = - action_prototypes_into_schema_variants_and_components(ctx, *func.id()).await?; - - let ts_types = compile_action_types(ctx, &schema_variant_ids).await?; - - let associations = Some(FuncAssociations::Action { - schema_variant_ids, - kind, - }); - - (associations, ts_types) - } - FuncBackendKind::JsReconciliation => { - return Err(FuncError::EditingReconciliationFuncsNotImplemented); - } - FuncBackendKind::JsValidation => { - return Err(FuncError::EditingValidationFuncsNotImplemented); - } + // FuncBackendKind::JsAction => { + // let (kind, schema_variant_ids) = + // action_prototypes_into_schema_variants_and_components(ctx, *func.id()).await?; + // + // let ts_types = compile_action_types(ctx, &schema_variant_ids).await?; + // + // let associations = Some(FuncAssociations::Action { + // schema_variant_ids, + // kind, + // }); + // + // (associations, ts_types) + // } + // FuncBackendKind::JsReconciliation => { + // return Err(FuncError::EditingReconciliationFuncsNotImplemented); + // } + // FuncBackendKind::JsValidation => { + // let protos = ValidationPrototype::list_for_func(ctx, *func.id()).await?; + // let input_type = compile_validation_types(ctx, &protos).await?; + // + // let associations = Some(FuncAssociations::Validation { + // prototypes: protos + // .iter() + // .map(|proto| ValidationPrototypeView { + // schema_variant_id: proto.context().schema_variant_id(), + // prop_id: proto.context().prop_id(), + // }) + // .collect(), + // }); + // (associations, input_type) + // } FuncBackendKind::JsAuthentication => { - let schema_variant_ids = AuthenticationPrototype::find_for_func(ctx, *func.id()) - .await? - .iter() - .map(|p| p.schema_variant_id()) - .collect(); + let schema_variant_ids = Func::list_schema_variants_for_auth_func(ctx, func.id).await?; ( Some(FuncAssociations::Authentication { schema_variant_ids }), @@ -527,289 +579,373 @@ pub async fn get_func_view(ctx: &DalContext, func: &Func) -> FuncResult (None, String::new()), }; - - let is_revertible = is_func_revertible(ctx, func).await?; - let types = [ - compile_return_types(*func.backend_response_type(), *func.backend_kind()), - &input_type, - langjs_types(), - ] - .join("\n"); + // + // let is_revertible = is_func_revertible(ctx, func).await?; + // let types = [ + // compile_return_types(*func.backend_response_type(), *func.backend_kind()), + // &input_type, + // langjs_types(), + // ] + // .join("\n"); Ok(GetFuncResponse { - id: func.id().to_owned(), + id: func.id.to_owned(), variant: func.try_into()?, - display_name: func.display_name().map(Into::into), - name: func.name().to_owned(), - description: func.description().map(|d| d.to_owned()), + display_name: func.display_name.as_ref().map(Into::into), + name: func.name.to_owned(), + description: func.description.as_ref().map(|d| d.to_owned()), code: func.code_plaintext()?, - is_builtin: func.builtin(), - is_revertible, + is_builtin: func.builtin, + is_revertible: false, associations, - types, + types: input_type, }) } -pub fn compile_return_types(ty: FuncBackendResponseType, kind: FuncBackendKind) -> &'static str { - if matches!(kind, FuncBackendKind::JsAttribute) - && !matches!( - ty, - FuncBackendResponseType::CodeGeneration | FuncBackendResponseType::Qualification - ) - { - return ""; // attribute functions have their output compiled dynamically - } - - match ty { - FuncBackendResponseType::Boolean => "type Output = boolean | null;", - FuncBackendResponseType::String => "type Output = string | null;", - FuncBackendResponseType::Integer => "type Output = number | null;", - FuncBackendResponseType::Qualification => { - "type Output { - result: 'success' | 'warning' | 'failure'; - message?: string | null; -}" - } - FuncBackendResponseType::CodeGeneration => { - "type Output { - format: string; - code: string; -}" - } - FuncBackendResponseType::Validation => { - "type Output { - valid: boolean; - message: string; -}" - } - FuncBackendResponseType::Reconciliation => { - "type Output { - updates: { [key: string]: unknown }; - actions: string[]; - message: string | null; -}" - } - FuncBackendResponseType::Action => { - "type Output { - status: 'ok' | 'warning' | 'error'; - payload?: { [key: string]: unknown } | null; - message?: string | null; -}" - } - FuncBackendResponseType::Json => "type Output = any;", - // Note: there is no ts function returning those - FuncBackendResponseType::Identity => "interface Output extends Input {}", - FuncBackendResponseType::Array => "type Output = any[];", - FuncBackendResponseType::Map => "type Output = Record;", - FuncBackendResponseType::Object => "type Output = any;", - FuncBackendResponseType::Unset => "type Output = undefined | null;", - FuncBackendResponseType::Void => "type Output = void;", - FuncBackendResponseType::SchemaVariantDefinition => concat!( - include_str!("./ts_types/asset_types_with_secrets.d.ts"), - "\n", - include_str!("./ts_types/joi.d.ts"), - "\n", - "type Output = any;" - ), - } -} - -async fn get_per_variant_types_for_prop_path( - ctx: &DalContext, - variant_ids: &[SchemaVariantId], - path: &[&str], -) -> FuncResult { - let mut per_variant_types = vec![]; - - for variant_id in variant_ids { - let prop = SchemaVariant::find_prop_in_tree(ctx, *variant_id, path).await?; - let ts_type = prop.ts_type(ctx).await?; - - if !per_variant_types.contains(&ts_type) { - per_variant_types.push(ts_type); - } - } - - Ok(per_variant_types.join(" | ")) -} - -async fn compile_leaf_function_input_types( - ctx: &DalContext, - schema_variant_ids: &[SchemaVariantId], - inputs: &[LeafInputLocation], -) -> FuncResult { - let mut ts_type = "type Input = {\n".to_string(); - - for input_location in inputs { - let input_property = format!( - "{}?: {} | null;\n", - input_location.arg_name(), - get_per_variant_types_for_prop_path( - ctx, - schema_variant_ids, - &input_location.prop_path(), - ) - .await? - ); - ts_type.push_str(&input_property); - } - ts_type.push_str("};"); - - Ok(ts_type) -} - -async fn compile_attribute_function_types( - ctx: &DalContext, - prototype_views: &[AttributePrototypeView], -) -> FuncResult { - let mut input_ts_types = "type Input = {\n".to_string(); - - let mut output_ts_types = vec![]; - let mut argument_types = HashMap::new(); - for prototype_view in prototype_views { - for arg in &prototype_view.prototype_arguments { - if let Some(ip_id) = arg.internal_provider_id { - let ip = InternalProvider::get_by_id(ctx, &ip_id) - .await? - .ok_or(InternalProviderError::NotFound(ip_id))?; - - let ts_type = if ip.prop_id().is_none() { - "object".to_string() - } else { - Prop::get_by_id(ctx, ip.prop_id()) - .await? - .ok_or(PropError::NotFound( - *ip.prop_id(), - ctx.visibility().to_owned(), - ))? - .ts_type(ctx) - .await? - }; - - if !argument_types.contains_key(&arg.func_argument_name) { - argument_types.insert(arg.func_argument_name.clone(), vec![ts_type]); - } else if let Some(ts_types_for_arg) = - argument_types.get_mut(&arg.func_argument_name) - { - if !ts_types_for_arg.contains(&ts_type) { - ts_types_for_arg.push(ts_type) - } - } - } - - let output_type = if let Some(output_prop_id) = prototype_view.prop_id { - Prop::get_by_id(ctx, &output_prop_id) - .await? - .ok_or(PropError::NotFound( - output_prop_id, - ctx.visibility().to_owned(), - ))? - .ts_type(ctx) - .await? - } else { - "any".to_string() - }; - - if !output_ts_types.contains(&output_type) { - output_ts_types.push(output_type); - } - } - } - for (arg_name, ts_types) in argument_types.iter() { - input_ts_types.push_str( - format!( - "{}?: {} | null;\n", - arg_name.as_ref().unwrap_or(&"".to_string()).to_owned(), - ts_types.join(" | ") - ) - .as_str(), - ); - } - input_ts_types.push_str("};"); - - let output_ts = format!("type Output = {};", output_ts_types.join(" | ")); - - Ok(format!("{}\n{}", input_ts_types, output_ts)) -} - -// Note: ComponentKind::Credential is unused and the implementation is broken, so let's ignore it for now -async fn compile_action_types( - ctx: &DalContext, - variant_ids: &[SchemaVariantId], -) -> FuncResult { - let mut ts_types = vec![]; - for variant_id in variant_ids { - let prop = SchemaVariant::find_prop_in_tree(ctx, *variant_id, &["root"]).await?; - ts_types.push(prop.ts_type(ctx).await?); - } - - Ok(format!( - "type Input {{ - kind: 'standard'; - properties: {}; -}}", - ts_types.join(" | "), - )) -} - -// TODO: stop duplicating definition -// TODO: use execa types instead of any -// TODO: add os, fs and path types (possibly fetch but I think it comes with DOM) -fn langjs_types() -> &'static str { - "declare namespace YAML { - function stringify(obj: unknown): string; -} - - declare namespace zlib { - function gzip(inputstr: string, callback: any); - } - - declare namespace requestStorage { - function getEnv(key: string): string; - function getItem(key: string): any; - function getEnvKeys(): string[]; - function getKeys(): string[]; - } - - declare namespace siExec { - - interface WatchArgs { - cmd: string, - args?: readonly string[], - execaOptions?: Options, - retryMs?: number, - maxRetryCount?: number, - callback: (child: execa.ExecaReturnValue) => Promise, - } - - interface WatchResult { - result: SiExecResult, - failed?: 'deadlineExceeded' | 'commandFailed', - } - - type SiExecResult = ExecaReturnValue; - - async function waitUntilEnd(execaFile: string, execaArgs?: string[], execaOptions?: any): Promise; - async function watch(options: WatchArgs, deadlineCount?: number): Promise; -}" -} - +// pub fn compile_return_types(ty: FuncBackendResponseType, kind: FuncBackendKind) -> &'static str { +// if matches!(kind, FuncBackendKind::JsAttribute) +// && !matches!( +// ty, +// FuncBackendResponseType::CodeGeneration | FuncBackendResponseType::Qualification +// ) +// { +// return ""; // attribute functions have their output compiled dynamically +// } + +// match ty { +// FuncBackendResponseType::Boolean => "type Output = boolean | null;", +// FuncBackendResponseType::String => "type Output = string | null;", +// FuncBackendResponseType::Integer => "type Output = number | null;", +// FuncBackendResponseType::Qualification => { +// "type Output { +// result: 'success' | 'warning' | 'failure'; +// message?: string | null; +// }" +// } +// FuncBackendResponseType::CodeGeneration => { +// "type Output { +// format: string; +// code: string; +// }" +// } +// FuncBackendResponseType::Validation => { +// "type Output { +// valid: boolean; +// message: string; +// }" +// } +// FuncBackendResponseType::Reconciliation => { +// "type Output { +// updates: { [key: string]: unknown }; +// actions: string[]; +// message: string | null; +// }" +// } +// FuncBackendResponseType::Action => { +// "type Output { +// status: 'ok' | 'warning' | 'error'; +// payload?: { [key: string]: unknown } | null; +// message?: string | null; +// }" +// } +// FuncBackendResponseType::Json => "type Output = any;", +// // Note: there is no ts function returning those +// FuncBackendResponseType::Identity => "interface Output extends Input {}", +// FuncBackendResponseType::Array => "type Output = any[];", +// FuncBackendResponseType::Map => "type Output = Record;", +// FuncBackendResponseType::Object => "type Output = any;", +// FuncBackendResponseType::Unset => "type Output = undefined | null;", +// FuncBackendResponseType::Void => "type Output = void;", +// FuncBackendResponseType::SchemaVariantDefinition => concat!( +// include_str!("./ts_types/asset_builder.d.ts"), +// "\n", +// "type Output = any;" +// ), +// } +// } + +// pub fn compile_return_types_2(ty: FuncBackendResponseType, kind: FuncBackendKind) -> &'static str { +// if matches!(kind, FuncBackendKind::JsAttribute) +// && !matches!( +// ty, +// FuncBackendResponseType::CodeGeneration | FuncBackendResponseType::Qualification +// ) +// { +// return ""; // attribute functions have their output compiled dynamically +// } + +// match ty { +// FuncBackendResponseType::Boolean => "type Output = boolean | null;", +// FuncBackendResponseType::String => "type Output = string | null;", +// FuncBackendResponseType::Integer => "type Output = number | null;", +// FuncBackendResponseType::Qualification => { +// "type Output { +// result: 'success' | 'warning' | 'failure'; +// message?: string | null; +// }" +// } +// FuncBackendResponseType::CodeGeneration => { +// "type Output { +// format: string; +// code: string; +// }" +// } +// FuncBackendResponseType::Validation => { +// "type Output { +// valid: boolean; +// message: string; +// }" +// } +// FuncBackendResponseType::Reconciliation => { +// "type Output { +// updates: { [key: string]: unknown }; +// actions: string[]; +// message: string | null; +// }" +// } +// FuncBackendResponseType::Action => { +// "type Output { +// status: 'ok' | 'warning' | 'error'; +// payload?: { [key: string]: unknown } | null; +// message?: string | null; +// }" +// } +// FuncBackendResponseType::Json => "type Output = any;", +// // Note: there is no ts function returning those +// FuncBackendResponseType::Identity => "interface Output extends Input {}", +// FuncBackendResponseType::Array => "type Output = any[];", +// FuncBackendResponseType::Map => "type Output = Record;", +// FuncBackendResponseType::Object => "type Output = any;", +// FuncBackendResponseType::Unset => "type Output = undefined | null;", +// FuncBackendResponseType::Void => "type Output = void;", +// FuncBackendResponseType::SchemaVariantDefinition => concat!( +// include_str!("./ts_types/asset_types_with_secrets.d.ts"), +// "\n", +// "type Output = any;" +// ), +// } +// } + +// async fn compile_validation_types( +// ctx: &DalContext, +// prototypes: &[ValidationPrototype], +// ) -> FuncResult { +// let mut input_fields = Vec::new(); +// for prototype in prototypes { +// let prop = Prop::get_by_id(ctx, &prototype.context().prop_id()) +// .await? +// .ok_or(PropError::NotFound( +// prototype.context().prop_id(), +// *ctx.visibility(), +// ))?; +// let ts_type = prop.ts_type(ctx).await?; +// input_fields.push(ts_type); +// } +// if input_fields.is_empty() { +// Ok("type Input = never;".to_owned()) +// } else { +// let variants = input_fields.join(" | "); +// let types = format!("type Input = {variants};"); +// Ok(types) +// } +// } + +// async fn get_per_variant_types_for_prop_path( +// ctx: &DalContext, +// variant_ids: &[SchemaVariantId], +// path: &[&str], +// ) -> FuncResult { +// let mut per_variant_types = vec![]; + +// for variant_id in variant_ids { +// let prop = SchemaVariant::find_prop_in_tree(ctx, *variant_id, path).await?; +// let ts_type = prop.ts_type(ctx).await?; + +// if !per_variant_types.contains(&ts_type) { +// per_variant_types.push(ts_type); +// } +// } + +// Ok(per_variant_types.join(" | ")) +// } + +// async fn compile_leaf_function_input_types( +// ctx: &DalContext, +// schema_variant_ids: &[SchemaVariantId], +// inputs: &[LeafInputLocation], +// ) -> FuncResult { +// let mut ts_type = "type Input = {\n".to_string(); + +// for input_location in inputs { +// let input_property = format!( +// "{}?: {} | null;\n", +// input_location.arg_name(), +// get_per_variant_types_for_prop_path( +// ctx, +// schema_variant_ids, +// &input_location.prop_path(), +// ) +// .await? +// ); +// ts_type.push_str(&input_property); +// } +// ts_type.push_str("};"); + +// Ok(ts_type) +// } + +// async fn compile_attribute_function_types( +// ctx: &DalContext, +// prototype_views: &[AttributePrototypeView], +// ) -> FuncResult { +// let mut input_ts_types = "type Input = {\n".to_string(); + +// let mut output_ts_types = vec![]; +// let mut argument_types = HashMap::new(); +// for prototype_view in prototype_views { +// for arg in &prototype_view.prototype_arguments { +// if let Some(ip_id) = arg.internal_provider_id { +// let ip = InternalProvider::get_by_id(ctx, &ip_id) +// .await? +// .ok_or(InternalProviderError::NotFound(ip_id))?; + +// let ts_type = if ip.prop_id().is_none() { +// "object".to_string() +// } else { +// Prop::get_by_id(ctx, ip.prop_id()) +// .await? +// .ok_or(PropError::NotFound( +// *ip.prop_id(), +// ctx.visibility().to_owned(), +// ))? +// .ts_type(ctx) +// .await? +// }; + +// if !argument_types.contains_key(&arg.func_argument_name) { +// argument_types.insert(arg.func_argument_name.clone(), vec![ts_type]); +// } else if let Some(ts_types_for_arg) = +// argument_types.get_mut(&arg.func_argument_name) +// { +// if !ts_types_for_arg.contains(&ts_type) { +// ts_types_for_arg.push(ts_type) +// } +// } +// } + +// let output_type = if let Some(output_prop_id) = prototype_view.prop_id { +// Prop::get_by_id(ctx, &output_prop_id) +// .await? +// .ok_or(PropError::NotFound( +// output_prop_id, +// ctx.visibility().to_owned(), +// ))? +// .ts_type(ctx) +// .await? +// } else { +// "any".to_string() +// }; + +// if !output_ts_types.contains(&output_type) { +// output_ts_types.push(output_type); +// } +// } +// } +// for (arg_name, ts_types) in argument_types.iter() { +// input_ts_types.push_str( +// format!( +// "{}?: {} | null;\n", +// arg_name.as_ref().unwrap_or(&"".to_string()).to_owned(), +// ts_types.join(" | ") +// ) +// .as_str(), +// ); +// } +// input_ts_types.push_str("};"); + +// let output_ts = format!("type Output = {};", output_ts_types.join(" | ")); + +// Ok(format!("{}\n{}", input_ts_types, output_ts)) +// } + +// // Note: ComponentKind::Credential is unused and the implementation is broken, so let's ignore it for now +// async fn compile_action_types( +// ctx: &DalContext, +// variant_ids: &[SchemaVariantId], +// ) -> FuncResult { +// let mut ts_types = vec![]; +// for variant_id in variant_ids { +// let prop = SchemaVariant::find_prop_in_tree(ctx, *variant_id, &["root"]).await?; +// ts_types.push(prop.ts_type(ctx).await?); +// } + +// Ok(format!( +// "type Input {{ +// kind: 'standard'; +// properties: {}; +// }}", +// ts_types.join(" | "), +// )) +// } + +// // TODO: stop duplicating definition +// // TODO: use execa types instead of any +// // TODO: add os, fs and path types (possibly fetch but I think it comes with DOM) +// fn langjs_types() -> &'static str { +// "declare namespace YAML { +// function stringify(obj: unknown): string; +// } +// +// declare namespace zlib { +// function gzip(inputstr: string, callback: any); +// } +// declare namespace requestStorage { +// function getEnv(key: string): string; +// function getItem(key: string): any; +// function getEnvKeys(): string[]; +// function getKeys(): string[]; +// } +// +// declare namespace siExec { +// +// interface WatchArgs { +// cmd: string, +// args?: readonly string[], +// execaOptions?: Options, +// retryMs?: number, +// maxRetryCount?: number, +// callback: (child: execa.ExecaReturnValue) => Promise, +// } +// +// interface WatchResult { +// result: SiExecResult, +// failed?: 'deadlineExceeded' | 'commandFailed', +// } +// +// type SiExecResult = ExecaReturnValue; +// +// async function waitUntilEnd(execaFile: string, execaArgs?: string[], execaOptions?: any): Promise; +// async function watch(options: WatchArgs, deadlineCount?: number): Promise; +// }" +// } +// +// pub fn routes() -> Router { Router::new() .route("/list_funcs", get(list_funcs::list_funcs)) .route("/get_func", get(get_func::get_func)) - .route( - "/get_func_last_execution", - get(get_func::get_latest_func_execution), - ) + // .route( + // "/get_func_last_execution", + // get(get_func::get_latest_func_execution), + // ) .route("/create_func", post(create_func::create_func)) .route("/save_func", post(save_func::save_func)) - .route("/delete_func", post(delete_func::delete_func)) - .route("/save_and_exec", post(save_and_exec::save_and_exec)) - .route("/execute", post(execute::execute)) - .route("/revert_func", post(revert_func::revert_func)) - .route( - "/list_input_sources", - get(list_input_sources::list_input_sources), - ) + // .route("/delete_func", post(delete_func::delete_func)) + // .route("/save_and_exec", post(save_and_exec::save_and_exec)) + // .route("/execute", post(execute::execute)) + // .route("/revert_func", post(revert_func::revert_func)) + // .route( + // "/list_input_sources", + // get(list_input_sources::list_input_sources), + // ) } diff --git a/lib/sdf-server/src/server/service/func/create_func.rs b/lib/sdf-server/src/server/service/func/create_func.rs index aadf0f01d0..dcaca9c60f 100644 --- a/lib/sdf-server/src/server/service/func/create_func.rs +++ b/lib/sdf-server/src/server/service/func/create_func.rs @@ -1,18 +1,18 @@ -use super::{FuncResult, FuncVariant}; -use crate::server::extract::{AccessBuilder, HandlerContext, PosthogClient}; -use crate::server::tracking::track; -use crate::service::func::FuncError; use axum::extract::OriginalUri; use axum::{response::IntoResponse, Json}; -use dal::authentication_prototype::{AuthenticationPrototype, AuthenticationPrototypeContext}; +use base64::engine::general_purpose; +use base64::Engine; use dal::{ - generate_name, ActionKind, ActionPrototype, ActionPrototypeContext, AttributeContextBuilder, - AttributePrototype, ChangeSet, DalContext, ExternalProviderId, Func, FuncBackendResponseType, - FuncId, LeafInputLocation, LeafKind, PropId, SchemaVariant, SchemaVariantId, StandardModel, - Visibility, WsEvent, + generate_name, ActionKind, ChangeSet, DalContext, ExternalProviderId, Func, + FuncBackendResponseType, FuncId, PropId, SchemaVariant, SchemaVariantId, Visibility, }; use serde::{Deserialize, Serialize}; +use super::{FuncResult, FuncVariant}; +use crate::server::extract::{AccessBuilder, HandlerContext, PosthogClient}; +use crate::server::tracking::track; +use crate::service::func::FuncError; + #[remain::sorted] #[derive(Deserialize, Serialize, Debug, Clone, PartialEq, Eq)] #[serde(tag = "type", rename_all = "camelCase")] @@ -93,10 +93,22 @@ async fn create_func_stub( return Err(FuncError::FuncNameExists(name)); } - let mut func = Func::new(ctx, name, variant.into(), response_type).await?; + let code_base64 = general_purpose::STANDARD_NO_PAD.encode(code); - func.set_code_plaintext(ctx, Some(code)).await?; - func.set_handler(ctx, Some(handler)).await?; + let func = Func::new( + ctx, + name, + None::, + None::, + None::, + false, + false, + variant.into(), + response_type, + Some(handler), + Some(code_base64), + ) + .await?; Ok(func) } @@ -104,7 +116,7 @@ async fn create_func_stub( async fn create_action_func( ctx: &DalContext, name: Option, - options: Option, + _options: Option, ) -> FuncResult { let func = create_func_stub( ctx, @@ -116,58 +128,58 @@ async fn create_action_func( ) .await?; - if let Some(CreateFuncOptions::ActionOptions { - schema_variant_id, - action_kind, - }) = options - { - ActionPrototype::new( - ctx, - *func.id(), - action_kind, - ActionPrototypeContext { schema_variant_id }, - ) - .await?; - } + // if let Some(CreateFuncOptions::ActionOptions { + // schema_variant_id, + // action_kind, + // }) = options + // { + // ActionPrototype::new( + // ctx, + // *func.id(), + // action_kind, + // ActionPrototypeContext { schema_variant_id }, + // ) + // .await?; + // } Ok(func) } -async fn create_leaf_prototype( - ctx: &DalContext, - func: &Func, - schema_variant_id: SchemaVariantId, - variant: FuncVariant, -) -> FuncResult<()> { - let leaf_kind = match variant { - FuncVariant::CodeGeneration => LeafKind::CodeGeneration, - FuncVariant::Qualification => LeafKind::Qualification, - _ => return Err(FuncError::FuncOptionsAndVariantMismatch), - }; - - let input_locations = match leaf_kind { - LeafKind::CodeGeneration => vec![LeafInputLocation::Domain], - LeafKind::Qualification => vec![LeafInputLocation::Domain, LeafInputLocation::Code], - }; - - SchemaVariant::upsert_leaf_function( - ctx, - schema_variant_id, - None, - leaf_kind, - &input_locations, - func, - ) - .await?; - - Ok(()) -} +//async fn create_leaf_prototype( +// ctx: &DalContext, +// func: &Func, +// schema_variant_id: SchemaVariantId, +// variant: FuncVariant, +//) -> FuncResult<()> { +// let leaf_kind = match variant { +// FuncVariant::CodeGeneration => LeafKind::CodeGeneration, +// FuncVariant::Qualification => LeafKind::Qualification, +// _ => return Err(FuncError::FuncOptionsAndVariantMismatch), +// }; +// +// let input_locations = match leaf_kind { +// LeafKind::CodeGeneration => vec![LeafInputLocation::Domain], +// LeafKind::Qualification => vec![LeafInputLocation::Domain, LeafInputLocation::Code], +// }; +// +// SchemaVariant::upsert_leaf_function( +// ctx, +// schema_variant_id, +// None, +// leaf_kind, +// &input_locations, +// func, +// ) +// .await?; +// +// Ok(()) +//} async fn create_attribute_func( ctx: &DalContext, name: Option, variant: FuncVariant, - options: Option, + _options: Option, ) -> FuncResult { let (code, handler, response_type) = match variant { FuncVariant::Attribute => ( @@ -194,62 +206,62 @@ async fn create_attribute_func( let func = create_func_stub(ctx, name, variant, response_type, code, handler).await?; - if let Some(options) = options { - match (variant, options) { - ( - FuncVariant::Attribute, - CreateFuncOptions::AttributeOptions { - output_location, .. - }, - ) => { - // XXX: we need to search *up* the attribute tree to ensure that - // the parent of this prop is not also set by a function. But we - // should also hide props on the frontend if they are the - // children of a value that is set by a function. - let mut context_builder = AttributeContextBuilder::new(); - match output_location { - AttributeOutputLocation::OutputSocket { - external_provider_id, - } => { - context_builder.set_external_provider_id(external_provider_id); - } - AttributeOutputLocation::Prop { prop_id } => { - context_builder.set_prop_id(prop_id); - } - } - - let context = context_builder.to_context()?; - let mut prototype = - AttributePrototype::find_for_context_and_key(ctx, context, &None) - .await? - .pop() - .ok_or(FuncError::AttributePrototypeMissing)?; - - if let Some(func) = Func::get_by_id(ctx, &prototype.func_id()).await? { - if !func.is_intrinsic() { - return Err(FuncError::AttributePrototypeAlreadySetByFunc( - func.name().into(), - )); - } - } - - prototype.set_func_id(ctx, *func.id()).await?; - } - ( - FuncVariant::CodeGeneration, - CreateFuncOptions::CodeGenerationOptions { schema_variant_id }, - ) => { - create_leaf_prototype(ctx, &func, schema_variant_id, variant).await?; - } - ( - FuncVariant::Qualification, - CreateFuncOptions::QualificationOptions { schema_variant_id }, - ) => { - create_leaf_prototype(ctx, &func, schema_variant_id, variant).await?; - } - (_, _) => return Err(FuncError::FuncOptionsAndVariantMismatch), - } - } + // if let Some(options) = options { + // match (variant, options) { + // ( + // FuncVariant::Attribute, + // CreateFuncOptions::AttributeOptions { + // output_location, .. + // }, + // ) => { + // // XXX: we need to search *up* the attribute tree to ensure that + // // the parent of this prop is not also set by a function. But we + // // should also hide props on the frontend if they are the + // // children of a value that is set by a function. + // let mut context_builder = AttributeContextBuilder::new(); + // match output_location { + // AttributeOutputLocation::OutputSocket { + // external_provider_id, + // } => { + // context_builder.set_external_provider_id(external_provider_id); + // } + // AttributeOutputLocation::Prop { prop_id } => { + // context_builder.set_prop_id(prop_id); + // } + // } + + // let context = context_builder.to_context()?; + // let mut prototype = + // AttributePrototype::find_for_context_and_key(ctx, context, &None) + // .await? + // .pop() + // .ok_or(FuncError::AttributePrototypeMissing)?; + + // if let Some(func) = Func::get_by_id(ctx, &prototype.func_id()).await? { + // if !func.is_intrinsic() { + // return Err(FuncError::AttributePrototypeAlreadySetByFunc( + // func.name().into(), + // )); + // } + // } + + // prototype.set_func_id(ctx, *func.id()).await?; + // } + // ( + // FuncVariant::CodeGeneration, + // CreateFuncOptions::CodeGenerationOptions { schema_variant_id }, + // ) => { + // create_leaf_prototype(ctx, &func, schema_variant_id, variant).await?; + // } + // ( + // FuncVariant::Qualification, + // CreateFuncOptions::QualificationOptions { schema_variant_id }, + // ) => { + // create_leaf_prototype(ctx, &func, schema_variant_id, variant).await?; + // } + // (_, _) => return Err(FuncError::FuncOptionsAndVariantMismatch), + // } + // } Ok(func) } @@ -270,12 +282,7 @@ async fn create_authentication_func( .await?; if let Some(CreateFuncOptions::AuthenticationOptions { schema_variant_id }) = options { - AuthenticationPrototype::new( - ctx, - *func.id(), - AuthenticationPrototypeContext { schema_variant_id }, - ) - .await?; + SchemaVariant::new_authentication_prototype(ctx, func.id, schema_variant_id).await?; } Ok(func) @@ -339,17 +346,13 @@ pub async fn create_func( &original_uri, "created_func", serde_json::json!({ - "func_id": func.id().to_owned(), - "func_handler": func.handler().map(|h| h.to_owned()), - "func_name": func.name().to_owned(), + "func_id": func.id, + "func_handler": func.handler.as_ref().map(|h| h.to_owned()), + "func_name": func.name.to_owned(), "func_variant": func_variant, }), ); - WsEvent::func_created(&ctx, *func.id()) - .await? - .publish_on_commit(&ctx) - .await?; ctx.commit().await?; let mut response = axum::response::Response::builder(); @@ -358,10 +361,10 @@ pub async fn create_func( response = response.header("force_changeset_pk", force_changeset_pk.to_string()); } Ok(response.body(serde_json::to_string(&CreateFuncResponse { - id: func.id().to_owned(), - handler: func.handler().map(|h| h.to_owned()), + id: func.id, + handler: func.handler.as_ref().map(|h| h.to_owned()), variant: func_variant, - name: func.name().to_owned(), + name: func.name.to_owned(), code: func.code_plaintext()?, })?)?) } diff --git a/lib/sdf-server/src/server/service/func/get_func.rs b/lib/sdf-server/src/server/service/func/get_func.rs index 941858fe8e..816652aa98 100644 --- a/lib/sdf-server/src/server/service/func/get_func.rs +++ b/lib/sdf-server/src/server/service/func/get_func.rs @@ -1,10 +1,10 @@ -use super::{FuncAssociations, FuncError, FuncResult, FuncVariant}; -use crate::server::extract::{AccessBuilder, HandlerContext}; use axum::{extract::Query, Json}; -use dal::func::execution::{FuncExecution, FuncExecutionState}; -use dal::{Func, FuncId, StandardModel, Visibility}; use serde::{Deserialize, Serialize}; -use veritech_client::{FunctionResultFailure, OutputStream}; + +use dal::{Func, FuncId, Visibility}; + +use super::{FuncAssociations, FuncResult, FuncVariant}; +use crate::server::extract::{AccessBuilder, HandlerContext}; #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] @@ -14,15 +14,15 @@ pub struct GetLatestFuncExecutionRequest { pub visibility: Visibility, } -#[derive(Deserialize, Serialize, Debug)] -#[serde(rename_all = "camelCase")] -pub struct GetLatestFuncExecutionResponse { - pub id: FuncId, - pub state: FuncExecutionState, - pub value: Option, - pub output_stream: Option>, - pub function_failure: Option, -} +// #[derive(Deserialize, Serialize, Debug)] +// #[serde(rename_all = "camelCase")] +// pub struct GetLatestFuncExecutionResponse { +// pub id: FuncId, +// pub state: FuncExecutionState, +// pub value: Option, +// pub output_stream: Option>, +// pub function_failure: Option, +// } #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] @@ -54,28 +54,28 @@ pub async fn get_func( ) -> FuncResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let func = Func::get_by_id(&ctx, &request.id) - .await? - .ok_or(FuncError::FuncNotFound)?; + dbg!("get_func"); + + let func = Func::get_by_id(&ctx, request.id).await?; Ok(Json(super::get_func_view(&ctx, &func).await?)) } -pub async fn get_latest_func_execution( - HandlerContext(builder): HandlerContext, - AccessBuilder(request_ctx): AccessBuilder, - Query(request): Query, -) -> FuncResult> { - let ctx = builder.build(request_ctx.build(request.visibility)).await?; +// pub async fn get_latest_func_execution( +// HandlerContext(builder): HandlerContext, +// AccessBuilder(request_ctx): AccessBuilder, +// Query(request): Query, +// ) -> FuncResult> { +// let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let func_execution_result = - FuncExecution::get_latest_execution_by_func_id(&ctx, &request.id).await?; +// let func_execution_result = +// FuncExecution::get_latest_execution_by_func_id(&ctx, &request.id).await?; - Ok(Json(GetLatestFuncExecutionResponse { - id: *func_execution_result.func_id(), - state: func_execution_result.state(), - value: func_execution_result.value().cloned(), - output_stream: func_execution_result.output_stream().cloned(), - function_failure: func_execution_result.function_failure().clone(), - })) -} +// Ok(Json(GetLatestFuncExecutionResponse { +// id: *func_execution_result.func_id(), +// state: func_execution_result.state(), +// value: func_execution_result.value().cloned(), +// output_stream: func_execution_result.output_stream().cloned(), +// function_failure: func_execution_result.function_failure().clone(), +// })) +// } diff --git a/lib/sdf-server/src/server/service/func/list_funcs.rs b/lib/sdf-server/src/server/service/func/list_funcs.rs index aa45d75409..7207669fd0 100644 --- a/lib/sdf-server/src/server/service/func/list_funcs.rs +++ b/lib/sdf-server/src/server/service/func/list_funcs.rs @@ -1,8 +1,10 @@ -use super::{FuncError, FuncResult, FuncVariant}; -use crate::server::extract::{AccessBuilder, HandlerContext}; use axum::{extract::Query, Json}; -use dal::{Func, FuncBackendKind, FuncId, StandardModel, Visibility}; +use dal::{Func, FuncBackendKind, FuncId, Visibility}; use serde::{Deserialize, Serialize}; +use telemetry::prelude::*; + +use super::{FuncResult, FuncVariant}; +use crate::server::extract::{AccessBuilder, HandlerContext}; #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] @@ -33,32 +35,43 @@ pub async fn list_funcs( AccessBuilder(request_ctx): AccessBuilder, Query(request): Query, ) -> FuncResult> { + let start = tokio::time::Instant::now(); + let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let try_func_views: Vec> = Func::find_by_attr_in( - &ctx, - "backend_kind", - &[ - &FuncBackendKind::JsAction.as_ref().to_string(), - &FuncBackendKind::JsAuthentication.as_ref().to_string(), - &FuncBackendKind::JsAttribute.as_ref().to_string(), - &FuncBackendKind::JsValidation.as_ref().to_string(), - ], - ) - .await? - .iter() - .filter(|f| !f.hidden()) - .map(|func| { - Ok(ListedFuncView { - id: func.id().to_owned(), - handler: func.handler().map(|handler| handler.to_owned()), - variant: func.try_into()?, - name: func.name().into(), - display_name: func.display_name().map(Into::into), - is_builtin: func.builtin(), + info!("after context build: {:?}", start.elapsed()); + + let funcs = Func::list(&ctx).await?; + + info!("after content store fetch: {:?}", start.elapsed()); + + let customizable_backend_kinds = [ + FuncBackendKind::JsAction, + FuncBackendKind::JsAttribute, + FuncBackendKind::JsValidation, + FuncBackendKind::JsAuthentication, + ]; + + let try_func_views: Vec> = funcs + .iter() + .filter(|f| { + if f.hidden { + false + } else { + customizable_backend_kinds.contains(&f.backend_kind) + } + }) + .map(|func| { + Ok(ListedFuncView { + id: func.id, + handler: func.handler.to_owned().map(|handler| handler.to_owned()), + variant: func.try_into()?, + name: func.name.to_owned(), + display_name: func.display_name.to_owned().map(Into::into), + is_builtin: func.builtin, + }) }) - }) - .collect(); + .collect(); let mut funcs = vec![]; for func_view in try_func_views { @@ -68,5 +81,9 @@ pub async fn list_funcs( } } + funcs.sort_by(|a, b| a.name.cmp(&b.name)); + + info!("after list funcs: {:?}", start.elapsed()); + Ok(Json(ListFuncsResponse { funcs })) } diff --git a/lib/sdf-server/src/server/service/func/save_func.rs b/lib/sdf-server/src/server/service/func/save_func.rs index 658281ae8a..315feb4376 100644 --- a/lib/sdf-server/src/server/service/func/save_func.rs +++ b/lib/sdf-server/src/server/service/func/save_func.rs @@ -1,28 +1,18 @@ -use std::collections::HashSet; - use axum::extract::OriginalUri; use axum::{response::IntoResponse, Json}; -use serde::{Deserialize, Serialize}; - -use dal::authentication_prototype::{AuthenticationPrototype, AuthenticationPrototypeContext}; +use base64::{engine::general_purpose, Engine}; +use dal::FuncBackendResponseType; use dal::{ - attribute::context::AttributeContextBuilder, - func::argument::FuncArgument, - schema::variant::leaves::{LeafInputLocation, LeafKind}, - ActionKind, ActionPrototype, ActionPrototypeContext, AttributeContext, AttributePrototype, - AttributePrototypeArgument, AttributePrototypeId, AttributeValue, ChangeSet, Component, - ComponentId, DalContext, Func, FuncBackendKind, FuncBinding, FuncId, InternalProviderId, Prop, - SchemaVariantId, StandardModel, Visibility, WsEvent, + func::argument::FuncArgument, ChangeSet, DalContext, Func, FuncBackendKind, FuncId, Visibility, }; -use dal::{FuncBackendResponseType, PropKind, SchemaVariant}; +use serde::{Deserialize, Serialize}; +use std::collections::HashSet; +use telemetry::prelude::*; +use super::{FuncArgumentView, FuncAssociations, FuncResult}; use crate::server::extract::{AccessBuilder, HandlerContext, PosthogClient}; use crate::server::tracking::track; - -use super::{ - AttributePrototypeArgumentView, AttributePrototypeView, FuncArgumentView, FuncAssociations, - FuncError, FuncResult, -}; +use crate::service::func::FuncError; #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] @@ -46,86 +36,86 @@ pub struct SaveFuncResponse { pub types: String, } -async fn save_attr_func_proto_arguments( - ctx: &DalContext, - proto: &AttributePrototype, - arguments: Vec, - create_all: bool, -) -> FuncResult<()> { - let mut id_set = HashSet::new(); - - if create_all { - for mut proto_arg in - AttributePrototypeArgument::list_for_attribute_prototype(ctx, *proto.id()).await? - { - proto_arg.delete_by_id(ctx).await?; - } - } - - for arg in &arguments { - if let Some(arg_id) = arg.id { - let proto_arg = if arg_id.is_none() || create_all { - match arg.internal_provider_id { - Some(internal_provider_id) => Some( - AttributePrototypeArgument::new_for_intra_component( - ctx, - *proto.id(), - arg.func_argument_id, - internal_provider_id, - ) - .await?, - ), - None => None, // This should probably be an error - } - } else { - Some( - AttributePrototypeArgument::get_by_id(ctx, &arg_id) - .await? - .ok_or_else(|| { - FuncError::AttributePrototypeMissingArgument(*proto.id(), arg_id) - })?, - ) - }; - - if let Some(mut proto_arg) = proto_arg { - if proto_arg.attribute_prototype_id() != *proto.id() { - proto_arg - .set_attribute_prototype_id(ctx, *proto.id()) - .await?; - } - - if let Some(internal_provider_id) = arg.internal_provider_id { - if internal_provider_id != proto_arg.internal_provider_id() { - proto_arg - .set_internal_provider_id_safe(ctx, internal_provider_id) - .await?; - } - } - - let proto_arg_id = *proto_arg.id(); - id_set.insert(proto_arg_id); - } - } else if let Some(internal_provider_id) = arg.internal_provider_id { - AttributePrototypeArgument::new_for_intra_component( - ctx, - *proto.id(), - arg.func_argument_id, - internal_provider_id, - ) - .await?; - } // else condition should be error here? (saving an arg that has no internal provider id) - } - - for mut proto_arg in - AttributePrototypeArgument::list_for_attribute_prototype(ctx, *proto.id()).await? - { - if !id_set.contains(proto_arg.id()) { - proto_arg.delete_by_id(ctx).await?; - } - } - - Ok(()) -} +//async fn save_attr_func_proto_arguments( +// ctx: &DalContext, +// proto: &AttributePrototype, +// arguments: Vec, +// create_all: bool, +//) -> FuncResult<()> { +// let mut id_set = HashSet::new(); +// +// if create_all { +// for mut proto_arg in +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *proto.id()).await? +// { +// proto_arg.delete_by_id(ctx).await?; +// } +// } +// +// for arg in &arguments { +// if let Some(arg_id) = arg.id { +// let proto_arg = if arg_id.is_none() || create_all { +// match arg.internal_provider_id { +// Some(internal_provider_id) => Some( +// AttributePrototypeArgument::new_for_intra_component( +// ctx, +// *proto.id(), +// arg.func_argument_id, +// internal_provider_id, +// ) +// .await?, +// ), +// None => None, // This should probably be an error +// } +// } else { +// Some( +// AttributePrototypeArgument::get_by_id(ctx, &arg_id) +// .await? +// .ok_or_else(|| { +// FuncError::AttributePrototypeMissingArgument(*proto.id(), arg_id) +// })?, +// ) +// }; +// +// if let Some(mut proto_arg) = proto_arg { +// if proto_arg.attribute_prototype_id() != *proto.id() { +// proto_arg +// .set_attribute_prototype_id(ctx, *proto.id()) +// .await?; +// } +// +// if let Some(internal_provider_id) = arg.internal_provider_id { +// if internal_provider_id != proto_arg.internal_provider_id() { +// proto_arg +// .set_internal_provider_id_safe(ctx, internal_provider_id) +// .await?; +// } +// } +// +// let proto_arg_id = *proto_arg.id(); +// id_set.insert(proto_arg_id); +// } +// } else if let Some(internal_provider_id) = arg.internal_provider_id { +// AttributePrototypeArgument::new_for_intra_component( +// ctx, +// *proto.id(), +// arg.func_argument_id, +// internal_provider_id, +// ) +// .await?; +// } // else condition should be error here? (saving an arg that has no internal provider id) +// } +// +// for mut proto_arg in +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *proto.id()).await? +// { +// if !id_set.contains(proto_arg.id()) { +// proto_arg.delete_by_id(ctx).await?; +// } +// } +// +// Ok(()) +//} /// Determines what we should do with the [`AttributePrototype`](dal::AttributePrototype) and /// [`AttributeValues`](dal::AttributeValue) that are currently associated with a function but @@ -135,302 +125,303 @@ async fn save_attr_func_proto_arguments( /// value using a builtin value function, like `si:setString`, etc. /// /// `RemovedPrototypeOp::Delete` deletes the prototype and its values. +#[allow(dead_code)] #[remain::sorted] enum RemovedPrototypeOp { Delete, Reset, } -async fn save_attr_func_prototypes( - ctx: &DalContext, - func: &Func, - prototypes: Vec, - removed_protoype_op: RemovedPrototypeOp, - key: Option, -) -> FuncResult { - let mut id_set = HashSet::new(); - let mut prop_kind: Option = None; - let mut computed_backend_response_type = *func.backend_response_type(); - - for proto_view in prototypes { - let context = proto_view.to_attribute_context()?; - - let (mut existing_value_proto, need_to_create) = - match AttributePrototype::find_for_context_and_key(ctx, context, &key) - .await? - .pop() - { - Some(existing_proto) => (existing_proto, false), - None => { - let mut context_builder = AttributeContextBuilder::new(); - - if let Some(prop_id) = proto_view.prop_id { - context_builder.set_prop_id(prop_id); - } - - if let Some(external_provider_id) = proto_view.external_provider_id { - context_builder.set_external_provider_id(external_provider_id); - } - - let default_value_context = context_builder.to_context()?; - - ( - AttributePrototype::find_for_context_and_key( - ctx, - default_value_context, - &key, - ) - .await? - .pop() - .ok_or(FuncError::AttributePrototypeMissing)?, - true, - ) - } - }; - - let proto = if !need_to_create { - existing_value_proto.set_func_id(ctx, *func.id()).await?; - existing_value_proto - } else { - let existing_value = existing_value_proto - .attribute_values(ctx) - .await? - .pop() - .ok_or(FuncError::AttributeValueMissing)?; - - let maybe_parent_attribute_value = existing_value.parent_attribute_value(ctx).await?; - - let (mut func_binding, fbrv) = FuncBinding::create_with_existing_value( - ctx, - serde_json::json!({}), - existing_value.get_value(ctx).await?, - *func.id(), - ) - .await?; - - // Clear out the function sha so we know to execute this on the first run in - // AttributeValue::update_from_prototype_function - func_binding.set_code_sha256(ctx, "0").await?; - - AttributePrototype::new( - ctx, - *func.id(), - *func_binding.id(), - *fbrv.id(), - context, - key.clone(), - maybe_parent_attribute_value.map(|mpav| *mpav.id()), - ) - .await? - }; - - id_set.insert(*proto.id()); - - if proto.context.prop_id().is_some() { - let prop = Prop::get_by_id(ctx, &proto.context.prop_id()) - .await? - .ok_or(FuncError::PropNotFound)?; - if let Some(prop_kind) = prop_kind { - if prop_kind != *prop.kind() { - return Err(FuncError::FuncDestinationPropKindMismatch); - } - } else { - prop_kind = Some(*prop.kind()); - } - - if matches!( - computed_backend_response_type, - FuncBackendResponseType::Json - ) { - return Err(FuncError::FuncDestinationPropAndOutputSocket); - } - - computed_backend_response_type = (*prop.kind()).into(); - } else if proto.context.external_provider_id().is_some() { - // External and internal providers do not have types yet -- so we set functions that - // set them to Json, However, some builtins have expressed their type concretely - // already, so we should continue to use that type to prevent mutation of the function - // itself. A new function will have an Unset response type, however (until it is bound) - if prop_kind.is_some() { - return Err(FuncError::FuncDestinationPropAndOutputSocket); - } - - if matches!( - computed_backend_response_type, - FuncBackendResponseType::Unset, - ) { - computed_backend_response_type = FuncBackendResponseType::Json; - } - } - - save_attr_func_proto_arguments(ctx, &proto, proto_view.prototype_arguments, need_to_create) - .await?; - } - - // TODO: should use a custom query to fetch for *not in* id_set only - for proto in AttributePrototype::find_for_func(ctx, func.id()).await? { - if !id_set.contains(proto.id()) { - match removed_protoype_op { - RemovedPrototypeOp::Reset => { - reset_prototype_and_value_to_intrinsic_function(ctx, &proto, proto.context) - .await? - } - RemovedPrototypeOp::Delete => { - AttributePrototype::remove(ctx, proto.id(), false).await? - } - } - } - } - - // Unset response type if all bindings removed - if id_set.is_empty() { - computed_backend_response_type = FuncBackendResponseType::Unset; - } - - Ok(computed_backend_response_type) -} - -async fn attribute_view_for_leaf_func( - ctx: &DalContext, - func: &Func, - schema_variant_id: SchemaVariantId, - component_id: Option, - inputs: &[LeafInputLocation], - leaf_kind: LeafKind, -) -> FuncResult { - let existing_proto = SchemaVariant::upsert_leaf_function( - ctx, - schema_variant_id, - component_id, - leaf_kind, - inputs, - func, - ) - .await?; - - let mut prototype_view = AttributePrototypeView { - id: AttributePrototypeId::NONE, - component_id, - prop_id: if existing_proto.context.prop_id().is_some() { - Some(existing_proto.context.prop_id()) - } else { - None - }, - external_provider_id: if existing_proto.context.external_provider_id().is_some() { - Some(existing_proto.context.external_provider_id()) - } else { - None - }, - prototype_arguments: vec![], - }; - - let arguments = - FuncArgument::list_for_func_with_prototype_arguments(ctx, *func.id(), *existing_proto.id()) - .await?; - - let mut argument_views = vec![]; - - for (func_argument, maybe_proto_arg) in arguments { - let proto_arg = maybe_proto_arg.ok_or_else(|| { - FuncError::FuncArgumentMissingPrototypeArgument( - *func_argument.id(), - *existing_proto.id(), - ) - })?; - - if proto_arg.internal_provider_id() == InternalProviderId::NONE { - return Err(FuncError::AttributePrototypeMissingInternalProviderId( - *proto_arg.id(), - )); - } - - argument_views.push(AttributePrototypeArgumentView { - func_argument_id: *func_argument.id(), - func_argument_name: Some(func_argument.name().to_owned()), - id: Some(*proto_arg.id()), - internal_provider_id: Some(proto_arg.internal_provider_id()), - }); - } - - prototype_view.id = *existing_proto.id(); - prototype_view.prototype_arguments = argument_views; - - Ok(prototype_view) -} - -async fn save_leaf_prototypes( - ctx: &DalContext, - func: &Func, - schema_variant_ids: Vec, - component_ids: Vec, - inputs: &[LeafInputLocation], - leaf_kind: LeafKind, -) -> FuncResult<()> { - let mut attribute_views = vec![]; - - for schema_variant_id in schema_variant_ids { - attribute_views.push( - attribute_view_for_leaf_func(ctx, func, schema_variant_id, None, inputs, leaf_kind) - .await?, - ); - } - - for component_id in component_ids { - let schema_variant_id = Component::schema_variant_id(ctx, component_id).await?; - - attribute_views.push( - attribute_view_for_leaf_func( - ctx, - func, - schema_variant_id, - Some(component_id), - inputs, - leaf_kind, - ) - .await?, - ); - } - - let key = Some(func.name().to_string()); - - save_attr_func_prototypes(ctx, func, attribute_views, RemovedPrototypeOp::Delete, key).await?; - - Ok(()) -} - -async fn reset_prototype_and_value_to_intrinsic_function( - ctx: &DalContext, - proto: &AttributePrototype, - context: AttributeContext, -) -> FuncResult<()> { - let existing_value = proto - .attribute_values(ctx) - .await? - .pop() - .ok_or(FuncError::AttributeValueMissing)?; - - let maybe_parent_attribute_value = existing_value.parent_attribute_value(ctx).await?; - let value_value = existing_value.get_value(ctx).await?; - - for mut proto_arg in - AttributePrototypeArgument::list_for_attribute_prototype(ctx, *proto.id()).await? - { - proto_arg.delete_by_id(ctx).await?; - } - - // This should reset the prototype to a builtin value function - AttributeValue::update_for_context( - ctx, - *existing_value.id(), - maybe_parent_attribute_value.map(|pav| *pav.id()), - context, - value_value, - proto.key().map(|key| key.to_string()), - ) - .await?; - - Ok(()) -} +//async fn save_attr_func_prototypes( +// ctx: &DalContext, +// func: &Func, +// prototypes: Vec, +// removed_protoype_op: RemovedPrototypeOp, +// key: Option, +//) -> FuncResult { +// let mut id_set = HashSet::new(); +// let mut prop_kind: Option = None; +// let mut computed_backend_response_type = *func.backend_response_type(); +// +// for proto_view in prototypes { +// let context = proto_view.to_attribute_context()?; +// +// let (mut existing_value_proto, need_to_create) = +// match AttributePrototype::find_for_context_and_key(ctx, context, &key) +// .await? +// .pop() +// { +// Some(existing_proto) => (existing_proto, false), +// None => { +// let mut context_builder = AttributeContextBuilder::new(); +// +// if let Some(prop_id) = proto_view.prop_id { +// context_builder.set_prop_id(prop_id); +// } +// +// if let Some(external_provider_id) = proto_view.external_provider_id { +// context_builder.set_external_provider_id(external_provider_id); +// } +// +// let default_value_context = context_builder.to_context()?; +// +// ( +// AttributePrototype::find_for_context_and_key( +// ctx, +// default_value_context, +// &key, +// ) +// .await? +// .pop() +// .ok_or(FuncError::AttributePrototypeMissing)?, +// true, +// ) +// } +// }; +// +// let proto = if !need_to_create { +// existing_value_proto.set_func_id(ctx, *func.id()).await?; +// existing_value_proto +// } else { +// let existing_value = existing_value_proto +// .attribute_values(ctx) +// .await? +// .pop() +// .ok_or(FuncError::AttributeValueMissing)?; +// +// let maybe_parent_attribute_value = existing_value.parent_attribute_value(ctx).await?; +// +// let (mut func_binding, fbrv) = FuncBinding::create_with_existing_value( +// ctx, +// serde_json::json!({}), +// existing_value.get_value(ctx).await?, +// *func.id(), +// ) +// .await?; +// +// // Clear out the function sha so we know to execute this on the first run in +// // AttributeValue::update_from_prototype_function +// func_binding.set_code_sha256(ctx, "0").await?; +// +// AttributePrototype::new( +// ctx, +// *func.id(), +// *func_binding.id(), +// *fbrv.id(), +// context, +// key.clone(), +// maybe_parent_attribute_value.map(|mpav| *mpav.id()), +// ) +// .await? +// }; +// +// id_set.insert(*proto.id()); +// +// if proto.context.prop_id().is_some() { +// let prop = Prop::get_by_id(ctx, &proto.context.prop_id()) +// .await? +// .ok_or(FuncError::PropNotFound)?; +// if let Some(prop_kind) = prop_kind { +// if prop_kind != *prop.kind() { +// return Err(FuncError::FuncDestinationPropKindMismatch); +// } +// } else { +// prop_kind = Some(*prop.kind()); +// } +// +// if matches!( +// computed_backend_response_type, +// FuncBackendResponseType::Json +// ) { +// return Err(FuncError::FuncDestinationPropAndOutputSocket); +// } +// +// computed_backend_response_type = (*prop.kind()).into(); +// } else if proto.context.external_provider_id().is_some() { +// // External and internal providers do not have types yet -- so we set functions that +// // set them to Json, However, some builtins have expressed their type concretely +// // already, so we should continue to use that type to prevent mutation of the function +// // itself. A new function will have an Unset response type, however (until it is bound) +// if prop_kind.is_some() { +// return Err(FuncError::FuncDestinationPropAndOutputSocket); +// } +// +// if matches!( +// computed_backend_response_type, +// FuncBackendResponseType::Unset, +// ) { +// computed_backend_response_type = FuncBackendResponseType::Json; +// } +// } +// +// save_attr_func_proto_arguments(ctx, &proto, proto_view.prototype_arguments, need_to_create) +// .await?; +// } +// +// // TODO: should use a custom query to fetch for *not in* id_set only +// for proto in AttributePrototype::find_for_func(ctx, func.id()).await? { +// if !id_set.contains(proto.id()) { +// match removed_protoype_op { +// RemovedPrototypeOp::Reset => { +// reset_prototype_and_value_to_intrinsic_function(ctx, &proto, proto.context) +// .await? +// } +// RemovedPrototypeOp::Delete => { +// AttributePrototype::remove(ctx, proto.id(), false).await? +// } +// } +// } +// } +// +// // Unset response type if all bindings removed +// if id_set.is_empty() { +// computed_backend_response_type = FuncBackendResponseType::Unset; +// } +// +// Ok(computed_backend_response_type) +//} +// +//async fn attribute_view_for_leaf_func( +// ctx: &DalContext, +// func: &Func, +// schema_variant_id: SchemaVariantId, +// component_id: Option, +// inputs: &[LeafInputLocation], +// leaf_kind: LeafKind, +//) -> FuncResult { +// let existing_proto = SchemaVariant::upsert_leaf_function( +// ctx, +// schema_variant_id, +// component_id, +// leaf_kind, +// inputs, +// func, +// ) +// .await?; +// +// let mut prototype_view = AttributePrototypeView { +// id: AttributePrototypeId::NONE, +// component_id, +// prop_id: if existing_proto.context.prop_id().is_some() { +// Some(existing_proto.context.prop_id()) +// } else { +// None +// }, +// external_provider_id: if existing_proto.context.external_provider_id().is_some() { +// Some(existing_proto.context.external_provider_id()) +// } else { +// None +// }, +// prototype_arguments: vec![], +// }; +// +// let arguments = +// FuncArgument::list_for_func_with_prototype_arguments(ctx, *func.id(), *existing_proto.id()) +// .await?; +// +// let mut argument_views = vec![]; +// +// for (func_argument, maybe_proto_arg) in arguments { +// let proto_arg = maybe_proto_arg.ok_or_else(|| { +// FuncError::FuncArgumentMissingPrototypeArgument( +// *func_argument.id(), +// *existing_proto.id(), +// ) +// })?; +// +// if proto_arg.internal_provider_id() == InternalProviderId::NONE { +// return Err(FuncError::AttributePrototypeMissingInternalProviderId( +// *proto_arg.id(), +// )); +// } +// +// argument_views.push(AttributePrototypeArgumentView { +// func_argument_id: *func_argument.id(), +// func_argument_name: Some(func_argument.name().to_owned()), +// id: Some(*proto_arg.id()), +// internal_provider_id: Some(proto_arg.internal_provider_id()), +// }); +// } +// +// prototype_view.id = *existing_proto.id(); +// prototype_view.prototype_arguments = argument_views; +// +// Ok(prototype_view) +//} +// +//async fn save_leaf_prototypes( +// ctx: &DalContext, +// func: &Func, +// schema_variant_ids: Vec, +// component_ids: Vec, +// inputs: &[LeafInputLocation], +// leaf_kind: LeafKind, +//) -> FuncResult<()> { +// let mut attribute_views = vec![]; +// +// for schema_variant_id in schema_variant_ids { +// attribute_views.push( +// attribute_view_for_leaf_func(ctx, func, schema_variant_id, None, inputs, leaf_kind) +// .await?, +// ); +// } +// +// for component_id in component_ids { +// let schema_variant_id = Component::schema_variant_id(ctx, component_id).await?; +// +// attribute_views.push( +// attribute_view_for_leaf_func( +// ctx, +// func, +// schema_variant_id, +// Some(component_id), +// inputs, +// leaf_kind, +// ) +// .await?, +// ); +// } +// +// let key = Some(func.name().to_string()); +// +// save_attr_func_prototypes(ctx, func, attribute_views, RemovedPrototypeOp::Delete, key).await?; +// +// Ok(()) +//} +// +//async fn reset_prototype_and_value_to_intrinsic_function( +// ctx: &DalContext, +// proto: &AttributePrototype, +// context: AttributeContext, +//) -> FuncResult<()> { +// let existing_value = proto +// .attribute_values(ctx) +// .await? +// .pop() +// .ok_or(FuncError::AttributeValueMissing)?; +// +// let maybe_parent_attribute_value = existing_value.parent_attribute_value(ctx).await?; +// let value_value = existing_value.get_value(ctx).await?; +// +// for mut proto_arg in +// AttributePrototypeArgument::list_for_attribute_prototype(ctx, *proto.id()).await? +// { +// proto_arg.delete_by_id(ctx).await?; +// } +// +// // This should reset the prototype to a builtin value function +// AttributeValue::update_for_context( +// ctx, +// *existing_value.id(), +// maybe_parent_attribute_value.map(|pav| *pav.id()), +// context, +// value_value, +// proto.key().map(|key| key.to_string()), +// ) +// .await?; +// +// Ok(()) +//} async fn save_attr_func_arguments( ctx: &DalContext, @@ -441,214 +432,283 @@ async fn save_attr_func_arguments( for arg in &arguments { let arg_id = if arg.id.is_some() { id_set.insert(arg.id); - let mut existing = FuncArgument::get_by_id(ctx, &arg.id) - .await? - .ok_or(FuncError::FuncArgNotFound)?; - existing.set_name(ctx, &arg.name).await?; - existing.set_kind(ctx, arg.kind).await?; - existing.set_element_kind(ctx, arg.element_kind).await?; - *existing.id() + FuncArgument::modify_by_id(ctx, arg.id, |existing_arg| { + existing_arg.name = arg.name.to_owned(); + existing_arg.kind = arg.kind; + existing_arg.element_kind = arg.element_kind; + + Ok(()) + }) + .await?; + + arg.id } else { let new_arg = - FuncArgument::new(ctx, &arg.name, arg.kind, arg.element_kind, *func.id()).await?; - *new_arg.id() + FuncArgument::new(ctx, &arg.name, arg.kind, arg.element_kind, func.id).await?; + new_arg.id }; id_set.insert(arg_id); } - for func_arg in FuncArgument::list_for_func(ctx, *func.id()).await? { - if !id_set.contains(func_arg.id()) { - FuncArgument::remove(ctx, func_arg.id()).await?; + for func_arg in FuncArgument::list_for_func(ctx, func.id).await? { + if !id_set.contains(&func_arg.id) { + info!("should remove func arg: {:?}", func_arg.id); + FuncArgument::remove(ctx, func_arg.id).await?; } } Ok(()) } -async fn save_action_func_prototypes( - ctx: &DalContext, - func: &Func, - kind: Option, - schema_variant_ids: Vec, -) -> FuncResult<()> { - let mut id_set = HashSet::new(); - - let kind = match kind { - Some(kind) => kind, - None => { - if !schema_variant_ids.is_empty() { - return Err(FuncError::ActionKindMissing(*func.id())); - } - - ActionKind::Other - } - }; - - for schema_variant_id in schema_variant_ids { - let context = ActionPrototypeContext { schema_variant_id }; - - let proto = match ActionPrototype::find_for_context_and_func(ctx, context, *func.id()) - .await? - .pop() - { - Some(mut existing_proto) => { - existing_proto.set_func_id(ctx, *func.id()).await?; - existing_proto.set_kind_checked(ctx, kind).await?; - existing_proto - } - None => ActionPrototype::new(ctx, *func.id(), kind, context).await?, - }; - - id_set.insert(*proto.id()); - } - - for mut proto in ActionPrototype::find_for_func(ctx, *func.id()).await? { - if !id_set.contains(proto.id()) { - proto.delete_by_id(ctx).await?; - } - } - - Ok(()) -} +//async fn save_action_func_prototypes( +// ctx: &DalContext, +// func: &Func, +// kind: Option, +// schema_variant_ids: Vec, +//) -> FuncResult<()> { +// let mut id_set = HashSet::new(); +// +// let kind = match kind { +// Some(kind) => kind, +// None => { +// if !schema_variant_ids.is_empty() { +// return Err(FuncError::ActionKindMissing(*func.id())); +// } +// +// ActionKind::Other +// } +// }; +// +// for schema_variant_id in schema_variant_ids { +// let context = ActionPrototypeContext { schema_variant_id }; +// +// let proto = match ActionPrototype::find_for_context_and_func(ctx, context, *func.id()) +// .await? +// .pop() +// { +// Some(mut existing_proto) => { +// existing_proto.set_func_id(ctx, *func.id()).await?; +// existing_proto.set_kind_checked(ctx, kind).await?; +// existing_proto +// } +// None => ActionPrototype::new(ctx, *func.id(), kind, context).await?, +// }; +// +// id_set.insert(*proto.id()); +// } +// +// for mut proto in ActionPrototype::find_for_func(ctx, *func.id()).await? { +// if !id_set.contains(proto.id()) { +// proto.delete_by_id(ctx).await?; +// } +// } +// +// Ok(()) +//} +// +//async fn save_validation_func_prototypes( +// ctx: &DalContext, +// func: &Func, +// prototypes: Vec, +//) -> FuncResult<()> { +// let mut id_set = HashSet::new(); +// +// for proto_view in prototypes { +// let mut context = ValidationPrototypeContext::builder(); +// let schema_id = *SchemaVariant::get_by_id(ctx, &proto_view.schema_variant_id) +// .await? +// .ok_or(FuncError::ValidationPrototypeMissingSchemaVariant( +// proto_view.schema_variant_id, +// ))? +// .schema(ctx) +// .await? +// .ok_or(FuncError::ValidationPrototypeMissingSchema)? +// .id(); +// +// let context = context +// .set_prop_id(proto_view.prop_id) +// .set_schema_variant_id(proto_view.schema_variant_id) +// .set_schema_id(schema_id) +// .to_context(ctx) +// .await?; +// +// let proto = match ValidationPrototype::find_for_context(ctx, context.clone()) +// .await? +// .pop() +// { +// Some(mut existing_proto) => { +// existing_proto.set_func_id(ctx, *func.id()).await?; +// existing_proto +// } +// None => { +// ValidationPrototype::new(ctx, *func.id(), serde_json::json!(null), context).await? +// } +// }; +// +// id_set.insert(*proto.id()); +// } +// +// for proto in ValidationPrototype::list_for_func(ctx, *func.id()).await? { +// if !id_set.contains(proto.id()) { +// if let Some(mut proto) = ValidationPrototype::get_by_id(ctx, proto.id()).await? { +// proto.delete_by_id(ctx).await?; +// } +// } +// } +// +// Ok(()) +//} pub async fn do_save_func( ctx: &DalContext, request: SaveFuncRequest, ) -> FuncResult<(SaveFuncResponse, Func)> { - let mut func = Func::get_by_id(ctx, &request.id) - .await? - .ok_or(FuncError::FuncNotFound)?; + let func = Func::get_by_id(ctx, request.id).await?; // Don't modify builtins, or for other tenancies - if !ctx.check_tenancy(&func).await? { - return Err(FuncError::NotWritable); - } - - func.set_display_name(ctx, request.display_name).await?; - func.set_name(ctx, request.name).await?; - func.set_description(ctx, request.description).await?; - func.set_code_plaintext(ctx, request.code.as_deref()) - .await?; + // if !ctx.check_tenancy(&func).await? { + // return Err(FuncError::NotWritable); + // } + // + + Func::modify_by_id(ctx, func.id, |func| { + func.display_name = request.display_name.to_owned(); + func.name = request.name.to_owned(); + func.description = request.description.to_owned(); + func.code_base64 = request + .code + .as_ref() + .map(|code| general_purpose::STANDARD_NO_PAD.encode(code)); + + Ok(()) + }) + .await?; - match func.backend_kind() { - FuncBackendKind::JsAction => { - if let Some(FuncAssociations::Action { - schema_variant_ids, - kind, - }) = request.associations - { - save_action_func_prototypes(ctx, &func, kind, schema_variant_ids).await?; - } - } - FuncBackendKind::JsAttribute => match func.backend_response_type() { + #[allow(clippy::single_match)] + match func.backend_kind { + // FuncBackendKind::JsAction => { + // if let Some(FuncAssociations::Action { + // schema_variant_ids, + // kind, + // }) = request.associations + // { + // save_action_func_prototypes(ctx, &func, kind, schema_variant_ids).await?; + // } + // } + // FuncBackendKind::JsValidation => { + // if let Some(FuncAssociations::Validation { prototypes }) = request.associations { + // save_validation_func_prototypes(ctx, &func, prototypes).await?; + // } + // } + FuncBackendKind::JsAttribute => match func.backend_response_type { FuncBackendResponseType::CodeGeneration => { - if let Some(FuncAssociations::CodeGeneration { - schema_variant_ids, - component_ids, - inputs, - }) = request.associations - { - save_leaf_prototypes( - ctx, - &func, - schema_variant_ids, - component_ids, - &inputs, - LeafKind::CodeGeneration, - ) - .await?; - } + // if let Some(FuncAssociations::CodeGeneration { + // schema_variant_ids, + // component_ids, + // inputs, + // }) = request.associations + // { + // save_leaf_prototypes( + // ctx, + // &func, + // schema_variant_ids, + // component_ids, + // &inputs, + // LeafKind::CodeGeneration, + // ) + // .await?; + // } } FuncBackendResponseType::Qualification => { - if let Some(FuncAssociations::Qualification { - schema_variant_ids, - component_ids, - inputs, - }) = request.associations - { - save_leaf_prototypes( - ctx, - &func, - schema_variant_ids, - component_ids, - &inputs, - LeafKind::Qualification, - ) - .await?; - } + // if let Some(FuncAssociations::Qualification { + // schema_variant_ids, + // component_ids, + // inputs, + // }) = request.associations + // { + // save_leaf_prototypes( + // ctx, + // &func, + // schema_variant_ids, + // component_ids, + // &inputs, + // LeafKind::Qualification, + // ) + // .await?; + // } } _ => { if let Some(FuncAssociations::Attribute { - prototypes, + prototypes: _, arguments, }) = request.associations { - let backend_response_type = save_attr_func_prototypes( - ctx, - &func, - prototypes, - RemovedPrototypeOp::Reset, - None, - ) - .await?; + // let backend_response_type = save_attr_func_prototypes( + // ctx, + // &func, + // prototypes, + // RemovedPrototypeOp::Reset, + // None, + // ) + // .await?; save_attr_func_arguments(ctx, &func, arguments).await?; - func.set_backend_response_type(ctx, backend_response_type) - .await?; + // func.set_backend_response_type(ctx, backend_response_type) + // .await?; } } }, - FuncBackendKind::JsAuthentication => { - if let Some(FuncAssociations::Authentication { schema_variant_ids }) = - request.associations - { - let mut id_set = HashSet::new(); - - for schema_variant_id in schema_variant_ids { - let proto_context = AuthenticationPrototypeContext { schema_variant_id }; - - let proto = match AuthenticationPrototype::find_for_context_and_func( - ctx, - &proto_context, - *func.id(), - ) - .await? - .pop() - { - None => { - AuthenticationPrototype::new(ctx, *func.id(), proto_context).await? - } - Some(existing_proto) => existing_proto, - }; - - id_set.insert(*proto.id()); - } - - for mut proto in AuthenticationPrototype::find_for_func(ctx, *func.id()).await? { - if !id_set.contains(proto.id()) { - proto.delete_by_id(ctx).await?; - } - } - } - } - FuncBackendKind::Array - | FuncBackendKind::Boolean - | FuncBackendKind::Diff - | FuncBackendKind::Identity - | FuncBackendKind::Integer - | FuncBackendKind::JsReconciliation - | FuncBackendKind::JsSchemaVariantDefinition - | FuncBackendKind::Map - | FuncBackendKind::Object - | FuncBackendKind::String - | FuncBackendKind::Unset - | FuncBackendKind::Validation - | FuncBackendKind::JsValidation => return Err(FuncError::NotWritable), + // FuncBackendKind::JsAuthentication => { + // if let Some(FuncAssociations::Authentication { schema_variant_ids }) = + // request.associations + // { + // let mut id_set = HashSet::new(); + // + // for schema_variant_id in schema_variant_ids { + // let proto_context = AuthenticationPrototypeContext { schema_variant_id }; + // + // let proto = match AuthenticationPrototype::find_for_context_and_func( + // ctx, + // &proto_context, + // *func.id(), + // ) + // .await? + // .pop() + // { + // None => { + // AuthenticationPrototype::new(ctx, *func.id(), proto_context).await? + // } + // Some(existing_proto) => existing_proto, + // }; + // + // id_set.insert(*proto.id()); + // } + // + // for mut proto in AuthenticationPrototype::find_for_func(ctx, *func.id()).await? { + // if !id_set.contains(proto.id()) { + // proto.delete_by_id(ctx).await?; + // } + // } + // } + // } + // FuncBackendKind::Array + // | FuncBackendKind::Boolean + // | FuncBackendKind::Diff + // | FuncBackendKind::Identity + // | FuncBackendKind::Integer + // | FuncBackendKind::JsReconciliation + // | FuncBackendKind::JsSchemaVariantDefinition + // | FuncBackendKind::Map + // | FuncBackendKind::Object + // | FuncBackendKind::String + // | FuncBackendKind::Unset + // | FuncBackendKind::Validation => return Err(FuncError::NotWritable), + _ => return Err(FuncError::NotWritable), } - let is_revertible = super::is_func_revertible(ctx, &func).await?; + let is_revertible = false; // super::is_func_revertible(ctx, &func).await?; let view = super::get_func_view(ctx, &func).await?; let associations = view.associations; let types = view.types; @@ -676,56 +736,49 @@ pub async fn save_func<'a>( let force_changeset_pk = ChangeSet::force_new(&mut ctx).await?; let request_id = request.id; - let request_associations = request.associations.clone(); + let _request_associations = request.associations.clone(); let (save_response, _) = do_save_func(&ctx, request).await?; - // Track - { - let func = Func::get_by_id(&ctx, &request_id) - .await? - .ok_or(FuncError::FuncNotFound)?; - - let (component_ids, schema_variant_ids) = match request_associations { - Some(FuncAssociations::Qualification { - component_ids, - schema_variant_ids, - .. - }) - | Some(FuncAssociations::CodeGeneration { - component_ids, - schema_variant_ids, - .. - }) => (component_ids, schema_variant_ids), - Some(FuncAssociations::Authentication { - schema_variant_ids, .. - }) => (vec![], schema_variant_ids), - - None - | Some(FuncAssociations::Action { .. }) - | Some(FuncAssociations::Attribute { .. }) - | Some(FuncAssociations::SchemaVariantDefinitions { .. }) - | Some(FuncAssociations::Validation { .. }) => (vec![], vec![]), - }; - - track( - &posthog_client, - &ctx, - &original_uri, - "save_func", - serde_json::json!({ - "func_id": func.id(), - "func_name": func.name(), - "func_variant": *func.backend_response_type(), - "func_is_builtin": func.builtin(), - "func_associated_with_schema_variant_ids": schema_variant_ids, - "func_associated_with_component_ids": component_ids, - }), - ); - WsEvent::func_saved(&ctx, *func.id()) - .await? - .publish_on_commit(&ctx) - .await?; - } + let func = Func::get_by_id(&ctx, request_id).await?; + + // //let (comp_associations, schema_associations) = + // let (component_ids, schema_variant_ids) = match request_associations { + // Some(FuncAssociations::Qualification { + // component_ids, + // schema_variant_ids, + // .. + // }) + // | Some(FuncAssociations::CodeGeneration { + // component_ids, + // schema_variant_ids, + // .. + // }) => (component_ids, schema_variant_ids), + // Some(FuncAssociations::Authentication { + // schema_variant_ids, .. + // }) => (vec![], schema_variant_ids), + + // None + // | Some(FuncAssociations::Action { .. }) + // | Some(FuncAssociations::Attribute { .. }) + // | Some(FuncAssociations::SchemaVariantDefinitions { .. }) + // | Some(FuncAssociations::Validation { .. }) => (vec![], vec![]), + // _ => (vec![], vec![]), + // }; + + track( + &posthog_client, + &ctx, + &original_uri, + "save_func", + serde_json::json!({ + "func_id": func.id, + "func_name": func.name.as_str(), + "func_variant": func.backend_response_type, + "func_is_builtin": func.builtin, + // "func_associated_with_schema_variant_ids": vec![], // schema_variant_ids, + // "func_associated_with_component_ids": vec![], // component_ids, + }), + ); ctx.commit().await?; diff --git a/lib/sdf-server/src/server/service/graphviz.rs b/lib/sdf-server/src/server/service/graphviz.rs new file mode 100644 index 0000000000..9b6bc01da6 --- /dev/null +++ b/lib/sdf-server/src/server/service/graphviz.rs @@ -0,0 +1,307 @@ +use std::collections::{HashMap, HashSet, VecDeque}; + +use axum::{extract::Query, response::Response, routing::get, Json, Router}; +use dal::{ + schema::variant::SchemaVariantError, + workspace_snapshot::{ + content_address::ContentAddressDiscriminants, + edge_weight::EdgeWeightKindDiscriminants, + node_weight::{NodeWeight, NodeWeightDiscriminants}, + WorkspaceSnapshotError, + }, + SchemaVariant, SchemaVariantId, TransactionsError, Visibility, +}; +use serde::{Deserialize, Serialize}; +use thiserror::Error; +use ulid::Ulid; + +use crate::server::{ + extract::{AccessBuilder, HandlerContext}, + impl_default_error_into_response, + state::AppState, +}; + +#[remain::sorted] +#[derive(Error, Debug)] +pub enum GraphVizError { + #[error(transparent)] + ContextTransaction(#[from] TransactionsError), + #[error("graph did not have a root node, although this is an unreachable state")] + NoRootNode, + #[error(transparent)] + SchemaVariant(#[from] SchemaVariantError), + #[error("could not acquire lock: {0}")] + TryLock(#[from] tokio::sync::TryLockError), + #[error("workspace snapshot error")] + WorkspaceSnapshot(#[from] WorkspaceSnapshotError), +} + +type GraphVizResult = Result; + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct GraphVizRequest { + #[serde(flatten)] + pub visibility: Visibility, +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct SchemaVariantVizRequest { + #[serde(flatten)] + pub visibility: Visibility, + pub schema_variant_id: SchemaVariantId, +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct GraphVizNode { + id: Ulid, + content_kind: Option, + node_kind: NodeWeightDiscriminants, + name: Option, +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct GraphVizEdge { + from: Ulid, + to: Ulid, +} + +#[derive(Deserialize, Serialize, Debug)] +#[serde(rename_all = "camelCase")] +pub struct GraphVizResponse { + pub nodes: Vec, + pub edges: Vec, + pub root_node_id: Ulid, +} + +pub async fn schema_variant( + HandlerContext(builder): HandlerContext, + AccessBuilder(request_ctx): AccessBuilder, + Query(request): Query, +) -> GraphVizResult> { + let ctx = builder.build(request_ctx.build(request.visibility)).await?; + + let mut func_nodes = vec![]; + let mut nodes = vec![]; + let mut edges = vec![]; + let mut added_nodes = HashSet::new(); + let mut added_edges = HashSet::new(); + let mut root_node_id: Option = None; + + let sv = SchemaVariant::get_by_id(&ctx, request.schema_variant_id).await?; + + let sv_node = { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let node_idx = workspace_snapshot.get_node_index_by_id(request.schema_variant_id)?; + let sv_node_weight = workspace_snapshot.get_node_weight(node_idx)?; + + added_nodes.insert(sv_node_weight.id()); + GraphVizNode { + id: sv_node_weight.id(), + content_kind: sv_node_weight.content_address_discriminants(), + node_kind: sv_node_weight.into(), + name: Some(sv.name().to_owned()), + } + }; + + nodes.push(sv_node); + + // descend + let mut work_queue: VecDeque = VecDeque::from([request.schema_variant_id.into()]); + while let Some(id) = work_queue.pop_front() { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + for target in workspace_snapshot.all_outgoing_targets(id)? { + work_queue.push_back(target.id()); + if !added_edges.contains(&(id, target.id())) { + added_edges.insert((id, target.id())); + edges.push(GraphVizEdge { + from: id, + to: target.id(), + }); + } + let name = match &target { + NodeWeight::Category(inner) => Some(inner.kind().to_string()), + NodeWeight::Func(inner) => { + func_nodes.push(inner.id()); + Some(inner.name().to_owned()) + } + NodeWeight::Prop(inner) => Some(inner.name().to_owned()), + _ => None, + }; + + if !added_nodes.contains(&target.id()) { + added_nodes.insert(target.id()); + nodes.push(GraphVizNode { + id: target.id(), + content_kind: target.content_address_discriminants(), + node_kind: target.into(), + name, + }) + } + } + } + + // ascend + let mut work_queue: VecDeque = VecDeque::from([request.schema_variant_id.into()]); + while let Some(id) = work_queue.pop_front() { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + let sources = workspace_snapshot.all_incoming_sources(id)?; + if sources.is_empty() { + root_node_id = Some(id); + continue; + } + + for source in sources { + work_queue.push_back(source.id()); + if !added_edges.contains(&(source.id(), id)) { + added_edges.insert((source.id(), id)); + edges.push(GraphVizEdge { + from: source.id(), + to: id, + }); + } + + let name = match &source { + NodeWeight::Category(inner) => Some(inner.kind().to_string()), + NodeWeight::Func(inner) => Some(inner.name().to_owned()), + NodeWeight::Prop(inner) => Some(inner.name().to_owned()), + _ => None, + }; + + if !added_nodes.contains(&source.id()) { + added_nodes.insert(source.id()); + nodes.push(GraphVizNode { + id: source.id(), + content_kind: source.content_address_discriminants(), + node_kind: source.into(), + name, + }) + } + } + } + + // connect func_nodes to root + for func_id in func_nodes { + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + for user_node_idx in workspace_snapshot + .incoming_sources_for_edge_weight_kind(func_id, EdgeWeightKindDiscriminants::Use)? + { + let user_node = workspace_snapshot + .get_node_weight(user_node_idx)? + .to_owned(); + + if let NodeWeight::Category(cat_inner) = &user_node { + let name = Some(cat_inner.kind().to_string()); + if !added_edges.contains(&(func_id, cat_inner.id())) { + added_edges.insert((func_id, cat_inner.id())); + edges.push(GraphVizEdge { + from: cat_inner.id(), + to: func_id, + }); + } + if !added_nodes.contains(&cat_inner.id()) { + added_nodes.insert(cat_inner.id()); + nodes.push(GraphVizNode { + id: cat_inner.id(), + content_kind: user_node.content_address_discriminants(), + node_kind: user_node.to_owned().into(), + name, + }) + } + for cat_user_node_idx in workspace_snapshot.incoming_sources_for_edge_weight_kind( + user_node.id(), + EdgeWeightKindDiscriminants::Use, + )? { + let node_weight = workspace_snapshot.get_node_weight(cat_user_node_idx)?; + match node_weight + .get_content_node_weight_of_kind(ContentAddressDiscriminants::Root) + { + Ok(root_content) => { + if !added_edges.contains(&(cat_inner.id(), root_content.id())) { + added_edges.insert((cat_inner.id(), root_content.id())); + edges.push(GraphVizEdge { + from: root_content.id(), + to: cat_inner.id(), + }); + } + } + _ => continue, + } + } + } + } + } + + let root_node_id = root_node_id.ok_or(GraphVizError::NoRootNode)?; + + Ok(Json(GraphVizResponse { + nodes, + edges, + root_node_id, + })) +} + +pub async fn nodes_edges( + HandlerContext(builder): HandlerContext, + AccessBuilder(request_ctx): AccessBuilder, + Query(request): Query, +) -> GraphVizResult> { + let ctx = builder.build(request_ctx.build(request.visibility)).await?; + + let workspace_snapshot = ctx.workspace_snapshot()?.read().await; + + let mut node_idx_to_id = HashMap::new(); + + let root_node_idx = workspace_snapshot.root()?; + + let nodes = workspace_snapshot + .nodes()? + .map(|(weight, idx)| { + node_idx_to_id.insert(idx, weight.id()); + let name = match weight { + NodeWeight::Category(inner) => Some(inner.kind().to_string()), + NodeWeight::Func(inner) => Some(inner.name().to_owned()), + NodeWeight::Prop(inner) => Some(inner.name().to_owned()), + _ => None, + }; + GraphVizNode { + id: weight.id(), + content_kind: weight.content_address_discriminants(), + node_kind: weight.into(), + name, + } + }) + .collect(); + + let edges = workspace_snapshot + .edges()? + .filter_map( + |(_, from, to)| match (node_idx_to_id.get(&from), node_idx_to_id.get(&to)) { + (None, _) | (_, None) => None, + (Some(&from), Some(&to)) => Some(GraphVizEdge { from, to }), + }, + ) + .collect(); + + let response = GraphVizResponse { + nodes, + edges, + root_node_id: node_idx_to_id + .get(&root_node_idx) + .copied() + .ok_or(GraphVizError::NoRootNode)?, + }; + + Ok(Json(response)) +} + +impl_default_error_into_response!(GraphVizError); + +pub fn routes() -> Router { + Router::new() + .route("/schema_variant", get(schema_variant)) + .route("/nodes_edges", get(nodes_edges)) +} diff --git a/lib/sdf-server/src/server/service/qualification.rs b/lib/sdf-server/src/server/service/qualification.rs index 73b8345f10..7c32634b78 100644 --- a/lib/sdf-server/src/server/service/qualification.rs +++ b/lib/sdf-server/src/server/service/qualification.rs @@ -11,8 +11,8 @@ use thiserror::Error; use dal::{qualification::QualificationSummaryError, WsEventError}; use dal::{ - AttributeValueError, ComponentError, ComponentId, FuncError, FuncId, SchemaError, SchemaId, - StandardModelError, TenancyError, TransactionsError, + ComponentError, ComponentId, FuncId, SchemaError, SchemaId, StandardModelError, TenancyError, + TransactionsError, }; use crate::server::state::AppState; @@ -29,16 +29,12 @@ pub mod get_summary; #[remain::sorted] #[derive(Debug, Error)] pub enum QualificationError { - #[error("attribute value error: {0}")] - AttributeValue(#[from] AttributeValueError), #[error("base64 decode error: {0}")] Base64Decode(#[from] base64::DecodeError), #[error("component error: {0}")] Component(#[from] ComponentError), #[error("component not found: {0}")] ComponentNotFound(ComponentId), - #[error("func error: {0}")] - Func(#[from] FuncError), #[error("func code not found: {0}")] FuncCodeNotFound(FuncId), #[error("func not found")] diff --git a/lib/sdf-server/src/server/service/schema/create_schema.rs b/lib/sdf-server/src/server/service/schema/create_schema.rs index e24af1083d..9e951aa0e9 100644 --- a/lib/sdf-server/src/server/service/schema/create_schema.rs +++ b/lib/sdf-server/src/server/service/schema/create_schema.rs @@ -1,9 +1,11 @@ -use super::SchemaResult; -use crate::server::extract::{AccessBuilder, HandlerContext}; use axum::Json; -use dal::{component::ComponentKind, Schema, Visibility}; +use dal::ComponentKind; +use dal::{Schema, Visibility}; use serde::{Deserialize, Serialize}; +use super::SchemaResult; +use crate::server::extract::{AccessBuilder, HandlerContext}; + #[derive(Deserialize, Serialize, Debug)] #[serde(rename_all = "camelCase")] pub struct CreateSchemaRequest { @@ -26,7 +28,7 @@ pub async fn create_schema( ) -> SchemaResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let schema = Schema::new(&ctx, &request.name, &ComponentKind::Standard).await?; + let schema = Schema::new(&ctx, &request.name, ComponentKind::Standard).await?; let response = CreateSchemaResponse { schema }; ctx.commit().await?; diff --git a/lib/sdf-server/src/server/service/schema/get_schema.rs b/lib/sdf-server/src/server/service/schema/get_schema.rs index 22bd145b64..c9d36e84d4 100644 --- a/lib/sdf-server/src/server/service/schema/get_schema.rs +++ b/lib/sdf-server/src/server/service/schema/get_schema.rs @@ -1,8 +1,8 @@ use axum::{extract::Query, Json}; -use dal::{Schema, SchemaId, StandardModel, Visibility}; +use dal::{Schema, SchemaId, Visibility}; use serde::{Deserialize, Serialize}; -use super::{SchemaError, SchemaResult}; +use super::SchemaResult; use crate::server::extract::{AccessBuilder, HandlerContext}; #[derive(Deserialize, Serialize, Debug)] @@ -22,9 +22,7 @@ pub async fn get_schema( ) -> SchemaResult> { let ctx = builder.build(request_ctx.build(request.visibility)).await?; - let response = Schema::get_by_id(&ctx, &request.schema_id) - .await? - .ok_or(SchemaError::SchemaNotFound)?; + let response = Schema::get_by_id(&ctx, request.schema_id).await?; Ok(Json(response)) } diff --git a/lib/sdf-server/src/server/service/schema/list_schemas.rs b/lib/sdf-server/src/server/service/schema/list_schemas.rs index 5039560cb0..9268a20c5e 100644 --- a/lib/sdf-server/src/server/service/schema/list_schemas.rs +++ b/lib/sdf-server/src/server/service/schema/list_schemas.rs @@ -1,6 +1,6 @@ use axum::extract::Query; use axum::Json; -use dal::{Schema, StandardModel, Visibility}; +use dal::{Schema, Visibility}; use serde::{Deserialize, Serialize}; use super::SchemaResult; diff --git a/lib/sdf-server/src/server/service/secret.rs b/lib/sdf-server/src/server/service/secret.rs index c8bb398ea7..ab2a43cbc0 100644 --- a/lib/sdf-server/src/server/service/secret.rs +++ b/lib/sdf-server/src/server/service/secret.rs @@ -1,12 +1,13 @@ +use axum::routing::{get, patch}; use axum::{ http::StatusCode, response::{IntoResponse, Response}, - routing::{get, patch, post}, + routing::post, Json, Router, }; use dal::{ - ChangeSetError, DiagramError, KeyPairError, SecretId, StandardModelError, TransactionsError, - UserError, WorkspacePk, WsEventError, + ChangeSetError, KeyPairError, SecretId, StandardModelError, TransactionsError, UserError, + WorkspacePk, WsEventError, }; use thiserror::Error; @@ -24,8 +25,8 @@ pub enum SecretError { ChangeSet(#[from] ChangeSetError), #[error(transparent)] ContextTransactions(#[from] TransactionsError), - #[error(transparent)] - Diagram(#[from] DiagramError), + // #[error(transparent)] + // Diagram(#[from] DiagramError), #[error("Hyper error: {0}")] Hyper(#[from] hyper::http::Error), #[error(transparent)] diff --git a/lib/sdf-server/src/server/service/secret/create_secret.rs b/lib/sdf-server/src/server/service/secret/create_secret.rs index 4dcbdadfac..73e14bcde3 100644 --- a/lib/sdf-server/src/server/service/secret/create_secret.rs +++ b/lib/sdf-server/src/server/service/secret/create_secret.rs @@ -1,11 +1,9 @@ use axum::response::IntoResponse; use axum::Json; -use dal::secret::SecretView; -use dal::StandardModel; use dal::{ - key_pair::KeyPairPk, ChangeSet, EncryptedSecret, SecretAlgorithm, SecretVersion, Visibility, - WsEvent, + key_pair::KeyPairPk, EncryptedSecret, SecretAlgorithm, SecretVersion, Visibility, WsEvent, }; +use dal::{ChangeSet, SecretView}; use serde::{Deserialize, Serialize}; use crate::server::extract::{AccessBuilder, HandlerContext}; @@ -49,7 +47,7 @@ pub async fn create_secret( ) .await?; - WsEvent::secret_created(&ctx, *secret.id()) + WsEvent::secret_created(&ctx, secret.id()) .await? .publish_on_commit(&ctx) .await?; diff --git a/lib/sdf-server/src/server/service/secret/list_secrets.rs b/lib/sdf-server/src/server/service/secret/list_secrets.rs index fc99c06f14..2554142e22 100644 --- a/lib/sdf-server/src/server/service/secret/list_secrets.rs +++ b/lib/sdf-server/src/server/service/secret/list_secrets.rs @@ -5,7 +5,7 @@ use axum::Json; use serde::{Deserialize, Serialize}; use dal::secret::{SecretDefinitionView, SecretView}; -use dal::{Secret, StandardModel, Visibility}; +use dal::{Secret, Visibility}; use crate::server::extract::{AccessBuilder, HandlerContext}; use crate::service::secret::SecretError; @@ -54,7 +54,7 @@ pub async fn list_secrets( for secret in Secret::list(&ctx).await? { hash_map .get_mut(secret.definition()) - .ok_or(SecretError::SecretWithInvalidDefinition(*secret.id()))? + .ok_or(SecretError::SecretWithInvalidDefinition(secret.id()))? .secrets .push(SecretView::from_secret(&ctx, secret).await?); } diff --git a/lib/sdf-server/src/server/service/secret/update_secret.rs b/lib/sdf-server/src/server/service/secret/update_secret.rs index 48a12f57e4..9e9cf5b363 100644 --- a/lib/sdf-server/src/server/service/secret/update_secret.rs +++ b/lib/sdf-server/src/server/service/secret/update_secret.rs @@ -2,8 +2,8 @@ use axum::response::IntoResponse; use axum::Json; use dal::secret::SecretView; use dal::{ - key_pair::KeyPairPk, ChangeSet, EncryptedSecret, SecretAlgorithm, SecretVersion, Visibility, - WsEvent, + key_pair::KeyPairPk, ChangeSet, EncryptedSecret, Secret, SecretAlgorithm, SecretVersion, + Visibility, WsEvent, }; use dal::{HistoryActor, SecretError, SecretId, StandardModel}; use serde::{Deserialize, Serialize}; @@ -67,6 +67,10 @@ pub async fn update_secret( secret.set_algorithm(&ctx, new_data.algorithm).await?; } + // TODO(nick): unify this with the encrypted secrets stuff. For now, let's update the referential secret + // as a side effect. + Secret::update(&ctx, &secret).await?; + WsEvent::secret_updated(&ctx, *secret.id()) .await? .publish_on_commit(&ctx) diff --git a/lib/sdf-server/src/server/service/signup.rs b/lib/sdf-server/src/server/service/signup.rs deleted file mode 100644 index ff9804f7e1..0000000000 --- a/lib/sdf-server/src/server/service/signup.rs +++ /dev/null @@ -1,64 +0,0 @@ -use axum::{ - http::StatusCode, - response::{IntoResponse, Response}, - routing::post, - Json, Router, -}; -use thiserror::Error; - -use dal::{ - ComponentError, NodeError, SchemaError, StandardModelError, TransactionsError, WorkspaceError, -}; - -pub mod create_account; - -#[allow(clippy::large_enum_variant)] -#[remain::sorted] -#[derive(Debug, Error)] -pub enum SignupError { - #[error("component error: {0}")] - Component(#[from] ComponentError), - #[error(transparent)] - ContextTransaction(#[from] TransactionsError), - #[error("invalid signup secret")] - InvalidSignupSecret, - #[error(transparent)] - Nats(#[from] si_data_nats::NatsError), - #[error("Node error: {0}")] - Node(#[from] NodeError), - #[error(transparent)] - Pg(#[from] si_data_pg::PgError), - #[error("Schema error: {0}")] - Schema(#[from] SchemaError), - #[error("StandardModel error: {0}")] - StandardModel(#[from] StandardModelError), - #[error(transparent)] - Workspace(#[from] WorkspaceError), -} - -pub type SignupResult = std::result::Result; - -impl IntoResponse for SignupError { - fn into_response(self) -> Response { - let (status, error_message) = match self { - SignupError::InvalidSignupSecret => { - (StatusCode::BAD_REQUEST, "signup failed".to_string()) - } - err => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()), - }; - - let body = Json(serde_json::json!({ - "error": { - "message": error_message, - "code": 42, - "statusCode": status.as_u16(), - }, - })); - - (status, body).into_response() - } -} - -pub fn routes() -> Router { - Router::new().route("/create_account", post(create_account::create_account)) -} diff --git a/lib/sdf-server/src/server/service/signup/create_account.rs b/lib/sdf-server/src/server/service/signup/create_account.rs deleted file mode 100644 index b11d85ad30..0000000000 --- a/lib/sdf-server/src/server/service/signup/create_account.rs +++ /dev/null @@ -1,53 +0,0 @@ -use axum::Json; -use serde::{Deserialize, Serialize}; - -use dal::Workspace; -use telemetry::prelude::*; - -use crate::{ - server::extract::{HandlerContext, SignupSecret}, - service::signup::SignupError, -}; - -use super::SignupResult; - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CreateAccountRequest { - pub workspace_name: String, - pub user_name: String, - pub user_email: String, - pub user_password: String, - pub signup_secret: String, -} - -#[derive(Debug, Serialize, Deserialize)] -#[serde(rename_all = "camelCase")] -pub struct CreateAccountResponse { - pub success: bool, -} - -pub async fn create_account( - HandlerContext(builder): HandlerContext, - SignupSecret(signup_secret): SignupSecret, - Json(request): Json, -) -> SignupResult> { - if signup_secret.as_str() != request.signup_secret.as_str() { - warn!("invalid signup secret provided when signing up new workspace"); - return Err(SignupError::InvalidSignupSecret); - } - - let mut ctx = builder.build_default().await?; - - let _nw = Workspace::signup( - &mut ctx, - &request.workspace_name, - &request.user_name, - &request.user_email, - ) - .await?; - - ctx.commit().await?; - - Ok(Json(CreateAccountResponse { success: true })) -} diff --git a/lib/sdf-server/tests/api.rs b/lib/sdf-server/tests/api.rs index 43a8731c94..e599a25218 100644 --- a/lib/sdf-server/tests/api.rs +++ b/lib/sdf-server/tests/api.rs @@ -1,5 +1,6 @@ #![recursion_limit = "256"] +#[allow(dead_code)] const TEST_PG_DBNAME: &str = "si_test_sdf_server"; -mod service_tests; +// mod service_tests; diff --git a/lib/si-cbor/BUCK b/lib/si-cbor/BUCK new file mode 100644 index 0000000000..f6574e8e2e --- /dev/null +++ b/lib/si-cbor/BUCK @@ -0,0 +1,12 @@ +load("@prelude-si//:macros.bzl", "rust_library") + +rust_library( + name = "si-cbor", + deps = [ + "//third-party/rust:ciborium", + "//third-party/rust:remain", + "//third-party/rust:serde", + "//third-party/rust:thiserror", + ], + srcs = glob(["src/**/*.rs"]), +) diff --git a/lib/si-cbor/Cargo.toml b/lib/si-cbor/Cargo.toml new file mode 100644 index 0000000000..f98842bd81 --- /dev/null +++ b/lib/si-cbor/Cargo.toml @@ -0,0 +1,11 @@ +[package] +name = "si-cbor" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +ciborium = { workspace = true } +remain = { workspace = true } +serde = { workspace = true } +thiserror = { workspace = true } \ No newline at end of file diff --git a/lib/si-cbor/src/lib.rs b/lib/si-cbor/src/lib.rs new file mode 100644 index 0000000000..84657cd14e --- /dev/null +++ b/lib/si-cbor/src/lib.rs @@ -0,0 +1,74 @@ +//! This library provides the ability to encode (serialize) and decode (deserialize) +//! [CBOR](https://en.wikipedia.org/wiki/CBOR) objects. + +#![warn( + missing_debug_implementations, + missing_docs, + unreachable_pub, + bad_style, + dead_code, + improper_ctypes, + non_shorthand_field_patterns, + no_mangle_generic_items, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + unconditional_recursion, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true, + clippy::missing_panics_doc +)] + +use serde::de::DeserializeOwned; +use serde::Serialize; +use std::io::BufReader; +use thiserror::Error; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Error, Debug)] +pub enum CborError { + #[error("ciborium deserialization error: {0}")] + CiboriumDeserialization(#[from] ciborium::de::Error), + #[error("ciborium serialization error: {0}")] + CiboriumSerialization(#[from] ciborium::ser::Error), +} + +type CborResult = Result; + +/// Serialize the given value to CBOR. +pub fn encode(value: &T) -> CborResult> +where + T: Serialize + ?Sized, +{ + let mut encoded = Vec::new(); + ciborium::into_writer(value, &mut encoded)?; + Ok(encoded) +} + +/// Deserialize from CBOR to a provided type. +pub fn decode(value: &[u8]) -> CborResult +where + T: DeserializeOwned, +{ + let reader = BufReader::new(value); + Ok(ciborium::from_reader(reader)?) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn string() { + let original = "mybrainhurts"; + + let bytes = encode(original).expect("could not encode"); + let round_trip: String = decode(&bytes).expect("could not decode"); + + assert_eq!(original, round_trip.as_str()); + } +} diff --git a/lib/si-data-nats/src/jetstream.rs b/lib/si-data-nats/src/jetstream.rs new file mode 100644 index 0000000000..cb36be2c68 --- /dev/null +++ b/lib/si-data-nats/src/jetstream.rs @@ -0,0 +1,64 @@ +//! This module contains tools for working with [NATS Jetstream](https://docs.nats.io/nats-concepts/jetstream). + +#![warn( + bad_style, + clippy::missing_panics_doc, + clippy::panic, + clippy::panic_in_result_fn, + clippy::unwrap_in_result, + clippy::unwrap_used, + dead_code, + improper_ctypes, + missing_debug_implementations, + missing_docs, + no_mangle_generic_items, + non_shorthand_field_patterns, + overflowing_literals, + path_statements, + patterns_in_fns_without_body, + unconditional_recursion, + unreachable_pub, + unused, + unused_allocation, + unused_comparisons, + unused_parens, + while_true +)] + +use async_nats::jetstream::consumer::StreamErrorKind; +use async_nats::jetstream::context::{CreateStreamErrorKind, GetStreamErrorKind, PublishErrorKind}; +use async_nats::jetstream::stream::ConsumerErrorKind; +use telemetry::prelude::*; +use thiserror::Error; + +mod consumer; +mod context; + +pub use async_nats::jetstream::consumer::pull::Stream; +pub use consumer::Consumer; +pub use context::Context; +pub use context::REPLY_SUBJECT_HEADER_NAME; + +/// Re-export of [`async_nats::jetstream::AckKind`]. +pub type AckKind = async_nats::jetstream::AckKind; + +#[allow(missing_docs)] +#[remain::sorted] +#[derive(Debug, Error)] +pub enum JetstreamError { + #[error("consumer error: {0}")] + Consumer(#[from] async_nats::error::Error), + #[error("create stream error: {0}")] + CreateStream(#[from] async_nats::error::Error), + #[error("get stream error: {0}")] + GetStream(#[from] async_nats::error::Error), + #[error("invalid subject name for stream: {0}")] + InvalidSubjectName(String), + #[error("publish error: {0}")] + Publish(#[from] async_nats::error::Error), + #[error("stream error: {0}")] + Stream(#[from] async_nats::error::Error), +} + +#[allow(missing_docs)] +pub type JetstreamResult = Result; diff --git a/lib/si-data-nats/src/jetstream/consumer.rs b/lib/si-data-nats/src/jetstream/consumer.rs new file mode 100644 index 0000000000..903b2ca455 --- /dev/null +++ b/lib/si-data-nats/src/jetstream/consumer.rs @@ -0,0 +1,30 @@ +use async_nats::jetstream; +use async_nats::jetstream::consumer::pull::{Config, Stream}; + +use crate::jetstream::JetstreamResult; + +/// A wrapper around [`jetstream::consumer::Consumer`]. +#[derive(Debug)] +pub struct Consumer { + inner: jetstream::consumer::Consumer, +} + +impl Consumer { + /// Creates a new [`Consumer`]. + pub fn new(raw_consumer: jetstream::consumer::Consumer) -> Self { + Self { + inner: raw_consumer, + } + } + + /// Creates a [`Stream`] from self. + pub async fn stream(&self) -> JetstreamResult { + Ok(self.inner.messages().await?) + } +} + +impl From> for Consumer { + fn from(value: jetstream::consumer::Consumer) -> Self { + Self::new(value) + } +} diff --git a/lib/si-data-nats/src/jetstream/context.rs b/lib/si-data-nats/src/jetstream/context.rs new file mode 100644 index 0000000000..31a5260f3f --- /dev/null +++ b/lib/si-data-nats/src/jetstream/context.rs @@ -0,0 +1,134 @@ +use async_nats::jetstream::stream::{Config, RetentionPolicy, Stream}; +use async_nats::{jetstream, HeaderMap}; +use bytes::Bytes; + +use crate::jetstream::{Consumer, JetstreamError, JetstreamResult}; +use crate::Client; +use crate::HeaderName; + +/// Used in a [`HeaderMap`] for consumers of stream to reply back to requesters (via a reply mailbox and outside of the +/// stream). +pub static REPLY_SUBJECT_HEADER_NAME: HeaderName = HeaderName::from_static("X-Reply-Subject"); + +const DEFAULT_MAX_MESSAGES: i64 = 10_000; + +/// A wrapper around [`jetstream::Context`]. +#[derive(Debug)] +pub struct Context { + inner: jetstream::Context, +} + +impl From for Context { + fn from(value: Client) -> Self { + Self::new(value) + } +} + +impl Context { + /// Creates a new [`Context`]. + pub fn new(client: Client) -> Self { + Self { + inner: jetstream::new(client.inner), + } + } + + /// Finds or creates a stream with a [`WorkQueue`](RetentionPolicy::WorkQueue) retention policy. + pub async fn get_or_create_work_queue_stream( + &self, + name: impl Into, + subjects: Vec, + ) -> JetstreamResult { + // Validate the name before getting or creating the stream. We perform pre-validation to prevent a potential + // hang. + let name = name.into(); + validate_stream_subject_name(&name)?; + + let stream = self + .inner + .get_or_create_stream(Config { + name, + subjects, + max_messages: DEFAULT_MAX_MESSAGES, + retention: RetentionPolicy::WorkQueue, + ..Default::default() + }) + .await?; + Ok(stream) + } + + /// Publishes to a stream with a reply mailbox in the headers. Immediately awaits the future that the server has + /// acknowledged the message. + pub async fn publish_with_reply_mailbox_and_immediately_ack( + &self, + client: &Client, + subject: String, + bytes: Bytes, + ) -> JetstreamResult { + let mut headers = HeaderMap::new(); + let reply_subject = client.new_inbox(); + headers.insert(REPLY_SUBJECT_HEADER_NAME.clone(), reply_subject.as_str()); + + let ack_future = self + .inner + .publish_with_headers(subject, headers, bytes) + .await?; + + ack_future.await?; + + Ok(reply_subject) + } + + /// Finds or creates a [`Consumer`] with a durable, pull configuration. Because the consumer is durable, the server + /// will remember the last events "acked" in the event of a failure. + pub async fn get_or_create_durable_consumer( + &self, + stream: &Stream, + name: impl Into, + ) -> JetstreamResult { + let name = name.into(); + let raw_consumer = stream + .get_or_create_consumer( + name.as_str(), + jetstream::consumer::pull::Config { + durable_name: Some(name.clone()), + ..Default::default() + }, + ) + .await?; + Ok(Consumer::new(raw_consumer)) + } +} + +// This is an opinionated version of the rules from upstream. For example, alphanumeric characters are "recommended", +// but we use "-" for subject prefixes for testing since we cannot use "." (it is prohibited and you will fail stream +// creation). +// +// Link to upstream docs: https://docs.nats.io/running-a-nats-service/nats_admin/jetstream_admin/naming +fn validate_stream_subject_name(subject: impl AsRef) -> JetstreamResult<()> { + let subject = subject.as_ref(); + for char in subject.chars() { + let valid = match char { + ' ' | '.' | '>' | '*' | '/' | '\\' => false, + char if char != '-' && !char.is_alphanumeric() => false, + _ => true, + }; + if !valid { + return Err(JetstreamError::InvalidSubjectName(subject.to_string())); + } + } + Ok(()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_validate_stream_subject_name() { + assert!(validate_stream_subject_name("poop.canoe").is_err()); + assert!(validate_stream_subject_name("poop").is_ok()); + assert!(validate_stream_subject_name("poop-canoe").is_ok()); + assert!(validate_stream_subject_name("poop canoe").is_err()); + assert!(validate_stream_subject_name("poop/canoe").is_err()); + } +} diff --git a/lib/si-data-nats/src/lib.rs b/lib/si-data-nats/src/lib.rs index f2442bc438..e75aa92671 100644 --- a/lib/si-data-nats/src/lib.rs +++ b/lib/si-data-nats/src/lib.rs @@ -16,11 +16,15 @@ use telemetry::prelude::*; use thiserror::Error; use tokio::sync::Mutex; +use crate::jetstream::Context; + mod connect_options; mod message; -pub mod service; mod subscriber; +pub mod jetstream; +pub mod service; + pub use async_nats::{ connection::State, header, header::HeaderMap, rustls, status, subject, Auth, AuthError, HeaderName, HeaderValue, ServerAddr, ServerInfo, Subject, @@ -1000,6 +1004,33 @@ impl Client { pub fn metadata(&self) -> &ConnectionMetadata { self.metadata.as_ref() } + + /// Consumes self to create a [Jetstream](https://docs.nats.io/nats-concepts/jetstream) context. + #[instrument( + name = "client::to_jetstream_ctx", + skip_all, + level = "debug", + fields( + messaging.client_id = Empty, + messaging.nats.server.id = Empty, + messaging.nats.server.name = Empty, + messaging.nats.server.version = Empty, + messaging.system = Empty, + messaging.url = Empty, + network.peer.address = Empty, + network.protocol.name = Empty, + network.protocol.version = Empty, + network.transport = Empty, + otel.kind = SpanKind::Client.as_str(), + otel.status_code = Empty, + otel.status_message = Empty, + server.address = Empty, + server.port = Empty, + ) + )] + pub fn to_jetstream_ctx(self) -> Context { + Context::new(self) + } } #[derive(Clone, Debug)] diff --git a/lib/si-data-pg/src/lib.rs b/lib/si-data-pg/src/lib.rs index eece056a46..22a5b486cd 100644 --- a/lib/si-data-pg/src/lib.rs +++ b/lib/si-data-pg/src/lib.rs @@ -47,6 +47,7 @@ use tokio_postgres::{ }; pub use tokio_postgres::error::SqlState; +pub use tokio_postgres::types as postgres_types; const MIGRATION_LOCK_NUMBER: i64 = 42; const MAX_POOL_SIZE_MINIMUM: usize = 32; @@ -64,6 +65,8 @@ pub enum PgError { "transaction not exclusively referenced when rollback attempted; arc_strong_count={0}" )] TxnRollbackNotExclusive(usize), + #[error("unexpected row returned: {0:?}")] + UnexpectedRow(PgRow), } #[remain::sorted] @@ -2411,6 +2414,38 @@ impl PgSharedTransaction { } } + /// Executes a statement that returns zero rows. + /// + /// Returns an error if the query returns more than zero rows. + /// + /// A statement may contain parameters, specified by `$n`, where `n` is the index of the + /// parameter of the list provided, 1-indexed. + /// + /// The `statement` argument can either be a `Statement`, or a raw query string. If the same + /// statement will be repeatedly executed (perhaps with different query parameters), consider + /// preparing the statement up front with the `prepare` method. + /// + /// # Panics + /// + /// - If the number of parameters provided does not match the number expected. + /// - If the internal transaction has already been consumed which is an internal correctness + /// bug + pub async fn query_none( + &self, + statement: &str, + params: &[&(dyn ToSql + Sync)], + ) -> Result<(), PgError> { + match self.inner.lock().await.borrow_txn().as_ref() { + Some(txn) => match txn.query_opt(statement, params).await? { + None => Ok(()), + Some(row) => Err(PgError::UnexpectedRow(row)), + }, + None => { + unreachable!("txn is only consumed with commit/rollback--this is an internal bug") + } + } + } + /// The maximally flexible version of [`query`]. /// /// A statement may contain parameters, specified by `$n`, where `n` is the index of the diff --git a/lib/si-pkg/src/lib.rs b/lib/si-pkg/src/lib.rs index 03570e8ea6..f255b83880 100644 --- a/lib/si-pkg/src/lib.rs +++ b/lib/si-pkg/src/lib.rs @@ -66,7 +66,7 @@ mod tests { assert_eq!( "head", - change_sets.get(0).expect("get first change set").name() + change_sets.first().expect("get first change set").name() ); } @@ -93,7 +93,7 @@ mod tests { let funcs = read_pkg.funcs().expect("failed to get funcs"); assert_eq!(2, funcs.len()); - let truthy_func = funcs.get(0).expect("failed to get first func"); + let truthy_func = funcs.first().expect("failed to get first func"); assert_eq!("si:truthy", truthy_func.name()); let args = truthy_func.arguments().expect("failed to get arguments"); assert_eq!(6, args.len()); diff --git a/lib/si-pkg/src/pkg/variant.rs b/lib/si-pkg/src/pkg/variant.rs index 11449a4464..3779f16543 100644 --- a/lib/si-pkg/src/pkg/variant.rs +++ b/lib/si-pkg/src/pkg/variant.rs @@ -357,7 +357,7 @@ impl<'a> SiPkgSchemaVariant<'a> { "Array or map has more than one direct child", )); } - let type_prop = children.get(0).ok_or(SiPkgError::prop_tree_invalid( + let type_prop = children.first().ok_or(SiPkgError::prop_tree_invalid( "Array or map prop missing type prop", ))?; prop.type_prop(type_prop.clone()); diff --git a/lib/si-pkg/src/spec/prop.rs b/lib/si-pkg/src/spec/prop.rs index 6fa732a737..f5d377fc82 100644 --- a/lib/si-pkg/src/spec/prop.rs +++ b/lib/si-pkg/src/spec/prop.rs @@ -420,7 +420,7 @@ impl PropSpec { match current_prop_spec.kind() { PropSpecKind::Map | PropSpecKind::Array => { if let Some(children) = child_map.get(current_path) { - if let Some(type_child) = children.get(0) { + if let Some(type_child) = children.first() { current_prop_spec_builder.type_prop(type_child.to_owned()); } } diff --git a/lib/si-test-macros/src/dal_test.rs b/lib/si-test-macros/src/dal_test.rs index 5fff086d72..4de57dbfd8 100644 --- a/lib/si-test-macros/src/dal_test.rs +++ b/lib/si-test-macros/src/dal_test.rs @@ -29,6 +29,11 @@ pub(crate) fn expand(item: ItemFn, args: Args) -> TokenStream { fn fn_setup<'a>(params: impl Iterator) -> DalTestFnSetup { let mut expander = DalTestFnSetupExpander::new(); + expander.setup_start_veritech_server(); + expander.setup_start_pinga_server(); + expander.setup_start_council_server(); + expander.setup_start_rebaser_server(); + for param in params { match param { FnArg::Typed(pat_type) => match &*pat_type.ty { @@ -79,6 +84,11 @@ fn fn_setup<'a>(params: impl Iterator) -> DalTestFnSetup { let var = var.as_ref(); expander.push_arg(parse_quote! {#var}); } + "RebaserShutdownHandle" => { + let var = expander.setup_rebaser_shutdown_handle(); + let var = var.as_ref(); + expander.push_arg(parse_quote! {#var}); + } "ServicesContext" => { let var = expander.setup_services_context(); let var = var.as_ref(); @@ -166,14 +176,15 @@ fn fn_setup<'a>(params: impl Iterator) -> DalTestFnSetup { } } - if expander.has_args() { - // TODO(fnichol): we can use a macro attribute to opt-out and not run a veritech server in - // the future, but for now (as before), every test starts with its own veritech server with - // a randomized subject prefix - expander.setup_start_veritech_server(); - expander.setup_start_pinga_server(); - expander.setup_start_council_server(); - } + // if expander.has_args() { + // // TODO(fnichol): we can use a macro attribute to opt-out and not run a veritech server in + // // the future, but for now (as before), every test starts with its own veritech server with + // // a randomized subject prefix + // expander.setup_start_veritech_server(); + // expander.setup_start_pinga_server(); + // expander.setup_start_council_server(); + // expander.setup_start_rebaser_server(); + // } expander.finish() } @@ -200,6 +211,9 @@ struct DalTestFnSetupExpander { pinga_server: Option>, pinga_shutdown_handle: Option>, start_pinga_server: Option<()>, + rebaser_server: Option>, + rebaser_shutdown_handle: Option>, + start_rebaser_server: Option<()>, veritech_server: Option>, veritech_shutdown_handle: Option>, start_veritech_server: Option<()>, @@ -226,6 +240,9 @@ impl DalTestFnSetupExpander { pinga_server: None, pinga_shutdown_handle: None, start_pinga_server: None, + rebaser_server: None, + rebaser_shutdown_handle: None, + start_rebaser_server: None, veritech_server: None, veritech_shutdown_handle: None, start_veritech_server: None, @@ -241,6 +258,7 @@ impl DalTestFnSetupExpander { } } + #[allow(dead_code)] fn has_args(&self) -> bool { !self.args.is_empty() } @@ -318,6 +336,30 @@ impl FnSetupExpander for DalTestFnSetupExpander { self.start_pinga_server = value; } + fn rebaser_server(&self) -> Option<&Rc> { + self.rebaser_server.as_ref() + } + + fn set_rebaser_server(&mut self, value: Option>) { + self.rebaser_server = value; + } + + fn rebaser_shutdown_handle(&self) -> Option<&Rc> { + self.rebaser_shutdown_handle.as_ref() + } + + fn set_rebaser_shutdown_handle(&mut self, value: Option>) { + self.rebaser_shutdown_handle = value; + } + + fn start_rebaser_server(&self) -> Option<()> { + self.start_rebaser_server + } + + fn set_start_rebaser_server(&mut self, value: Option<()>) { + self.start_rebaser_server = value; + } + fn veritech_server(&self) -> Option<&Rc> { self.veritech_server.as_ref() } diff --git a/lib/si-test-macros/src/expand.rs b/lib/si-test-macros/src/expand.rs index 13245912af..714743e5a0 100644 --- a/lib/si-test-macros/src/expand.rs +++ b/lib/si-test-macros/src/expand.rs @@ -224,6 +224,15 @@ pub(crate) trait FnSetupExpander { fn start_pinga_server(&self) -> Option<()>; fn set_start_pinga_server(&mut self, value: Option<()>); + fn rebaser_server(&self) -> Option<&Rc>; + fn set_rebaser_server(&mut self, value: Option>); + + fn rebaser_shutdown_handle(&self) -> Option<&Rc>; + fn set_rebaser_shutdown_handle(&mut self, value: Option>); + + fn start_rebaser_server(&self) -> Option<()>; + fn set_start_rebaser_server(&mut self, value: Option<()>); + fn veritech_server(&self) -> Option<&Rc>; fn set_veritech_server(&mut self, value: Option>); @@ -267,7 +276,7 @@ pub(crate) trait FnSetupExpander { let var = Ident::new("test_context", Span::call_site()); self.code_extend(quote! { - let test_context = ::dal_test::TestContext::global(crate::TEST_PG_DBNAME).await?; + let test_context = ::dal_test::TestContext::global(crate::TEST_PG_DBNAME, crate::TEST_CONTENT_STORE_PG_DBNAME).await?; }); self.set_test_context(Some(Rc::new(var))); @@ -377,6 +386,54 @@ pub(crate) trait FnSetupExpander { self.set_start_pinga_server(Some(())); } + fn setup_rebaser_server(&mut self) -> Rc { + if let Some(ident) = self.rebaser_server() { + return ident.clone(); + } + + let services_context = self.setup_services_context(); + let services_context = services_context.as_ref(); + + let var = Ident::new("rebaser_server", Span::call_site()); + self.code_extend(quote! { + let #var = ::dal_test::rebaser_server(&#services_context)?; + }); + self.set_rebaser_server(Some(Rc::new(var))); + + self.rebaser_server().unwrap().clone() + } + + fn setup_rebaser_shutdown_handle(&mut self) -> Rc { + if let Some(ident) = self.rebaser_shutdown_handle() { + return ident.clone(); + } + + let rebaser_server = self.setup_rebaser_server(); + let rebaser_server = rebaser_server.as_ref(); + + let var = Ident::new("rebaser_shutdown_handle", Span::call_site()); + self.code_extend(quote! { + let #var = #rebaser_server.shutdown_handle(); + }); + self.set_rebaser_shutdown_handle(Some(Rc::new(var))); + + self.rebaser_shutdown_handle().unwrap().clone() + } + + fn setup_start_rebaser_server(&mut self) { + if self.start_rebaser_server().is_some() { + return; + } + + let rebaser_server = self.setup_rebaser_server(); + let rebaser_server = rebaser_server.as_ref(); + + self.code_extend(quote! { + ::tokio::spawn(#rebaser_server.run()); + }); + self.set_start_rebaser_server(Some(())); + } + fn setup_veritech_server(&mut self) -> Rc { if let Some(ident) = self.veritech_server() { return ident.clone(); @@ -527,7 +584,7 @@ pub(crate) trait FnSetupExpander { .await .wrap_err("failed to build default dal ctx for dal_context_default")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); - ::dal_test::helpers::create_change_set_and_update_ctx(&mut ctx).await; + ::dal_test::helpers::create_change_set_and_update_ctx(&mut ctx, #nw.workspace.default_change_set_id()).await; ctx.blocking_commit() .await .wrap_err("failed to commit create_change_set_and_update_ctx")?; @@ -558,7 +615,7 @@ pub(crate) trait FnSetupExpander { .await .wrap_err("failed to build default dal ctx for dal_context_default_mut")?; ctx.update_tenancy(::dal::Tenancy::new(*#nw.workspace.pk())); - ::dal_test::helpers::create_change_set_and_update_ctx(&mut ctx).await; + ::dal_test::helpers::create_change_set_and_update_ctx(&mut ctx, #nw.workspace.default_change_set_id()).await; ctx.blocking_commit() .await .wrap_err("failed to commit create_change_set_and_update_ctx_mut")?; diff --git a/lib/si-test-macros/src/lib.rs b/lib/si-test-macros/src/lib.rs index 94cd504e68..79f2535d25 100644 --- a/lib/si-test-macros/src/lib.rs +++ b/lib/si-test-macros/src/lib.rs @@ -149,6 +149,8 @@ fn path_as_string(path: &Path) -> String { /// for a workspace for a visibility which is not in a change set /// * `pinga_handle: PingaShutdownHandle`: the shutdown handle for the Pinga server running /// alongside each test +/// * `rebaser_handle: RebaserShutdownHandle`: the shutdown handle for the rebaser server running +/// alongside each test /// * `services_ctx: ServicesContext`: a services context object, used to create DAL contexts /// * `veritech_handle: VeritechShutdownHandle`: the shutdown handle for the Veritech server /// running alongside each test @@ -325,6 +327,8 @@ pub fn dal_test(attr: TokenStream, input: TokenStream) -> TokenStream { /// for a workspace for a visibility which is not in a change set /// * `pinga_handle: PingaShutdownHandle`: the shutdown handle for the Pinga server running /// alongside each test +/// * `rebaser_handle: RebaserShutdownHandle`: the shutdown handle for the rebaser server running +/// alongside each test /// * `services_ctx: ServicesContext`: a services context object, used to create DAL contexts /// * `veritech_handle: VeritechShutdownHandle`: the shutdown handle for the Veritech server /// running alongside each test diff --git a/lib/si-test-macros/src/sdf_test.rs b/lib/si-test-macros/src/sdf_test.rs index 584b8ff11f..16c02f4137 100644 --- a/lib/si-test-macros/src/sdf_test.rs +++ b/lib/si-test-macros/src/sdf_test.rs @@ -95,6 +95,11 @@ fn fn_setup<'a>(params: impl Iterator) -> SdfTestFnSetup { let var = var.as_ref(); expander.push_arg(parse_quote! {#var}); } + "RebaserShutdownHandle" => { + let var = expander.setup_rebaser_shutdown_handle(); + let var = var.as_ref(); + expander.push_arg(parse_quote! {#var}); + } "ServicesContext" => { let var = expander.setup_services_context(); let var = var.as_ref(); @@ -189,6 +194,7 @@ fn fn_setup<'a>(params: impl Iterator) -> SdfTestFnSetup { expander.setup_start_veritech_server(); expander.setup_start_pinga_server(); expander.setup_start_council_server(); + expander.setup_start_rebaser_server(); } expander.finish() @@ -216,6 +222,9 @@ struct SdfTestFnSetupExpander { pinga_server: Option>, pinga_shutdown_handle: Option>, start_pinga_server: Option<()>, + rebaser_server: Option>, + rebaser_shutdown_handle: Option>, + start_rebaser_server: Option<()>, veritech_server: Option>, veritech_shutdown_handle: Option>, start_veritech_server: Option<()>, @@ -250,6 +259,9 @@ impl SdfTestFnSetupExpander { pinga_server: None, pinga_shutdown_handle: None, start_pinga_server: None, + rebaser_server: None, + rebaser_shutdown_handle: None, + start_rebaser_server: None, veritech_server: None, veritech_shutdown_handle: None, start_veritech_server: None, @@ -509,6 +521,30 @@ impl FnSetupExpander for SdfTestFnSetupExpander { self.start_pinga_server = value; } + fn rebaser_server(&self) -> Option<&Rc> { + self.rebaser_server.as_ref() + } + + fn set_rebaser_server(&mut self, value: Option>) { + self.rebaser_server = value; + } + + fn rebaser_shutdown_handle(&self) -> Option<&Rc> { + self.rebaser_shutdown_handle.as_ref() + } + + fn set_rebaser_shutdown_handle(&mut self, value: Option>) { + self.rebaser_shutdown_handle = value; + } + + fn start_rebaser_server(&self) -> Option<()> { + self.start_rebaser_server + } + + fn set_start_rebaser_server(&mut self, value: Option<()>) { + self.start_rebaser_server = value; + } + fn veritech_server(&self) -> Option<&Rc> { self.veritech_server.as_ref() } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 37b3c39cd5..7a932959a6 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -188,6 +188,15 @@ importers: fontfaceobserver: specifier: ^2.3.0 version: 2.3.0 + graphology: + specifier: ^0.25.4 + version: 0.25.4(graphology-types@0.24.7) + graphology-layout-forceatlas2: + specifier: ^0.10.1 + version: 0.10.1(graphology-types@0.24.7) + graphology-layout-noverlap: + specifier: ^0.4.2 + version: 0.4.2(graphology-types@0.24.7) is-promise: specifier: ^4.0.0 version: 4.0.0 @@ -239,6 +248,9 @@ importers: reconnecting-websocket: specifier: ^4.4.0 version: 4.4.0 + sigma: + specifier: 3.0.0-beta.5 + version: 3.0.0-beta.5(graphology-types@0.24.7) tinycolor2: specifier: ^1.4.2 version: 1.4.2 @@ -333,6 +345,9 @@ importers: faker: specifier: ^6.6.6 version: 6.6.6 + graphology-types: + specifier: ^0.24.7 + version: 0.24.7 unplugin-icons: specifier: ^0.17.1 version: 0.17.1 @@ -5051,6 +5066,10 @@ packages: vue: 3.4.15(typescript@4.9.5) dev: false + /@yomguithereal/helpers@1.1.1: + resolution: {integrity: sha512-UYvAq/XCA7xoh1juWDYsq3W0WywOB+pz8cgVnE1b45ZfdMhBvHDrgmSFG3jXeZSr2tMTYLGHFHON+ekG05Jebg==} + dev: false + /abab@2.0.6: resolution: {integrity: sha512-j2afSsaIENvHZN2B8GOpF566vZ5WVk5opAiMTvWgaQT8DkbOqsTfvNAvHoRGU2zzP8cPoqys+xHTRDWW8L+/BA==} @@ -8943,7 +8962,6 @@ packages: /events@3.3.0: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} - dev: true /execa@4.1.0: resolution: {integrity: sha512-j5W0//W7f8UxAn8hXVnwG8tLwdiUy4FJLcSupCg6maBYZDpyBvTApK7KyuI4bKj8KOh1r2YH+6ucuYtJv1bTZA==} @@ -10292,6 +10310,45 @@ packages: resolution: {integrity: sha512-bzh50DW9kTPM00T8y4o8vQg89Di9oLJVLW/KaOGIXJWP/iqCN6WKYkbNOF04vFLJhwcpYUh9ydh/+5vpOqV4YQ==} dev: true + /graphology-layout-forceatlas2@0.10.1(graphology-types@0.24.7): + resolution: {integrity: sha512-ogzBeF1FvWzjkikrIFwxhlZXvD2+wlY54lqhsrWprcdPjopM2J9HoMweUmIgwaTvY4bUYVimpSsOdvDv1gPRFQ==} + peerDependencies: + graphology-types: '>=0.19.0' + dependencies: + graphology-types: 0.24.7 + graphology-utils: 2.5.2(graphology-types@0.24.7) + dev: false + + /graphology-layout-noverlap@0.4.2(graphology-types@0.24.7): + resolution: {integrity: sha512-13WwZSx96zim6l1dfZONcqLh3oqyRcjIBsqz2c2iJ3ohgs3605IDWjldH41Gnhh462xGB1j6VGmuGhZ2FKISXA==} + peerDependencies: + graphology-types: '>=0.19.0' + dependencies: + graphology-types: 0.24.7 + graphology-utils: 2.5.2(graphology-types@0.24.7) + dev: false + + /graphology-types@0.24.7: + resolution: {integrity: sha512-tdcqOOpwArNjEr0gNQKCXwaNCWnQJrog14nJNQPeemcLnXQUUGrsCWpWkVKt46zLjcS6/KGoayeJfHHyPDlvwA==} + + /graphology-utils@2.5.2(graphology-types@0.24.7): + resolution: {integrity: sha512-ckHg8MXrXJkOARk56ZaSCM1g1Wihe2d6iTmz1enGOz4W/l831MBCKSayeFQfowgF8wd+PQ4rlch/56Vs/VZLDQ==} + peerDependencies: + graphology-types: '>=0.23.0' + dependencies: + graphology-types: 0.24.7 + dev: false + + /graphology@0.25.4(graphology-types@0.24.7): + resolution: {integrity: sha512-33g0Ol9nkWdD6ulw687viS8YJQBxqG5LWII6FI6nul0pq6iM2t5EKquOTFDbyTblRB3O9I+7KX4xI8u5ffekAQ==} + peerDependencies: + graphology-types: '>=0.24.0' + dependencies: + events: 3.3.0 + graphology-types: 0.24.7 + obliterator: 2.0.4 + dev: false + /graphql@16.5.0: resolution: {integrity: sha512-qbHgh8Ix+j/qY+a/ZcJnFQ+j8ezakqPiHwPiZhV/3PgGlgf96QMBB5/f2rkiC9sgLoy/xvT6TSiaf2nTHJh5iA==} engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} @@ -14260,6 +14317,10 @@ packages: es-abstract: 1.20.4 dev: true + /obliterator@2.0.4: + resolution: {integrity: sha512-lgHwxlxV1qIg1Eap7LgIeoBWIMFibOjbrYPIPJZcI1mmGAI2m3lNYpK12Y+GBdPQ0U1hRwSord7GIaawz962qQ==} + dev: false + /omit.js@2.0.2: resolution: {integrity: sha512-hJmu9D+bNB40YpL9jYebQl4lsTW6yEHRTroJzNLqQJYHm7c+NQnJGfZmIWh8S3q3KoaxV1aLhV6B3+0N0/kyJg==} dev: true @@ -16405,6 +16466,16 @@ packages: resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} dev: true + /sigma@3.0.0-beta.5(graphology-types@0.24.7): + resolution: {integrity: sha512-lrIdp5mtwEN/axDfaKkGO7Sq9dSWxJsD3BPkQzFMEBe6T8cE1+Dcv0RWx8g9vz62xCSwcW4X2iZtkkj0xDOPJA==} + dependencies: + '@yomguithereal/helpers': 1.1.1 + events: 3.3.0 + graphology-utils: 2.5.2(graphology-types@0.24.7) + transitivePeerDependencies: + - graphology-types + dev: false + /signal-exit@3.0.7: resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} diff --git a/third-party/rust/BUCK b/third-party/rust/BUCK index 8261c7a1f2..40291e3091 100644 --- a/third-party/rust/BUCK +++ b/third-party/rust/BUCK @@ -74,18 +74,18 @@ cargo.rust_library( ) http_archive( - name = "ahash-0.7.7.crate", - sha256 = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd", - strip_prefix = "ahash-0.7.7", - urls = ["https://crates.io/api/v1/crates/ahash/0.7.7/download"], + name = "ahash-0.7.8.crate", + sha256 = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9", + strip_prefix = "ahash-0.7.8", + urls = ["https://crates.io/api/v1/crates/ahash/0.7.8/download"], visibility = [], ) cargo.rust_library( - name = "ahash-0.7.7", - srcs = [":ahash-0.7.7.crate"], + name = "ahash-0.7.8", + srcs = [":ahash-0.7.8.crate"], crate = "ahash", - crate_root = "ahash-0.7.7.crate/src/lib.rs", + crate_root = "ahash-0.7.8.crate/src/lib.rs", edition = "2018", platform = { "linux-arm64": dict( @@ -130,18 +130,18 @@ cargo.rust_library( ) http_archive( - name = "ahash-0.8.7.crate", - sha256 = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01", - strip_prefix = "ahash-0.8.7", - urls = ["https://crates.io/api/v1/crates/ahash/0.8.7/download"], + name = "ahash-0.8.8.crate", + sha256 = "42cd52102d3df161c77a887b608d7a4897d7cc112886a9537b738a887a03aaff", + strip_prefix = "ahash-0.8.8", + urls = ["https://crates.io/api/v1/crates/ahash/0.8.8/download"], visibility = [], ) cargo.rust_library( - name = "ahash-0.8.7", - srcs = [":ahash-0.8.7.crate"], + name = "ahash-0.8.8", + srcs = [":ahash-0.8.8.crate"], crate = "ahash", - crate_root = "ahash-0.8.7.crate/src/lib.rs", + crate_root = "ahash-0.8.8.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -551,7 +551,7 @@ cargo.rust_library( ":serde_json-1.0.113", ":serde_nanos-0.1.3", ":serde_repr-0.1.18", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":time-0.3.34", ":tokio-1.36.0", ":tokio-retry-0.3.0", @@ -586,7 +586,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -631,7 +631,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -660,7 +660,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -814,7 +814,7 @@ cargo.rust_library( ":quick-xml-0.30.0", ":rust-ini-0.19.0", ":serde-1.0.196", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":time-0.3.34", ":url-2.5.0", ], @@ -827,7 +827,7 @@ cargo.rust_library( crate_root = "rust-s3-61c54947c717d042/aws-region/src/lib.rs", edition = "2018", visibility = [], - deps = [":thiserror-1.0.56"], + deps = [":thiserror-1.0.57"], ) alias( @@ -945,7 +945,7 @@ cargo.rust_library( ":heck-0.4.1", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -1500,7 +1500,7 @@ cargo.rust_library( ":serde_json-1.0.113", ":serde_repr-0.1.18", ":serde_urlencoded-0.7.1", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":tokio-1.36.0", ":tokio-util-0.7.10", ":url-2.5.0", @@ -1726,23 +1726,23 @@ cargo.rust_library( alias( name = "chrono", - actual = ":chrono-0.4.33", + actual = ":chrono-0.4.34", visibility = ["PUBLIC"], ) http_archive( - name = "chrono-0.4.33.crate", - sha256 = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb", - strip_prefix = "chrono-0.4.33", - urls = ["https://crates.io/api/v1/crates/chrono/0.4.33/download"], + name = "chrono-0.4.34.crate", + sha256 = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b", + strip_prefix = "chrono-0.4.34", + urls = ["https://crates.io/api/v1/crates/chrono/0.4.34/download"], visibility = [], ) cargo.rust_library( - name = "chrono-0.4.33", - srcs = [":chrono-0.4.33.crate"], + name = "chrono-0.4.34", + srcs = [":chrono-0.4.34.crate"], crate = "chrono", - crate_root = "chrono-0.4.33.crate/src/lib.rs", + crate_root = "chrono-0.4.34.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -1863,23 +1863,23 @@ cargo.rust_library( alias( name = "clap", - actual = ":clap-4.5.0", + actual = ":clap-4.5.1", visibility = ["PUBLIC"], ) http_archive( - name = "clap-4.5.0.crate", - sha256 = "80c21025abd42669a92efc996ef13cfb2c5c627858421ea58d5c3b331a6c134f", - strip_prefix = "clap-4.5.0", - urls = ["https://crates.io/api/v1/crates/clap/4.5.0/download"], + name = "clap-4.5.1.crate", + sha256 = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da", + strip_prefix = "clap-4.5.1", + urls = ["https://crates.io/api/v1/crates/clap/4.5.1/download"], visibility = [], ) cargo.rust_library( - name = "clap-4.5.0", - srcs = [":clap-4.5.0.crate"], + name = "clap-4.5.1", + srcs = [":clap-4.5.1.crate"], crate = "clap", - crate_root = "clap-4.5.0.crate/src/lib.rs", + crate_root = "clap-4.5.1.crate/src/lib.rs", edition = "2021", features = [ "color", @@ -1895,24 +1895,24 @@ cargo.rust_library( ], visibility = [], deps = [ - ":clap_builder-4.5.0", + ":clap_builder-4.5.1", ":clap_derive-4.5.0", ], ) http_archive( - name = "clap_builder-4.5.0.crate", - sha256 = "458bf1f341769dfcf849846f65dffdf9146daa56bcd2a47cb4e1de9915567c99", - strip_prefix = "clap_builder-4.5.0", - urls = ["https://crates.io/api/v1/crates/clap_builder/4.5.0/download"], + name = "clap_builder-4.5.1.crate", + sha256 = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb", + strip_prefix = "clap_builder-4.5.1", + urls = ["https://crates.io/api/v1/crates/clap_builder/4.5.1/download"], visibility = [], ) cargo.rust_library( - name = "clap_builder-4.5.0", - srcs = [":clap_builder-4.5.0.crate"], + name = "clap_builder-4.5.1", + srcs = [":clap_builder-4.5.1.crate"], crate = "clap_builder", - crate_root = "clap_builder-4.5.0.crate/src/lib.rs", + crate_root = "clap_builder-4.5.1.crate/src/lib.rs", edition = "2021", features = [ "color", @@ -1955,7 +1955,7 @@ cargo.rust_library( ":heck-0.4.1", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -2013,6 +2013,23 @@ cargo.rust_library( visibility = [], ) +http_archive( + name = "cobs-0.2.3.crate", + sha256 = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15", + strip_prefix = "cobs-0.2.3", + urls = ["https://crates.io/api/v1/crates/cobs/0.2.3/download"], + visibility = [], +) + +cargo.rust_library( + name = "cobs-0.2.3", + srcs = [":cobs-0.2.3.crate"], + crate = "cobs", + crate_root = "cobs-0.2.3.crate/src/lib.rs", + edition = "2018", + visibility = [], +) + alias( name = "color-eyre", actual = ":color-eyre-0.6.2", @@ -2368,7 +2385,7 @@ cargo.rust_library( }, visibility = [], deps = [ - ":chrono-0.4.33", + ":chrono-0.4.34", ":flate2-1.0.28", ":futures-util-0.3.30", ":http-0.2.11", @@ -2380,7 +2397,7 @@ cargo.rust_library( ":serde-1.0.196", ":serde_json-1.0.113", ":tar-0.4.40", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":tokio-1.36.0", ":url-2.5.0", ], @@ -2420,7 +2437,7 @@ cargo.rust_library( }, visibility = [], deps = [ - ":chrono-0.4.33", + ":chrono-0.4.34", ":flate2-1.0.28", ":futures-util-0.3.30", ":http-0.2.11", @@ -2432,7 +2449,7 @@ cargo.rust_library( ":serde-1.0.196", ":serde_json-1.0.113", ":tar-0.4.40", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":tokio-1.36.0", ":url-2.5.0", ], @@ -2586,18 +2603,18 @@ cargo.rust_library( ) http_archive( - name = "crc32fast-1.3.2.crate", - sha256 = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d", - strip_prefix = "crc32fast-1.3.2", - urls = ["https://crates.io/api/v1/crates/crc32fast/1.3.2/download"], + name = "crc32fast-1.4.0.crate", + sha256 = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa", + strip_prefix = "crc32fast-1.4.0", + urls = ["https://crates.io/api/v1/crates/crc32fast/1.4.0/download"], visibility = [], ) cargo.rust_library( - name = "crc32fast-1.3.2", - srcs = [":crc32fast-1.3.2.crate"], + name = "crc32fast-1.4.0", + srcs = [":crc32fast-1.4.0.crate"], crate = "crc32fast", - crate_root = "crc32fast-1.3.2.crate/src/lib.rs", + crate_root = "crc32fast-1.4.0.crate/src/lib.rs", edition = "2015", features = [ "default", @@ -2674,6 +2691,17 @@ cargo.rust_library( crate = "crossbeam_epoch", crate_root = "crossbeam-epoch-0.9.18.crate/src/lib.rs", edition = "2021", + env = { + "CARGO_MANIFEST_DIR": "crossbeam-epoch-0.9.18.crate", + "CARGO_PKG_AUTHORS": "", + "CARGO_PKG_DESCRIPTION": "Epoch-based garbage collection", + "CARGO_PKG_NAME": "crossbeam-epoch", + "CARGO_PKG_REPOSITORY": "https://github.com/crossbeam-rs/crossbeam", + "CARGO_PKG_VERSION": "0.9.18", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "9", + "CARGO_PKG_VERSION_PATCH": "18", + }, features = [ "alloc", "std", @@ -3088,7 +3116,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -3118,18 +3146,18 @@ cargo.rust_library( ) http_archive( - name = "darling-0.20.5.crate", - sha256 = "fc5d6b04b3fd0ba9926f945895de7d806260a2d7431ba82e7edaecb043c4c6b8", - strip_prefix = "darling-0.20.5", - urls = ["https://crates.io/api/v1/crates/darling/0.20.5/download"], + name = "darling-0.20.6.crate", + sha256 = "c376d08ea6aa96aafe61237c7200d1241cb177b7d3a542d791f2d118e9cbb955", + strip_prefix = "darling-0.20.6", + urls = ["https://crates.io/api/v1/crates/darling/0.20.6/download"], visibility = [], ) cargo.rust_library( - name = "darling-0.20.5", - srcs = [":darling-0.20.5.crate"], + name = "darling-0.20.6", + srcs = [":darling-0.20.6.crate"], crate = "darling", - crate_root = "darling-0.20.5.crate/src/lib.rs", + crate_root = "darling-0.20.6.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -3137,8 +3165,8 @@ cargo.rust_library( ], visibility = [], deps = [ - ":darling_core-0.20.5", - ":darling_macro-0.20.5", + ":darling_core-0.20.6", + ":darling_macro-0.20.6", ], ) @@ -3172,18 +3200,18 @@ cargo.rust_library( ) http_archive( - name = "darling_core-0.20.5.crate", - sha256 = "04e48a959bcd5c761246f5d090ebc2fbf7b9cd527a492b07a67510c108f1e7e3", - strip_prefix = "darling_core-0.20.5", - urls = ["https://crates.io/api/v1/crates/darling_core/0.20.5/download"], + name = "darling_core-0.20.6.crate", + sha256 = "33043dcd19068b8192064c704b3f83eb464f91f1ff527b44a4e2b08d9cdb8855", + strip_prefix = "darling_core-0.20.6", + urls = ["https://crates.io/api/v1/crates/darling_core/0.20.6/download"], visibility = [], ) cargo.rust_library( - name = "darling_core-0.20.5", - srcs = [":darling_core-0.20.5.crate"], + name = "darling_core-0.20.6", + srcs = [":darling_core-0.20.6.crate"], crate = "darling_core", - crate_root = "darling_core-0.20.5.crate/src/lib.rs", + crate_root = "darling_core-0.20.6.crate/src/lib.rs", edition = "2018", features = [ "strsim", @@ -3196,7 +3224,7 @@ cargo.rust_library( ":proc-macro2-1.0.78", ":quote-1.0.35", ":strsim-0.10.0", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -3224,25 +3252,25 @@ cargo.rust_library( ) http_archive( - name = "darling_macro-0.20.5.crate", - sha256 = "1d1545d67a2149e1d93b7e5c7752dce5a7426eb5d1357ddcfd89336b94444f77", - strip_prefix = "darling_macro-0.20.5", - urls = ["https://crates.io/api/v1/crates/darling_macro/0.20.5/download"], + name = "darling_macro-0.20.6.crate", + sha256 = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be", + strip_prefix = "darling_macro-0.20.6", + urls = ["https://crates.io/api/v1/crates/darling_macro/0.20.6/download"], visibility = [], ) cargo.rust_library( - name = "darling_macro-0.20.5", - srcs = [":darling_macro-0.20.5.crate"], + name = "darling_macro-0.20.6", + srcs = [":darling_macro-0.20.6.crate"], crate = "darling_macro", - crate_root = "darling_macro-0.20.5.crate/src/lib.rs", + crate_root = "darling_macro-0.20.6.crate/src/lib.rs", edition = "2018", proc_macro = True, visibility = [], deps = [ - ":darling_core-0.20.5", + ":darling_core-0.20.6", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -3751,7 +3779,7 @@ cargo.rust_library( ":base64-0.13.1", ":byteorder-1.5.0", ":bytes-1.5.0", - ":chrono-0.4.33", + ":chrono-0.4.34", ":containers-api-0.9.0", ":docker-api-stubs-0.6.0", ":futures-util-0.3.30", @@ -3762,7 +3790,7 @@ cargo.rust_library( ":serde-1.0.196", ":serde_json-1.0.113", ":tar-0.4.40", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":url-2.5.0", ], ) @@ -3775,7 +3803,7 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":chrono-0.4.33", + ":chrono-0.4.34", ":serde-1.0.196", ":serde_json-1.0.113", ":serde_with-2.3.3", @@ -4001,18 +4029,18 @@ cargo.rust_library( ) http_archive( - name = "either-1.9.0.crate", - sha256 = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07", - strip_prefix = "either-1.9.0", - urls = ["https://crates.io/api/v1/crates/either/1.9.0/download"], + name = "either-1.10.0.crate", + sha256 = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a", + strip_prefix = "either-1.10.0", + urls = ["https://crates.io/api/v1/crates/either/1.10.0/download"], visibility = [], ) cargo.rust_library( - name = "either-1.9.0", - srcs = [":either-1.9.0.crate"], + name = "either-1.10.0", + srcs = [":either-1.10.0.crate"], crate = "either", - crate_root = "either-1.9.0.crate/src/lib.rs", + crate_root = "either-1.10.0.crate/src/lib.rs", edition = "2018", features = [ "default", @@ -4068,6 +4096,24 @@ cargo.rust_library( ], ) +http_archive( + name = "embedded-io-0.4.0.crate", + sha256 = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced", + strip_prefix = "embedded-io-0.4.0", + urls = ["https://crates.io/api/v1/crates/embedded-io/0.4.0/download"], + visibility = [], +) + +cargo.rust_library( + name = "embedded-io-0.4.0", + srcs = [":embedded-io-0.4.0.crate"], + crate = "embedded_io", + crate_root = "embedded-io-0.4.0.crate/src/lib.rs", + edition = "2021", + features = ["alloc"], + visibility = [], +) + http_archive( name = "encode_unicode-0.3.6.crate", sha256 = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f", @@ -4132,7 +4178,7 @@ cargo.rust_library( ":num-traits-0.2.18", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -4450,7 +4496,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":crc32fast-1.3.2", + ":crc32fast-1.4.0", ":miniz_oxide-0.7.2", ], ) @@ -4744,7 +4790,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -5095,7 +5141,7 @@ cargo.rust_library( ":futures-sink-0.3.30", ":futures-util-0.3.30", ":http-0.2.11", - ":indexmap-2.2.2", + ":indexmap-2.2.3", ":slab-0.4.9", ":tokio-1.36.0", ":tokio-util-0.7.10", @@ -5121,6 +5167,24 @@ cargo.rust_library( deps = [":cfg-if-1.0.0"], ) +http_archive( + name = "hash32-0.2.1.crate", + sha256 = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67", + strip_prefix = "hash32-0.2.1", + urls = ["https://crates.io/api/v1/crates/hash32/0.2.1/download"], + visibility = [], +) + +cargo.rust_library( + name = "hash32-0.2.1", + srcs = [":hash32-0.2.1.crate"], + crate = "hash32", + crate_root = "hash32-0.2.1.crate/src/lib.rs", + edition = "2015", + visibility = [], + deps = [":byteorder-1.5.0"], +) + http_archive( name = "hashbrown-0.12.3.crate", sha256 = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888", @@ -5142,7 +5206,7 @@ cargo.rust_library( "raw", ], visibility = [], - deps = [":ahash-0.7.7"], + deps = [":ahash-0.7.8"], ) http_archive( @@ -5185,7 +5249,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":ahash-0.8.7", + ":ahash-0.8.8", ":allocator-api2-0.2.16", ], ) @@ -5208,6 +5272,47 @@ cargo.rust_library( deps = [":hashbrown-0.14.3"], ) +http_archive( + name = "heapless-0.7.17.crate", + sha256 = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f", + strip_prefix = "heapless-0.7.17", + urls = ["https://crates.io/api/v1/crates/heapless/0.7.17/download"], + visibility = [], +) + +cargo.rust_library( + name = "heapless-0.7.17", + srcs = [":heapless-0.7.17.crate"], + crate = "heapless", + crate_root = "heapless-0.7.17.crate/src/lib.rs", + edition = "2018", + features = [ + "atomic-polyfill", + "cas", + "serde", + ], + platform = { + "linux-x86_64": dict( + deps = [":spin-0.9.8"], + ), + "macos-x86_64": dict( + deps = [":spin-0.9.8"], + ), + "windows-gnu": dict( + deps = [":spin-0.9.8"], + ), + "windows-msvc": dict( + deps = [":spin-0.9.8"], + ), + }, + visibility = [], + deps = [ + ":hash32-0.2.1", + ":serde-1.0.196", + ":stable_deref_trait-1.2.0", + ], +) + http_archive( name = "heck-0.4.1.crate", sha256 = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8", @@ -5835,23 +5940,23 @@ cargo.rust_library( alias( name = "indexmap", - actual = ":indexmap-2.2.2", + actual = ":indexmap-2.2.3", visibility = ["PUBLIC"], ) http_archive( - name = "indexmap-2.2.2.crate", - sha256 = "824b2ae422412366ba479e8111fd301f7b5faece8149317bb81925979a53f520", - strip_prefix = "indexmap-2.2.2", - urls = ["https://crates.io/api/v1/crates/indexmap/2.2.2/download"], + name = "indexmap-2.2.3.crate", + sha256 = "233cf39063f058ea2caae4091bf4a3ef70a653afbc026f5c4a4135d114e3c177", + strip_prefix = "indexmap-2.2.3", + urls = ["https://crates.io/api/v1/crates/indexmap/2.2.3/download"], visibility = [], ) cargo.rust_library( - name = "indexmap-2.2.2", - srcs = [":indexmap-2.2.2.crate"], + name = "indexmap-2.2.3", + srcs = [":indexmap-2.2.3.crate"], crate = "indexmap", - crate_root = "indexmap-2.2.2.crate/src/lib.rs", + crate_root = "indexmap-2.2.3.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -5869,23 +5974,23 @@ cargo.rust_library( alias( name = "indicatif", - actual = ":indicatif-0.17.7", + actual = ":indicatif-0.17.8", visibility = ["PUBLIC"], ) http_archive( - name = "indicatif-0.17.7.crate", - sha256 = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25", - strip_prefix = "indicatif-0.17.7", - urls = ["https://crates.io/api/v1/crates/indicatif/0.17.7/download"], + name = "indicatif-0.17.8.crate", + sha256 = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3", + strip_prefix = "indicatif-0.17.8", + urls = ["https://crates.io/api/v1/crates/indicatif/0.17.8/download"], visibility = [], ) cargo.rust_library( - name = "indicatif-0.17.7", - srcs = [":indicatif-0.17.7.crate"], + name = "indicatif-0.17.8", + srcs = [":indicatif-0.17.8.crate"], crate = "indicatif", - crate_root = "indicatif-0.17.7.crate/src/lib.rs", + crate_root = "indicatif-0.17.8.crate/src/lib.rs", edition = "2021", features = [ "default", @@ -5943,7 +6048,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -5979,7 +6084,7 @@ cargo.rust_library( ":dyn-clone-1.0.16", ":lazy_static-1.4.0", ":newline-converter-0.2.2", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":unicode-segmentation-1.11.0", ":unicode-width-0.1.11", ], @@ -6061,7 +6166,7 @@ cargo.rust_library( edition = "2018", features = ["use_alloc"], visibility = [], - deps = [":either-1.9.0"], + deps = [":either-1.10.0"], ) alias( @@ -6090,7 +6195,7 @@ cargo.rust_library( "use_std", ], visibility = [], - deps = [":either-1.9.0"], + deps = [":either-1.10.0"], ) http_archive( @@ -6171,7 +6276,7 @@ cargo.rust_library( ":rand-0.8.5", ":serde-1.0.196", ":serde_json-1.0.113", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":zeroize-1.7.0", ], ) @@ -6944,7 +7049,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -8008,10 +8113,10 @@ cargo.rust_library( deps = [ ":futures-core-0.3.30", ":futures-sink-0.3.30", - ":indexmap-2.2.2", + ":indexmap-2.2.3", ":once_cell-1.19.0", ":pin-project-lite-0.2.13", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":urlencoding-2.1.3", ], ) @@ -8066,7 +8171,7 @@ cargo.rust_library( ":opentelemetry-semantic-conventions-0.13.0", ":opentelemetry_sdk-0.21.2", ":prost-0.11.9", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":tokio-1.36.0", ":tonic-0.9.2", ], @@ -8183,7 +8288,7 @@ cargo.rust_library( ":ordered-float-4.2.0", ":percent-encoding-2.3.1", ":rand-0.8.5", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":tokio-1.36.0", ":tokio-stream-0.1.14", ], @@ -8368,7 +8473,7 @@ cargo.rust_library( ":proc-macro-error-1.0.4", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -8395,7 +8500,7 @@ cargo.rust_library( ":proc-macro2-1.0.78", ":proc-macro2-diagnostics-0.10.1", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -8735,7 +8840,7 @@ cargo.rust_library( visibility = [], deps = [ ":fixedbitset-0.4.2", - ":indexmap-2.2.2", + ":indexmap-2.2.3", ":serde-1.0.196", ":serde_derive-1.0.196", ], @@ -8860,7 +8965,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -8960,18 +9065,18 @@ cargo.rust_library( ) http_archive( - name = "pkg-config-0.3.29.crate", - sha256 = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb", - strip_prefix = "pkg-config-0.3.29", - urls = ["https://crates.io/api/v1/crates/pkg-config/0.3.29/download"], + name = "pkg-config-0.3.30.crate", + sha256 = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec", + strip_prefix = "pkg-config-0.3.30", + urls = ["https://crates.io/api/v1/crates/pkg-config/0.3.30/download"], visibility = [], ) cargo.rust_library( - name = "pkg-config-0.3.29", - srcs = [":pkg-config-0.3.29.crate"], + name = "pkg-config-0.3.30", + srcs = [":pkg-config-0.3.30.crate"], crate = "pkg_config", - crate_root = "pkg-config-0.3.29.crate/src/lib.rs", + crate_root = "pkg-config-0.3.30.crate/src/lib.rs", edition = "2015", visibility = [], ) @@ -9026,7 +9131,7 @@ cargo.rust_library( ":base64-0.13.1", ":byteorder-1.5.0", ":bytes-1.5.0", - ":chrono-0.4.33", + ":chrono-0.4.34", ":containers-api-0.8.0", ":flate2-1.0.28", ":futures-util-0.3.30", @@ -9037,7 +9142,7 @@ cargo.rust_library( ":serde-1.0.196", ":serde_json-1.0.113", ":tar-0.4.40", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":tokio-1.36.0", ":url-2.5.0", ], @@ -9059,7 +9164,7 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":chrono-0.4.33", + ":chrono-0.4.34", ":serde-1.0.196", ":serde_json-1.0.113", ], @@ -9133,6 +9238,43 @@ buildscript_run( version = "1.6.0", ) +alias( + name = "postcard", + actual = ":postcard-1.0.8", + visibility = ["PUBLIC"], +) + +http_archive( + name = "postcard-1.0.8.crate", + sha256 = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8", + strip_prefix = "postcard-1.0.8", + urls = ["https://crates.io/api/v1/crates/postcard/1.0.8/download"], + visibility = [], +) + +cargo.rust_library( + name = "postcard-1.0.8", + srcs = [":postcard-1.0.8.crate"], + crate = "postcard", + crate_root = "postcard-1.0.8.crate/src/lib.rs", + edition = "2018", + features = [ + "alloc", + "default", + "embedded-io", + "heapless", + "heapless-cas", + "use-std", + ], + visibility = [], + deps = [ + ":cobs-0.2.3", + ":embedded-io-0.4.0", + ":heapless-0.7.17", + ":serde-1.0.196", + ], +) + http_archive( name = "postgres-derive-0.4.5.crate", sha256 = "83145eba741b050ef981a9a1838c843fa7665e154383325aa8b440ae703180a2", @@ -9153,7 +9295,7 @@ cargo.rust_library( ":heck-0.4.1", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -9217,7 +9359,7 @@ cargo.rust_library( "with-serde_json-1", ], named_deps = { - "chrono_04": ":chrono-0.4.33", + "chrono_04": ":chrono-0.4.34", "serde_1": ":serde-1.0.196", "serde_json_1": ":serde_json-1.0.113", }, @@ -9492,7 +9634,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ":yansi-1.0.0-rc.1", ], ) @@ -9863,7 +10005,7 @@ cargo.rust_library( ":regex-1.10.3", ":serde-1.0.196", ":siphasher-1.0.0", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":time-0.3.34", ":tokio-1.36.0", ":tokio-postgres-0.7.10", @@ -9894,7 +10036,7 @@ cargo.rust_library( ":quote-1.0.35", ":refinery-core-0.8.12", ":regex-1.10.3", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -10099,7 +10241,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -10427,6 +10569,17 @@ cargo.rust_library( crate = "ring", crate_root = "ring-0.17.5.crate/src/lib.rs", edition = "2021", + env = { + "CARGO_MANIFEST_DIR": "ring-0.17.5.crate", + "CARGO_PKG_AUTHORS": "Brian Smith ", + "CARGO_PKG_DESCRIPTION": "Safe, fast, small crypto using Rust.", + "CARGO_PKG_NAME": "ring", + "CARGO_PKG_REPOSITORY": "https://github.com/briansmith/ring", + "CARGO_PKG_VERSION": "0.17.5", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "17", + "CARGO_PKG_VERSION_PATCH": "5", + }, features = [ "alloc", "default", @@ -11133,7 +11286,7 @@ cargo.rust_library( ":serde_derive-1.0.196", ":serde_json-1.0.113", ":sha2-0.10.8", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":time-0.3.34", ":tokio-1.36.0", ":tokio-rustls-0.24.1", @@ -11143,21 +11296,21 @@ cargo.rust_library( ) http_archive( - name = "rust_decimal-1.34.2.crate", - sha256 = "755392e1a2f77afd95580d3f0d0e94ac83eeeb7167552c9b5bca549e61a94d83", - strip_prefix = "rust_decimal-1.34.2", - urls = ["https://crates.io/api/v1/crates/rust_decimal/1.34.2/download"], + name = "rust_decimal-1.34.3.crate", + sha256 = "b39449a79f45e8da28c57c341891b69a183044b29518bb8f86dbac9df60bb7df", + strip_prefix = "rust_decimal-1.34.3", + urls = ["https://crates.io/api/v1/crates/rust_decimal/1.34.3/download"], visibility = [], ) cargo.rust_library( - name = "rust_decimal-1.34.2", - srcs = [":rust_decimal-1.34.2.crate"], + name = "rust_decimal-1.34.3", + srcs = [":rust_decimal-1.34.3.crate"], crate = "rust_decimal", - crate_root = "rust_decimal-1.34.2.crate/src/lib.rs", + crate_root = "rust_decimal-1.34.3.crate/src/lib.rs", edition = "2021", env = { - "OUT_DIR": "$(location :rust_decimal-1.34.2-build-script-run[out_dir])", + "OUT_DIR": "$(location :rust_decimal-1.34.3-build-script-run[out_dir])", }, features = [ "default", @@ -11174,10 +11327,10 @@ cargo.rust_library( ) cargo.rust_binary( - name = "rust_decimal-1.34.2-build-script-build", - srcs = [":rust_decimal-1.34.2.crate"], + name = "rust_decimal-1.34.3-build-script-build", + srcs = [":rust_decimal-1.34.3.crate"], crate = "build_script_build", - crate_root = "rust_decimal-1.34.2.crate/build.rs", + crate_root = "rust_decimal-1.34.3.crate/build.rs", edition = "2021", features = [ "default", @@ -11189,16 +11342,16 @@ cargo.rust_binary( ) buildscript_run( - name = "rust_decimal-1.34.2-build-script-run", + name = "rust_decimal-1.34.3-build-script-run", package_name = "rust_decimal", - buildscript_rule = ":rust_decimal-1.34.2-build-script-build", + buildscript_rule = ":rust_decimal-1.34.3-build-script-build", features = [ "default", "maths", "serde", "std", ], - version = "1.34.2", + version = "1.34.3", ) http_archive( @@ -11397,7 +11550,7 @@ cargo.rust_library( "tls12", ], named_deps = { - "pki_types": ":rustls-pki-types-1.2.0", + "pki_types": ":rustls-pki-types-1.3.0", }, visibility = [], deps = [ @@ -11467,48 +11620,48 @@ cargo.rust_library( alias( name = "rustls-pemfile", - actual = ":rustls-pemfile-2.0.0", + actual = ":rustls-pemfile-2.1.0", visibility = ["PUBLIC"], ) http_archive( - name = "rustls-pemfile-2.0.0.crate", - sha256 = "35e4980fa29e4c4b212ffb3db068a564cbf560e51d3944b7c88bd8bf5bec64f4", - strip_prefix = "rustls-pemfile-2.0.0", - urls = ["https://crates.io/api/v1/crates/rustls-pemfile/2.0.0/download"], + name = "rustls-pemfile-2.1.0.crate", + sha256 = "3c333bb734fcdedcea57de1602543590f545f127dc8b533324318fd492c5c70b", + strip_prefix = "rustls-pemfile-2.1.0", + urls = ["https://crates.io/api/v1/crates/rustls-pemfile/2.1.0/download"], visibility = [], ) cargo.rust_library( - name = "rustls-pemfile-2.0.0", - srcs = [":rustls-pemfile-2.0.0.crate"], + name = "rustls-pemfile-2.1.0", + srcs = [":rustls-pemfile-2.1.0.crate"], crate = "rustls_pemfile", - crate_root = "rustls-pemfile-2.0.0.crate/src/lib.rs", + crate_root = "rustls-pemfile-2.1.0.crate/src/lib.rs", edition = "2018", features = [ "default", "std", ], named_deps = { - "pki_types": ":rustls-pki-types-1.2.0", + "pki_types": ":rustls-pki-types-1.3.0", }, visibility = [], deps = [":base64-0.21.7"], ) http_archive( - name = "rustls-pki-types-1.2.0.crate", - sha256 = "0a716eb65e3158e90e17cd93d855216e27bde02745ab842f2cab4a39dba1bacf", - strip_prefix = "rustls-pki-types-1.2.0", - urls = ["https://crates.io/api/v1/crates/rustls-pki-types/1.2.0/download"], + name = "rustls-pki-types-1.3.0.crate", + sha256 = "048a63e5b3ac996d78d402940b5fa47973d2d080c6c6fffa1d0f19c4445310b7", + strip_prefix = "rustls-pki-types-1.3.0", + urls = ["https://crates.io/api/v1/crates/rustls-pki-types/1.3.0/download"], visibility = [], ) cargo.rust_library( - name = "rustls-pki-types-1.2.0", - srcs = [":rustls-pki-types-1.2.0.crate"], + name = "rustls-pki-types-1.3.0", + srcs = [":rustls-pki-types-1.3.0.crate"], crate = "rustls_pki_types", - crate_root = "rustls-pki-types-1.2.0.crate/src/lib.rs", + crate_root = "rustls-pki-types-1.3.0.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -11564,7 +11717,7 @@ cargo.rust_library( "std", ], named_deps = { - "pki_types": ":rustls-pki-types-1.2.0", + "pki_types": ":rustls-pki-types-1.3.0", }, visibility = [], deps = [ @@ -11729,7 +11882,7 @@ cargo.rust_library( ":proc-macro-error-1.0.4", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -11782,11 +11935,11 @@ cargo.rust_library( ":async-stream-0.3.5", ":async-trait-0.1.77", ":bigdecimal-0.3.1", - ":chrono-0.4.33", + ":chrono-0.4.34", ":futures-0.3.30", ":log-0.4.20", ":ouroboros-0.17.2", - ":rust_decimal-1.34.2", + ":rust_decimal-1.34.3", ":sea-orm-macros-0.12.14", ":sea-query-0.30.7", ":sea-query-binder-0.5.0", @@ -11794,7 +11947,7 @@ cargo.rust_library( ":serde_json-1.0.113", ":sqlx-0.7.3", ":strum-0.25.0", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":time-0.3.34", ":tracing-0.1.40", ":url-2.5.0", @@ -11831,7 +11984,7 @@ cargo.rust_library( ":heck-0.4.1", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ":unicode-ident-1.0.12", ], ) @@ -11875,11 +12028,11 @@ cargo.rust_library( visibility = [], deps = [ ":bigdecimal-0.3.1", - ":chrono-0.4.33", + ":chrono-0.4.34", ":derivative-2.2.0", ":inherent-1.0.11", ":ordered-float-3.9.2", - ":rust_decimal-1.34.2", + ":rust_decimal-1.34.3", ":serde_json-1.0.113", ":time-0.3.34", ":uuid-1.7.0", @@ -11921,8 +12074,8 @@ cargo.rust_library( visibility = [], deps = [ ":bigdecimal-0.3.1", - ":chrono-0.4.33", - ":rust_decimal-1.34.2", + ":chrono-0.4.34", + ":rust_decimal-1.34.3", ":sea-query-0.30.7", ":serde_json-1.0.113", ":sqlx-0.7.3", @@ -12069,13 +12222,60 @@ cargo.rust_library( crate = "semver", crate_root = "semver-1.0.21.crate/src/lib.rs", edition = "2018", + env = { + "CARGO_MANIFEST_DIR": "semver-1.0.21.crate", + "CARGO_PKG_AUTHORS": "David Tolnay ", + "CARGO_PKG_DESCRIPTION": "Parser and evaluator for Cargo's flavor of Semantic Versioning", + "CARGO_PKG_NAME": "semver", + "CARGO_PKG_REPOSITORY": "https://github.com/dtolnay/semver", + "CARGO_PKG_VERSION": "1.0.21", + "CARGO_PKG_VERSION_MAJOR": "1", + "CARGO_PKG_VERSION_MINOR": "0", + "CARGO_PKG_VERSION_PATCH": "21", + }, features = [ "default", "std", ], + rustc_flags = ["@$(location :semver-1.0.21-build-script-run[rustc_flags])"], visibility = [], ) +cargo.rust_binary( + name = "semver-1.0.21-build-script-build", + srcs = [":semver-1.0.21.crate"], + crate = "build_script_build", + crate_root = "semver-1.0.21.crate/build.rs", + edition = "2018", + env = { + "CARGO_MANIFEST_DIR": "semver-1.0.21.crate", + "CARGO_PKG_AUTHORS": "David Tolnay ", + "CARGO_PKG_DESCRIPTION": "Parser and evaluator for Cargo's flavor of Semantic Versioning", + "CARGO_PKG_NAME": "semver", + "CARGO_PKG_REPOSITORY": "https://github.com/dtolnay/semver", + "CARGO_PKG_VERSION": "1.0.21", + "CARGO_PKG_VERSION_MAJOR": "1", + "CARGO_PKG_VERSION_MINOR": "0", + "CARGO_PKG_VERSION_PATCH": "21", + }, + features = [ + "default", + "std", + ], + visibility = [], +) + +buildscript_run( + name = "semver-1.0.21-build-script-run", + package_name = "semver", + buildscript_rule = ":semver-1.0.21-build-script-build", + features = [ + "default", + "std", + ], + version = "1.0.21", +) + alias( name = "serde", actual = ":serde-1.0.196", @@ -12134,7 +12334,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":chrono-0.4.33", + ":chrono-0.4.34", ":serde-1.0.196", ":serde_json-1.0.113", ], @@ -12160,7 +12360,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -12194,7 +12394,7 @@ cargo.rust_library( ], visibility = [], deps = [ - ":indexmap-2.2.2", + ":indexmap-2.2.3", ":itoa-1.0.10", ":ryu-1.0.16", ":serde-1.0.196", @@ -12259,7 +12459,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -12353,7 +12553,7 @@ cargo.rust_library( "std", ], named_deps = { - "chrono_0_4": ":chrono-0.4.33", + "chrono_0_4": ":chrono-0.4.34", "indexmap_1": ":indexmap-1.9.3", "time_0_3": ":time-0.3.34", }, @@ -12394,9 +12594,9 @@ cargo.rust_library( "std", ], named_deps = { - "chrono_0_4": ":chrono-0.4.33", + "chrono_0_4": ":chrono-0.4.34", "indexmap_1": ":indexmap-1.9.3", - "indexmap_2": ":indexmap-2.2.2", + "indexmap_2": ":indexmap-2.2.3", "time_0_3": ":time-0.3.34", }, visibility = [], @@ -12427,10 +12627,10 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":darling-0.20.5", + ":darling-0.20.6", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -12451,10 +12651,10 @@ cargo.rust_library( proc_macro = True, visibility = [], deps = [ - ":darling-0.20.5", + ":darling-0.20.6", ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -12480,7 +12680,7 @@ cargo.rust_library( edition = "2021", visibility = [], deps = [ - ":indexmap-2.2.2", + ":indexmap-2.2.3", ":itoa-1.0.10", ":ryu-1.0.16", ":serde-1.0.196", @@ -13086,16 +13286,16 @@ cargo.rust_library( ], visibility = [], deps = [ - ":ahash-0.8.7", + ":ahash-0.8.8", ":atoi-2.0.0", ":bigdecimal-0.3.1", ":byteorder-1.5.0", ":bytes-1.5.0", - ":chrono-0.4.33", + ":chrono-0.4.34", ":crc-3.0.1", ":crossbeam-queue-0.3.11", ":dotenvy-0.15.7", - ":either-1.9.0", + ":either-1.10.0", ":event-listener-2.5.3", ":futures-channel-0.3.30", ":futures-core-0.3.30", @@ -13104,13 +13304,13 @@ cargo.rust_library( ":futures-util-0.3.30", ":hashlink-0.8.4", ":hex-0.4.3", - ":indexmap-2.2.2", + ":indexmap-2.2.3", ":log-0.4.20", ":memchr-2.7.1", ":once_cell-1.19.0", ":paste-1.0.14", ":percent-encoding-2.3.1", - ":rust_decimal-1.34.2", + ":rust_decimal-1.34.3", ":rustls-0.21.10", ":rustls-pemfile-1.0.4", ":serde-1.0.196", @@ -13118,7 +13318,7 @@ cargo.rust_library( ":sha2-0.10.8", ":smallvec-1.13.1", ":sqlformat-0.2.3", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":time-0.3.34", ":tokio-1.36.0", ":tokio-stream-0.1.14", @@ -13168,7 +13368,7 @@ cargo.rust_library( ":bigdecimal-0.3.1", ":bitflags-2.4.2", ":byteorder-1.5.0", - ":chrono-0.4.33", + ":chrono-0.4.34", ":crc-3.0.1", ":dotenvy-0.15.7", ":futures-channel-0.3.30", @@ -13186,7 +13386,7 @@ cargo.rust_library( ":num-bigint-0.4.4", ":once_cell-1.19.0", ":rand-0.8.5", - ":rust_decimal-1.34.2", + ":rust_decimal-1.34.3", ":serde-1.0.196", ":serde_json-1.0.113", ":sha1-0.10.6", @@ -13194,7 +13394,7 @@ cargo.rust_library( ":smallvec-1.13.1", ":sqlx-core-0.7.3", ":stringprep-0.1.4", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":time-0.3.34", ":tracing-0.1.40", ":uuid-1.7.0", @@ -13376,7 +13576,7 @@ cargo.rust_library( ":proc-macro2-1.0.78", ":quote-1.0.35", ":rustversion-1.0.14", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -13468,23 +13668,23 @@ cargo.rust_library( alias( name = "syn", - actual = ":syn-2.0.48", + actual = ":syn-2.0.49", visibility = ["PUBLIC"], ) http_archive( - name = "syn-2.0.48.crate", - sha256 = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f", - strip_prefix = "syn-2.0.48", - urls = ["https://crates.io/api/v1/crates/syn/2.0.48/download"], + name = "syn-2.0.49.crate", + sha256 = "915aea9e586f80826ee59f8453c1101f9d1c4b3964cd2460185ee8e299ada496", + strip_prefix = "syn-2.0.49", + urls = ["https://crates.io/api/v1/crates/syn/2.0.49/download"], visibility = [], ) cargo.rust_library( - name = "syn-2.0.48", - srcs = [":syn-2.0.48.crate"], + name = "syn-2.0.49", + srcs = [":syn-2.0.49.crate"], crate = "syn", - crate_root = "syn-2.0.48.crate/src/lib.rs", + crate_root = "syn-2.0.49.crate/src/lib.rs", edition = "2021", features = [ "clone-impls", @@ -13561,6 +13761,18 @@ cargo.rust_library( crate = "system_configuration_sys", crate_root = "system-configuration-sys-0.5.0.crate/src/lib.rs", edition = "2021", + env = { + "CARGO_MANIFEST_DIR": "system-configuration-sys-0.5.0.crate", + "CARGO_PKG_AUTHORS": "Mullvad VPN", + "CARGO_PKG_DESCRIPTION": "Low level bindings to SystemConfiguration framework for macOS", + "CARGO_PKG_NAME": "system-configuration-sys", + "CARGO_PKG_REPOSITORY": "https://github.com/mullvad/system-configuration-rs", + "CARGO_PKG_VERSION": "0.5.0", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "5", + "CARGO_PKG_VERSION_PATCH": "0", + }, + rustc_flags = ["@$(location :system-configuration-sys-0.5.0-build-script-run[rustc_flags])"], visibility = [], deps = [ ":core-foundation-sys-0.8.6", @@ -13568,6 +13780,33 @@ cargo.rust_library( ], ) +cargo.rust_binary( + name = "system-configuration-sys-0.5.0-build-script-build", + srcs = [":system-configuration-sys-0.5.0.crate"], + crate = "build_script_build", + crate_root = "system-configuration-sys-0.5.0.crate/build.rs", + edition = "2021", + env = { + "CARGO_MANIFEST_DIR": "system-configuration-sys-0.5.0.crate", + "CARGO_PKG_AUTHORS": "Mullvad VPN", + "CARGO_PKG_DESCRIPTION": "Low level bindings to SystemConfiguration framework for macOS", + "CARGO_PKG_NAME": "system-configuration-sys", + "CARGO_PKG_REPOSITORY": "https://github.com/mullvad/system-configuration-rs", + "CARGO_PKG_VERSION": "0.5.0", + "CARGO_PKG_VERSION_MAJOR": "0", + "CARGO_PKG_VERSION_MINOR": "5", + "CARGO_PKG_VERSION_PATCH": "0", + }, + visibility = [], +) + +buildscript_run( + name = "system-configuration-sys-0.5.0-build-script-run", + package_name = "system-configuration-sys", + buildscript_rule = ":system-configuration-sys-0.5.0-build-script-build", + version = "0.5.0", +) + alias( name = "tar", actual = ":tar-0.4.40", @@ -13775,7 +14014,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -13795,9 +14034,9 @@ cargo.rust_binary( ":blake3-1.5.0", ":bollard-0.15.0", ":bytes-1.5.0", - ":chrono-0.4.33", + ":chrono-0.4.34", ":ciborium-0.2.2", - ":clap-4.5.0", + ":clap-4.5.1", ":color-eyre-0.6.2", ":colored-2.1.0", ":comfy-table-7.1.0", @@ -13821,8 +14060,8 @@ cargo.rust_binary( ":hyper-0.14.28", ":hyperlocal-0.8.0", ":iftree-1.0.4", - ":indexmap-2.2.2", - ":indicatif-0.17.7", + ":indexmap-2.2.3", + ":indicatif-0.17.8", ":indoc-2.0.4", ":inquire-0.6.2", ":itertools-0.12.1", @@ -13844,6 +14083,7 @@ cargo.rust_binary( ":petgraph-0.6.4", ":pin-project-lite-0.2.13", ":podman-api-0.10.0", + ":postcard-1.0.8", ":postgres-types-0.2.6", ":pretty_assertions_sorted-1.2.3", ":proc-macro2-1.0.78", @@ -13856,7 +14096,7 @@ cargo.rust_binary( ":ring-0.17.5", ":rust-s3-0.34.0-rc4", ":rustls-0.22.2", - ":rustls-pemfile-2.0.0", + ":rustls-pemfile-2.1.0", ":sea-orm-0.12.14", ":self-replace-1.3.7", ":serde-1.0.196", @@ -13868,11 +14108,11 @@ cargo.rust_binary( ":sodiumoxide-0.2.7", ":stream-cancel-0.8.2", ":strum-0.25.0", - ":syn-2.0.48", + ":syn-2.0.49", ":tar-0.4.40", ":tempfile-3.10.0", ":test-log-0.2.14", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":tokio-1.36.0", ":tokio-postgres-0.7.10", ":tokio-postgres-rustls-0.11.1", @@ -13901,48 +14141,48 @@ cargo.rust_binary( alias( name = "thiserror", - actual = ":thiserror-1.0.56", + actual = ":thiserror-1.0.57", visibility = ["PUBLIC"], ) http_archive( - name = "thiserror-1.0.56.crate", - sha256 = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad", - strip_prefix = "thiserror-1.0.56", - urls = ["https://crates.io/api/v1/crates/thiserror/1.0.56/download"], + name = "thiserror-1.0.57.crate", + sha256 = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b", + strip_prefix = "thiserror-1.0.57", + urls = ["https://crates.io/api/v1/crates/thiserror/1.0.57/download"], visibility = [], ) cargo.rust_library( - name = "thiserror-1.0.56", - srcs = [":thiserror-1.0.56.crate"], + name = "thiserror-1.0.57", + srcs = [":thiserror-1.0.57.crate"], crate = "thiserror", - crate_root = "thiserror-1.0.56.crate/src/lib.rs", + crate_root = "thiserror-1.0.57.crate/src/lib.rs", edition = "2021", visibility = [], - deps = [":thiserror-impl-1.0.56"], + deps = [":thiserror-impl-1.0.57"], ) http_archive( - name = "thiserror-impl-1.0.56.crate", - sha256 = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471", - strip_prefix = "thiserror-impl-1.0.56", - urls = ["https://crates.io/api/v1/crates/thiserror-impl/1.0.56/download"], + name = "thiserror-impl-1.0.57.crate", + sha256 = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81", + strip_prefix = "thiserror-impl-1.0.57", + urls = ["https://crates.io/api/v1/crates/thiserror-impl/1.0.57/download"], visibility = [], ) cargo.rust_library( - name = "thiserror-impl-1.0.56", - srcs = [":thiserror-impl-1.0.56.crate"], + name = "thiserror-impl-1.0.57", + srcs = [":thiserror-impl-1.0.57.crate"], crate = "thiserror_impl", - crate_root = "thiserror-impl-1.0.56.crate/src/lib.rs", + crate_root = "thiserror-impl-1.0.57.crate/src/lib.rs", edition = "2021", proc_macro = True, visibility = [], deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -14253,7 +14493,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -14411,7 +14651,7 @@ cargo.rust_library( crate_root = "tokio-rustls-0.25.0.crate/src/lib.rs", edition = "2021", named_deps = { - "pki_types": ":rustls-pki-types-1.2.0", + "pki_types": ":rustls-pki-types-1.3.0", }, visibility = [], deps = [ @@ -14707,7 +14947,7 @@ cargo.rust_library( ":serde-1.0.196", ":serde_spanned-0.6.5", ":toml_datetime-0.6.5", - ":toml_edit-0.22.4", + ":toml_edit-0.22.6", ], ) @@ -14750,27 +14990,27 @@ cargo.rust_library( ], visibility = [], deps = [ - ":indexmap-2.2.2", + ":indexmap-2.2.3", ":serde-1.0.196", ":serde_spanned-0.6.5", ":toml_datetime-0.6.5", - ":winnow-0.5.39", + ":winnow-0.5.40", ], ) http_archive( - name = "toml_edit-0.22.4.crate", - sha256 = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951", - strip_prefix = "toml_edit-0.22.4", - urls = ["https://crates.io/api/v1/crates/toml_edit/0.22.4/download"], + name = "toml_edit-0.22.6.crate", + sha256 = "2c1b5fd4128cc8d3e0cb74d4ed9a9cc7c7284becd4df68f5f940e1ad123606f6", + strip_prefix = "toml_edit-0.22.6", + urls = ["https://crates.io/api/v1/crates/toml_edit/0.22.6/download"], visibility = [], ) cargo.rust_library( - name = "toml_edit-0.22.4", - srcs = [":toml_edit-0.22.4.crate"], + name = "toml_edit-0.22.6", + srcs = [":toml_edit-0.22.6.crate"], crate = "toml_edit", - crate_root = "toml_edit-0.22.4.crate/src/lib.rs", + crate_root = "toml_edit-0.22.6.crate/src/lib.rs", edition = "2021", features = [ "display", @@ -14779,11 +15019,11 @@ cargo.rust_library( ], visibility = [], deps = [ - ":indexmap-2.2.2", + ":indexmap-2.2.3", ":serde-1.0.196", ":serde_spanned-0.6.5", ":toml_datetime-0.6.5", - ":winnow-0.5.39", + ":winnow-0.6.1", ], ) @@ -15045,7 +15285,7 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) @@ -15304,7 +15544,7 @@ cargo.rust_library( ":log-0.4.20", ":rand-0.8.5", ":sha1-0.10.6", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":url-2.5.0", ":utf-8-0.7.6", ], @@ -16303,18 +16543,41 @@ cargo.rust_library( ) http_archive( - name = "winnow-0.5.39.crate", - sha256 = "5389a154b01683d28c77f8f68f49dea75f0a4da32557a58f68ee51ebba472d29", - strip_prefix = "winnow-0.5.39", - urls = ["https://crates.io/api/v1/crates/winnow/0.5.39/download"], + name = "winnow-0.5.40.crate", + sha256 = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876", + strip_prefix = "winnow-0.5.40", + urls = ["https://crates.io/api/v1/crates/winnow/0.5.40/download"], + visibility = [], +) + +cargo.rust_library( + name = "winnow-0.5.40", + srcs = [":winnow-0.5.40.crate"], + crate = "winnow", + crate_root = "winnow-0.5.40.crate/src/lib.rs", + edition = "2021", + features = [ + "alloc", + "default", + "std", + ], + visibility = [], + deps = [":memchr-2.7.1"], +) + +http_archive( + name = "winnow-0.6.1.crate", + sha256 = "d90f4e0f530c4c69f62b80d839e9ef3855edc9cba471a160c4d692deed62b401", + strip_prefix = "winnow-0.6.1", + urls = ["https://crates.io/api/v1/crates/winnow/0.6.1/download"], visibility = [], ) cargo.rust_library( - name = "winnow-0.5.39", - srcs = [":winnow-0.5.39.crate"], + name = "winnow-0.6.1", + srcs = [":winnow-0.6.1.crate"], crate = "winnow", - crate_root = "winnow-0.5.39.crate/src/lib.rs", + crate_root = "winnow-0.6.1.crate/src/lib.rs", edition = "2021", features = [ "alloc", @@ -16364,14 +16627,14 @@ cargo.rust_library( deps = [ ":bcder-0.7.4", ":bytes-1.5.0", - ":chrono-0.4.33", + ":chrono-0.4.34", ":der-0.7.8", ":hex-0.4.3", ":pem-3.0.3", ":ring-0.17.5", ":signature-2.2.0", ":spki-0.7.3", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":zeroize-1.7.0", ], ) @@ -16430,7 +16693,7 @@ cargo.rust_library( visibility = [], deps = [ ":futures-util-0.3.30", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ":tokio-1.36.0", ":yrs-0.17.4", ], @@ -16503,7 +16766,7 @@ cargo.rust_library( ":serde_json-1.0.113", ":smallstr-0.3.0", ":smallvec-1.13.1", - ":thiserror-1.0.56", + ":thiserror-1.0.57", ], ) @@ -16579,6 +16842,6 @@ cargo.rust_library( deps = [ ":proc-macro2-1.0.78", ":quote-1.0.35", - ":syn-2.0.48", + ":syn-2.0.49", ], ) diff --git a/third-party/rust/Cargo.lock b/third-party/rust/Cargo.lock index fadb8ff340..828090c178 100644 --- a/third-party/rust/Cargo.lock +++ b/third-party/rust/Cargo.lock @@ -19,9 +19,9 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.7.7" +version = "0.7.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a824f2aa7e75a0c98c5a504fceb80649e9c35265d44525b5f94de4771a395cd" +checksum = "891477e0c6a8957309ee5c45a6368af3ae14bb510732d2684ffa19af310920f9" dependencies = [ "getrandom 0.2.12", "once_cell", @@ -30,9 +30,9 @@ dependencies = [ [[package]] name = "ahash" -version = "0.8.7" +version = "0.8.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77c3a9648d43b9cd48db467b3f87fdd6e146bcc88ab0180006cef2179fe11d01" +checksum = "42cd52102d3df161c77a887b608d7a4897d7cc112886a9537b738a887a03aaff" dependencies = [ "cfg-if", "getrandom 0.2.12", @@ -214,7 +214,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -236,7 +236,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -247,7 +247,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -272,6 +272,15 @@ dependencies = [ "num-traits", ] +[[package]] +name = "atomic-polyfill" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cf2bce30dfe09ef0bfaef228b9d414faaf7e563035494d7fe092dba54b300f4" +dependencies = [ + "critical-section", +] + [[package]] name = "atomic-write-file" version = "0.1.2" @@ -396,7 +405,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -585,7 +594,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", "syn_derive", ] @@ -622,9 +631,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.14.0" +version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7f30e7476521f6f8af1a1c4c0b8cc94f0bee37d91763d0ca2665f299b6cd8aec" +checksum = "d32a994c2b3ca201d9b263612a374263f05e7adde37c4707f693dcd375076d1f" [[package]] name = "bytecheck" @@ -692,9 +701,9 @@ checksum = "fd16c4719339c4530435d38e511904438d07cce7950afa3718a84ac36c10e89e" [[package]] name = "chrono" -version = "0.4.33" +version = "0.4.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f13690e35a5e4ace198e7beea2895d29f3a9cc55015fcebe6336bd2010af9eb" +checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b" dependencies = [ "android-tzdata", "iana-time-zone", @@ -734,9 +743,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.0" +version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "80c21025abd42669a92efc996ef13cfb2c5c627858421ea58d5c3b331a6c134f" +checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da" dependencies = [ "clap_builder", "clap_derive", @@ -744,9 +753,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.0" +version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "458bf1f341769dfcf849846f65dffdf9146daa56bcd2a47cb4e1de9915567c99" +checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb" dependencies = [ "anstream", "anstyle", @@ -764,7 +773,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -784,6 +793,12 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "cobs" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67ba02a97a2bd10f4b59b25c7973101c79642302776489e030cd13cdab09ed15" + [[package]] name = "color-eyre" version = "0.6.2" @@ -1004,13 +1019,19 @@ checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" dependencies = [ "cfg-if", ] +[[package]] +name = "critical-section" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7059fff8937831a9ae6f0fe4d658ffabf58f2ca96aa9dec1c889f936f705f216" + [[package]] name = "crossbeam-channel" version = "0.5.11" @@ -1150,7 +1171,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1165,12 +1186,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc5d6b04b3fd0ba9926f945895de7d806260a2d7431ba82e7edaecb043c4c6b8" +checksum = "c376d08ea6aa96aafe61237c7200d1241cb177b7d3a542d791f2d118e9cbb955" dependencies = [ - "darling_core 0.20.5", - "darling_macro 0.20.5", + "darling_core 0.20.6", + "darling_macro 0.20.6", ] [[package]] @@ -1189,16 +1210,16 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e48a959bcd5c761246f5d090ebc2fbf7b9cd527a492b07a67510c108f1e7e3" +checksum = "33043dcd19068b8192064c704b3f83eb464f91f1ff527b44a4e2b08d9cdb8855" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1214,13 +1235,13 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1545d67a2149e1d93b7e5c7752dce5a7426eb5d1357ddcfd89336b94444f77" +checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" dependencies = [ - "darling_core 0.20.5", + "darling_core 0.20.6", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1514,9 +1535,9 @@ dependencies = [ [[package]] name = "either" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" +checksum = "11157ac094ffbdde99aa67b23417ebdd801842852b500e395a45a9c0aac03e4a" dependencies = [ "serde", ] @@ -1542,6 +1563,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + [[package]] name = "encode_unicode" version = "0.3.6" @@ -1567,7 +1594,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1796,7 +1823,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1926,7 +1953,7 @@ dependencies = [ "futures-sink", "futures-util", "http", - "indexmap 2.2.2", + "indexmap 2.2.3", "slab", "tokio", "tokio-util", @@ -1943,13 +1970,22 @@ dependencies = [ "crunchy", ] +[[package]] +name = "hash32" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0c35f58762feb77d74ebe43bdbc3210f09be9fe6742234d573bacc26ed92b67" +dependencies = [ + "byteorder", +] + [[package]] name = "hashbrown" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" dependencies = [ - "ahash 0.7.7", + "ahash 0.7.8", ] [[package]] @@ -1964,7 +2000,7 @@ version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.8", "allocator-api2", ] @@ -1977,6 +2013,20 @@ dependencies = [ "hashbrown 0.14.3", ] +[[package]] +name = "heapless" +version = "0.7.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cdc6457c0eb62c71aac4bc17216026d8410337c4126773b9c5daba343f17964f" +dependencies = [ + "atomic-polyfill", + "hash32", + "rustc_version", + "serde", + "spin 0.9.8", + "stable_deref_trait", +] + [[package]] name = "heck" version = "0.4.1" @@ -1988,9 +2038,9 @@ dependencies = [ [[package]] name = "hermit-abi" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d0c62115964e08cb8039170eb33c1d0e2388a256930279edca206fff675f82c3" +checksum = "bd5256b483761cd23699d0da46cc6fd2ee3be420bbe6d020ae4a091e70b7e9fd" [[package]] name = "hex" @@ -2249,9 +2299,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.2" +version = "2.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "824b2ae422412366ba479e8111fd301f7b5faece8149317bb81925979a53f520" +checksum = "233cf39063f058ea2caae4091bf4a3ef70a653afbc026f5c4a4135d114e3c177" dependencies = [ "equivalent", "hashbrown 0.14.3", @@ -2260,9 +2310,9 @@ dependencies = [ [[package]] name = "indicatif" -version = "0.17.7" +version = "0.17.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb28741c9db9a713d93deb3bb9515c20788cef5815265bee4980e87bde7e0f25" +checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" dependencies = [ "console", "instant", @@ -2285,7 +2335,7 @@ checksum = "0122b7114117e64a63ac49f752a5ca4624d534c7b1c7de796ac196381cd2d947" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2511,7 +2561,7 @@ checksum = "afc95a651c82daf7004c824405aa1019723644950d488571bd718e3ed84646ed" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2838,7 +2888,7 @@ checksum = "1e32339a5dc40459130b3bd269e9892439f55b33e772d2a9d402a789baaf4e8a" dependencies = [ "futures-core", "futures-sink", - "indexmap 2.2.2", + "indexmap 2.2.3", "js-sys", "once_cell", "pin-project-lite", @@ -2983,7 +3033,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2997,7 +3047,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3109,7 +3159,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.2.2", + "indexmap 2.2.3", "serde", "serde_derive", ] @@ -3169,7 +3219,7 @@ checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3207,9 +3257,9 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2900ede94e305130c13ddd391e0ab7cbaeb783945ae07a279c268cb05109c6cb" +checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" [[package]] name = "platforms" @@ -3259,6 +3309,18 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +[[package]] +name = "postcard" +version = "1.0.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8" +dependencies = [ + "cobs", + "embedded-io", + "heapless", + "serde", +] + [[package]] name = "postgres-derive" version = "0.4.5" @@ -3268,7 +3330,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3395,7 +3457,7 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", "version_check", "yansi 1.0.0-rc.1", ] @@ -3600,7 +3662,7 @@ dependencies = [ "quote", "refinery-core", "regex", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3655,7 +3717,7 @@ checksum = "1ad5e011230cad274d0532460c5ab69828ea47ae75681b42a841663efffaf794" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3831,9 +3893,9 @@ dependencies = [ [[package]] name = "rust_decimal" -version = "1.34.2" +version = "1.34.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "755392e1a2f77afd95580d3f0d0e94ac83eeeb7167552c9b5bca549e61a94d83" +checksum = "b39449a79f45e8da28c57c341891b69a183044b29518bb8f86dbac9df60bb7df" dependencies = [ "arrayvec", "borsh", @@ -3922,9 +3984,9 @@ dependencies = [ [[package]] name = "rustls-pemfile" -version = "2.0.0" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "35e4980fa29e4c4b212ffb3db068a564cbf560e51d3944b7c88bd8bf5bec64f4" +checksum = "3c333bb734fcdedcea57de1602543590f545f127dc8b533324318fd492c5c70b" dependencies = [ "base64 0.21.7", "rustls-pki-types", @@ -3932,9 +3994,9 @@ dependencies = [ [[package]] name = "rustls-pki-types" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a716eb65e3158e90e17cd93d855216e27bde02745ab842f2cab4a39dba1bacf" +checksum = "048a63e5b3ac996d78d402940b5fa47973d2d080c6c6fffa1d0f19c4445310b7" [[package]] name = "rustls-webpki" @@ -4013,7 +4075,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4054,7 +4116,7 @@ dependencies = [ "proc-macro2", "quote", "sea-bae", - "syn 2.0.48", + "syn 2.0.49", "unicode-ident", ] @@ -4179,7 +4241,7 @@ checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4188,7 +4250,7 @@ version = "1.0.113" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "itoa", "ryu", "serde", @@ -4221,7 +4283,7 @@ checksum = "0b2e6b945e9d3df726b65d6ee24060aff8e3533d431f677a9695db04eff9dfdb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4281,7 +4343,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.2.2", + "indexmap 2.2.3", "serde", "serde_derive", "serde_json", @@ -4295,10 +4357,10 @@ version = "2.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "881b6f881b17d13214e5d494c939ebab463d01264ce1811e9d4ac3a882e7695f" dependencies = [ - "darling 0.20.5", + "darling 0.20.6", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4307,10 +4369,10 @@ version = "3.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "865f9743393e638991566a8b7a479043c2c8da94a33e0a31f18214c9cae0a64d" dependencies = [ - "darling 0.20.5", + "darling 0.20.6", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4319,7 +4381,7 @@ version = "0.9.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adf8a49373e98a4c5f0ceb5d05aa7c648d75f63774981ed95b7c7443bbd50c6e" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "itoa", "ryu", "serde", @@ -4534,7 +4596,7 @@ version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d84b0a3c3739e220d94b3239fd69fb1f74bc36e16643423bd99de3b43c21bfbd" dependencies = [ - "ahash 0.8.7", + "ahash 0.8.8", "atoi", "bigdecimal", "byteorder", @@ -4552,7 +4614,7 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.2.2", + "indexmap 2.2.3", "log", "memchr", "once_cell", @@ -4799,7 +4861,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4834,9 +4896,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.48" +version = "2.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "915aea9e586f80826ee59f8453c1101f9d1c4b3964cd2460185ee8e299ada496" dependencies = [ "proc-macro2", "quote", @@ -4852,7 +4914,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4948,7 +5010,7 @@ checksum = "7ba277e77219e9eea169e8508942db1bf5d8a41ff2db9b20aab5a5aadc9fa25d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -4989,7 +5051,7 @@ dependencies = [ "hyper", "hyperlocal", "iftree", - "indexmap 2.2.2", + "indexmap 2.2.3", "indicatif", "indoc", "inquire", @@ -5012,6 +5074,7 @@ dependencies = [ "petgraph", "pin-project-lite", "podman-api", + "postcard", "postgres-types", "pretty_assertions_sorted", "proc-macro2", @@ -5024,7 +5087,7 @@ dependencies = [ "ring", "rust-s3", "rustls 0.22.2", - "rustls-pemfile 2.0.0", + "rustls-pemfile 2.1.0", "sea-orm", "self-replace", "serde", @@ -5036,7 +5099,7 @@ dependencies = [ "sodiumoxide", "stream-cancel", "strum", - "syn 2.0.48", + "syn 2.0.49", "tar", "tempfile", "test-log", @@ -5068,22 +5131,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54378c645627613241d077a3a79db965db602882668f9136ac42af9ecb730ad" +checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.56" +version = "1.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa0faa943b50f3db30a20aa7e265dbc66076993efed8463e8de414e5d06d3471" +checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5188,7 +5251,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5373,7 +5436,7 @@ dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.4", + "toml_edit 0.22.6", ] [[package]] @@ -5391,11 +5454,11 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] @@ -5404,22 +5467,22 @@ version = "0.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6a8534fd7f78b5405e860340ad6575217ce99f38d4d5c8f2442cb5ecb50090e1" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "toml_datetime", - "winnow", + "winnow 0.5.40", ] [[package]] name = "toml_edit" -version = "0.22.4" +version = "0.22.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951" +checksum = "2c1b5fd4128cc8d3e0cb74d4ed9a9cc7c7284becd4df68f5f940e1ad123606f6" dependencies = [ - "indexmap 2.2.2", + "indexmap 2.2.3", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.6.1", ] [[package]] @@ -5524,7 +5587,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5869,7 +5932,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", "wasm-bindgen-shared", ] @@ -5903,7 +5966,7 @@ checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -6134,9 +6197,18 @@ checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" [[package]] name = "winnow" -version = "0.5.39" +version = "0.5.40" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f593a95398737aeed53e489c785df13f3618e41dbcd6718c6addbf1395aa6876" +dependencies = [ + "memchr", +] + +[[package]] +name = "winnow" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5389a154b01683d28c77f8f68f49dea75f0a4da32557a58f68ee51ebba472d29" +checksum = "d90f4e0f530c4c69f62b80d839e9ef3855edc9cba471a160c4d692deed62b401" dependencies = [ "memchr", ] @@ -6246,7 +6318,7 @@ checksum = "9ce1b18ccd8e73a9321186f97e46f9f04b778851177567b1975109d26a08d2a6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -6266,5 +6338,5 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.48", + "syn 2.0.49", ] diff --git a/third-party/rust/Cargo.toml b/third-party/rust/Cargo.toml index 1125f8d0d3..45614abfbd 100644 --- a/third-party/rust/Cargo.toml +++ b/third-party/rust/Cargo.toml @@ -93,6 +93,7 @@ pathdiff = "0.2.1" petgraph = { version = "0.6.3", features = ["serde-1"] } pin-project-lite = "0.2.9" podman-api = "0.10" +postcard = { version = "1.0.8", features = ["use-std"] } postgres-types = { version = "0.2.5", features = ["derive"] } pretty_assertions_sorted = "1.2.1" proc-macro2 = "1.0.56" diff --git a/third-party/rust/fixups/borsh/fixups.toml b/third-party/rust/fixups/borsh/fixups.toml new file mode 100644 index 0000000000..46dfe3a3ac --- /dev/null +++ b/third-party/rust/fixups/borsh/fixups.toml @@ -0,0 +1,4 @@ +cargo_env = true + +[[buildscript]] +[buildscript.rustc_flags] diff --git a/third-party/rust/fixups/crossbeam-epoch/fixups.toml b/third-party/rust/fixups/crossbeam-epoch/fixups.toml new file mode 100644 index 0000000000..46dfe3a3ac --- /dev/null +++ b/third-party/rust/fixups/crossbeam-epoch/fixups.toml @@ -0,0 +1,4 @@ +cargo_env = true + +[[buildscript]] +[buildscript.rustc_flags] diff --git a/third-party/rust/fixups/heapless/fixups.toml b/third-party/rust/fixups/heapless/fixups.toml new file mode 100644 index 0000000000..db40d72cb2 --- /dev/null +++ b/third-party/rust/fixups/heapless/fixups.toml @@ -0,0 +1 @@ +buildscript = [] diff --git a/third-party/rust/fixups/ring/fixups.toml b/third-party/rust/fixups/ring/fixups.toml index d2c83589cf..e21a3db527 100644 --- a/third-party/rust/fixups/ring/fixups.toml +++ b/third-party/rust/fixups/ring/fixups.toml @@ -1,3 +1,12 @@ +# NOTE(nick,jacob): adding cargo env. Original vendored file continues below. +cargo_env = true + +# vendored-date: 2023-05-03 +# project: https://github.com/facebook/buck2 +# commit-hash: f24bf491d4ee266e1ccb444919bce598ba88ad8a +# commit-date: 2023-02-21T07:04:43-0800 +# source: https://github.com/facebook/buck2/blob/f24bf491d4ee266e1ccb444919bce598ba88ad8a/shim/third-party/rust/fixups/rings/fixups.toml + # Copied from fbsource fixup. buildscript = [] diff --git a/third-party/rust/fixups/semver/fixups.toml b/third-party/rust/fixups/semver/fixups.toml index db40d72cb2..46dfe3a3ac 100644 --- a/third-party/rust/fixups/semver/fixups.toml +++ b/third-party/rust/fixups/semver/fixups.toml @@ -1 +1,4 @@ -buildscript = [] +cargo_env = true + +[[buildscript]] +[buildscript.rustc_flags] diff --git a/third-party/rust/fixups/system-configuration-sys/fixups.toml b/third-party/rust/fixups/system-configuration-sys/fixups.toml index db40d72cb2..2d20323aba 100644 --- a/third-party/rust/fixups/system-configuration-sys/fixups.toml +++ b/third-party/rust/fixups/system-configuration-sys/fixups.toml @@ -1 +1,4 @@ -buildscript = [] +cargo_env = true + +[[buildscript]] +[buildscript.rustc_flags] \ No newline at end of file