diff --git a/.github/workflows/derive-typescript.yaml b/.github/workflows/derive-typescript.yaml new file mode 100644 index 0000000000..9a6a684da4 --- /dev/null +++ b/.github/workflows/derive-typescript.yaml @@ -0,0 +1,52 @@ +name: derive-typescript + +on: + push: + branches: [master] + paths: [crates/derive-typescript/**] + pull_request: + branches: [master] + paths: [crates/derive-typescript/**] + +jobs: + build-and-test: + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + submodules: false + + - name: Install Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: 1.72.0 + default: true + target: x86_64-unknown-linux-musl + + - run: sudo apt install -y musl-tools + + - name: Login to GitHub container registry + if: ${{ github.ref == 'refs/heads/master' }} + run: | + echo "${{ secrets.GITHUB_TOKEN }}" | \ + docker login --username ${{ github.actor }} --password-stdin ghcr.io + + - uses: Swatinem/rust-cache@v2 + with: + workspaces: | + crates/derive-typescript + + - name: build `derive-typescript` + run: cd crates/derive-typescript && cargo build --target x86_64-unknown-linux-musl --release + + - name: test `derive-typescript` + run: cd crates/derive-typescript && cargo test --target x86_64-unknown-linux-musl --release + + - name: package docker image + run: docker build -t ghcr.io/estuary/derive-typescript:dev crates/derive-typescript/ + + - name: push docker image + if: ${{ github.ref == 'refs/heads/master' }} + run: docker push ghcr.io/estuary/derive-typescript:dev diff --git a/Cargo.lock b/Cargo.lock index 53823e65d0..56dafe0efd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -32,6 +32,7 @@ dependencies = [ "async-process", "async-trait", "base64 0.13.1", + "build", "bytes", "chrono", "clap 3.2.24", @@ -45,7 +46,7 @@ dependencies = [ "proto-flow", "regex", "reqwest", - "rusqlite", + "runtime", "schemars", "serde", "serde_json", @@ -60,6 +61,7 @@ dependencies = [ "tracing-subscriber", "url", "uuid 1.3.1", + "validation", "validator", ] @@ -469,27 +471,14 @@ dependencies = [ "anyhow", "assemble", "bytes", - "cgo", - "connector-init", - "derive-typescript", - "doc", "futures", - "insta", - "json", - "lazy_static", - "models", "ops", - "pbjson-types", - "prost", "proto-flow", + "reqwest", "runtime", "rusqlite", - "serde", - "serde_json", - "serde_yaml", "sources", "tables", - "thiserror", "tokio", "tracing", "url", @@ -1161,30 +1150,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "derive-typescript" -version = "0.0.0" -dependencies = [ - "anyhow", - "doc", - "insta", - "itertools 0.10.5", - "json", - "lazy_static", - "locate-bin", - "models", - "proto-flow", - "regex", - "serde", - "serde_json", - "serde_yaml", - "sources", - "tables", - "tempfile", - "tracing", - "url", -] - [[package]] name = "diff" version = "0.1.13" @@ -1505,13 +1470,13 @@ dependencies = [ "assert_cmd", "async-process", "base64 0.13.1", + "build", "bytelines", "bytes", "clap 3.2.24", "comfy-table", "connector-init", "crossterm", - "derive-typescript", "dirs", "doc", "extractors", @@ -3292,6 +3257,7 @@ dependencies = [ "regex", "serde", "serde_json", + "zeroize", ] [[package]] @@ -3759,6 +3725,7 @@ name = "runtime" version = "0.0.0" dependencies = [ "anyhow", + "async-process", "bytes", "clap 3.2.24", "connector-init", @@ -3769,6 +3736,8 @@ dependencies = [ "insta", "json-patch", "librocksdb-sys", + "locate-bin", + "models", "ops", "pbjson-types", "prost", @@ -3785,6 +3754,7 @@ dependencies = [ "tracing", "tracing-subscriber", "tuple", + "zeroize", ] [[package]] @@ -5785,6 +5755,12 @@ dependencies = [ "synstructure", ] +[[package]] +name = "zeroize" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" + [[package]] name = "zip" version = "0.5.13" diff --git a/Cargo.toml b/Cargo.toml index 808ab2853d..946b71a954 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,7 @@ [workspace] resolver = "2" members = ["crates/*"] +exclude = ["crates/derive-typescript"] [workspace.package] version = "0.0.0" @@ -159,6 +160,7 @@ tracing-subscriber = { version = "0.3", features = [ "fmt", ] } shared_child = "1.0.0" +zeroize = "1.6" unicode-bom = "1.1" unicode-normalization = "0.1" diff --git a/Makefile b/Makefile index b34c348451..55c4374f61 100644 --- a/Makefile +++ b/Makefile @@ -300,6 +300,7 @@ install-tools: ${PKGDIR}/bin/deno ${PKGDIR}/bin/etcd ${PKGDIR}/bin/sops .PHONY: rust-gnu-test rust-gnu-test: + PATH=${PKGDIR}/bin:$$PATH ;\ cargo test --release --locked --workspace --exclude parser --exclude network-tunnel --exclude schemalate --exclude connector-init .PHONY: rust-musl-test diff --git a/crates/agent/Cargo.toml b/crates/agent/Cargo.toml index 5139f8c4de..133b0120f7 100644 --- a/crates/agent/Cargo.toml +++ b/crates/agent/Cargo.toml @@ -13,10 +13,13 @@ license.workspace = true [dependencies] agent-sql = { path = "../agent-sql" } async-process = { path = "../async-process" } +build = { path = "../build" } doc = { path = "../doc" } models = { path = "../models" } proto-flow = { path = "../proto-flow" } +runtime = { path = "../runtime" } tables = { path = "../tables", features = ["persist"] } +validation = { path = "../validation" } anyhow = { workspace = true } async-trait = { workspace = true } @@ -29,7 +32,6 @@ itertools = { workspace = true } lazy_static = { workspace = true } regex = { workspace = true } reqwest = { workspace = true } -rusqlite = { workspace = true } schemars = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } diff --git a/crates/agent/src/logs.rs b/crates/agent/src/logs.rs index 24cf03eb69..3afd62303a 100644 --- a/crates/agent/src/logs.rs +++ b/crates/agent/src/logs.rs @@ -16,6 +16,43 @@ pub struct Line { // Tx is the channel sender of log Lines. pub type Tx = tokio::sync::mpsc::Sender; +impl Line { + pub(crate) fn new(token: Uuid, stream: String, line: String) -> Self { + Line { + token, + stream, + line, + } + } +} + +/// ops_handler is a ops::Log handler that dispatches to `tx` +/// using the given `stream` and `token`. +pub fn ops_handler( + tx: Tx, + stream: String, + token: Uuid, +) -> impl Fn(&proto_flow::ops::Log) + Send + Sync + Clone + 'static { + let log_handler = move |log: &proto_flow::ops::Log| { + // TODO(johnny): format into a user-facing representation for our UI. + let line = serde_json::to_string(log).unwrap(); + + let Err(tokio::sync::mpsc::error::TrySendError::Full(line)) = tx.try_send(Line { + token: token.clone(), + stream: stream.clone(), + line, + }) else { return }; + + // Perform an expensive "move" of all other tasks scheduled on the + // current async executor thread, so that we can block until there's capacity. + let tx_clone = tx.clone(); + _ = tokio::task::block_in_place(move || { + tokio::runtime::Handle::current().block_on(tx_clone.send(line)) + }); + }; + log_handler +} + // capture_job_logs consumes newline-delimited lines from the AsyncRead and // streams each as a Line to the channel Sender. #[tracing::instrument(err, skip(tx, reader))] @@ -35,13 +72,9 @@ where let line = String::from_utf8(line) .unwrap_or_else(|err| String::from_utf8_lossy(err.as_bytes()).into_owned()); - tx.send(Line { - token, - stream: stream.clone(), - line, - }) - .await - .unwrap(); + tx.send(Line::new(token, stream.clone(), line)) + .await + .unwrap(); } Ok(()) } diff --git a/crates/agent/src/publications.rs b/crates/agent/src/publications.rs index 1ade80c098..c6e9e2345a 100644 --- a/crates/agent/src/publications.rs +++ b/crates/agent/src/publications.rs @@ -301,7 +301,6 @@ impl PublishHandler { &self.builds_root, &draft_catalog, &self.connector_network, - &self.bindir, row.logs_token, &self.logs_tx, row.pub_id, diff --git a/crates/agent/src/publications/builds.rs b/crates/agent/src/publications/builds.rs index 29adac553a..20a3faa7e5 100644 --- a/crates/agent/src/publications/builds.rs +++ b/crates/agent/src/publications/builds.rs @@ -3,13 +3,14 @@ use agent_sql::publications::{ExpandedRow, SpecRow}; use agent_sql::CatalogType; use anyhow::Context; use itertools::Itertools; +use proto_flow::{materialize, ops::log::Level as LogLevel}; use serde::{Deserialize, Serialize}; use sqlx::types::Uuid; use std::collections::BTreeMap; use std::io::Write; use std::path; -use tables::SqlTableObj; +#[derive(Default)] pub struct BuildOutput { pub errors: tables::Errors, pub built_captures: tables::BuiltCaptures, @@ -74,11 +75,19 @@ impl BuildOutput { let Some(collection_name) = mat.spec.bindings[i].collection.as_ref().map(|c| c.name.as_str()) else { continue; }; - let naughty_fields: Vec = binding.constraints.iter().filter(|(_, constraint)| { - constraint.r#type == proto_flow::materialize::response::validated::constraint::Type::Unsatisfiable as i32 - }).map(|(field, constraint)| { - RejectedField { field: field.clone(), reason: constraint.reason.clone() } - }).collect(); + let naughty_fields: Vec = binding + .constraints + .iter() + .filter(|(_, constraint)| { + constraint.r#type + == materialize::response::validated::constraint::Type::Unsatisfiable + as i32 + }) + .map(|(field, constraint)| RejectedField { + field: field.clone(), + reason: constraint.reason.clone(), + }) + .collect(); if !naughty_fields.is_empty() { let affected_consumers = naughty_collections .entry(collection_name.to_owned()) @@ -108,7 +117,6 @@ pub async fn build_catalog( builds_root: &url::Url, catalog: &models::Catalog, connector_network: &str, - bindir: &str, logs_token: Uuid, logs_tx: &logs::Tx, pub_id: Id, @@ -128,39 +136,47 @@ pub async fn build_catalog( .context("writing catalog file")?; let build_id = format!("{pub_id}"); + let control_plane = validation::NoOpControlPlane {}; let db_path = builds_dir.join(&build_id); + let log_handler = logs::ops_handler(logs_tx.clone(), "build".to_string(), logs_token); + let project_root = url::Url::parse("file:///").unwrap(); + let source = url::Url::parse("file:///flow.json").unwrap(); + + let managed_build = build::managed_build( + build_id.clone(), + connector_network.to_string(), + Box::new(control_plane), + builds_dir.clone(), // Root for file:// resolution. + log_handler.clone(), + project_root, + source, + ); - let build_job = jobs::run( - "build", - logs_tx, - logs_token, - async_process::Command::new(format!("{bindir}/flowctl-go")) - .arg("api") - .arg("build") - .arg("--build-id") - .arg(&build_id) - .arg("--build-db") - .arg(&db_path) - .arg("--fs-root") - .arg(&builds_dir) - .arg("--network") - .arg(connector_network) - .arg("--source") - .arg("file:///flow.json") - .arg("--source-type") - .arg("catalog") - .arg("--log.level=warn") - .arg("--log.format=color") - .current_dir(tmpdir), - ) - .await - .with_context(|| format!("building catalog in {builds_dir:?}"))?; + // Build a tokio::Runtime that dispatches all tracing events to `log_handler`. + let tokio_context = runtime::TokioContext::new( + LogLevel::Warn, + log_handler, + format!("agent-build-{build_id}"), + 1, + ); + let build_result = tokio_context + .spawn(managed_build) + .await + .context("unable to join catalog build handle due to panic")?; // Persist the build before we do anything else. + build::persist( + proto_flow::flow::build_api::Config { + build_id, + ..Default::default() + }, + &db_path, + &build_result, + )?; let dest_url = builds_root.join(&pub_id.to_string())?; - // The gsutil job needs to access the GOOGLE_APPLICATION_CREDENTIALS environment variable, so - // we cannot use `jobs::run` here. + // The gsutil job needs to access the GOOGLE_APPLICATION_CREDENTIALS environment variable, + // so we cannot use `jobs::run` here. let persist_job = jobs::run_without_removing_env( "persist", &logs_tx, @@ -178,43 +194,27 @@ pub async fn build_catalog( anyhow::bail!("persist of {db_path:?} exited with an error"); } - // Inspect the database for build errors. - let db = rusqlite::Connection::open_with_flags( - &db_path, - rusqlite::OpenFlags::SQLITE_OPEN_READ_ONLY, - )?; - - let mut errors = tables::Errors::new(); - errors.load_all(&db).context("loading build errors")?; - - if !build_job.success() && errors.is_empty() { - anyhow::bail!("build_job exited with failure but errors is empty"); - } - - let mut built_captures = tables::BuiltCaptures::new(); - built_captures - .load_all(&db) - .context("loading built captures")?; - - let mut built_collections = tables::BuiltCollections::new(); - built_collections - .load_all(&db) - .context("loading built collections")?; - - let mut built_materializations = tables::BuiltMaterializations::new(); - built_materializations - .load_all(&db) - .context("loading built materailizations")?; - - let mut built_tests = tables::BuiltTests::new(); - built_tests.load_all(&db).context("loading built tests")?; - - Ok(BuildOutput { - errors, - built_captures, - built_collections, - built_materializations, - built_tests, + Ok(match build_result { + Ok(( + _sources, + tables::Validations { + built_captures, + built_collections, + built_materializations, + built_tests, + errors: _, + }, + )) => BuildOutput { + built_captures, + built_collections, + built_materializations, + built_tests, + ..Default::default() + }, + Err(errors) => BuildOutput { + errors, + ..Default::default() + }, }) } diff --git a/crates/agent/src/publications/specs.rs b/crates/agent/src/publications/specs.rs index e9aa09fbd3..88d3b7badd 100644 --- a/crates/agent/src/publications/specs.rs +++ b/crates/agent/src/publications/specs.rs @@ -739,7 +739,7 @@ mod test { async fn execute_publications(txn: &mut Transaction<'_, Postgres>) -> Vec { let bs_url: Url = "http://example.com".parse().unwrap(); - let (logs_tx, mut logs_rx) = tokio::sync::mpsc::channel(8192); + let (logs_tx, mut logs_rx) = tokio::sync::mpsc::channel(1); // TODO(enlarge once the block_in_place definitely works) // Just in case anything gets through logs_rx.close(); diff --git a/crates/assemble/src/lib.rs b/crates/assemble/src/lib.rs index cb8f6bb280..49ab812c4a 100644 --- a/crates/assemble/src/lib.rs +++ b/crates/assemble/src/lib.rs @@ -81,7 +81,7 @@ pub fn inference_uuid_v1_date_time() -> flow::Inference { // partition_template returns a template JournalSpec for creating // or updating data partitions of the collection. pub fn partition_template( - build_config: &flow::build_api::Config, + build_id: &str, collection: &models::Collection, journals: &models::JournalTemplate, stores: &[models::Store], @@ -140,7 +140,7 @@ pub fn partition_template( }, broker::Label { name: labels::BUILD.to_string(), - value: build_config.build_id.clone(), + value: build_id.to_string(), }, broker::Label { name: labels::COLLECTION.to_string(), @@ -172,7 +172,7 @@ pub fn partition_template( // recovery_log_template returns a template JournalSpec for creating // or updating recovery logs of task shards. pub fn recovery_log_template( - build_config: &flow::build_api::Config, + build_id: &str, task_name: &str, task_type: &str, stores: &[models::Store], @@ -223,7 +223,7 @@ pub fn recovery_log_template( }, broker::Label { name: labels::BUILD.to_string(), - value: build_config.build_id.clone(), + value: build_id.to_string(), }, broker::Label { name: labels::TASK_NAME.to_string(), @@ -272,7 +272,7 @@ pub fn shard_id_base(task_name: &str, task_type: &str) -> String { // shard_template returns a template ShardSpec for creating or updating // shards of the task. pub fn shard_template( - build_config: &flow::build_api::Config, + build_id: &str, task_name: &str, task_type: &str, shard: &models::ShardTemplate, @@ -327,7 +327,7 @@ pub fn shard_template( }, broker::Label { name: labels::BUILD.to_string(), - value: build_config.build_id.clone(), + value: build_id.to_string(), }, broker::Label { name: labels::LOG_LEVEL.to_string(), @@ -408,7 +408,7 @@ fn shard_hostname_label(task_name: &str) -> String { } pub fn collection_spec( - build_config: &flow::build_api::Config, + build_id: &str, collection: &tables::Collection, projections: Vec, stores: &[models::Store], @@ -467,7 +467,7 @@ pub fn collection_spec( "ack": true, } }) .to_string(), - partition_template: Some(partition_template(build_config, name, journals, stores)), + partition_template: Some(partition_template(build_id, name, journals, stores)), derivation: None, } } diff --git a/crates/async-process/src/lib.rs b/crates/async-process/src/lib.rs index eff9224f99..919bb18e52 100644 --- a/crates/async-process/src/lib.rs +++ b/crates/async-process/src/lib.rs @@ -1,5 +1,5 @@ pub use std::process::{Command, Output, Stdio}; -use tokio::io::AsyncReadExt; +use tokio::io::{AsyncReadExt, AsyncWriteExt}; use shared_child::SharedChild; #[cfg(unix)] @@ -34,6 +34,7 @@ impl From for Child { } impl Child { + /// Wait on the Child asynchronously, using a blocking background thread. pub fn wait( &self, ) -> impl std::future::Future> { @@ -41,6 +42,11 @@ impl Child { let handle = tokio::runtime::Handle::current().spawn_blocking(move || cloned_inner.wait()); async move { handle.await.expect("wait does not panic") } } + + /// Check whether the Child has exited without blocking, returning Ok(None) if the child is still running. + pub fn try_wait(&self) -> std::io::Result> { + self.inner.try_wait() + } } impl Drop for Child { @@ -89,17 +95,31 @@ impl Drop for Child { /// Spawn the command and wait for it to exit, buffering its stdout and stderr. /// Upon its exit return an Output having its stdout, stderr, and ExitStatus. pub async fn output(cmd: &mut Command) -> std::io::Result { - cmd.stdin(Stdio::null()); + input_output(cmd, &[]).await +} + +/// Span the command and wait for it to exit, passing it the given input and buffering its stdout and stderr. +/// Upon its exit return an Output having its stdout, stderr, and ExitStatus. +pub async fn input_output(cmd: &mut Command, input: &[u8]) -> std::io::Result { + cmd.stdin(Stdio::piped()); cmd.stderr(Stdio::piped()); cmd.stdout(Stdio::piped()); let mut child: Child = cmd.spawn()?.into(); - let (mut stdout, mut stderr) = (Vec::new(), Vec::new()); - let (mut stdout_pipe, mut stderr_pipe) = - (child.stdout.take().unwrap(), child.stderr.take().unwrap()); + // Pre-allocate enough stdout to hold all of `input` without a reallocation. + // This is a security measure, to avoid extra allocations / heap copies if + // the output contains sensitive data, as is the case with `sops` decryptions. + let (mut stdout, mut stderr) = (Vec::with_capacity(input.len()), Vec::new()); + let (mut stdin_pipe, mut stdout_pipe, mut stderr_pipe) = ( + child.stdin.take().unwrap(), + child.stdout.take().unwrap(), + child.stderr.take().unwrap(), + ); - let (_, _, wait) = tokio::join!( + let (_, _, _, wait) = tokio::join!( + // Wrapping future is required to drop `stdin_pipe` once `input` is written or fails. + async move { stdin_pipe.write_all(input).await }, stdout_pipe.read_to_end(&mut stdout), stderr_pipe.read_to_end(&mut stderr), child.wait(), @@ -124,7 +144,7 @@ where #[cfg(test)] mod test { - use super::{output, Child, Command}; + use super::{input_output, output, Child, Command}; #[tokio::test] async fn test_wait() { @@ -164,4 +184,27 @@ mod test { ) "###); } + + #[tokio::test] + async fn test_input_output() { + let result = input_output( + Command::new("cat").arg("/dev/stdin"), + "Hello, world!".as_bytes(), + ) + .await; + + insta::assert_debug_snapshot!(result, @r###" + Ok( + Output { + status: ExitStatus( + unix_wait_status( + 0, + ), + ), + stdout: "Hello, world!", + stderr: "", + }, + ) + "###); + } } diff --git a/crates/bindings/flow_bindings.h b/crates/bindings/flow_bindings.h index 43dc4c89d9..ba56822d7f 100644 --- a/crates/bindings/flow_bindings.h +++ b/crates/bindings/flow_bindings.h @@ -112,16 +112,6 @@ typedef struct TaskService { uintptr_t err_cap; } TaskService; -struct Channel *build_create(int32_t log_level, int32_t log_dest_fd); - -void build_invoke1(struct Channel *ch, struct In1 i); - -void build_invoke4(struct Channel *ch, struct In4 i); - -void build_invoke16(struct Channel *ch, struct In16 i); - -void build_drop(struct Channel *ch); - struct Channel *combine_create(int32_t log_level, int32_t log_dest_fd); void combine_invoke1(struct Channel *ch, struct In1 i); diff --git a/crates/bindings/src/build.rs b/crates/bindings/src/build.rs deleted file mode 100644 index 00608c3a09..0000000000 --- a/crates/bindings/src/build.rs +++ /dev/null @@ -1,23 +0,0 @@ -use crate::service::{self, Channel}; -use build::API; - -#[no_mangle] -pub extern "C" fn build_create(log_level: i32, log_dest_fd: i32) -> *mut Channel { - service::create::(log_level, log_dest_fd) -} -#[no_mangle] -pub extern "C" fn build_invoke1(ch: *mut Channel, i: service::In1) { - service::invoke::(ch, i) -} -#[no_mangle] -pub extern "C" fn build_invoke4(ch: *mut Channel, i: service::In4) { - service::invoke::(ch, i) -} -#[no_mangle] -pub extern "C" fn build_invoke16(ch: *mut Channel, i: service::In16) { - service::invoke::(ch, i) -} -#[no_mangle] -pub extern "C" fn build_drop(ch: *mut Channel) { - service::drop::(ch) -} diff --git a/crates/bindings/src/derive.rs b/crates/bindings/src/derive.rs deleted file mode 100644 index 95f548a48d..0000000000 --- a/crates/bindings/src/derive.rs +++ /dev/null @@ -1,23 +0,0 @@ -use crate::service::{self, Channel}; -use derive::derive_api::API; - -#[no_mangle] -pub extern "C" fn derive_create(log_level: i32, log_dest_fd: i32) -> *mut Channel { - service::create::(log_level, log_dest_fd) -} -#[no_mangle] -pub extern "C" fn derive_invoke1(ch: *mut Channel, i: service::In1) { - service::invoke::(ch, i) -} -#[no_mangle] -pub extern "C" fn derive_invoke4(ch: *mut Channel, i: service::In4) { - service::invoke::(ch, i) -} -#[no_mangle] -pub extern "C" fn derive_invoke16(ch: *mut Channel, i: service::In16) { - service::invoke::(ch, i) -} -#[no_mangle] -pub extern "C" fn derive_drop(ch: *mut Channel) { - service::drop::(ch) -} diff --git a/crates/bindings/src/lib.rs b/crates/bindings/src/lib.rs index edb96027b7..436e45dbf1 100644 --- a/crates/bindings/src/lib.rs +++ b/crates/bindings/src/lib.rs @@ -1,6 +1,4 @@ -mod build; mod combine; -// mod derive; mod extract; mod metrics; mod service; diff --git a/crates/build/Cargo.toml b/crates/build/Cargo.toml index 2e13a000b6..af7e9b4796 100644 --- a/crates/build/Cargo.toml +++ b/crates/build/Cargo.toml @@ -10,12 +10,6 @@ license.workspace = true [dependencies] assemble = { path = "../assemble" } -cgo = { path = "../cgo" } -connector-init = { path = "../connector-init" } -derive-typescript = { path = "../derive-typescript" } -doc = { path = "../doc" } -json = { path = "../json" } -models = { path = "../models" } ops = { path = "../ops" } proto-flow = { path = "../proto-flow" } runtime = { path = "../runtime" } @@ -26,18 +20,8 @@ validation = { path = "../validation" } anyhow = { workspace = true } bytes = { workspace = true } futures = { workspace = true } -pbjson-types = { workspace = true } -prost = { workspace = true } +reqwest = { workspace = true } rusqlite = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -thiserror = { workspace = true } tokio = { workspace = true } tracing = { workspace = true } url = { workspace = true } - -[dev-dependencies] - -insta = { workspace = true } -lazy_static = { workspace = true } -serde_yaml = { workspace = true } diff --git a/crates/build/src/api.rs b/crates/build/src/api.rs deleted file mode 100644 index e5d2227322..0000000000 --- a/crates/build/src/api.rs +++ /dev/null @@ -1,274 +0,0 @@ -use anyhow::Context; -use futures::future::LocalBoxFuture; -use futures::{channel::oneshot, FutureExt}; -use prost::Message; -use proto_flow::{ - flow, - flow::build_api::{self, Code}, -}; -use std::rc::Rc; -use std::task::Poll; -use url::Url; - -#[derive(thiserror::Error, Debug)] -pub enum Error { - #[error("protocol error (invalid state or invocation)")] - InvalidState, - #[error("Protobuf decoding error")] - ProtoDecode(#[from] prost::DecodeError), - #[error(transparent)] - UTF8Error(#[from] std::str::Utf8Error), - #[error(transparent)] - Anyhow(#[from] anyhow::Error), -} - -// Fetcher implements sources::Fetcher, and delegates to Go via Trampoline. -struct Fetcher(Rc); - -impl sources::Fetcher for Fetcher { - fn fetch<'a>( - &self, - resource: &'a Url, - content_type: flow::ContentType, - ) -> LocalBoxFuture<'a, Result> { - let request = build_api::Fetch { - resource_url: resource.to_string(), - content_type: content_type as i32, - }; - let (tx, rx) = oneshot::channel(); - - self.0.start_task( - build_api::Code::TrampolineFetch as u32, - move |arena: &mut Vec| request.encode_raw(arena), - move |result: Result<&[u8], anyhow::Error>| { - let result = result.map(|data| bytes::Bytes::copy_from_slice(data)); - tx.send(result).unwrap(); - }, - ); - rx.map(|r| r.unwrap()).boxed_local() - } -} - -// Connectors implements validation::Connectors, and delegates to Go via Trampoline. -struct Connectors(Rc); - -impl validation::Connectors for Connectors { - fn validate_capture<'a>( - &'a self, - request: proto_flow::capture::request::Validate, - ) -> LocalBoxFuture<'a, anyhow::Result> { - let (tx, rx) = oneshot::channel(); - - self.0.start_task( - build_api::Code::TrampolineValidateCapture as u32, - move |arena: &mut Vec| request.encode_raw(arena), - move |result: Result<&[u8], anyhow::Error>| { - let result = result.and_then(|data| { - proto_flow::capture::response::Validated::decode(data).map_err(Into::into) - }); - tx.send(result).unwrap(); - }, - ); - rx.map(|r| r.unwrap()).boxed_local() - } - - fn validate_derivation<'a>( - &'a self, - request: proto_flow::derive::request::Validate, - ) -> LocalBoxFuture<'a, anyhow::Result> { - use proto_flow::derive; - - async move { - // This is a bit gross, but we synchronously drive the derivation middleware - // to determine its validation outcome. We must do it this way because we - // cannot return a non-ready future from this code path, unless it's using - // trampoline polling (which we're not doing here). - // TODO(johnny): Have *all* connector invocations happen from Rust via tokio, - // and remove trampoline polling back to the Go runtime. - - let response = tracing::dispatcher::get_default(move |dispatch| { - let task_runtime = runtime::TaskRuntime::new("build".to_string(), dispatch.clone()); - let middleware = runtime::derive::Middleware::new( - ops::new_tracing_dispatch_handler(dispatch.clone()), - None, - ); - - let request = derive::Request { - validate: Some(request.clone()), - ..Default::default() - }; - task_runtime.block_on(async move { middleware.serve_unary(request).await }) - }) - .map_err(|status| anyhow::Error::msg(status.message().to_string()))?; - - let validated = response - .validated - .context("derive Response is not Validated")?; - - Ok(validated) - } - .boxed_local() - } - - fn validate_materialization<'a>( - &'a self, - request: proto_flow::materialize::request::Validate, - ) -> LocalBoxFuture<'a, anyhow::Result> { - let (tx, rx) = oneshot::channel(); - - self.0.start_task( - build_api::Code::TrampolineValidateMaterialization as u32, - move |arena: &mut Vec| request.encode_raw(arena), - move |result: Result<&[u8], anyhow::Error>| { - let result = result.and_then(|data| { - proto_flow::materialize::response::Validated::decode(data).map_err(Into::into) - }); - tx.send(result).unwrap(); - }, - ); - rx.map(|r| r.unwrap()).boxed_local() - } - - fn inspect_image<'a>( - &'a self, - image: String, - ) -> LocalBoxFuture<'a, Result, anyhow::Error>> { - let (tx, rx) = oneshot::channel(); - self.0.start_task( - build_api::Code::TrampolineDockerInspect as u32, - move |arena: &mut Vec| arena.extend_from_slice(image.as_bytes()), - move |result: Result<&[u8], anyhow::Error>| { - let final_result = result.map(|output| output.to_vec()); - tx.send(final_result).unwrap(); - }, - ); - rx.map(|r| r.unwrap()).boxed_local() - } -} - -// BuildFuture is a polled future which builds a catalog. -struct BuildFuture { - boxed: LocalBoxFuture<'static, Result>, - trampoline: Rc, -} - -impl BuildFuture { - fn new(config: build_api::Config) -> Result { - let trampoline = Rc::new(cgo::Trampoline::new()); - let fetcher = Fetcher(trampoline.clone()); - let drivers = Connectors(trampoline.clone()); - let future = crate::configured_build(config, fetcher, drivers); - - Ok(BuildFuture { - boxed: future.boxed_local(), - trampoline, - }) - } - - // Dispatch all queued work to the Go side of the CGO bridge. - fn dispatch_work(&mut self, arena: &mut Vec, out: &mut Vec) { - self.trampoline - .dispatch_tasks(build_api::Code::Trampoline as u32, arena, out); - } -} - -/// API implements the CGO bridge service for the build API. -pub struct API { - state: State, -} - -// State is the private inner state machine of the API. -enum State { - Init, - // We're ready to be immediately polled. - PollReady { future: BuildFuture }, - // We've polled to Pending and have dispatched work, but it must - // resolve before we may continue. - PollIdle { future: BuildFuture }, - // Build is completed. - Done, -} - -impl cgo::Service for API { - type Error = Error; - - fn create() -> Self { - Self { state: State::Init } - } - - fn invoke( - &mut self, - code: u32, - data: &[u8], - arena: &mut Vec, - out: &mut Vec, - ) -> Result<(), Self::Error> { - let code = match Code::from_i32(code as i32) { - Some(c) => c, - None => return Err(Error::InvalidState), - }; - tracing::trace!(?code, "invoke"); - - match (code, std::mem::replace(&mut self.state, State::Init)) { - // Begin build. - (Code::Begin, State::Init) => { - let config = build_api::Config::decode(data)?; - - self.state = State::PollReady { - future: BuildFuture::new(config)?, - }; - Ok(()) - } - // Poll future. - (Code::Poll, State::PollReady { mut future }) => { - let waker = futures::task::noop_waker(); - let mut ctx = std::task::Context::from_waker(&waker); - - match future.boxed.poll_unpin(&mut ctx) { - Poll::Ready(result) => { - let tables = result?; - - // We must have drained all outstanding fetches. - assert!(future.trampoline.is_empty()); - - if tables.errors.is_empty() { - cgo::send_code(Code::Done as u32, out); - } else { - cgo::send_code(Code::DoneWithErrors as u32, out); - } - - self.state = State::Done; - Ok(()) - } - Poll::Pending => { - future.dispatch_work(arena, out); - - self.state = State::PollIdle { future }; - Ok(()) - } - } - } - // Trampoline task has resolved. - (Code::Trampoline, State::PollIdle { future }) - | (Code::Trampoline, State::PollReady { future }) => { - future.trampoline.resolve_task(data); - - self.state = State::PollReady { future }; - Ok(()) - } - // Return source catalog JSON schema. - (Code::CatalogSchema, State::Init) => { - let schema = models::Catalog::root_json_schema(); - - let begin = arena.len(); - let w: &mut Vec = &mut *arena; - serde_json::to_writer_pretty(w, &schema).expect("encoding cannot fail"); - cgo::send_bytes(Code::CatalogSchema as u32, begin, arena, out); - - self.state = State::Done; - Ok(()) - } - _ => Err(Error::InvalidState), - } - } -} diff --git a/crates/build/src/lib.rs b/crates/build/src/lib.rs index bcb55c62fe..0881984139 100644 --- a/crates/build/src/lib.rs +++ b/crates/build/src/lib.rs @@ -1,110 +1,154 @@ use anyhow::Context; -use proto_flow::flow; -use url::Url; - -mod api; -pub use api::API; - -// TODO(johnny): consolidate with local_specs.rs of crate `flowctl`. -/// Resolves a source argument to a canonical URL. If `source` is already a url, then it's simply -/// parsed and returned. If source is a filesystem path, then it is canonical-ized and returned as a -/// `file:///` URL. Will return an error if the filesystem path does not exist. -pub fn source_to_url(source: &str) -> Result { - match Url::parse(source) { - Ok(url) => Ok(url), +use futures::{future::BoxFuture, FutureExt}; +use proto_flow::{capture, derive, flow, materialize}; +use std::{ + collections::BTreeMap, + path::{Path, PathBuf}, +}; + +/// Map a "--source" argument to a corresponding URL, optionally creating an empty +/// file if one doesn't exist, which is required when producing a canonical file:/// +/// URL for a local file. +pub fn arg_source_to_url(source: &str, create_if_not_exists: bool) -> anyhow::Result { + // Special case that maps stdin into a URL constant. + if source == "-" { + return Ok(url::Url::parse(STDIN_URL).unwrap()); + } else if let Ok(url) = url::Url::parse(source) { + return Ok(url); + } + + tracing::debug!( + source = %source, + "source is not a URL; assuming it's a filesystem path", + ); + + let source = match std::fs::canonicalize(source) { + Ok(p) => p, + Err(err) if matches!(err.kind(), std::io::ErrorKind::NotFound) && create_if_not_exists => { + std::fs::write(source, "{}") + .with_context(|| format!("failed to create new file {source}"))?; + std::fs::canonicalize(source).expect("can canonicalize() a file we just wrote") + } Err(err) => { - tracing::debug!( - "{:?} is not a URL; assuming it's a filesystem path (parse error: {})", - source, - err - ); - let source = std::fs::canonicalize(source) - .context(format!("finding {:?} in the local filesystem", source))?; - // Safe unwrap since we've canonicalized the path. - Ok(url::Url::from_file_path(&source).unwrap()) + return Err(err).context(format!("could not find {source} in the local filesystem")); } - } + }; + + // Safe unwrap since we've canonical-ized the path. + Ok(url::Url::from_file_path(&source).unwrap()) } -pub async fn configured_build( - config: flow::build_api::Config, - fetcher: F, - drivers: D, -) -> Result -where - F: sources::Fetcher, - D: validation::Connectors, -{ - let root_url = source_to_url(config.source.as_str())?; +/// Map a `source` into a suitable project root directory. +/// +/// If `source` is a local file:// URL, its parent directories are examined +/// for a contained `flow.yaml`, `flow.yml`, or `flow.json` file, and the URL +/// of the root-most directory having such a file is returned. +/// +/// Or, if `source` is not a local file://, then the current working directory is returned. +pub fn project_root(source: &url::Url) -> url::Url { + let current_dir = + std::env::current_dir().expect("failed to determine current working directory"); + let source_path = source.to_file_path(); + + let dir = if let Ok(source_path) = &source_path { + let mut dir = source_path + .parent() + .expect("source path is an absolute filesystem path"); - let root_spec = match flow::ContentType::from_i32(config.source_type) { - Some(flow::ContentType::Catalog) => flow::ContentType::Catalog, - Some(flow::ContentType::JsonSchema) => flow::ContentType::JsonSchema, - _ => anyhow::bail!("unexpected content type (must be Catalog or JsonSchema)"), + while let Some(parent) = dir.parent() { + if ["flow.yaml", "flow.yml", "flow.json"] + .iter() + .any(|name| parent.join(name).exists()) + { + dir = parent; + } else { + break; + } + } + dir + } else { + // `source` isn't local. Use the current working directory. + ¤t_dir }; - let mut all_tables = - load_and_validate(root_url.clone(), root_spec, fetcher, drivers, &config).await; - all_tables.meta.insert_row(config.clone()); + url::Url::from_file_path(dir).expect("cannot map project directory into a URL") +} - // Output database path is implied from the configured directory and ID. - if !config.build_db.is_empty() { - let db = rusqlite::Connection::open(&config.build_db) - .context("failed to open catalog database")?; +/// Load a source into tables, separately returning source tables and any errors. +pub async fn load(source: &url::Url, file_root: &Path) -> tables::Sources { + let loader = sources::Loader::new( + tables::Sources::default(), + Fetcher { + file_root: file_root.to_owned(), + }, + ); - tables::persist_tables(&db, &all_tables.as_tables()) - .context("failed to persist catalog tables")?; - tracing::info!(build_db=?config.build_db, "wrote build database"); - } + loader + .load_resource( + sources::Scope::new(&source), + &source, + flow::ContentType::Catalog, + ) + .await; - Ok(all_tables) + loader.into_tables() } -async fn load_and_validate( - root: Url, - root_type: flow::ContentType, - fetcher: F, - connectors: D, - build_config: &flow::build_api::Config, -) -> tables::All +/// Build sources by mapping to their inline form, validating all specifications, +/// fetching referenced specs from the control plane, and producing built specifications. +pub async fn validate( + build_id: &str, + connector_network: &str, + control_plane: &dyn validation::ControlPlane, + generate_ops_collections: bool, + log_handler: L, + noop_captures: bool, + noop_derivations: bool, + noop_materializations: bool, + project_root: &url::Url, + mut sources: tables::Sources, +) -> (tables::Sources, tables::Validations) where - F: sources::Fetcher, - D: validation::Connectors, + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, { - let loader = sources::Loader::new(tables::Sources::default(), fetcher); - loader - .load_resource(sources::Scope::new(&root), &root, root_type.into()) - .await; + // TODO(johnny): We *really* need to kill this, and have ops collections + // be injected exclusively from the control-plane. + if generate_ops_collections { + assemble::generate_ops_collections(&mut sources); + } + ::sources::inline_sources(&mut sources); - let mut tables = loader.into_tables(); - assemble::generate_ops_collections(&mut tables); - sources::inline_sources(&mut tables); + let runtime = runtime::Runtime::new( + connector_network.to_string(), + log_handler, + None, + format!("build/{}", build_id), + ); + + let connectors = Connectors { + noop_captures, + noop_derivations, + noop_materializations, + runtime, + }; let tables::Sources { captures, collections, - mut errors, + errors: _, fetches, imports, materializations, - resources, + resources: _, storage_mappings, tests, - } = tables; - - let tables::Validations { - built_captures, - built_collections, - built_materializations, - built_tests, - errors: validation_errors, - } = validation::validate( - build_config, + } = &sources; + + let validations = validation::validate( + build_id, + project_root, &connectors, - // TODO(johnny): Plumb through collection resolution. - // At the moment we get away with not having this because the control-plane agent - // includes all connected collections in the build. - &validation::NoOpControlPlane {}, + control_plane, &captures, &collections, &fetches, @@ -115,22 +159,233 @@ where ) .await; - errors.extend(validation_errors.into_iter()); + (sources, validations) +} - tables::All { - built_captures, - built_collections, - built_materializations, - built_tests, - captures, - collections, - errors, - fetches, - imports, - materializations, - meta: tables::Meta::new(), - resources, - storage_mappings, - tests, +pub async fn managed_build( + build_id: String, + connector_network: String, + control_plane: Box, + file_root: PathBuf, + log_handler: L, + project_root: url::Url, + source: url::Url, +) -> Result<(tables::Sources, tables::Validations), tables::Errors> +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + let (sources, validations) = validate( + &build_id, + &connector_network, + &*control_plane, + true, // Generate ops collections. + log_handler, + false, // Validate captures. + false, // Validate derivations. + false, // Validate materializations. + &project_root, + load(&source, &file_root).await.into_result()?, + ) + .await; + + Ok((sources, validations.into_result()?)) +} + +pub fn persist( + build_config: proto_flow::flow::build_api::Config, + db_path: &Path, + result: &Result<(tables::Sources, tables::Validations), tables::Errors>, +) -> anyhow::Result<()> { + let db = rusqlite::Connection::open(db_path).context("failed to open catalog database")?; + + match result { + Ok((sources, validations)) => { + tables::persist_tables(&db, &sources.as_tables()) + .context("failed to persist catalog sources")?; + tables::persist_tables(&db, &validations.as_tables()) + .context("failed to persist catalog validations")?; + } + Err(errors) => { + tables::persist_tables(&db, &[errors]).context("failed to persist catalog errors")?; + } + } + + // Legacy support: encode and persist a deprecated protobuf build Config. + // At the moment, these are still covered by Go snapshot tests. + let mut meta = tables::Meta::new(); + meta.insert_row(build_config); + tables::persist_tables(&db, &[&meta]).context("failed to persist catalog meta")?; + + tracing::info!(?db_path, "wrote build database"); + Ok(()) +} + +/// Gather all file URLs and contents generated by validations. +/// Malformed URLs are ignored, as they're already surfaced as validation errors. +pub fn generate_files( + project_root: &url::Url, + validations: &tables::Validations, +) -> anyhow::Result<()> { + let mut files = BTreeMap::new(); + + for row in validations.built_collections.iter() { + let Some(validated) = &row.validated else { continue }; + + for (url, content) in &validated.generated_files { + if let Ok(url) = url::Url::parse(&url) { + files.insert(url, content.as_bytes()); + } + } + } + let files = files + .into_iter() + .map(|(resource, content)| (resource, content.to_vec())) + .collect(); + + write_files(project_root, files) +} + +/// Write out files which are located underneath the `project_root`. +pub fn write_files(project_root: &url::Url, files: Vec<(url::Url, Vec)>) -> anyhow::Result<()> { + for (resource, content) in files { + let Ok(path) = resource.to_file_path() else { + tracing::info!(%resource, "not writing the resource because it's remote and not local"); + continue; + }; + if !resource.as_str().starts_with(project_root.as_str()) { + tracing::info!(%resource, %project_root, + "not writing local resource because it's not under the project root"); + continue; + } + if let Some(parent) = path.parent() { + std::fs::create_dir_all(path.parent().unwrap()).with_context(|| { + format!("failed to create directory {}", parent.to_string_lossy()) + })?; + } + std::fs::write(&path, content).with_context(|| format!("failed to write {resource}"))?; + + tracing::info!(path=%path.to_str().unwrap_or(resource.as_str()), "wrote file"); + } + Ok(()) +} + +struct Fetcher { + file_root: PathBuf, +} + +impl sources::Fetcher for Fetcher { + fn fetch<'a>( + &'a self, + resource: &'a url::Url, + content_type: flow::ContentType, + ) -> BoxFuture<'a, anyhow::Result> { + tracing::debug!(%resource, ?content_type, file_root=?self.file_root, "fetching resource"); + fetch_async(resource.clone(), self.file_root.clone()).boxed() + } +} + +async fn fetch_async(resource: url::Url, mut file_path: PathBuf) -> anyhow::Result { + match resource.scheme() { + "http" | "https" => { + let resp = reqwest::get(resource.as_str()).await?; + let status = resp.status(); + + if status.is_success() { + Ok(resp.bytes().await?) + } else { + let body = resp.text().await?; + anyhow::bail!("{status}: {body}"); + } + } + "file" => { + let rel_path = resource + .to_file_path() + .map_err(|err| anyhow::anyhow!("failed to convert file uri to path: {:?}", err))?; + + // `rel_path` is absolute, so we must extend `file_path` rather than joining. + // Skip the first component, which is a RootDir token. + file_path.extend(rel_path.components().skip(1)); + + let bytes = std::fs::read(&file_path) + .with_context(|| format!("failed to read {file_path:?}"))?; + Ok(bytes.into()) + } + "stdin" => { + use tokio::io::AsyncReadExt; + + let mut bytes = Vec::new(); + tokio::io::stdin() + .read_to_end(&mut bytes) + .await + .context("reading stdin")?; + + Ok(bytes.into()) + } + _ => Err(anyhow::anyhow!( + "cannot fetch unsupported URI scheme: '{resource}'" + )), + } +} + +pub struct Connectors +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + noop_captures: bool, + noop_derivations: bool, + noop_materializations: bool, + runtime: runtime::Runtime, +} + +impl validation::Connectors for Connectors +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + fn validate_capture<'a>( + &'a self, + request: capture::Request, + ) -> BoxFuture<'a, anyhow::Result> { + async move { + if self.noop_captures { + validation::NoOpConnectors.validate_capture(request).await + } else { + Ok(self.runtime.clone().unary_capture(request).await?) + } + } + .boxed() + } + + fn validate_derivation<'a>( + &'a self, + request: derive::Request, + ) -> BoxFuture<'a, anyhow::Result> { + async move { + if self.noop_derivations { + validation::NoOpConnectors + .validate_derivation(request) + .await + } else { + Ok(self.runtime.clone().unary_derive(request).await?) + } + } + .boxed() + } + + fn validate_materialization<'a>( + &'a self, + request: materialize::Request, + ) -> BoxFuture<'a, anyhow::Result> { + async move { + if self.noop_materializations { + validation::NoOpConnectors + .validate_materialization(request) + .await + } else { + Ok(self.runtime.clone().unary_materialize(request).await?) + } + } + .boxed() } } + +pub const STDIN_URL: &str = "stdin://root/flow.yaml"; diff --git a/crates/derive-sqlite/src/connector.rs b/crates/derive-sqlite/src/connector.rs index 9662c65e9c..e389734189 100644 --- a/crates/derive-sqlite/src/connector.rs +++ b/crates/derive-sqlite/src/connector.rs @@ -196,9 +196,9 @@ fn parse_open( config_json, transforms, .. - } = derivation.unwrap(); + } = derivation.as_ref().unwrap(); - let config: Config = serde_json::from_str(&config_json) + let config: Config = serde_json::from_str(config_json) .with_context(|| format!("failed to parse SQLite configuration: {config_json}"))?; let transforms: Vec = transforms @@ -212,7 +212,7 @@ fn parse_open( .. } = transform; - let source = source.unwrap(); + let source = source.as_ref().unwrap(); let params = source .projections .iter() @@ -224,9 +224,9 @@ fn parse_open( })?; Ok(Transform { - name, + name: name.clone(), block, - source: source.name, + source: source.name.clone(), params, }) }) diff --git a/crates/derive-sqlite/src/validate.rs b/crates/derive-sqlite/src/validate.rs index 86a4f667cf..91757de668 100644 --- a/crates/derive-sqlite/src/validate.rs +++ b/crates/derive-sqlite/src/validate.rs @@ -16,8 +16,7 @@ pub fn parse_validate( shuffle_key_types: _, project_root: _, import_map: _, - network_ports: _, - } = validate; + } = &validate; let config: Config = serde_json::from_str(&config_json) .with_context(|| format!("failed to parse SQLite configuration: {config_json}"))?; @@ -32,7 +31,7 @@ pub fn parse_validate( shuffle_lambda_config_json: _, } = transform; - let source = source.unwrap(); + let source = source.as_ref().unwrap(); let params = source .projections .iter() @@ -44,9 +43,9 @@ pub fn parse_validate( })?; Ok(Transform { - name, + name: name.clone(), block, - source: source.name, + source: source.name.clone(), params, }) }) diff --git a/crates/derive-typescript/Cargo.lock b/crates/derive-typescript/Cargo.lock new file mode 100644 index 0000000000..b598b8a282 --- /dev/null +++ b/crates/derive-typescript/Cargo.lock @@ -0,0 +1,1844 @@ +# This file is automatically @generated by Cargo. +# It is not intended for manual editing. +version = 3 + +[[package]] +name = "addr" +version = "0.15.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a93b8a41dbe230ad5087cc721f8d41611de654542180586b315d9f4cf6b72bef" +dependencies = [ + "psl-types", +] + +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom", + "once_cell", + "version_check", +] + +[[package]] +name = "aho-corasick" +version = "1.0.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0c378d78423fdad8089616f827526ee33c19f2fddbd5de1629152c9593ba4783" +dependencies = [ + "memchr", +] + +[[package]] +name = "anyhow" +version = "1.0.75" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a4668cab20f66d8d020e1fbc0ebe47217433c1b6c8f2040faf858554e394ace6" + +[[package]] +name = "autocfg" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" + +[[package]] +name = "base64" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e1b586273c5702936fe7b7d6896644d8be71e6314cfe09d3167c95f712589e8" + +[[package]] +name = "bigdecimal" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a6773ddc0eafc0e509fb60e48dff7f450f8e674a0686ae8605e8d9901bd5eefa" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits", +] + +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + +[[package]] +name = "bitflags" +version = "1.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" + +[[package]] +name = "bitflags" +version = "2.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" + +[[package]] +name = "bitvec" +version = "0.19.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55f93d0ef3363c364d5976646a38f04cf67cfe1d4c8d160cdea02cab2c116b33" +dependencies = [ + "funty 1.1.0", + "radium 0.5.3", + "tap", + "wyz 0.2.0", +] + +[[package]] +name = "bitvec" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1bc2832c24239b0141d5674bb9174f9d68a8b5b3f2753311927c172ca46f7e9c" +dependencies = [ + "funty 2.0.0", + "radium 0.7.0", + "tap", + "wyz 0.5.1", +] + +[[package]] +name = "bumpalo" +version = "3.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1" + +[[package]] +name = "bytecheck" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b6372023ac861f6e6dc89c8344a8f398fb42aaba2b5dbc649ca0c0e9dbcb627" +dependencies = [ + "bytecheck_derive", + "ptr_meta", + "simdutf8", +] + +[[package]] +name = "bytecheck_derive" +version = "0.6.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "byteorder" +version = "1.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" + +[[package]] +name = "bytes" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" + +[[package]] +name = "caseless" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "808dab3318747be122cb31d36de18d4d1c81277a76f8332a02b81a3d73463d7f" +dependencies = [ + "regex", + "unicode-normalization", +] + +[[package]] +name = "cc" +version = "1.0.83" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f1174fb0b6ec23863f8b971027804a42614e347eafb0a95bf0b12cdae21fc4d0" +dependencies = [ + "libc", +] + +[[package]] +name = "cfg-if" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" + +[[package]] +name = "chrono" +version = "0.4.27" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f56b4c72906975ca04becb8a30e102dfecddd0c06181e3e95ddc444be28881f8" +dependencies = [ + "num-traits", +] + +[[package]] +name = "console" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "windows-sys 0.45.0", +] + +[[package]] +name = "deranged" +version = "0.3.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +dependencies = [ + "serde", +] + +[[package]] +name = "derive-typescript" +version = "0.1.0" +dependencies = [ + "anyhow", + "doc", + "insta", + "itertools 0.11.0", + "json", + "lazy_static", + "models", + "proto-flow", + "regex", + "serde", + "serde_json", + "serde_yaml 0.9.25", + "sources", + "tables", + "tempfile", + "tracing", + "tracing-subscriber", + "url", +] + +[[package]] +name = "doc" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" +dependencies = [ + "base64", + "bumpalo", + "bytes", + "fancy-regex", + "futures", + "fxhash", + "itertools 0.10.5", + "json", + "lz4", + "rkyv", + "schemars", + "serde", + "serde_json", + "tempfile", + "thiserror", + "time", + "tracing", + "tuple", + "url", + "uuid", +] + +[[package]] +name = "dyn-clone" +version = "1.0.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555" + +[[package]] +name = "either" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" + +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + +[[package]] +name = "equivalent" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" + +[[package]] +name = "errno" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "136526188508e25c6fef639d7927dfb3e0e3084488bf202267829cf7fc23dbdd" +dependencies = [ + "errno-dragonfly", + "libc", + "windows-sys 0.48.0", +] + +[[package]] +name = "errno-dragonfly" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "fancy-regex" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0678ab2d46fa5195aaf59ad034c083d351377d4af57f3e073c074d0da3e3c766" +dependencies = [ + "bit-set", + "regex", +] + +[[package]] +name = "fastrand" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" + +[[package]] +name = "fixedbitset" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" + +[[package]] +name = "form_urlencoded" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +dependencies = [ + "percent-encoding", +] + +[[package]] +name = "funty" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fed34cd105917e91daa4da6b3728c47b068749d6a62c59811f06ed2ac71d9da7" + +[[package]] +name = "funty" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" + +[[package]] +name = "futures" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +dependencies = [ + "futures-channel", + "futures-core", + "futures-executor", + "futures-io", + "futures-sink", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-channel" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +dependencies = [ + "futures-core", + "futures-sink", +] + +[[package]] +name = "futures-core" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" + +[[package]] +name = "futures-executor" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +dependencies = [ + "futures-core", + "futures-task", + "futures-util", +] + +[[package]] +name = "futures-io" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" + +[[package]] +name = "futures-macro" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.29", +] + +[[package]] +name = "futures-sink" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" + +[[package]] +name = "futures-task" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" + +[[package]] +name = "futures-util" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +dependencies = [ + "futures-channel", + "futures-core", + "futures-io", + "futures-macro", + "futures-sink", + "futures-task", + "memchr", + "pin-project-lite", + "pin-utils", + "slab", +] + +[[package]] +name = "fxhash" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c31b6d751ae2c7f11320402d34e41349dd1016f8d5d45e48c4312bc8625af50c" +dependencies = [ + "byteorder", +] + +[[package]] +name = "getrandom" +version = "0.2.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4136b2a15dd319360be1c07d9933517ccf0be8f16bf62a3bee4f0d618df427" +dependencies = [ + "cfg-if", + "libc", + "wasi", +] + +[[package]] +name = "hashbrown" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] + +[[package]] +name = "hashbrown" +version = "0.14.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" + +[[package]] +name = "heck" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" + +[[package]] +name = "humantime" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" + +[[package]] +name = "humantime-serde" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57a3db5ea5923d99402c94e9feb261dc5ee9b4efa158b0315f788cf549cc200c" +dependencies = [ + "humantime", + "serde", +] + +[[package]] +name = "idna" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "418a0a6fab821475f634efe3ccc45c013f742efe03d853e8d3355d5cb850ecf8" +dependencies = [ + "matches", + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "idna" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d20d6b07bfbc108882d88ed8e37d39636dcc260e15e30c45e6ba089610b917c" +dependencies = [ + "unicode-bidi", + "unicode-normalization", +] + +[[package]] +name = "if_chain" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb56e1aa765b4b4f3aadfab769793b7087bb03a4ea4920644a6d238e2df5b9ed" + +[[package]] +name = "indexmap" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99" +dependencies = [ + "autocfg", + "hashbrown 0.12.3", +] + +[[package]] +name = "indexmap" +version = "2.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +dependencies = [ + "equivalent", + "hashbrown 0.14.0", +] + +[[package]] +name = "insta" +version = "1.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0770b0a3d4c70567f0d58331f3088b0e4c4f56c9b8d764efe654b4a5d46de3a" +dependencies = [ + "console", + "lazy_static", + "linked-hash-map", + "similar", + "yaml-rust", +] + +[[package]] +name = "integer-sqrt" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "276ec31bcb4a9ee45f58bec6f9ec700ae4cf4f4f8f2fa7e06cb406bd5ffdd770" +dependencies = [ + "num-traits", +] + +[[package]] +name = "iri-string" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d0586ad318a04c73acdbad33f67969519b5452c80770c4c72059a686da48a7e" +dependencies = [ + "memchr", + "serde", +] + +[[package]] +name = "itertools" +version = "0.10.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" +dependencies = [ + "either", +] + +[[package]] +name = "itertools" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b1c173a5686ce8bfa551b3563d0c2170bf24ca44da99c7ca4bfdab5418c3fe57" +dependencies = [ + "either", +] + +[[package]] +name = "itoa" +version = "1.0.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" + +[[package]] +name = "json" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" +dependencies = [ + "addr", + "bigdecimal", + "bitvec 0.19.6", + "fancy-regex", + "fxhash", + "iri-string", + "itertools 0.10.5", + "lazy_static", + "percent-encoding", + "serde", + "serde_json", + "thiserror", + "time", + "tracing", + "url", + "uuid", +] + +[[package]] +name = "labels" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" + +[[package]] +name = "lazy_static" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" + +[[package]] +name = "libc" +version = "0.2.147" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b4668fb0ea861c1df094127ac5f1da3409a82116a4ba74fca2e58ef927159bb3" + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + +[[package]] +name = "linux-raw-sys" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57bcfdad1b858c2db7c38303a6d2ad4dfaf5eb53dfeb0910128b2c26d6158503" + +[[package]] +name = "log" +version = "0.4.20" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" + +[[package]] +name = "lz4" +version = "1.24.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e9e2dd86df36ce760a60f6ff6ad526f7ba1f14ba0356f8254fb6905e6494df1" +dependencies = [ + "libc", + "lz4-sys", +] + +[[package]] +name = "lz4-sys" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57d27b317e207b10f69f5e75494119e391a96f48861ae870d1da6edac98ca900" +dependencies = [ + "cc", + "libc", +] + +[[package]] +name = "matchers" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558" +dependencies = [ + "regex-automata 0.1.10", +] + +[[package]] +name = "matches" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" + +[[package]] +name = "memchr" +version = "2.6.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5486aed0026218e61b8a01d5fbd5a0a134649abb71a0e53b7bc088529dced86e" + +[[package]] +name = "models" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" +dependencies = [ + "caseless", + "humantime-serde", + "lazy_static", + "regex", + "schemars", + "serde", + "serde_json", + "superslice", + "time", + "unicode-normalization", + "url", + "validator", +] + +[[package]] +name = "multimap" +version = "0.8.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" + +[[package]] +name = "nu-ansi-term" +version = "0.46.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84" +dependencies = [ + "overload", + "winapi", +] + +[[package]] +name = "num-bigint" +version = "0.4.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "608e7659b5c3d7cba262d894801b9ec9d00de989e8a82bd4bef91d08da45cdc0" +dependencies = [ + "autocfg", + "num-integer", + "num-traits", +] + +[[package]] +name = "num-integer" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" +dependencies = [ + "autocfg", + "num-traits", +] + +[[package]] +name = "num-traits" +version = "0.2.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +dependencies = [ + "autocfg", +] + +[[package]] +name = "once_cell" +version = "1.18.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd8b5dd2ae5ed71462c540258bedcb51965123ad7e7ccf4b9a8cafaa4a63576d" + +[[package]] +name = "overload" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39" + +[[package]] +name = "pathfinding" +version = "3.0.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb45190a18e771c500291c549959777a3be38d30113a860930bc1f2119f0cc13" +dependencies = [ + "fixedbitset", + "indexmap 1.9.3", + "integer-sqrt", + "itertools 0.10.5", + "num-traits", + "rustc-hash", + "thiserror", +] + +[[package]] +name = "pbjson" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "048f9ac93c1eab514f9470c4bc8d97ca2a0a236b84f45cc19d69a59fc11467f6" +dependencies = [ + "base64", + "serde", +] + +[[package]] +name = "pbjson-build" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bdbb7b706f2afc610f3853550cdbbf6372fd324824a087806bd4480ea4996e24" +dependencies = [ + "heck", + "itertools 0.10.5", + "prost", + "prost-types", +] + +[[package]] +name = "pbjson-types" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4a88c8d87f99a4ac14325e7a4c24af190fca261956e3b82dd7ed67e77e6c7043" +dependencies = [ + "bytes", + "chrono", + "pbjson", + "pbjson-build", + "prost", + "prost-build", + "serde", +] + +[[package]] +name = "percent-encoding" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" + +[[package]] +name = "petgraph" +version = "0.6.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" +dependencies = [ + "fixedbitset", + "indexmap 2.0.0", +] + +[[package]] +name = "pin-project-lite" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8afb450f006bf6385ca15ef45d71d2288452bc3683ce2e2cacc0d18e4be60b58" + +[[package]] +name = "pin-utils" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" + +[[package]] +name = "prettyplease" +version = "0.1.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +dependencies = [ + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "proc-macro-error" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" +dependencies = [ + "proc-macro-error-attr", + "proc-macro2", + "quote", + "syn 1.0.109", + "version_check", +] + +[[package]] +name = "proc-macro-error-attr" +version = "1.0.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" +dependencies = [ + "proc-macro2", + "quote", + "version_check", +] + +[[package]] +name = "proc-macro2" +version = "1.0.66" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9" +dependencies = [ + "unicode-ident", +] + +[[package]] +name = "prost" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-build" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +dependencies = [ + "bytes", + "heck", + "itertools 0.10.5", + "lazy_static", + "log", + "multimap", + "petgraph", + "prettyplease", + "prost", + "prost-types", + "regex", + "syn 1.0.109", + "tempfile", + "which", +] + +[[package]] +name = "prost-derive" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" +dependencies = [ + "anyhow", + "itertools 0.10.5", + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "prost-types" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +dependencies = [ + "prost", +] + +[[package]] +name = "proto-flow" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" +dependencies = [ + "bytes", + "pbjson", + "pbjson-types", + "prost", + "proto-gazette", + "serde", + "serde_json", + "zeroize", +] + +[[package]] +name = "proto-gazette" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" +dependencies = [ + "bytes", + "pbjson", + "pbjson-types", + "prost", + "serde", +] + +[[package]] +name = "psl-types" +version = "2.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33cb294fe86a74cbcf50d4445b37da762029549ebeea341421c7c70370f86cac" + +[[package]] +name = "ptr_meta" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0738ccf7ea06b608c10564b31debd4f5bc5e197fc8bfe088f68ae5ce81e7a4f1" +dependencies = [ + "ptr_meta_derive", +] + +[[package]] +name = "ptr_meta_derive" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "quote" +version = "1.0.33" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5267fca4496028628a95160fc423a33e8b2e6af8a5302579e322e4b520293cae" +dependencies = [ + "proc-macro2", +] + +[[package]] +name = "radium" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "941ba9d78d8e2f7ce474c015eea4d9c6d25b6a3327f9832ee29a4de27f91bbb8" + +[[package]] +name = "radium" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc33ff2d4973d518d823d61aa239014831e521c75da58e3df4840d3f47749d09" + +[[package]] +name = "redox_syscall" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "567664f262709473930a4bf9e51bf2ebf3348f2e748ccc50dea20646858f8f29" +dependencies = [ + "bitflags 1.3.2", +] + +[[package]] +name = "regex" +version = "1.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "12de2eff854e5fa4b1295edd650e227e9d8fb0c9e90b12e7f36d6a6811791a29" +dependencies = [ + "aho-corasick", + "memchr", + "regex-automata 0.3.7", + "regex-syntax 0.7.5", +] + +[[package]] +name = "regex-automata" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132" +dependencies = [ + "regex-syntax 0.6.29", +] + +[[package]] +name = "regex-automata" +version = "0.3.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49530408a136e16e5b486e883fbb6ba058e8e4e8ae6621a77b048b314336e629" +dependencies = [ + "aho-corasick", + "memchr", + "regex-syntax 0.7.5", +] + +[[package]] +name = "regex-syntax" +version = "0.6.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1" + +[[package]] +name = "regex-syntax" +version = "0.7.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" + +[[package]] +name = "rend" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "581008d2099240d37fb08d77ad713bcaec2c4d89d50b5b21a8bb1996bbab68ab" +dependencies = [ + "bytecheck", +] + +[[package]] +name = "rkyv" +version = "0.7.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0200c8230b013893c0b2d6213d6ec64ed2b9be2e0e016682b7224ff82cff5c58" +dependencies = [ + "bitvec 1.0.1", + "bytecheck", + "hashbrown 0.12.3", + "ptr_meta", + "rend", + "rkyv_derive", + "seahash", + "tinyvec", + "uuid", +] + +[[package]] +name = "rkyv_derive" +version = "0.7.42" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b2e06b915b5c230a17d7a736d1e2e63ee753c256a8614ef3f5147b13a4f5541d" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "rustc-hash" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" + +[[package]] +name = "rustix" +version = "0.38.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed6248e1caa625eb708e266e06159f135e8c26f2bb7ceb72dc4b2766d0340964" +dependencies = [ + "bitflags 2.4.0", + "errno", + "libc", + "linux-raw-sys", + "windows-sys 0.48.0", +] + +[[package]] +name = "ryu" +version = "1.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" + +[[package]] +name = "schemars" +version = "0.8.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "763f8cd0d4c71ed8389c90cb8100cba87e763bd01a8e614d4f0af97bcd50a161" +dependencies = [ + "dyn-clone", + "schemars_derive", + "serde", + "serde_json", +] + +[[package]] +name = "schemars_derive" +version = "0.8.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec0f696e21e10fa546b7ffb1c9672c6de8fbc7a81acf59524386d8639bf12737" +dependencies = [ + "proc-macro2", + "quote", + "serde_derive_internals", + "syn 1.0.109", +] + +[[package]] +name = "seahash" +version = "4.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1c107b6f4780854c8b126e228ea8869f4d7b71260f962fefb57b996b8959ba6b" + +[[package]] +name = "serde" +version = "1.0.188" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" +dependencies = [ + "serde_derive", +] + +[[package]] +name = "serde-transcode" +version = "1.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "590c0e25c2a5bb6e85bf5c1bce768ceb86b316e7a01bdf07d2cb4ec2271990e2" +dependencies = [ + "serde", +] + +[[package]] +name = "serde_derive" +version = "1.0.188" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.29", +] + +[[package]] +name = "serde_derive_internals" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85bf8229e7920a9f636479437026331ce11aa132b4dde37d121944a44d6e5f3c" +dependencies = [ + "proc-macro2", + "quote", + "syn 1.0.109", +] + +[[package]] +name = "serde_json" +version = "1.0.105" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "693151e1ac27563d6dbcec9dee9fbd5da8539b20fa14ad3752b2e6d363ace360" +dependencies = [ + "itoa", + "ryu", + "serde", +] + +[[package]] +name = "serde_yaml" +version = "0.8.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" +dependencies = [ + "indexmap 1.9.3", + "ryu", + "serde", + "yaml-rust", +] + +[[package]] +name = "serde_yaml" +version = "0.9.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" +dependencies = [ + "indexmap 2.0.0", + "itoa", + "ryu", + "serde", + "unsafe-libyaml", +] + +[[package]] +name = "sharded-slab" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +dependencies = [ + "lazy_static", +] + +[[package]] +name = "simdutf8" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f27f6278552951f1f2b8cf9da965d10969b2efdea95a6ec47987ab46edfe263a" + +[[package]] +name = "similar" +version = "2.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "420acb44afdae038210c99e69aae24109f32f15500aa708e81d46c9f29d55fcf" + +[[package]] +name = "slab" +version = "0.4.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f92a496fb766b417c996b9c5e57daf2f7ad3b0bebe1ccfca4856390e3d3bb67" +dependencies = [ + "autocfg", +] + +[[package]] +name = "smallvec" +version = "1.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" + +[[package]] +name = "sources" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" +dependencies = [ + "anyhow", + "bytes", + "doc", + "futures", + "json", + "models", + "proto-flow", + "schemars", + "serde", + "serde-transcode", + "serde_json", + "serde_yaml 0.8.26", + "tables", + "thiserror", + "tracing", + "url", + "yaml-merge-keys", +] + +[[package]] +name = "superslice" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ab16ced94dbd8a46c82fd81e3ed9a8727dac2977ea869d217bcc4ea1f122e81f" + +[[package]] +name = "syn" +version = "1.0.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "syn" +version = "2.0.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c324c494eba9d92503e6f1ef2e6df781e78f6a7705a0202d9801b198807d518a" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + +[[package]] +name = "tables" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" +dependencies = [ + "anyhow", + "bytes", + "doc", + "json", + "labels", + "models", + "pathfinding", + "prost", + "proto-flow", + "serde", + "serde_json", + "superslice", + "url", +] + +[[package]] +name = "tap" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" + +[[package]] +name = "tempfile" +version = "3.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +dependencies = [ + "cfg-if", + "fastrand", + "redox_syscall", + "rustix", + "windows-sys 0.48.0", +] + +[[package]] +name = "thiserror" +version = "1.0.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97a802ec30afc17eee47b2855fc72e0c4cd62be9b4efe6591edde0ec5bd68d8f" +dependencies = [ + "thiserror-impl", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.47" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6bb623b56e39ab7dcd4b1b98bb6c8f8d907ed255b18de254088016b27a8ee19b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.29", +] + +[[package]] +name = "thread_local" +version = "1.1.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" +dependencies = [ + "cfg-if", + "once_cell", +] + +[[package]] +name = "time" +version = "0.3.28" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17f6bb557fd245c28e6411aa56b6403c689ad95061f50e4be16c274e70a17e48" +dependencies = [ + "deranged", + "itoa", + "serde", + "time-core", + "time-macros", +] + +[[package]] +name = "time-core" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" + +[[package]] +name = "time-macros" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a942f44339478ef67935ab2bbaec2fb0322496cf3cbe84b261e06ac3814c572" +dependencies = [ + "time-core", +] + +[[package]] +name = "tinyvec" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87cc5ceb3875bb20c2890005a4e226a4651264a5c75edb2421b52861a0a0cb50" +dependencies = [ + "tinyvec_macros", +] + +[[package]] +name = "tinyvec_macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" + +[[package]] +name = "tracing" +version = "0.1.37" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +dependencies = [ + "cfg-if", + "pin-project-lite", + "tracing-attributes", + "tracing-core", +] + +[[package]] +name = "tracing-attributes" +version = "0.1.26" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.29", +] + +[[package]] +name = "tracing-core" +version = "0.1.31" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +dependencies = [ + "once_cell", + "valuable", +] + +[[package]] +name = "tracing-log" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "78ddad33d2d10b1ed7eb9d1f518a5674713876e97e5bb9b7345a7984fbb4f922" +dependencies = [ + "lazy_static", + "log", + "tracing-core", +] + +[[package]] +name = "tracing-serde" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bc6b213177105856957181934e4920de57730fc69bf42c37ee5bb664d406d9e1" +dependencies = [ + "serde", + "tracing-core", +] + +[[package]] +name = "tracing-subscriber" +version = "0.3.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "30a651bc37f915e81f087d86e62a18eec5f79550c7faff886f7090b4ea757c77" +dependencies = [ + "matchers", + "nu-ansi-term", + "once_cell", + "regex", + "serde", + "serde_json", + "sharded-slab", + "smallvec", + "thread_local", + "time", + "tracing", + "tracing-core", + "tracing-log", + "tracing-serde", +] + +[[package]] +name = "tuple" +version = "0.0.0" +source = "git+https://github.com/estuary/flow?branch=johnny/rust-connectors#b0f96544b94a1103217c4b3c6e2071518717cf2c" +dependencies = [ + "memchr", + "serde_json", +] + +[[package]] +name = "unicode-bidi" +version = "0.3.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92888ba5573ff080736b3648696b70cafad7d250551175acbaa4e0385b3e1460" + +[[package]] +name = "unicode-ident" +version = "1.0.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" + +[[package]] +name = "unicode-normalization" +version = "0.1.22" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" +dependencies = [ + "tinyvec", +] + +[[package]] +name = "unsafe-libyaml" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f28467d3e1d3c6586d8f25fa243f544f5800fec42d97032474e17222c2b75cfa" + +[[package]] +name = "url" +version = "2.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "143b538f18257fac9cad154828a57c6bf5157e1aa604d4816b5995bf6de87ae5" +dependencies = [ + "form_urlencoded", + "idna 0.4.0", + "percent-encoding", + "serde", +] + +[[package]] +name = "uuid" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +dependencies = [ + "getrandom", + "serde", +] + +[[package]] +name = "validator" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f07b0a1390e01c0fc35ebb26b28ced33c9a3808f7f9fbe94d3cc01e233bfeed5" +dependencies = [ + "idna 0.2.3", + "lazy_static", + "regex", + "serde", + "serde_derive", + "serde_json", + "url", + "validator_derive", +] + +[[package]] +name = "validator_derive" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ea7ed5e8cf2b6bdd64a6c4ce851da25388a89327b17b88424ceced6bd5017923" +dependencies = [ + "if_chain", + "lazy_static", + "proc-macro-error", + "proc-macro2", + "quote", + "regex", + "syn 1.0.109", + "validator_types", +] + +[[package]] +name = "validator_types" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d2ddf34293296847abfc1493b15c6e2f5d3cd19f57ad7d22673bf4c6278da329" +dependencies = [ + "proc-macro2", + "syn 1.0.109", +] + +[[package]] +name = "valuable" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" + +[[package]] +name = "version_check" +version = "0.9.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" + +[[package]] +name = "wasi" +version = "0.11.0+wasi-snapshot-preview1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" + +[[package]] +name = "which" +version = "4.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2441c784c52b289a054b7201fc93253e288f094e2f4be9058343127c4226a269" +dependencies = [ + "either", + "libc", + "once_cell", +] + +[[package]] +name = "winapi" +version = "0.3.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c839a674fcd7a98952e593242ea400abe93992746761e38641405d28b00f419" +dependencies = [ + "winapi-i686-pc-windows-gnu", + "winapi-x86_64-pc-windows-gnu", +] + +[[package]] +name = "winapi-i686-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" + +[[package]] +name = "winapi-x86_64-pc-windows-gnu" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" + +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + +[[package]] +name = "windows-sys" +version = "0.48.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" +dependencies = [ + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", +] + +[[package]] +name = "windows-targets" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" +dependencies = [ + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", +] + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + +[[package]] +name = "windows_aarch64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" + +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + +[[package]] +name = "windows_i686_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" + +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + +[[package]] +name = "windows_i686_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + +[[package]] +name = "windows_x86_64_gnu" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + +[[package]] +name = "windows_x86_64_msvc" +version = "0.48.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ed94fce61571a4006852b7389a063ab983c02eb1bb37b47f8272ce92d06d9538" + +[[package]] +name = "wyz" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "85e60b0d1b5f99db2556934e21937020776a5d31520bf169e851ac44e6420214" + +[[package]] +name = "wyz" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05f360fc0b24296329c78fda852a1e9ae82de9cf7b27dae4b7f62f118f77b9ed" +dependencies = [ + "tap", +] + +[[package]] +name = "yaml-merge-keys" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af47d205071caaef70ebce5e04e1d88eba944833f8a6626dacdda700f86c285a" +dependencies = [ + "lazy_static", + "serde_yaml 0.8.26", + "thiserror", + "yaml-rust", +] + +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "zeroize" +version = "1.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9" diff --git a/crates/derive-typescript/Cargo.toml b/crates/derive-typescript/Cargo.toml index 333a0bc8a2..8e7dbd1e2a 100644 --- a/crates/derive-typescript/Cargo.toml +++ b/crates/derive-typescript/Cargo.toml @@ -1,34 +1,37 @@ [package] name = "derive-typescript" -version.workspace = true -rust-version.workspace = true -edition.workspace = true -authors.workspace = true -homepage.workspace = true -repository.workspace = true -license.workspace = true +version = "0.1.0" +edition = "2021" + +[[bin]] +name = "derive-typescript" +path = "src/main.rs" [dependencies] -doc = { path = "../doc" } -json = { path = "../json" } -proto-flow = { path = "../proto-flow" } -locate-bin = { path = "../locate-bin" } +doc = { git = "https://github.com/estuary/flow", branch = "johnny/rust-connectors" } +json = { git = "https://github.com/estuary/flow", branch = "johnny/rust-connectors" } +proto-flow = { git = "https://github.com/estuary/flow", branch = "johnny/rust-connectors" } -anyhow = { workspace = true } -itertools = { workspace = true } -lazy_static = { workspace = true } -regex = { workspace = true } -serde = { workspace = true } -serde_json = { workspace = true } -tempfile = { workspace = true } -tracing = { workspace = true } -url = { workspace = true } +anyhow = "1" +itertools = "0.11" +lazy_static = "1" +regex = "1" +serde = { version = "1.0", features = ["derive"] } +serde_json = { version = "1.0", features = ["raw_value"] } +tempfile = "3" +tracing = "0.1" +tracing-subscriber = { version = "0.3", features = [ + "time", + "json", + "env-filter", + "fmt", +] } +url = "2" [dev-dependencies] -models = { path = "../models" } -sources = { path = "../sources" } -tables = { path = "../tables" } +models = { git = "https://github.com/estuary/flow", branch = "johnny/rust-connectors" } +sources = { git = "https://github.com/estuary/flow", branch = "johnny/rust-connectors" } +tables = { git = "https://github.com/estuary/flow", branch = "johnny/rust-connectors" } -insta = { workspace = true } -lazy_static = { workspace = true } -serde_yaml = { workspace = true } +insta = "1" +serde_yaml = "0.9" diff --git a/crates/derive-typescript/Dockerfile b/crates/derive-typescript/Dockerfile new file mode 100644 index 0000000000..8b52e3f664 --- /dev/null +++ b/crates/derive-typescript/Dockerfile @@ -0,0 +1,7 @@ +FROM denoland/deno:distroless-1.36.3 + +COPY target/x86_64-unknown-linux-musl/release/derive-typescript / + +ENTRYPOINT ["/derive-typescript"] +LABEL FLOW_RUNTIME_CODEC=json +LABEL FLOW_RUNTIME_PROTOCOL=derive \ No newline at end of file diff --git a/crates/derive-typescript/src/lib.rs b/crates/derive-typescript/src/lib.rs index f319768061..f45886d867 100644 --- a/crates/derive-typescript/src/lib.rs +++ b/crates/derive-typescript/src/lib.rs @@ -1,5 +1,4 @@ use anyhow::Context; -use locate_bin::locate; use proto_flow::{derive, flow}; use serde_json::json; use std::io::{BufRead, Write}; @@ -26,8 +25,9 @@ pub fn run() -> anyhow::Result<()> { spec: Some(derive::response::Spec { protocol: 3032023, config_schema_json: "{}".to_string(), - lambda_config_schema_json: "{}".to_string(), + resource_config_schema_json: "{}".to_string(), documentation_url: "https://docs.estuary.dev".to_string(), + oauth2: None, }), ..Default::default() }) @@ -89,7 +89,7 @@ pub fn run() -> anyhow::Result<()> { std::fs::write(temp_dir.join(MODULE_NAME), config.module)?; std::fs::write(temp_dir.join(MAIN_NAME), codegen::main_ts(&transforms))?; - let mut child = std::process::Command::new(locate("deno")?) + let mut child = std::process::Command::new("deno") .stdin(Stdio::piped()) .current_dir(temp_dir) .args(["run", "--allow-net=api.openai.com", MAIN_NAME]) @@ -133,10 +133,9 @@ fn validate(validate: derive::request::Validate) -> anyhow::Result(&config_json) .with_context(|| format!("invalid derivation configuration: {config_json}"))?; @@ -215,7 +214,7 @@ fn validate(validate: derive::request::Validate) -> anyhow::Result anyhow::Result<()> { + tracing_subscriber::fmt::fmt() + .json() + .with_env_filter(tracing_subscriber::EnvFilter::from_env("LOG_LEVEL")) + .init(); + + derive_typescript::run() +} diff --git a/crates/flowctl/Cargo.toml b/crates/flowctl/Cargo.toml index 142ac37d9a..cc13d71b99 100644 --- a/crates/flowctl/Cargo.toml +++ b/crates/flowctl/Cargo.toml @@ -13,8 +13,8 @@ license.workspace = true [dependencies] assemble = { path = "../assemble" } async-process = { path = "../async-process" } +build = { path = "../build" } connector-init = { path = "../connector-init" } -derive-typescript = { path = "../derive-typescript" } doc = { path = "../doc" } extractors = { path = "../extractors" } journal-client = { path = "../journal-client" } diff --git a/crates/flowctl/src/catalog/pull_specs.rs b/crates/flowctl/src/catalog/pull_specs.rs index fdfd13d8bb..decb0145f6 100644 --- a/crates/flowctl/src/catalog/pull_specs.rs +++ b/crates/flowctl/src/catalog/pull_specs.rs @@ -45,8 +45,8 @@ pub async fn do_pull_specs(ctx: &mut CliContext, args: &PullSpecs) -> anyhow::Re .await?; tracing::debug!(count = live_specs.len(), "successfully fetched live specs"); - let target = local_specs::arg_source_to_url(&args.target, true)?; - let mut sources = local_specs::surface_errors(local_specs::load(&target).await)?; + let target = build::arg_source_to_url(&args.target, true)?; + let mut sources = local_specs::surface_errors(local_specs::load(&target).await.into_result())?; let count = local_specs::extend_from_catalog( &mut sources, @@ -56,15 +56,7 @@ pub async fn do_pull_specs(ctx: &mut CliContext, args: &PullSpecs) -> anyhow::Re let sources = local_specs::indirect_and_write_resources(sources)?; println!("Wrote {count} specifications under {target}."); + let () = local_specs::generate_files(client, sources).await?; - // Build to generate associated files. - let (_, errors) = local_specs::build(client, sources).await; - - if !errors.is_empty() { - tracing::warn!( - "The written Flow specifications have {} errors. Run `test` to review", - errors.len() - ); - } Ok(()) } diff --git a/crates/flowctl/src/draft/develop.rs b/crates/flowctl/src/draft/develop.rs index fa0ab99c86..6f23457d40 100644 --- a/crates/flowctl/src/draft/develop.rs +++ b/crates/flowctl/src/draft/develop.rs @@ -35,8 +35,8 @@ pub async fn do_develop( ) .await?; - let target = local_specs::arg_source_to_url(&target, true)?; - let mut sources = local_specs::surface_errors(local_specs::load(&target).await)?; + let target = build::arg_source_to_url(&target, true)?; + let mut sources = local_specs::surface_errors(local_specs::load(&target).await.into_result())?; let count = local_specs::extend_from_catalog( &mut sources, @@ -46,16 +46,8 @@ pub async fn do_develop( let sources = local_specs::indirect_and_write_resources(sources)?; println!("Wrote {count} specifications under {target}."); + let () = local_specs::generate_files(client, sources).await?; - // Build to generate associated files. - let (_, errors) = local_specs::build(client, sources).await; - - if !errors.is_empty() { - tracing::warn!( - "The written Flow specifications have {} errors. Run `test` to review", - errors.len() - ); - } Ok(()) } diff --git a/crates/flowctl/src/generate/mod.rs b/crates/flowctl/src/generate/mod.rs index 6d637a4d1d..72ca35f348 100644 --- a/crates/flowctl/src/generate/mod.rs +++ b/crates/flowctl/src/generate/mod.rs @@ -10,27 +10,10 @@ pub struct Generate { impl Generate { pub async fn run(&self, ctx: &mut crate::CliContext) -> anyhow::Result<()> { - let source = local_specs::arg_source_to_url(&self.source, false)?; - - let (sources, errors) = local_specs::load(&source).await; - - for tables::Error { scope, error } in errors.iter() { - match error.downcast_ref() { - Some(sources::LoadError::Fetch { .. }) => { - // Omit load errors about missing resources. That's the point! - } - _ => tracing::error!(%scope, ?error), - } - } - - // Build to generate associated files. + let source = build::arg_source_to_url(&self.source, false)?; + let sources = local_specs::load(&source).await; let client = ctx.controlplane_client().await?; - let (_, errors) = local_specs::build(client, sources).await; - - for tables::Error { scope, error } in errors.iter() { - tracing::error!(%scope, ?error); - } - + let () = local_specs::generate_files(client, sources).await?; Ok(()) } } diff --git a/crates/flowctl/src/local_specs.rs b/crates/flowctl/src/local_specs.rs index 0baa9346ed..9bcceea1cf 100644 --- a/crates/flowctl/src/local_specs.rs +++ b/crates/flowctl/src/local_specs.rs @@ -1,116 +1,102 @@ use anyhow::Context; -use futures::FutureExt; -use proto_flow::{derive, flow}; +use futures::{future::BoxFuture, FutureExt}; use std::collections::BTreeMap; +/// Load and validate sources and derivation connectors (only). +/// Capture and materialization connectors are not validated. pub(crate) async fn load_and_validate( client: crate::controlplane::Client, source: &str, ) -> anyhow::Result<(tables::Sources, tables::Validations)> { - let source = arg_source_to_url(source, false)?; - let sources = surface_errors(load(&source).await)?; - let (sources, validations) = surface_errors(build(client, sources).await)?; - Ok((sources, validations)) + let source = build::arg_source_to_url(source, false)?; + let sources = surface_errors(load(&source).await.into_result())?; + let (sources, validations) = validate(client, true, false, true, sources).await; + Ok((sources, surface_errors(validations.into_result())?)) } -// Map a "--source" argument to a corresponding URL, optionally creating an empty -// file if one doesn't exist, which is required when producing a canonical file:/// -// URL for a local file. -pub(crate) fn arg_source_to_url( +/// Load and validate sources and all connectors. +pub(crate) async fn load_and_validate_full( + client: crate::controlplane::Client, source: &str, - create_if_not_exists: bool, -) -> anyhow::Result { - // Special case that maps stdin into a URL constant. - if source == "-" { - return Ok(url::Url::parse(STDIN_URL).unwrap()); - } - match url::Url::parse(source) { - Ok(url) => Ok(url), - Err(err) => { - tracing::debug!( - source = %source, - ?err, - "source is not a URL; assuming it's a filesystem path", - ); - - let source = match std::fs::canonicalize(source) { - Ok(p) => p, - Err(err) - if matches!(err.kind(), std::io::ErrorKind::NotFound) - && create_if_not_exists => - { - std::fs::write(source, "{}") - .with_context(|| format!("failed to create new file {source}"))?; - std::fs::canonicalize(source).expect("can canonicalize() a file we just wrote") - } - Err(err) => { - return Err(err) - .context(format!("could not find {source} in the local filesystem")); - } - }; +) -> anyhow::Result<(tables::Sources, tables::Validations)> { + let source = build::arg_source_to_url(source, false)?; + let sources = surface_errors(load(&source).await.into_result())?; + let (sources, validations) = validate(client, false, false, false, sources).await; + Ok((sources, surface_errors(validations.into_result())?)) +} + +/// Generate connector files by validating sources with derivation connectors. +pub(crate) async fn generate_files( + client: crate::controlplane::Client, + sources: tables::Sources, +) -> anyhow::Result<()> { + let source = &sources.fetches[0].resource.clone(); + let project_root = build::project_root(source); + + let (mut sources, validations) = validate(client, true, false, true, sources).await; + + build::generate_files(&project_root, &validations)?; + + sources.errors = sources + .errors + .into_iter() + .filter_map(|tables::Error { scope, error }| { + match error.downcast_ref() { + // Skip load errors about missing resources. That's the point! + Some(sources::LoadError::Fetch { .. }) => None, + _ => Some(tables::Error { scope, error }), + } + }) + .collect(); - // Safe unwrap since we've canonical-ized the path. - Ok(url::Url::from_file_path(&source).unwrap()) + if let Err(errors) = sources + .into_result() + .and_then(|_| validations.into_result()) + { + for tables::Error { scope, error } in errors.iter() { + tracing::error!(%scope, ?error); } + tracing::error!( + "I may not have generated all files because the Flow specifications have errors.", + ); } + + Ok(()) } -// Load all sources into tables. -// Errors are returned but not inspected. -// Loaded specifications are unmodified from their fetch representations. -pub(crate) async fn load(source: &url::Url) -> (tables::Sources, tables::Errors) { - let loader = sources::Loader::new(tables::Sources::default(), Fetcher {}); - loader - .load_resource( - sources::Scope::new(&source), - &source, - flow::ContentType::Catalog, - ) - .await; - let mut sources = loader.into_tables(); - let errors = std::mem::take(&mut sources.errors); - - (sources, errors) +pub(crate) async fn load(source: &url::Url) -> tables::Sources { + // We never use a file root jail when loading on a user's machine. + build::load(source, std::path::Path::new("/")).await } -// Build sources by: -// * Mapping them to their inline form. -// * Performing validations which produce built specifications. -// * Gathering and writing out all generated files. -// Errors are returned but are not inspected. -pub(crate) async fn build( +async fn validate( client: crate::controlplane::Client, - mut sources: tables::Sources, -) -> ((tables::Sources, tables::Validations), tables::Errors) { - ::sources::inline_sources(&mut sources); - - let source = &sources.fetches[0].resource; - let project_root = project_root(source); - - let mut validations = validation::validate( - &flow::build_api::Config { - build_db: String::new(), - build_id: "local-build".to_string(), - connector_network: "default".to_string(), - project_root: project_root.to_string(), - source: source.to_string(), - source_type: flow::ContentType::Catalog as i32, - }, - &LocalConnectors(validation::NoOpDrivers {}), + noop_captures: bool, + noop_derivations: bool, + noop_materializations: bool, + sources: tables::Sources, +) -> (tables::Sources, tables::Validations) { + let source = &sources.fetches[0].resource.clone(); + let project_root = build::project_root(source); + + let (sources, mut validate) = build::validate( + "local-build", + "", // Use default connector network. &Resolver { client }, - &sources.captures, - &sources.collections, - &sources.fetches, - &sources.imports, - &sources.materializations, - &sources.storage_mappings, - &sources.tests, + false, // Don't generate ops collections. + ops::tracing_log_handler, + noop_captures, + noop_derivations, + noop_materializations, + &project_root, + sources, ) .await; // Local specs are not expected to satisfy all referential integrity checks. // Filter out errors which are not really "errors" for the Flow CLI. - let mut errors = std::mem::take(&mut validations.errors) + validate.errors = validate + .errors .into_iter() .filter(|err| match err.error.downcast_ref() { // Ok if *no* storage mappings are defined. @@ -122,48 +108,18 @@ pub(crate) async fn build( }) .collect::(); - // Gather all files generated by validations. - let mut generated_files = BTreeMap::new(); - for row in validations.built_collections.iter() { - let Some(validated) = &row.validated else { continue }; - for (url, content) in &validated.generated_files { - match url::Url::parse(&url) { - Ok(url) => { - generated_files.insert(url, content.as_bytes()); - } - Err(err) => errors.insert_row( - &row.scope, - anyhow::anyhow!(err) - .context("derive connector returns invalid generated file URL"), - ), - } - } - } - - // Write out all generated files. - if let Err(error) = write_files( - &sources, - generated_files - .into_iter() - .map(|(resource, content)| (resource, content.to_vec())) - .collect(), - ) { - tracing::error!(?error, "failed to write generated files"); - } - - ((sources, validations), errors) + (sources, validate) } -pub(crate) fn surface_errors(result: (T, tables::Errors)) -> anyhow::Result { - let (t, errors) = result; - - for tables::Error { scope, error } in errors.iter() { - tracing::error!(%scope, ?error); - } - if !errors.is_empty() { - Err(anyhow::anyhow!("failed due to encountered errors")) - } else { - Ok(t) +pub(crate) fn surface_errors(result: Result) -> anyhow::Result { + match result { + Err(errors) => { + for tables::Error { scope, error } in errors.iter() { + tracing::error!(%scope, ?error); + } + Err(anyhow::anyhow!("failed due to encountered errors")) + } + Ok(ok) => return Ok(ok), } } @@ -177,10 +133,12 @@ pub(crate) fn indirect_and_write_resources( } pub(crate) fn write_resources(mut sources: tables::Sources) -> anyhow::Result { + let source = &sources.fetches[0].resource.clone(); + let project_root = build::project_root(source); ::sources::rebuild_catalog_resources(&mut sources); - write_files( - &sources, + build::write_files( + &project_root, sources .resources .iter() @@ -195,31 +153,6 @@ pub(crate) fn write_resources(mut sources: tables::Sources) -> anyhow::Result)>) -> anyhow::Result<()> { - let project_root = project_root(&sources.fetches[0].resource); - - for (resource, content) in files { - let Ok(path) = resource.to_file_path() else { - tracing::info!(%resource, "not writing the resource because it's remote and not local"); - continue; - }; - if !resource.as_str().starts_with(project_root.as_str()) { - tracing::info!(%resource, %project_root, - "not writing local resource because it's not under the project root"); - continue; - } - if let Some(parent) = path.parent() { - std::fs::create_dir_all(path.parent().unwrap()).with_context(|| { - format!("failed to create directory {}", parent.to_string_lossy()) - })?; - } - std::fs::write(&path, content).with_context(|| format!("failed to write {resource}"))?; - - tracing::info!(path=%path.to_str().unwrap_or(resource.as_str()), "wrote file"); - } - Ok(()) -} - pub(crate) fn into_catalog(sources: tables::Sources) -> models::Catalog { let tables::Sources { captures, @@ -294,152 +227,8 @@ pub(crate) fn pick_policy( } } -struct LocalConnectors(validation::NoOpDrivers); - -impl validation::Connectors for LocalConnectors { - fn validate_capture<'a>( - &'a self, - request: proto_flow::capture::request::Validate, - ) -> futures::future::LocalBoxFuture< - 'a, - Result, - > { - self.0.validate_capture(request) - } - - fn validate_derivation<'a>( - &'a self, - request: proto_flow::derive::request::Validate, - ) -> futures::future::LocalBoxFuture< - 'a, - Result, - > { - let middleware = runtime::derive::Middleware::new(ops::tracing_log_handler, None); - - async move { - let request = derive::Request { - validate: Some(request.clone()), - ..Default::default() - }; - let response = middleware - .serve_unary(request) - .await - .map_err(|status| anyhow::Error::msg(status.message().to_string()))?; - - let validated = response - .validated - .context("derive Response is not Validated")?; - - Ok(validated) - } - .boxed_local() - } - - fn validate_materialization<'a>( - &'a self, - request: proto_flow::materialize::request::Validate, - ) -> futures::future::LocalBoxFuture< - 'a, - Result, - > { - self.0.validate_materialization(request) - } - - fn inspect_image<'a>( - &'a self, - image: String, - ) -> futures::future::LocalBoxFuture<'a, Result, anyhow::Error>> { - self.0.inspect_image(image) - } -} - -pub(crate) fn project_root(source: &url::Url) -> url::Url { - let current_dir = - std::env::current_dir().expect("failed to determine current working directory"); - let source_path = source.to_file_path(); - - let dir = if let Ok(source_path) = &source_path { - let mut dir = source_path - .parent() - .expect("source path is an absolute filesystem path"); - - while let Some(parent) = dir.parent() { - if ["flow.yaml", "flow.yml", "flow.json"] - .iter() - .any(|name| parent.join(name).exists()) - { - dir = parent; - } else { - break; - } - } - dir - } else { - // `source` isn't local. Use the current working directory. - ¤t_dir - }; - - url::Url::from_file_path(dir).expect("cannot map project directory into a URL") -} - -/// Fetcher fetches resource URLs from the local filesystem or over the network. -struct Fetcher; - -impl sources::Fetcher for Fetcher { - fn fetch<'a>( - &'a self, - // Resource to fetch. - resource: &'a url::Url, - // Expected content type of the resource. - content_type: flow::ContentType, - ) -> sources::FetchFuture<'a> { - tracing::debug!(%resource, ?content_type, "fetching resource"); - let url = resource.clone(); - Box::pin(fetch_async(url)) - } -} - -async fn fetch_async(resource: url::Url) -> Result { - match resource.scheme() { - "http" | "https" => { - let resp = reqwest::get(resource.as_str()).await?; - let status = resp.status(); - - if status.is_success() { - Ok(resp.bytes().await?) - } else { - let body = resp.text().await?; - anyhow::bail!("{status}: {body}"); - } - } - "file" => { - let path = resource - .to_file_path() - .map_err(|err| anyhow::anyhow!("failed to convert file uri to path: {:?}", err))?; - - let bytes = - std::fs::read(path).with_context(|| format!("failed to read {resource}"))?; - Ok(bytes.into()) - } - "stdin" => { - use tokio::io::AsyncReadExt; - - let mut bytes = Vec::new(); - tokio::io::stdin() - .read_to_end(&mut bytes) - .await - .context("reading stdin")?; - - Ok(bytes.into()) - } - _ => Err(anyhow::anyhow!( - "cannot fetch unsupported URI scheme: '{resource}'" - )), - } -} - -struct Resolver { - client: crate::controlplane::Client, +pub(crate) struct Resolver { + pub client: crate::controlplane::Client, } impl validation::ControlPlane for Resolver { @@ -448,10 +237,9 @@ impl validation::ControlPlane for Resolver { collections: Vec, // These parameters are currently required, but can be removed once we're // actually resolving fuzzy pre-built CollectionSpecs from the control plane. - temp_build_config: &'b proto_flow::flow::build_api::Config, + temp_build_id: &'b str, temp_storage_mappings: &'b [tables::StorageMapping], - ) -> futures::future::LocalBoxFuture<'a, anyhow::Result>> - { + ) -> BoxFuture<'a, anyhow::Result>> { async move { // TODO(johnny): Introduce a new RPC for doing fuzzy-search given the list of // collection names, and use that instead to surface mis-spelt name suggestions. @@ -496,21 +284,19 @@ impl validation::ControlPlane for Resolver { rows.into_iter() .map(|row| { use crate::catalog::SpecRow; - let def = row + let spec = row .parse_spec::() .context("parsing specification")?; - Ok(Self::temp_build_collection_helper( + Ok(self.temp_build_collection_helper( row.catalog_name, - def, - temp_build_config, + spec, + temp_build_id, temp_storage_mappings, )?) }) .collect::>() } - .boxed_local() + .boxed() } } - -const STDIN_URL: &str = "stdin://root/flow.yaml"; diff --git a/crates/flowctl/src/preview/mod.rs b/crates/flowctl/src/preview/mod.rs index f49e74ffb0..24eb88bae9 100644 --- a/crates/flowctl/src/preview/mod.rs +++ b/crates/flowctl/src/preview/mod.rs @@ -43,7 +43,7 @@ impl Preview { sqlite_uri: sqlite_path, interval: flush_interval, } = self; - let source = local_specs::arg_source_to_url(source, false)?; + let source = build::arg_source_to_url(source, false)?; if self.infer_schema && source.scheme() != "file" { anyhow::bail!("schema inference can only be used with a local file --source"); @@ -156,10 +156,15 @@ impl Preview { })) .await?; - let mut responses_rx = runtime::derive::Middleware::new(ops::tracing_log_handler, None) - .serve(request_rx) - .await - .map_err(|status| anyhow::anyhow!("{}", status.message()))?; + let mut responses_rx = runtime::Runtime::new( + String::new(), + ops::tracing_log_handler, + None, + "preview".to_string(), + ) + .serve_derive(request_rx) + .await + .map_err(|status| anyhow::anyhow!("{}", status.message()))?; let _opened = responses_rx .next() @@ -210,8 +215,9 @@ impl Preview { // Update with an inferred schema and write out the updated Flow spec. if let Some(schema) = schema { // Reload `sources`, this time without inlining them. - let mut sources = local_specs::surface_errors(local_specs::load(&source).await) - .expect("sources must load a second time"); + let mut sources = + local_specs::surface_errors(local_specs::load(&source).await.into_result()) + .expect("sources must load a second time"); // Find the derivation we just previewed. let index = sources diff --git a/crates/flowctl/src/raw/capture.rs b/crates/flowctl/src/raw/capture.rs index 5f2da73758..8d7cbadd19 100644 --- a/crates/flowctl/src/raw/capture.rs +++ b/crates/flowctl/src/raw/capture.rs @@ -44,7 +44,7 @@ pub async fn do_capture( }: &Capture, ) -> anyhow::Result<()> { let client = ctx.controlplane_client().await?; - let (_sources, mut validations) = local_specs::load_and_validate(client, &source).await?; + let (_sources, mut validations) = local_specs::load_and_validate_full(client, &source).await?; let capture = validations .built_captures diff --git a/crates/flowctl/src/raw/discover.rs b/crates/flowctl/src/raw/discover.rs index 70630f0e41..f4d7fa3e9a 100644 --- a/crates/flowctl/src/raw/discover.rs +++ b/crates/flowctl/src/raw/discover.rs @@ -54,8 +54,8 @@ pub async fn do_discover( let catalog_file = format!("{connector_name}.flow.yaml"); - let target = local_specs::arg_source_to_url(&catalog_file, true)?; - let mut sources = local_specs::surface_errors(local_specs::load(&target).await)?; + let target = build::arg_source_to_url(&catalog_file, true)?; + let mut sources = local_specs::surface_errors(local_specs::load(&target).await.into_result())?; let capture_name = format!("{prefix}/{connector_name}"); diff --git a/crates/flowctl/src/raw/mod.rs b/crates/flowctl/src/raw/mod.rs index fbffe07b48..df6bdcb065 100644 --- a/crates/flowctl/src/raw/mod.rs +++ b/crates/flowctl/src/raw/mod.rs @@ -1,7 +1,10 @@ use crate::local_specs; use anyhow::Context; use doc::combine; -use std::io::{self, Write}; +use std::{ + io::{self, Write}, + path::PathBuf, +}; mod capture; mod discover; @@ -37,12 +40,12 @@ pub enum Command { Rpc(Rpc), /// Issue a custom table update request to the API. Update(Update), + /// Perform a configured build of catalog sources. + Build(Build), /// Bundle catalog sources into a flattened and inlined catalog. Bundle(Bundle), /// Combine over an input stream of documents and write the output. Combine(Combine), - /// Deno derivation connector. - DenoDerive(DenoDerive), /// Generate a materialization fixture. MaterializeFixture(materialize_fixture::MaterializeFixture), /// Discover a connector and write catalog files @@ -92,6 +95,21 @@ pub struct Rpc { body: String, } +#[derive(Debug, Clone, clap::Args)] +#[clap(rename_all = "kebab-case")] +pub struct Build { + #[clap(long)] + db_path: PathBuf, + #[clap(long)] + build_id: String, + #[clap(long, default_value = "")] + connector_network: String, + #[clap(long, default_value = "/")] + file_root: String, + #[clap(long)] + source: String, +} + #[derive(Debug, clap::Args)] #[clap(rename_all = "kebab-case")] pub struct Bundle { @@ -111,19 +129,15 @@ pub struct Combine { collection: String, } -#[derive(Debug, clap::Args)] -#[clap(rename_all = "kebab-case")] -pub struct DenoDerive {} - impl Advanced { pub async fn run(&self, ctx: &mut crate::CliContext) -> anyhow::Result<()> { match &self.cmd { Command::Get(get) => do_get(ctx, get).await, Command::Update(update) => do_update(ctx, update).await, Command::Rpc(rpc) => do_rpc(ctx, rpc).await, + Command::Build(build) => do_build(ctx, build).await, Command::Bundle(bundle) => do_bundle(ctx, bundle).await, Command::Combine(combine) => do_combine(ctx, combine).await, - Command::DenoDerive(_deno) => derive_typescript::run(), Command::MaterializeFixture(fixture) => { materialize_fixture::do_materialize_fixture(ctx, fixture).await } @@ -171,6 +185,48 @@ async fn do_rpc( Ok(()) } +async fn do_build(ctx: &mut crate::CliContext, build: &Build) -> anyhow::Result<()> { + let client = ctx.controlplane_client().await?; + + let Build { + db_path, + build_id, + connector_network, + file_root, + source, + } = build.clone(); + + let source_url = build::arg_source_to_url(&source, false)?; + let project_root = build::project_root(&source_url); + + let build_result = build::managed_build( + build_id.clone(), + connector_network.clone(), + Box::new(crate::local_specs::Resolver { client }), + file_root.clone().into(), + ops::tracing_log_handler, + project_root.clone(), + source_url, + ) + .await; + + // The only purpose of this configuration is to be compatible with + // existing Go snapshots. We can remove it when we're ready to + // remove / update the corresponding Go tests. + let build_config = proto_flow::flow::build_api::Config { + build_db: db_path.to_string_lossy().to_string(), + build_id, + connector_network, + source, + source_type: proto_flow::flow::ContentType::Catalog as i32, + ..Default::default() + }; + + build::persist(build_config, &db_path, &build_result)?; + + Ok(()) +} + async fn do_bundle(ctx: &mut crate::CliContext, Bundle { source }: &Bundle) -> anyhow::Result<()> { let (sources, _) = local_specs::load_and_validate(ctx.controlplane_client().await?, source).await?; diff --git a/crates/proto-flow/Cargo.toml b/crates/proto-flow/Cargo.toml index 3048561a8c..12c84c0585 100644 --- a/crates/proto-flow/Cargo.toml +++ b/crates/proto-flow/Cargo.toml @@ -17,6 +17,7 @@ pbjson-types = { workspace = true } prost = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } +zeroize = { workspace = true } [build-dependencies] proto-build = { path = "../proto-build", optional = true } diff --git a/crates/proto-flow/src/capture.rs b/crates/proto-flow/src/capture.rs index f2020fc2d3..31a181669c 100644 --- a/crates/proto-flow/src/capture.rs +++ b/crates/proto-flow/src/capture.rs @@ -73,9 +73,6 @@ pub mod request { pub config_json: ::prost::alloc::string::String, #[prost(message, repeated, tag = "4")] pub bindings: ::prost::alloc::vec::Vec, - /// Network ports of this proposed capture. - #[prost(message, repeated, tag = "5")] - pub network_ports: ::prost::alloc::vec::Vec, } /// Nested message and enum types in `Validate`. pub mod validate { diff --git a/crates/proto-flow/src/capture.serde.rs b/crates/proto-flow/src/capture.serde.rs index 25da497989..b5197500ba 100644 --- a/crates/proto-flow/src/capture.serde.rs +++ b/crates/proto-flow/src/capture.serde.rs @@ -797,9 +797,6 @@ impl serde::Serialize for request::Validate { if !self.bindings.is_empty() { len += 1; } - if !self.network_ports.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("capture.Request.Validate", len)?; if !self.name.is_empty() { struct_ser.serialize_field("name", &self.name)?; @@ -815,9 +812,6 @@ impl serde::Serialize for request::Validate { if !self.bindings.is_empty() { struct_ser.serialize_field("bindings", &self.bindings)?; } - if !self.network_ports.is_empty() { - struct_ser.serialize_field("networkPorts", &self.network_ports)?; - } struct_ser.end() } } @@ -834,8 +828,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { "config_json", "config", "bindings", - "network_ports", - "networkPorts", ]; #[allow(clippy::enum_variant_names)] @@ -844,7 +836,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { ConnectorType, ConfigJson, Bindings, - NetworkPorts, } impl<'de> serde::Deserialize<'de> for GeneratedField { fn deserialize(deserializer: D) -> std::result::Result @@ -870,7 +861,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { "connectorType" | "connector_type" => Ok(GeneratedField::ConnectorType), "config" | "config_json" => Ok(GeneratedField::ConfigJson), "bindings" => Ok(GeneratedField::Bindings), - "networkPorts" | "network_ports" => Ok(GeneratedField::NetworkPorts), _ => Err(serde::de::Error::unknown_field(value, FIELDS)), } } @@ -894,7 +884,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { let mut connector_type__ = None; let mut config_json__ : Option> = None; let mut bindings__ = None; - let mut network_ports__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::Name => { @@ -921,12 +910,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { } bindings__ = Some(map.next_value()?); } - GeneratedField::NetworkPorts => { - if network_ports__.is_some() { - return Err(serde::de::Error::duplicate_field("networkPorts")); - } - network_ports__ = Some(map.next_value()?); - } } } Ok(request::Validate { @@ -934,7 +917,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { connector_type: connector_type__.unwrap_or_default(), config_json: config_json__.map(|r| Box::::from(r).into()).unwrap_or_default(), bindings: bindings__.unwrap_or_default(), - network_ports: network_ports__.unwrap_or_default(), }) } } diff --git a/crates/proto-flow/src/derive.rs b/crates/proto-flow/src/derive.rs index 444bdb32f4..b999b71cd6 100644 --- a/crates/proto-flow/src/derive.rs +++ b/crates/proto-flow/src/derive.rs @@ -84,9 +84,6 @@ pub mod request { ::prost::alloc::string::String, ::prost::alloc::string::String, >, - /// Network ports of this proposed derivation. - #[prost(message, repeated, tag = "8")] - pub network_ports: ::prost::alloc::vec::Vec, } /// Nested message and enum types in `Validate`. pub mod validate { @@ -221,12 +218,20 @@ pub mod response { /// JSON schema of the connector's configuration. #[prost(string, tag = "2")] pub config_schema_json: ::prost::alloc::string::String, - /// JSON schema of the connecor's lambda configuration. + /// JSON schema of the connector's lambda configuration. + /// The "resource" terminology is used with Response.Spec + /// only for symmetry with the capture and materialization protocols. + /// NOTE(johnny): We can encapsulate separate lambda config vs + /// shuffle lambda config schemas by encapsulating them as separate + /// definitions within the resource config schema. #[prost(string, tag = "3")] - pub lambda_config_schema_json: ::prost::alloc::string::String, + pub resource_config_schema_json: ::prost::alloc::string::String, /// URL for connector's documention. #[prost(string, tag = "4")] pub documentation_url: ::prost::alloc::string::String, + /// Optional OAuth2 configuration. + #[prost(message, optional, tag = "5")] + pub oauth2: ::core::option::Option, } /// Validated responds to Request.Validate. #[allow(clippy::derive_partial_eq_without_eq)] diff --git a/crates/proto-flow/src/derive.serde.rs b/crates/proto-flow/src/derive.serde.rs index 58ad959bdd..2551cdc6a4 100644 --- a/crates/proto-flow/src/derive.serde.rs +++ b/crates/proto-flow/src/derive.serde.rs @@ -1002,9 +1002,6 @@ impl serde::Serialize for request::Validate { if !self.import_map.is_empty() { len += 1; } - if !self.network_ports.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("derive.Request.Validate", len)?; if self.connector_type != 0 { let v = super::flow::collection_spec::derivation::ConnectorType::from_i32(self.connector_type) @@ -1033,9 +1030,6 @@ impl serde::Serialize for request::Validate { if !self.import_map.is_empty() { struct_ser.serialize_field("importMap", &self.import_map)?; } - if !self.network_ports.is_empty() { - struct_ser.serialize_field("networkPorts", &self.network_ports)?; - } struct_ser.end() } } @@ -1058,8 +1052,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { "projectRoot", "import_map", "importMap", - "network_ports", - "networkPorts", ]; #[allow(clippy::enum_variant_names)] @@ -1071,7 +1063,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { ShuffleKeyTypes, ProjectRoot, ImportMap, - NetworkPorts, } impl<'de> serde::Deserialize<'de> for GeneratedField { fn deserialize(deserializer: D) -> std::result::Result @@ -1100,7 +1091,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { "shuffleKeyTypes" | "shuffle_key_types" => Ok(GeneratedField::ShuffleKeyTypes), "projectRoot" | "project_root" => Ok(GeneratedField::ProjectRoot), "importMap" | "import_map" => Ok(GeneratedField::ImportMap), - "networkPorts" | "network_ports" => Ok(GeneratedField::NetworkPorts), _ => Err(serde::de::Error::unknown_field(value, FIELDS)), } } @@ -1127,7 +1117,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { let mut shuffle_key_types__ = None; let mut project_root__ = None; let mut import_map__ = None; - let mut network_ports__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::ConnectorType => { @@ -1174,12 +1163,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { map.next_value::>()? ); } - GeneratedField::NetworkPorts => { - if network_ports__.is_some() { - return Err(serde::de::Error::duplicate_field("networkPorts")); - } - network_ports__ = Some(map.next_value()?); - } } } Ok(request::Validate { @@ -1190,7 +1173,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { shuffle_key_types: shuffle_key_types__.unwrap_or_default(), project_root: project_root__.unwrap_or_default(), import_map: import_map__.unwrap_or_default(), - network_ports: network_ports__.unwrap_or_default(), }) } } @@ -1783,12 +1765,15 @@ impl serde::Serialize for response::Spec { if !self.config_schema_json.is_empty() { len += 1; } - if !self.lambda_config_schema_json.is_empty() { + if !self.resource_config_schema_json.is_empty() { len += 1; } if !self.documentation_url.is_empty() { len += 1; } + if self.oauth2.is_some() { + len += 1; + } let mut struct_ser = serializer.serialize_struct("derive.Response.Spec", len)?; if self.protocol != 0 { struct_ser.serialize_field("protocol", &self.protocol)?; @@ -1796,12 +1781,15 @@ impl serde::Serialize for response::Spec { if !self.config_schema_json.is_empty() { struct_ser.serialize_field("configSchema", crate::as_raw_json(&self.config_schema_json)?)?; } - if !self.lambda_config_schema_json.is_empty() { - struct_ser.serialize_field("lambdaConfigSchema", crate::as_raw_json(&self.lambda_config_schema_json)?)?; + if !self.resource_config_schema_json.is_empty() { + struct_ser.serialize_field("resourceConfigSchema", crate::as_raw_json(&self.resource_config_schema_json)?)?; } if !self.documentation_url.is_empty() { struct_ser.serialize_field("documentationUrl", &self.documentation_url)?; } + if let Some(v) = self.oauth2.as_ref() { + struct_ser.serialize_field("oauth2", v)?; + } struct_ser.end() } } @@ -1815,18 +1803,20 @@ impl<'de> serde::Deserialize<'de> for response::Spec { "protocol", "config_schema_json", "configSchema", - "lambda_config_schema_json", - "lambdaConfigSchema", + "resource_config_schema_json", + "resourceConfigSchema", "documentation_url", "documentationUrl", + "oauth2", ]; #[allow(clippy::enum_variant_names)] enum GeneratedField { Protocol, ConfigSchemaJson, - LambdaConfigSchemaJson, + ResourceConfigSchemaJson, DocumentationUrl, + Oauth2, } impl<'de> serde::Deserialize<'de> for GeneratedField { fn deserialize(deserializer: D) -> std::result::Result @@ -1850,8 +1840,9 @@ impl<'de> serde::Deserialize<'de> for response::Spec { match value { "protocol" => Ok(GeneratedField::Protocol), "configSchema" | "config_schema_json" => Ok(GeneratedField::ConfigSchemaJson), - "lambdaConfigSchema" | "lambda_config_schema_json" => Ok(GeneratedField::LambdaConfigSchemaJson), + "resourceConfigSchema" | "resource_config_schema_json" => Ok(GeneratedField::ResourceConfigSchemaJson), "documentationUrl" | "documentation_url" => Ok(GeneratedField::DocumentationUrl), + "oauth2" => Ok(GeneratedField::Oauth2), _ => Err(serde::de::Error::unknown_field(value, FIELDS)), } } @@ -1873,8 +1864,9 @@ impl<'de> serde::Deserialize<'de> for response::Spec { { let mut protocol__ = None; let mut config_schema_json__ : Option> = None; - let mut lambda_config_schema_json__ : Option> = None; + let mut resource_config_schema_json__ : Option> = None; let mut documentation_url__ = None; + let mut oauth2__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::Protocol => { @@ -1891,11 +1883,11 @@ impl<'de> serde::Deserialize<'de> for response::Spec { } config_schema_json__ = Some(map.next_value()?); } - GeneratedField::LambdaConfigSchemaJson => { - if lambda_config_schema_json__.is_some() { - return Err(serde::de::Error::duplicate_field("lambdaConfigSchema")); + GeneratedField::ResourceConfigSchemaJson => { + if resource_config_schema_json__.is_some() { + return Err(serde::de::Error::duplicate_field("resourceConfigSchema")); } - lambda_config_schema_json__ = Some(map.next_value()?); + resource_config_schema_json__ = Some(map.next_value()?); } GeneratedField::DocumentationUrl => { if documentation_url__.is_some() { @@ -1903,13 +1895,20 @@ impl<'de> serde::Deserialize<'de> for response::Spec { } documentation_url__ = Some(map.next_value()?); } + GeneratedField::Oauth2 => { + if oauth2__.is_some() { + return Err(serde::de::Error::duplicate_field("oauth2")); + } + oauth2__ = map.next_value()?; + } } } Ok(response::Spec { protocol: protocol__.unwrap_or_default(), config_schema_json: config_schema_json__.map(|r| Box::::from(r).into()).unwrap_or_default(), - lambda_config_schema_json: lambda_config_schema_json__.map(|r| Box::::from(r).into()).unwrap_or_default(), + resource_config_schema_json: resource_config_schema_json__.map(|r| Box::::from(r).into()).unwrap_or_default(), documentation_url: documentation_url__.unwrap_or_default(), + oauth2: oauth2__, }) } } diff --git a/crates/proto-flow/src/flow.rs b/crates/proto-flow/src/flow.rs index 024cab57d0..c805dbcc5b 100644 --- a/crates/proto-flow/src/flow.rs +++ b/crates/proto-flow/src/flow.rs @@ -516,7 +516,7 @@ pub struct MaterializationSpec { pub recovery_log_template: ::core::option::Option< ::proto_gazette::broker::JournalSpec, >, - /// Network ports of this capture. + /// Network ports of this materialization. #[prost(message, repeated, tag = "7")] pub network_ports: ::prost::alloc::vec::Vec, } @@ -1002,8 +1002,9 @@ pub mod combine_api { } } } -/// BuildAPI is a meta-message which name spaces messages of the Build API -/// bridge. +/// BuildAPI is deprecated and will be removed. +/// We're currently keeping Config around only to +/// avoid churning various Go snapshot tests. #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct BuildApi {} @@ -1035,90 +1036,6 @@ pub mod build_api { #[prost(string, tag = "6")] pub project_root: ::prost::alloc::string::String, } - #[allow(clippy::derive_partial_eq_without_eq)] - #[derive(Clone, PartialEq, ::prost::Message)] - pub struct Fetch { - #[prost(string, tag = "1")] - pub resource_url: ::prost::alloc::string::String, - #[prost(enumeration = "super::ContentType", tag = "2")] - pub content_type: i32, - } - /// Code labels message codes passed over the CGO bridge. - #[derive( - Clone, - Copy, - Debug, - PartialEq, - Eq, - Hash, - PartialOrd, - Ord, - ::prost::Enumeration - )] - #[repr(i32)] - pub enum Code { - /// Begin a build with a Config (Go -> Rust). - Begin = 0, - /// Poll the build after completing one or more trampoline tasks (Go -> - /// Rust). - Poll = 1, - /// Trampoline task start or completion (Rust <-> Go). - Trampoline = 2, - /// Trampoline sub-type: Start fetch of a resource. - TrampolineFetch = 3, - /// Trampoline sub-type: Start validation of a capture. - TrampolineValidateCapture = 4, - /// Trampoline sub-type: Start validation of a materialization. - TrampolineValidateMaterialization = 5, - /// Build completed successfully (Rust -> Go). - Done = 6, - /// Build completed with errors (Rust -> Go). - DoneWithErrors = 7, - /// Trampoline sub-type: start docker ispect of an image - TrampolineDockerInspect = 8, - /// Generate catalog specification JSON schema (Go <-> Rust) - CatalogSchema = 100, - } - impl Code { - /// String value of the enum field names used in the ProtoBuf definition. - /// - /// The values are not transformed in any way and thus are considered stable - /// (if the ProtoBuf definition does not change) and safe for programmatic use. - pub fn as_str_name(&self) -> &'static str { - match self { - Code::Begin => "BEGIN", - Code::Poll => "POLL", - Code::Trampoline => "TRAMPOLINE", - Code::TrampolineFetch => "TRAMPOLINE_FETCH", - Code::TrampolineValidateCapture => "TRAMPOLINE_VALIDATE_CAPTURE", - Code::TrampolineValidateMaterialization => { - "TRAMPOLINE_VALIDATE_MATERIALIZATION" - } - Code::Done => "DONE", - Code::DoneWithErrors => "DONE_WITH_ERRORS", - Code::TrampolineDockerInspect => "TRAMPOLINE_DOCKER_INSPECT", - Code::CatalogSchema => "CATALOG_SCHEMA", - } - } - /// Creates an enum from field names used in the ProtoBuf definition. - pub fn from_str_name(value: &str) -> ::core::option::Option { - match value { - "BEGIN" => Some(Self::Begin), - "POLL" => Some(Self::Poll), - "TRAMPOLINE" => Some(Self::Trampoline), - "TRAMPOLINE_FETCH" => Some(Self::TrampolineFetch), - "TRAMPOLINE_VALIDATE_CAPTURE" => Some(Self::TrampolineValidateCapture), - "TRAMPOLINE_VALIDATE_MATERIALIZATION" => { - Some(Self::TrampolineValidateMaterialization) - } - "DONE" => Some(Self::Done), - "DONE_WITH_ERRORS" => Some(Self::DoneWithErrors), - "TRAMPOLINE_DOCKER_INSPECT" => Some(Self::TrampolineDockerInspect), - "CATALOG_SCHEMA" => Some(Self::CatalogSchema), - _ => None, - } - } - } } /// ResetStateRequest is the request of the Testing.ResetState RPC. #[allow(clippy::derive_partial_eq_without_eq)] diff --git a/crates/proto-flow/src/flow.serde.rs b/crates/proto-flow/src/flow.serde.rs index c8ada4ae69..ed8ff600d3 100644 --- a/crates/proto-flow/src/flow.serde.rs +++ b/crates/proto-flow/src/flow.serde.rs @@ -234,103 +234,6 @@ impl<'de> serde::Deserialize<'de> for BuildApi { deserializer.deserialize_struct("flow.BuildAPI", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for build_api::Code { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - let variant = match self { - Self::Begin => "BEGIN", - Self::Poll => "POLL", - Self::Trampoline => "TRAMPOLINE", - Self::TrampolineFetch => "TRAMPOLINE_FETCH", - Self::TrampolineValidateCapture => "TRAMPOLINE_VALIDATE_CAPTURE", - Self::TrampolineValidateMaterialization => "TRAMPOLINE_VALIDATE_MATERIALIZATION", - Self::Done => "DONE", - Self::DoneWithErrors => "DONE_WITH_ERRORS", - Self::TrampolineDockerInspect => "TRAMPOLINE_DOCKER_INSPECT", - Self::CatalogSchema => "CATALOG_SCHEMA", - }; - serializer.serialize_str(variant) - } -} -impl<'de> serde::Deserialize<'de> for build_api::Code { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "BEGIN", - "POLL", - "TRAMPOLINE", - "TRAMPOLINE_FETCH", - "TRAMPOLINE_VALIDATE_CAPTURE", - "TRAMPOLINE_VALIDATE_MATERIALIZATION", - "DONE", - "DONE_WITH_ERRORS", - "TRAMPOLINE_DOCKER_INSPECT", - "CATALOG_SCHEMA", - ]; - - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = build_api::Code; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - fn visit_i64(self, v: i64) -> std::result::Result - where - E: serde::de::Error, - { - use std::convert::TryFrom; - i32::try_from(v) - .ok() - .and_then(build_api::Code::from_i32) - .ok_or_else(|| { - serde::de::Error::invalid_value(serde::de::Unexpected::Signed(v), &self) - }) - } - - fn visit_u64(self, v: u64) -> std::result::Result - where - E: serde::de::Error, - { - use std::convert::TryFrom; - i32::try_from(v) - .ok() - .and_then(build_api::Code::from_i32) - .ok_or_else(|| { - serde::de::Error::invalid_value(serde::de::Unexpected::Unsigned(v), &self) - }) - } - - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "BEGIN" => Ok(build_api::Code::Begin), - "POLL" => Ok(build_api::Code::Poll), - "TRAMPOLINE" => Ok(build_api::Code::Trampoline), - "TRAMPOLINE_FETCH" => Ok(build_api::Code::TrampolineFetch), - "TRAMPOLINE_VALIDATE_CAPTURE" => Ok(build_api::Code::TrampolineValidateCapture), - "TRAMPOLINE_VALIDATE_MATERIALIZATION" => Ok(build_api::Code::TrampolineValidateMaterialization), - "DONE" => Ok(build_api::Code::Done), - "DONE_WITH_ERRORS" => Ok(build_api::Code::DoneWithErrors), - "TRAMPOLINE_DOCKER_INSPECT" => Ok(build_api::Code::TrampolineDockerInspect), - "CATALOG_SCHEMA" => Ok(build_api::Code::CatalogSchema), - _ => Err(serde::de::Error::unknown_variant(value, FIELDS)), - } - } - } - deserializer.deserialize_any(GeneratedVisitor) - } -} impl serde::Serialize for build_api::Config { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result @@ -514,118 +417,6 @@ impl<'de> serde::Deserialize<'de> for build_api::Config { deserializer.deserialize_struct("flow.BuildAPI.Config", FIELDS, GeneratedVisitor) } } -impl serde::Serialize for build_api::Fetch { - #[allow(deprecated)] - fn serialize(&self, serializer: S) -> std::result::Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut len = 0; - if !self.resource_url.is_empty() { - len += 1; - } - if self.content_type != 0 { - len += 1; - } - let mut struct_ser = serializer.serialize_struct("flow.BuildAPI.Fetch", len)?; - if !self.resource_url.is_empty() { - struct_ser.serialize_field("resourceUrl", &self.resource_url)?; - } - if self.content_type != 0 { - let v = ContentType::from_i32(self.content_type) - .ok_or_else(|| serde::ser::Error::custom(format!("Invalid variant {}", self.content_type)))?; - struct_ser.serialize_field("contentType", &v)?; - } - struct_ser.end() - } -} -impl<'de> serde::Deserialize<'de> for build_api::Fetch { - #[allow(deprecated)] - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - const FIELDS: &[&str] = &[ - "resource_url", - "resourceUrl", - "content_type", - "contentType", - ]; - - #[allow(clippy::enum_variant_names)] - enum GeneratedField { - ResourceUrl, - ContentType, - } - impl<'de> serde::Deserialize<'de> for GeneratedField { - fn deserialize(deserializer: D) -> std::result::Result - where - D: serde::Deserializer<'de>, - { - struct GeneratedVisitor; - - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = GeneratedField; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - write!(formatter, "expected one of: {:?}", &FIELDS) - } - - #[allow(unused_variables)] - fn visit_str(self, value: &str) -> std::result::Result - where - E: serde::de::Error, - { - match value { - "resourceUrl" | "resource_url" => Ok(GeneratedField::ResourceUrl), - "contentType" | "content_type" => Ok(GeneratedField::ContentType), - _ => Err(serde::de::Error::unknown_field(value, FIELDS)), - } - } - } - deserializer.deserialize_identifier(GeneratedVisitor) - } - } - struct GeneratedVisitor; - impl<'de> serde::de::Visitor<'de> for GeneratedVisitor { - type Value = build_api::Fetch; - - fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - formatter.write_str("struct flow.BuildAPI.Fetch") - } - - fn visit_map(self, mut map: V) -> std::result::Result - where - V: serde::de::MapAccess<'de>, - { - let mut resource_url__ = None; - let mut content_type__ = None; - while let Some(k) = map.next_key()? { - match k { - GeneratedField::ResourceUrl => { - if resource_url__.is_some() { - return Err(serde::de::Error::duplicate_field("resourceUrl")); - } - resource_url__ = Some(map.next_value()?); - } - GeneratedField::ContentType => { - if content_type__.is_some() { - return Err(serde::de::Error::duplicate_field("contentType")); - } - content_type__ = Some(map.next_value::()? as i32); - } - } - } - Ok(build_api::Fetch { - resource_url: resource_url__.unwrap_or_default(), - content_type: content_type__.unwrap_or_default(), - }) - } - } - deserializer.deserialize_struct("flow.BuildAPI.Fetch", FIELDS, GeneratedVisitor) - } -} impl serde::Serialize for CaptureSpec { #[allow(deprecated)] fn serialize(&self, serializer: S) -> std::result::Result diff --git a/crates/proto-flow/src/internal.rs b/crates/proto-flow/src/internal.rs new file mode 100644 index 0000000000..f1fd9600e6 --- /dev/null +++ b/crates/proto-flow/src/internal.rs @@ -0,0 +1,85 @@ +use crate::{capture, derive, materialize, runtime, Any}; +use prost::Message; + +macro_rules! impl_internal { + ($msg_type:ty , $ext_type:ty , $type_url:literal) => { + impl $msg_type { + /// Get the internal field, decoded into its corresponding extension type. + pub fn get_internal(&self) -> Option> { + let Some(Any{type_url, value}) = &self.internal else { return None }; + + if type_url != $type_url { + return Some(Err(format!( + "internal field has wrong type_url {}, expected {}", + type_url, $type_url + ))); + } + match prost::Message::decode(value.clone()) { + Ok(m) => Some(Ok(m)), + Err(err) => Some(Err(format!( + "internal field {} cannot decode: {err:?}", + $type_url + ))), + } + } + + /// Set and inspect the internal field via a callback. + /// Modifications made by the callback are re-encoded into the + /// internal Any message, the post-modification value is returned. + pub fn set_internal( + &mut self, + buf: &mut bytes::BytesMut, + cb: F, + ) -> Result<$ext_type, String> + where + F: FnOnce(&mut $ext_type), + { + let mut internal = match self.get_internal() { + Some(result) => result?, + None => <$ext_type>::default(), + }; + cb(&mut internal); + + buf.reserve(internal.encoded_len()); + internal.encode(buf).unwrap(); + + self.internal = Some(::pbjson_types::Any { + type_url: $type_url.to_string(), + value: buf.split().freeze(), + }); + Ok(internal) + } + } + }; +} + +impl_internal!( + capture::Request, + runtime::CaptureRequestExt, + "flow://runtime.CaptureRequestExt" +); +impl_internal!( + capture::Response, + runtime::CaptureResponseExt, + "flow://runtime.CaptureResponseExt" +); +impl_internal!( + derive::Request, + runtime::DeriveRequestExt, + "flow://runtime.DeriveRequestExt" +); +impl_internal!( + derive::Response, + runtime::DeriveResponseExt, + "flow://runtime.DeriveResponseExt" +); +impl_internal!( + materialize::Request, + runtime::MaterializeRequestExt, + "flow://runtime.MaterializeRequestExt" +); +impl_internal!( + materialize::Response, + runtime::MaterializeResponseExt, + "flow://runtime.MaterializeResponseExt" +); diff --git a/crates/proto-flow/src/lib.rs b/crates/proto-flow/src/lib.rs index b027892f78..ac1ea9d226 100644 --- a/crates/proto-flow/src/lib.rs +++ b/crates/proto-flow/src/lib.rs @@ -4,9 +4,11 @@ use std::collections::BTreeMap; pub mod capture; pub mod derive; pub mod flow; +mod internal; pub mod materialize; pub mod ops; pub mod runtime; +mod zeroize; // Adapt a &str of JSON to a &RawValue for serialization. fn as_raw_json(v: &str) -> Result<&RawValue, E> { diff --git a/crates/proto-flow/src/materialize.rs b/crates/proto-flow/src/materialize.rs index a96454e41e..a5adf58b5f 100644 --- a/crates/proto-flow/src/materialize.rs +++ b/crates/proto-flow/src/materialize.rs @@ -65,9 +65,6 @@ pub mod request { pub config_json: ::prost::alloc::string::String, #[prost(message, repeated, tag = "4")] pub bindings: ::prost::alloc::vec::Vec, - /// Network ports of this proposed materialization. - #[prost(message, repeated, tag = "5")] - pub network_ports: ::prost::alloc::vec::Vec, } /// Nested message and enum types in `Validate`. pub mod validate { diff --git a/crates/proto-flow/src/materialize.serde.rs b/crates/proto-flow/src/materialize.serde.rs index a64fc30f89..112685d9c3 100644 --- a/crates/proto-flow/src/materialize.serde.rs +++ b/crates/proto-flow/src/materialize.serde.rs @@ -1504,9 +1504,6 @@ impl serde::Serialize for request::Validate { if !self.bindings.is_empty() { len += 1; } - if !self.network_ports.is_empty() { - len += 1; - } let mut struct_ser = serializer.serialize_struct("materialize.Request.Validate", len)?; if !self.name.is_empty() { struct_ser.serialize_field("name", &self.name)?; @@ -1522,9 +1519,6 @@ impl serde::Serialize for request::Validate { if !self.bindings.is_empty() { struct_ser.serialize_field("bindings", &self.bindings)?; } - if !self.network_ports.is_empty() { - struct_ser.serialize_field("networkPorts", &self.network_ports)?; - } struct_ser.end() } } @@ -1541,8 +1535,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { "config_json", "config", "bindings", - "network_ports", - "networkPorts", ]; #[allow(clippy::enum_variant_names)] @@ -1551,7 +1543,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { ConnectorType, ConfigJson, Bindings, - NetworkPorts, } impl<'de> serde::Deserialize<'de> for GeneratedField { fn deserialize(deserializer: D) -> std::result::Result @@ -1577,7 +1568,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { "connectorType" | "connector_type" => Ok(GeneratedField::ConnectorType), "config" | "config_json" => Ok(GeneratedField::ConfigJson), "bindings" => Ok(GeneratedField::Bindings), - "networkPorts" | "network_ports" => Ok(GeneratedField::NetworkPorts), _ => Err(serde::de::Error::unknown_field(value, FIELDS)), } } @@ -1601,7 +1591,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { let mut connector_type__ = None; let mut config_json__ : Option> = None; let mut bindings__ = None; - let mut network_ports__ = None; while let Some(k) = map.next_key()? { match k { GeneratedField::Name => { @@ -1628,12 +1617,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { } bindings__ = Some(map.next_value()?); } - GeneratedField::NetworkPorts => { - if network_ports__.is_some() { - return Err(serde::de::Error::duplicate_field("networkPorts")); - } - network_ports__ = Some(map.next_value()?); - } } } Ok(request::Validate { @@ -1641,7 +1624,6 @@ impl<'de> serde::Deserialize<'de> for request::Validate { connector_type: connector_type__.unwrap_or_default(), config_json: config_json__.map(|r| Box::::from(r).into()).unwrap_or_default(), bindings: bindings__.unwrap_or_default(), - network_ports: network_ports__.unwrap_or_default(), }) } } diff --git a/crates/proto-flow/src/runtime.rs b/crates/proto-flow/src/runtime.rs index 499338c76f..bc829a5646 100644 --- a/crates/proto-flow/src/runtime.rs +++ b/crates/proto-flow/src/runtime.rs @@ -7,6 +7,8 @@ pub struct TaskServiceConfig { pub task_name: ::prost::alloc::string::String, #[prost(string, tag = "3")] pub uds_path: ::prost::alloc::string::String, + #[prost(string, tag = "4")] + pub container_network: ::prost::alloc::string::String, } /// ShuffleRequest is the request message of a Shuffle RPC. /// It's a description of a document shuffle, @@ -122,10 +124,33 @@ pub struct RocksDbDescriptor { #[prost(string, tag = "2")] pub rocksdb_path: ::prost::alloc::string::String, } +/// Container is a description of a running connector container. +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct Container { + #[prost(string, tag = "1")] + pub ip_addr: ::prost::alloc::string::String, + #[prost(message, repeated, tag = "2")] + pub network_ports: ::prost::alloc::vec::Vec, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CaptureRequestExt { + #[prost(message, optional, tag = "1")] + pub labels: ::core::option::Option, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct CaptureResponseExt { + #[prost(message, optional, tag = "1")] + pub container: ::core::option::Option, +} #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeriveRequestExt { #[prost(message, optional, tag = "1")] + pub labels: ::core::option::Option, + #[prost(message, optional, tag = "2")] pub open: ::core::option::Option, } /// Nested message and enum types in `DeriveRequestExt`. @@ -133,14 +158,11 @@ pub mod derive_request_ext { #[allow(clippy::derive_partial_eq_without_eq)] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Open { - /// Configured log level. - #[prost(enumeration = "super::super::ops::log::Level", tag = "1")] - pub log_level: i32, /// URL with a registered SQLite VFS which should be opened. - #[prost(string, tag = "2")] + #[prost(string, tag = "1")] pub sqlite_vfs_uri: ::prost::alloc::string::String, /// RocksDB descriptor which should be opened. - #[prost(message, optional, tag = "3")] + #[prost(message, optional, tag = "2")] pub rocksdb_descriptor: ::core::option::Option, } } @@ -148,10 +170,12 @@ pub mod derive_request_ext { #[derive(Clone, PartialEq, ::prost::Message)] pub struct DeriveResponseExt { #[prost(message, optional, tag = "1")] - pub opened: ::core::option::Option, + pub container: ::core::option::Option, #[prost(message, optional, tag = "2")] - pub published: ::core::option::Option, + pub opened: ::core::option::Option, #[prost(message, optional, tag = "3")] + pub published: ::core::option::Option, + #[prost(message, optional, tag = "4")] pub flushed: ::core::option::Option, } /// Nested message and enum types in `DeriveResponseExt`. @@ -184,3 +208,15 @@ pub mod derive_response_ext { pub stats: ::core::option::Option, } } +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MaterializeRequestExt { + #[prost(message, optional, tag = "1")] + pub labels: ::core::option::Option, +} +#[allow(clippy::derive_partial_eq_without_eq)] +#[derive(Clone, PartialEq, ::prost::Message)] +pub struct MaterializeResponseExt { + #[prost(message, optional, tag = "1")] + pub container: ::core::option::Option, +} diff --git a/crates/proto-flow/src/zeroize.rs b/crates/proto-flow/src/zeroize.rs new file mode 100644 index 0000000000..70387b8aff --- /dev/null +++ b/crates/proto-flow/src/zeroize.rs @@ -0,0 +1,55 @@ +use super::{capture, derive, flow, materialize}; +use zeroize::Zeroize; + +impl Drop for capture::request::Spec { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} +impl Drop for capture::request::Discover { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} +impl Drop for capture::request::Validate { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} +impl Drop for flow::CaptureSpec { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} + +impl Drop for derive::request::Spec { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} +impl Drop for derive::request::Validate { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} +impl Drop for flow::collection_spec::Derivation { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} + +impl Drop for materialize::request::Spec { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} +impl Drop for materialize::request::Validate { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} +impl Drop for flow::MaterializationSpec { + fn drop(&mut self) { + self.config_json.zeroize(); + } +} diff --git a/crates/proto-flow/tests/regression.rs b/crates/proto-flow/tests/regression.rs index 35f4de1f08..2396a25c8d 100644 --- a/crates/proto-flow/tests/regression.rs +++ b/crates/proto-flow/tests/regression.rs @@ -373,7 +373,6 @@ fn ex_capture_request() -> capture::Request { collection: Some(ex_collection_spec()), resource_config_json: json!({"resource":"config"}).to_string(), }], - network_ports: ex_network_ports(), }), apply: Some(capture::request::Apply { capture: Some(ex_capture_spec()), @@ -457,7 +456,6 @@ fn ex_derive_request() -> derive::Request { "file:///path/to/import".to_string(), )] .into(), - network_ports: ex_network_ports(), }), open: Some(derive::request::Open { collection: Some(ex_collection_spec()), @@ -493,8 +491,9 @@ fn ex_derive_response() -> derive::Response { spec: Some(derive::response::Spec { protocol: 3032023, config_schema_json: json!({"config": "schema"}).to_string(), - lambda_config_schema_json: json!({"lambda": "schema"}).to_string(), + resource_config_schema_json: json!({"lambda": "schema"}).to_string(), documentation_url: "https://example/docs".to_string(), + oauth2: Some(ex_oauth2()), }), validated: Some(derive::response::Validated { transforms: vec![ @@ -534,7 +533,6 @@ fn ex_materialize_request() -> materialize::Request { resource_config_json: json!({"resource":"config"}).to_string(), field_config_json_map: ex_field_config(), }], - network_ports: ex_network_ports(), }), apply: Some(materialize::request::Apply { materialization: Some(ex_materialization_spec()), diff --git a/crates/proto-flow/tests/snapshots/regression__capture_request_json.snap b/crates/proto-flow/tests/snapshots/regression__capture_request_json.snap index 47a3e96526..3ad4aa2d2e 100644 --- a/crates/proto-flow/tests/snapshots/regression__capture_request_json.snap +++ b/crates/proto-flow/tests/snapshots/regression__capture_request_json.snap @@ -85,16 +85,6 @@ expression: json_test(msg) } } } - ], - "networkPorts": [ - { - "number": 8080, - "protocol": "https", - "public": true - }, - { - "number": 9000 - } ] }, "apply": { diff --git a/crates/proto-flow/tests/snapshots/regression__capture_request_proto.snap b/crates/proto-flow/tests/snapshots/regression__capture_request_proto.snap index 9c357d998f..fdfb53aa08 100644 --- a/crates/proto-flow/tests/snapshots/regression__capture_request_proto.snap +++ b/crates/proto-flow/tests/snapshots/regression__capture_request_proto.snap @@ -5,7 +5,7 @@ expression: proto_test(msg) |0a150807 12117b22 73706563 223a2263| ......{"spec":"c 00000000 |6f6e6669 67227d12 19080712 157b2264| onfig"}......{"d 00000010 |6973636f 76657222 3a22636f 6e666967| iscover":"config 00000020 -|227d1ad9 030a1076 616c6964 6174652f| "}.....validate/ 00000030 +|227d1ac6 030a1076 616c6964 6174652f| "}.....validate/ 00000030 |63617074 75726510 071a157b 2276616c| capture....{"val 00000040 |69646174 65223a22 636f6e66 6967227d| idate":"config"} 00000050 |2298030a 157b2272 65736f75 72636522| "....{"resource" 00000060 @@ -33,99 +33,98 @@ expression: proto_test(msg) |083e1080 e59a773a 18506174 687b7b50| .>....w:.Path{{P 000001c0 |6f737466 69782e54 656d706c 6174657d| ostfix.Template} 000001d0 |7d300438 cfb0f501 5a117b22 72656164| }0.8....Z.{"read 000001e0 -|223a2273 6368656d 61227d2a 0c08903f| ":"schema"}*...? 000001f0 -|12056874 74707318 012a0308 a84622b7| ..https..*...F". 00000200 -|050aa705 0a0e6163 6d65436f 2f636170| ......acmeCo/cap 00000210 -|74757265 10071a19 7b226361 70747572| ture....{"captur 00000220 -|65223a7b 22636f6e 66696722 3a34327d| e":{"config":42} 00000230 -|7d22a403 0a157b22 7265736f 75726365| }"....{"resource 00000240 -|223a2263 6f6e6669 67227d12 04736f6d| ":"config"}..som 00000250 -|65120470 6174681a fe020a11 61636d65| e..path.....acme 00000260 -|436f2f63 6f6c6c65 6374696f 6e1a082f| Co/collection../ 00000270 -|6b65792f 6f6e651a 082f6b65 792f7477| key/one../key/tw 00000280 -|6f220b2f 5f6d6574 612f7575 69642a04| o"./_meta/uuid*. 00000290 -|74797065 2a067265 67696f6e 325d0a09| type*.region2].. 000002a0 -|2f6a736f 6e2f7074 72120761 2d666965| /json/ptr..a-fie 000002b0 -|6c642001 32450a07 696e7465 6765720a| ld .2E..integer. 000002c0 -|06737472 696e671a 131a0374 79702204| .string....typ". 000002d0 -|64617465 30b9603a 03656e63 22057469| date0.`:.enc".ti 000002e0 -|746c652a 04646573 63320e7b 22646566| tle*.desc2.{"def 000002f0 -|223a2261 756c7422 7d40013a 0e7b2261| ":"ault"}@.:.{"a 00000300 -|636b223a 22747275 65227d42 127b2277| ck":"true"}B.{"w 00000310 -|72697465 223a2273 6368656d 61227d4a| rite":"schema"}J 00000320 -|a3010a12 70617274 6974696f 6e2f7465| ....partition/te 00000330 -|6d706c61 74651003 1a400a1e 0a0f6573| mplate...@....es 00000340 -|74756172 792e6465 762f666f 6f120b6c| tuary.dev/foo..l 00000350 -|6162656c 2d76616c 75650a1e 0a0f6573| abel-value....es 00000360 -|74756172 792e6465 762f6261 72120b6f| tuary.dev/bar..o 00000370 -|74686572 2d76616c 75652242 08e9ec06| ther-value"B.... 00000380 -|10031a12 73333a2f 2f627563 6b65742f| ....s3://bucket/ 00000390 -|70726566 69782203 08ac0232 07083e10| prefix"....2..>. 000003a0 -|80e59a77 3a185061 74687b7b 506f7374| ...w:.Path{{Post 000003b0 -|6669782e 54656d70 6c617465 7d7d3004| fix.Template}}0. 000003c0 -|38cfb0f5 015a117b 22726561 64223a22| 8....Z.{"read":" 000003d0 -|73636865 6d61227d 28ac0232 80010a0e| schema"}(..2.... 000003e0 -|73686172 642f7465 6d706c61 74651a0f| shard/template.. 000003f0 -|7265636f 76657279 2f707265 66697822| recovery/prefix" 00000400 -|0b68696e 742f7072 65666978 28033202| .hint/prefix(.2. 00000410 -|083c4801 52400a1e 0a0f6573 74756172| .....w:. 00000660 -|50617468 7b7b506f 73746669 782e5465| Path{{Postfix.Te 00000670 -|6d706c61 74657d7d 300438cf b0f5015a| mplate}}0.8....Z 00000680 -|117b2272 65616422 3a227363 68656d61| .{"read":"schema 00000690 -|227d28ac 02328001 0a0e7368 6172642f| "}(..2....shard/ 000006a0 -|74656d70 6c617465 1a0f7265 636f7665| template..recove 000006b0 -|72792f70 72656669 78220b68 696e742f| ry/prefix".hint/ 000006c0 -|70726566 69782803 3202083c 48015240| prefix(.2...... 00000390 +|773a1850 6174687b 7b506f73 74666978| w:.Path{{Postfix 000003a0 +|2e54656d 706c6174 657d7d30 0438cfb0| .Template}}0.8.. 000003b0 +|f5015a11 7b227265 6164223a 22736368| ..Z.{"read":"sch 000003c0 +|656d6122 7d28ac02 3280010a 0e736861| ema"}(..2....sha 000003d0 +|72642f74 656d706c 6174651a 0f726563| rd/template..rec 000003e0 +|6f766572 792f7072 65666978 220b6869| overy/prefix".hi 000003f0 +|6e742f70 72656669 78280332 02083c48| nt/prefix(.2......w:.Pat 00000650 +|687b7b50 6f737466 69782e54 656d706c| h{{Postfix.Templ 00000660 +|6174657d 7d300438 cfb0f501 5a117b22| ate}}0.8....Z.{" 00000670 +|72656164 223a2273 6368656d 61227d28| read":"schema"}( 00000680 +|ac023280 010a0e73 68617264 2f74656d| ..2....shard/tem 00000690 +|706c6174 651a0f72 65636f76 6572792f| plate..recovery/ 000006a0 +|70726566 6978220b 68696e74 2f707265| prefix".hint/pre 000006b0 +|66697828 03320208 3c480152 400a1e0a| fix(.2...... 00000510 -|773a1850 6174687b 7b506f73 74666978| w:.Path{{Postfix 00000520 -|2e54656d 706c6174 657d7d30 0438cfb0| .Template}}0.8.. 00000530 -|f5015a11 7b227265 6164223a 22736368| ..Z.{"read":"sch 00000540 -|656d6122 7d120b31 313a3232 3a33333a| ema"}..11:22:33: 00000550 -|34341a14 15332211 001d7766 554425bb| 44...3"...wfUD%. 00000560 -|aa99882d ffeeddcc 2a1a7b22 636f6e6e| ...-....*.{"conn 00000570 -|6563746f 72223a7b 22737461 7465223a| ector":{"state": 00000580 -|34327d7d 223e0802 121209d2 04000000| 42}}">.......... 00000590 -|00000011 2e160000 00000000 1a160a09| ................ 000005a0 -|5b747275 652c3332 5d120456 4b1e0918| [true,32]..VK... 000005b0 -|85c39f15 220e7b22 72656164 223a2264| ....".{"read":"d 000005c0 -|6f63227d 2a003266 0a640a4a 0a15612f| oc"}*.2f.d.J..a/ 000005d0 -|72656164 2f6a6f75 726e616c 3b737566| read/journal;suf 000005e0 -|66697812 3108b960 12150a05 03090805| fix.1..`........ 000005f0 -|07120c09 e3210000 00000000 10d70812| .....!.......... 00000600 -|150a0507 0c662b1d 120c0935 01000000| .....f+....5.... 00000610 -|00000010 ae111216 0a0e616e 2f61636b| ..........an/ack 00000620 -|2f6a6f75 726e616c 12040304 02053a00| /journal......:. 00000630 -|a2061f0a 15666c6f 773a2f2f 696e7465| .....flow://inte 00000640 -|726e616c 2e746869 6e671206 12024869| rnal.thing....Hi 00000650 -|1801| .. 00000660 - 00000662 +|70617468 2f746f2f 696d706f 72741ac0| path/to/import.. 000003b0 +|030afe02 0a116163 6d65436f 2f636f6c| ......acmeCo/col 000003c0 +|6c656374 696f6e1a 082f6b65 792f6f6e| lection../key/on 000003d0 +|651a082f 6b65792f 74776f22 0b2f5f6d| e../key/two"./_m 000003e0 +|6574612f 75756964 2a047479 70652a06| eta/uuid*.type*. 000003f0 +|72656769 6f6e325d 0a092f6a 736f6e2f| region2]../json/ 00000400 +|70747212 07612d66 69656c64 20013245| ptr..a-field .2E 00000410 +|0a07696e 74656765 720a0673 7472696e| ..integer..strin 00000420 +|671a131a 03747970 22046461 746530b9| g....typ".date0. 00000430 +|603a0365 6e632205 7469746c 652a0464| `:.enc".title*.d 00000440 +|65736332 0e7b2264 6566223a 2261756c| esc2.{"def":"aul 00000450 +|74227d40 013a0e7b 2261636b 223a2274| t"}@.:.{"ack":"t 00000460 +|72756522 7d42127b 22777269 7465223a| rue"}B.{"write": 00000470 +|22736368 656d6122 7d4aa301 0a127061| "schema"}J....pa 00000480 +|72746974 696f6e2f 74656d70 6c617465| rtition/template 00000490 +|10031a40 0a1e0a0f 65737475 6172792e| ...@....estuary. 000004a0 +|6465762f 666f6f12 0b6c6162 656c2d76| dev/foo..label-v 000004b0 +|616c7565 0a1e0a0f 65737475 6172792e| alue....estuary. 000004c0 +|6465762f 62617212 0b6f7468 65722d76| dev/bar..other-v 000004d0 +|616c7565 224208e9 ec061003 1a127333| alue"B........s3 000004e0 +|3a2f2f62 75636b65 742f7072 65666978| ://bucket/prefix 000004f0 +|220308ac 02320708 3e1080e5 9a773a18| "....2..>....w:. 00000500 +|50617468 7b7b506f 73746669 782e5465| Path{{Postfix.Te 00000510 +|6d706c61 74657d7d 300438cf b0f5015a| mplate}}0.8....Z 00000520 +|117b2272 65616422 3a227363 68656d61| .{"read":"schema 00000530 +|227d120b 31313a32 323a3333 3a34341a| "}..11:22:33:44. 00000540 +|14153322 11001d77 66554425 bbaa9988| ..3"...wfUD%.... 00000550 +|2dffeedd cc2a1a7b 22636f6e 6e656374| -....*.{"connect 00000560 +|6f72223a 7b227374 61746522 3a34327d| or":{"state":42} 00000570 +|7d223e08 02121209 d2040000 00000000| }">............. 00000580 +|112e1600 00000000 001a160a 095b7472| .............[tr 00000590 +|75652c33 325d1204 564b1e09 1885c39f| ue,32]..VK...... 000005a0 +|15220e7b 22726561 64223a22 646f6322| .".{"read":"doc" 000005b0 +|7d2a0032 660a640a 4a0a1561 2f726561| }*.2f.d.J..a/rea 000005c0 +|642f6a6f 75726e61 6c3b7375 66666978| d/journal;suffix 000005d0 +|123108b9 6012150a 05030908 0507120c| .1..`........... 000005e0 +|09e32100 00000000 0010d708 12150a05| ..!............. 000005f0 +|070c662b 1d120c09 35010000 00000000| ..f+....5....... 00000600 +|10ae1112 160a0e61 6e2f6163 6b2f6a6f| .......an/ack/jo 00000610 +|75726e61 6c120403 0402053a 00a2061f| urnal......:.... 00000620 +|0a15666c 6f773a2f 2f696e74 65726e61| ..flow://interna 00000630 +|6c2e7468 696e6712 06120248 691801| l.thing....Hi.. 00000640 + 0000064f diff --git a/crates/proto-flow/tests/snapshots/regression__derive_response_json.snap b/crates/proto-flow/tests/snapshots/regression__derive_response_json.snap index 9f160b3e51..9fca6b2eca 100644 --- a/crates/proto-flow/tests/snapshots/regression__derive_response_json.snap +++ b/crates/proto-flow/tests/snapshots/regression__derive_response_json.snap @@ -6,8 +6,32 @@ expression: json_test(msg) "spec": { "protocol": 3032023, "configSchema": {"config":"schema"}, - "lambdaConfigSchema": {"lambda":"schema"}, - "documentationUrl": "https://example/docs" + "resourceConfigSchema": {"lambda":"schema"}, + "documentationUrl": "https://example/docs", + "oauth2": { + "provider": "oauth-provider", + "authUrlTemplate": "https://auth-url", + "accessTokenUrlTemplate": "https://access-token", + "accessTokenMethod": "POST", + "accessTokenBody": "foo", + "accessTokenHeaders": { + "hdr-one": {"hello":"hdr"}, + "hdr-two": 42.5 + }, + "accessTokenResponseMap": { + "foo": true, + "key": "value" + }, + "refreshTokenUrlTemplate": "https://refresh-token", + "refreshTokenMethod": "POST", + "refreshTokenBody": "refresh!", + "refreshTokenHeaders": { + "hdr-three": {"refresh":"hdr"} + }, + "refreshTokenResponseMap": { + "access": "here" + } + } }, "validated": { "transforms": [ diff --git a/crates/proto-flow/tests/snapshots/regression__derive_response_proto.snap b/crates/proto-flow/tests/snapshots/regression__derive_response_proto.snap index 1ccd7f70f6..78633ac87b 100644 --- a/crates/proto-flow/tests/snapshots/regression__derive_response_proto.snap +++ b/crates/proto-flow/tests/snapshots/regression__derive_response_proto.snap @@ -2,18 +2,32 @@ source: crates/proto-flow/tests/regression.rs expression: proto_test(msg) --- -|0a4508d7 87b90112 137b2263 6f6e6669| .E.......{"confi 00000000 -|67223a22 73636865 6d61227d 1a137b22| g":"schema"}..{" 00000010 -|6c616d62 6461223a 22736368 656d6122| lambda":"schema" 00000020 -|7d221468 74747073 3a2f2f65 78616d70| }".https://examp 00000030 -|6c652f64 6f637312 310a0208 010a0012| le/docs.1....... 00000040 -|290a1e66 696c653a 2f2f2f70 726f6a65| )..file:///proje 00000050 -|63742f72 6f6f742f 64656e6f 2e6a736f| ct/root/deno.jso 00000060 -|6e120763 6f6e7465 6e741a00 22150a13| n..content.."... 00000070 -|7b227075 626c6973 68656422 3a22646f| {"published":"do 00000080 -|63227d2a 0032180a 160a127b 22737461| c"}*.2.....{"sta 00000090 -|7465223a 22757064 61746522 7d1001a2| te":"update"}... 000000a0 -|061f0a15 666c6f77 3a2f2f69 6e746572| ....flow://inter 000000b0 -|6e616c2e 7468696e 67120612 02486918| nal.thing....Hi. 000000c0 -|01| . 000000d0 - 000000d1 +|0aae0208 d787b901 12137b22 636f6e66| ..........{"conf 00000000 +|6967223a 22736368 656d6122 7d1a137b| ig":"schema"}..{ 00000010 +|226c616d 62646122 3a227363 68656d61| "lambda":"schema 00000020 +|227d2214 68747470 733a2f2f 6578616d| "}".https://exam 00000030 +|706c652f 646f6373 2ae6010a 0e6f6175| ple/docs*....oau 00000040 +|74682d70 726f7669 64657212 10687474| th-provider..htt 00000050 +|70733a2f 2f617574 682d7572 6c1a1468| ps://auth-url..h 00000060 +|74747073 3a2f2f61 63636573 732d746f| ttps://access-to 00000070 +|6b656e22 03666f6f 2a1a0a07 6864722d| ken".foo*...hdr- 00000080 +|6f6e6512 0f7b2268 656c6c6f 223a2268| one..{"hello":"h 00000090 +|6472227d 2a0f0a07 6864722d 74776f12| dr"}*...hdr-two. 000000a0 +|0434322e 35320b0a 03666f6f 12047472| .42.52...foo..tr 000000b0 +|7565320e 0a036b65 79120722 76616c75| ue2...key.."valu 000000c0 +|65223a15 68747470 733a2f2f 72656672| e":.https://refr 000000d0 +|6573682d 746f6b65 6e420872 65667265| esh-tokenB.refre 000000e0 +|7368214a 1e0a0968 64722d74 68726565| sh!J...hdr-three 000000f0 +|12117b22 72656672 65736822 3a226864| ..{"refresh":"hd 00000100 +|72227d52 100a0661 63636573 73120622| r"}R...access.." 00000110 +|68657265 225a0450 4f535462 04504f53| here"Z.POSTb.POS 00000120 +|5412310a 0208010a 0012290a 1e66696c| T.1.......)..fil 00000130 +|653a2f2f 2f70726f 6a656374 2f726f6f| e:///project/roo 00000140 +|742f6465 6e6f2e6a 736f6e12 07636f6e| t/deno.json..con 00000150 +|74656e74 1a002215 0a137b22 7075626c| tent.."...{"publ 00000160 +|69736865 64223a22 646f6322 7d2a0032| ished":"doc"}*.2 00000170 +|180a160a 127b2273 74617465 223a2275| .....{"state":"u 00000180 +|70646174 65227d10 01a2061f 0a15666c| pdate"}.......fl 00000190 +|6f773a2f 2f696e74 65726e61 6c2e7468| ow://internal.th 000001a0 +|696e6712 06120248 691801| ing....Hi.. 000001b0 + 000001bb diff --git a/crates/proto-flow/tests/snapshots/regression__materialize_request_json.snap b/crates/proto-flow/tests/snapshots/regression__materialize_request_json.snap index ff4f8da174..2d08f2a361 100644 --- a/crates/proto-flow/tests/snapshots/regression__materialize_request_json.snap +++ b/crates/proto-flow/tests/snapshots/regression__materialize_request_json.snap @@ -85,16 +85,6 @@ expression: json_test(msg) "other/field": 42.5 } } - ], - "networkPorts": [ - { - "number": 8080, - "protocol": "https", - "public": true - }, - { - "number": 9000 - } ] }, "apply": { diff --git a/crates/proto-flow/tests/snapshots/regression__materialize_request_proto.snap b/crates/proto-flow/tests/snapshots/regression__materialize_request_proto.snap index 37741ab57b..a569785a66 100644 --- a/crates/proto-flow/tests/snapshots/regression__materialize_request_proto.snap +++ b/crates/proto-flow/tests/snapshots/regression__materialize_request_proto.snap @@ -3,7 +3,7 @@ source: crates/proto-flow/tests/regression.rs expression: proto_test(msg) --- |0a150808 12117b22 73706563 223a2263| ......{"spec":"c 00000000 -|6f6e6669 67227d12 95040a18 76616c69| onfig"}.....vali 00000010 +|6f6e6669 67227d12 82040a18 76616c69| onfig"}.....vali 00000010 |64617465 2f6d6174 65726961 6c697a61| date/materializa 00000020 |74696f6e 10081a15 7b227661 6c696461| tion....{"valida 00000030 |7465223a 22636f6e 66696722 7d22cc03| te":"config"}".. 00000040 @@ -35,140 +35,139 @@ expression: proto_test(msg) |73636865 6d61227d 1a1d0a07 615f6669| schema"}....a_fi 000001e0 |656c6412 127b2266 69656c64 223a2263| eld..{"field":"c 000001f0 |6f6e6669 67227d1a 130a0b6f 74686572| onfig"}....other 00000200 -|2f666965 6c641204 34322e35 2a0c0890| /field..42.5*... 00000210 -|3f120568 74747073 18012a03 08a8461a| ?..https..*...F. 00000220 -|9d070a8d 070a1661 636d6543 6f2f6d61| .......acmeCo/ma 00000230 -|74657269 616c697a 6174696f 6e10081a| terialization... 00000240 -|1d7b226d 61746572 69616c69 7a65223a| .{"materialize": 00000250 -|7b22636f 6e666967 223a3432 7d7d2281| {"config":42}}". 00000260 -|050a157b 22726573 6f757263 65223a22| ...{"resource":" 00000270 -|636f6e66 6967227d 1204736f 6d651204| config"}..some.. 00000280 -|70617468 1afe020a 1161636d 65436f2f| path.....acmeCo/ 00000290 -|636f6c6c 65637469 6f6e1a08 2f6b6579| collection../key 000002a0 -|2f6f6e65 1a082f6b 65792f74 776f220b| /one../key/two". 000002b0 -|2f5f6d65 74612f75 7569642a 04747970| /_meta/uuid*.typ 000002c0 -|652a0672 6567696f 6e325d0a 092f6a73| e*.region2]../js 000002d0 -|6f6e2f70 74721207 612d6669 656c6420| on/ptr..a-field 000002e0 -|0132450a 07696e74 65676572 0a067374| .2E..integer..st 000002f0 -|72696e67 1a131a03 74797022 04646174| ring....typ".dat 00000300 -|6530b960 3a03656e 63220574 69746c65| e0.`:.enc".title 00000310 -|2a046465 7363320e 7b226465 66223a22| *.desc2.{"def":" 00000320 -|61756c74 227d4001 3a0e7b22 61636b22| ault"}@.:.{"ack" 00000330 -|3a227472 7565227d 42127b22 77726974| :"true"}B.{"writ 00000340 -|65223a22 73636865 6d61227d 4aa3010a| e":"schema"}J... 00000350 -|12706172 74697469 6f6e2f74 656d706c| .partition/templ 00000360 -|61746510 031a400a 1e0a0f65 73747561| ate...@....estua 00000370 -|72792e64 65762f66 6f6f120b 6c616265| ry.dev/foo..labe 00000380 -|6c2d7661 6c75650a 1e0a0f65 73747561| l-value....estua 00000390 -|72792e64 65762f62 6172120b 6f746865| ry.dev/bar..othe 000003a0 -|722d7661 6c756522 4208e9ec 0610031a| r-value"B....... 000003b0 -|1273333a 2f2f6275 636b6574 2f707265| .s3://bucket/pre 000003c0 -|66697822 0308ac02 3207083e 1080e59a| fix"....2..>.... 000003d0 -|773a1850 6174687b 7b506f73 74666978| w:.Path{{Postfix 000003e0 -|2e54656d 706c6174 657d7d30 0438cfb0| .Template}}0.8.. 000003f0 -|f5015a11 7b227265 6164223a 22736368| ..Z.{"read":"sch 00000400 -|656d6122 7d22550a 076b6579 2f6f6e65| ema"}"U..key/one 00000410 -|12077661 6c2f7477 6f1a0d66 6c6f775f| ..val/two..flow_ 00000420 -|646f6375 6d656e74 221d0a07 615f6669| document"...a_fi 00000430 -|656c6412 127b2266 69656c64 223a2263| eld..{"field":"c 00000440 -|6f6e6669 67227d22 130a0b6f 74686572| onfig"}"...other 00000450 -|2f666965 6c641204 34322e35 3a420a40| /field..42.5:B.@ 00000460 -|0a1e0a0f 65737475 6172792e 6465762f| ....estuary.dev/ 00000470 -|666f6f12 0b6c6162 656c2d76 616c7565| foo..label-value 00000480 -|0a1e0a0f 65737475 6172792e 6465762f| ....estuary.dev/ 00000490 -|62617212 0b6f7468 65722d76 616c7565| bar..other-value 000004a0 -|422e6d61 74657269 616c697a 652f6163| B.materialize/ac 000004b0 -|6d65436f 2f6d6174 65726961 6c697a61| meCo/materializa 000004c0 -|74696f6e 2f736f6d 65253230 70617468| tion/some%20path 000004d0 -|48035206 08cbc8d6 a6065a06 0880888b| H.R.......Z..... 000004e0 -|a1062a80 010a0e73 68617264 2f74656d| ..*....shard/tem 000004f0 -|706c6174 651a0f72 65636f76 6572792f| plate..recovery/ 00000500 -|70726566 6978220b 68696e74 2f707265| prefix".hint/pre 00000510 -|66697828 03320208 3c480152 400a1e0a| fix(.2...... 00000770 -|773a1850 6174687b 7b506f73 74666978| w:.Path{{Postfix 00000780 -|2e54656d 706c6174 657d7d30 0438cfb0| .Template}}0.8.. 00000790 -|f5015a11 7b227265 6164223a 22736368| ..Z.{"read":"sch 000007a0 -|656d6122 7d22550a 076b6579 2f6f6e65| ema"}"U..key/one 000007b0 -|12077661 6c2f7477 6f1a0d66 6c6f775f| ..val/two..flow_ 000007c0 -|646f6375 6d656e74 221d0a07 615f6669| document"...a_fi 000007d0 -|656c6412 127b2266 69656c64 223a2263| eld..{"field":"c 000007e0 -|6f6e6669 67227d22 130a0b6f 74686572| onfig"}"...other 000007f0 -|2f666965 6c641204 34322e35 3a420a40| /field..42.5:B.@ 00000800 -|0a1e0a0f 65737475 6172792e 6465762f| ....estuary.dev/ 00000810 -|666f6f12 0b6c6162 656c2d76 616c7565| foo..label-value 00000820 -|0a1e0a0f 65737475 6172792e 6465762f| ....estuary.dev/ 00000830 -|62617212 0b6f7468 65722d76 616c7565| bar..other-value 00000840 -|422e6d61 74657269 616c697a 652f6163| B.materialize/ac 00000850 -|6d65436f 2f6d6174 65726961 6c697a61| meCo/materializa 00000860 -|74696f6e 2f736f6d 65253230 70617468| tion/some%20path 00000870 -|48035206 08cbc8d6 a6065a06 0880888b| H.R.......Z..... 00000880 -|a1062a80 010a0e73 68617264 2f74656d| ..*....shard/tem 00000890 -|706c6174 651a0f72 65636f76 6572792f| plate..recovery/ 000008a0 -|70726566 6978220b 68696e74 2f707265| prefix".hint/pre 000008b0 -|66697828 03320208 3c480152 400a1e0a| fix(.2......w:. 000003c0 +|50617468 7b7b506f 73746669 782e5465| Path{{Postfix.Te 000003d0 +|6d706c61 74657d7d 300438cf b0f5015a| mplate}}0.8....Z 000003e0 +|117b2272 65616422 3a227363 68656d61| .{"read":"schema 000003f0 +|227d2255 0a076b65 792f6f6e 65120776| "}"U..key/one..v 00000400 +|616c2f74 776f1a0d 666c6f77 5f646f63| al/two..flow_doc 00000410 +|756d656e 74221d0a 07615f66 69656c64| ument"...a_field 00000420 +|12127b22 6669656c 64223a22 636f6e66| ..{"field":"conf 00000430 +|6967227d 22130a0b 6f746865 722f6669| ig"}"...other/fi 00000440 +|656c6412 0434322e 353a420a 400a1e0a| eld..42.5:B.@... 00000450 +|0f657374 75617279 2e646576 2f666f6f| .estuary.dev/foo 00000460 +|120b6c61 62656c2d 76616c75 650a1e0a| ..label-value... 00000470 +|0f657374 75617279 2e646576 2f626172| .estuary.dev/bar 00000480 +|120b6f74 6865722d 76616c75 65422e6d| ..other-valueB.m 00000490 +|61746572 69616c69 7a652f61 636d6543| aterialize/acmeC 000004a0 +|6f2f6d61 74657269 616c697a 6174696f| o/materializatio 000004b0 +|6e2f736f 6d652532 30706174 68480352| n/some%20pathH.R 000004c0 +|0608cbc8 d6a6065a 06088088 8ba1062a| .......Z.......* 000004d0 +|80010a0e 73686172 642f7465 6d706c61| ....shard/templa 000004e0 +|74651a0f 7265636f 76657279 2f707265| te..recovery/pre 000004f0 +|66697822 0b68696e 742f7072 65666978| fix".hint/prefix 00000500 +|28033202 083c4801 52400a1e 0a0f6573| (.2......w:. 00000760 +|50617468 7b7b506f 73746669 782e5465| Path{{Postfix.Te 00000770 +|6d706c61 74657d7d 300438cf b0f5015a| mplate}}0.8....Z 00000780 +|117b2272 65616422 3a227363 68656d61| .{"read":"schema 00000790 +|227d2255 0a076b65 792f6f6e 65120776| "}"U..key/one..v 000007a0 +|616c2f74 776f1a0d 666c6f77 5f646f63| al/two..flow_doc 000007b0 +|756d656e 74221d0a 07615f66 69656c64| ument"...a_field 000007c0 +|12127b22 6669656c 64223a22 636f6e66| ..{"field":"conf 000007d0 +|6967227d 22130a0b 6f746865 722f6669| ig"}"...other/fi 000007e0 +|656c6412 0434322e 353a420a 400a1e0a| eld..42.5:B.@... 000007f0 +|0f657374 75617279 2e646576 2f666f6f| .estuary.dev/foo 00000800 +|120b6c61 62656c2d 76616c75 650a1e0a| ..label-value... 00000810 +|0f657374 75617279 2e646576 2f626172| .estuary.dev/bar 00000820 +|120b6f74 6865722d 76616c75 65422e6d| ..other-valueB.m 00000830 +|61746572 69616c69 7a652f61 636d6543| aterialize/acmeC 00000840 +|6f2f6d61 74657269 616c697a 6174696f| o/materializatio 00000850 +|6e2f736f 6d652532 30706174 68480352| n/some%20pathH.R 00000860 +|0608cbc8 d6a6065a 06088088 8ba1062a| .......Z.......* 00000870 +|80010a0e 73686172 642f7465 6d706c61| ....shard/templa 00000880 +|74651a0f 7265636f 76657279 2f707265| te..recovery/pre 00000890 +|66697822 0b68696e 742f7072 65666978| fix".hint/prefix 000008a0 +|28033202 083c4801 52400a1e 0a0f6573| (.2..( + image: String, + log_handler: L, + network: String, + request_rx: R, + task_name: &str, +) -> tonic::Result>> +where + L: Fn(&ops::Log) + Send + Sync + 'static, + R: Stream> + Send + Unpin + 'static, +{ + let (container, channel, guard) = container::start( + &image, + log_handler, + &network, + task_name, + ops::TaskType::Capture, + ) + .await + .map_err(crate::anyhow_to_status)?; + + // Adapt requests by identifying instances that carry endpoint configuration. + // Verify they remain compatible with our started container, and then unseal their config. + // Or if they're not compatible, then map to Status::aborted(). + let request_rx = request_rx.and_then(move |mut request| { + let must_unseal = if matches!( + request, + Request { spec: Some(_), .. } + | Request { + discover: Some(_), + .. + } + | Request { + validate: Some(_), + .. + } + | Request { apply: Some(_), .. } + | Request { open: Some(_), .. } + ) { + Some(image.clone()) // Outer closure owns `image`. + } else { + None + }; + + async move { + if let Some(expect_image) = must_unseal { + let (endpoint, config_json) = + extract_endpoint(&mut request).map_err(crate::anyhow_to_status)?; + + let sealed_config = match endpoint { + models::CaptureEndpoint::Connector(models::ConnectorConfig { + image: this_image, + config, + }) if expect_image == this_image => config, + + _ => return Err(tonic::Status::aborted("connector image has changed")), + }; + + *config_json = unseal::decrypt_sops(&sealed_config) + .await + .map_err(crate::anyhow_to_status)? + .to_string(); + } + + Ok(request) + } + }); + + let (request_rx, error_rx) = eof_on_error(request_rx); + + // Start a capture RPC. + let container_response = proto_grpc::capture::connector_client::ConnectorClient::new(channel) + .capture(request_rx) + .await?; + let response_rx = container_response.into_inner(); + + // Adapt responses by enriching the first Response with the image Container. + let mut container = Some(container); + let response_rx = response_rx.and_then(move |mut response| { + _ = &guard; // Move so it's retained while responses are still being read. + + if container.is_some() { + response + .set_internal(&mut bytes::BytesMut::new(), |internal| { + internal.container = container.take(); + }) + .unwrap(); + } + futures::future::ready(Ok(response)) + }); + + Ok(inject_error(response_rx, error_rx)) +} diff --git a/crates/runtime/src/capture/mod.rs b/crates/runtime/src/capture/mod.rs new file mode 100644 index 0000000000..100391085a --- /dev/null +++ b/crates/runtime/src/capture/mod.rs @@ -0,0 +1,169 @@ +use super::Runtime; +use anyhow::Context; +use futures::{Stream, StreamExt, TryStreamExt}; +use proto_flow::capture::{Request, Response}; +use proto_flow::flow::capture_spec::ConnectorType; +use proto_flow::ops; +use proto_flow::runtime::CaptureRequestExt; +use std::pin::Pin; +use std::sync::Arc; + +// Notes on how we can structure capture middleware: + +// Request loop: +// - Spec / Discover / Validate / Apply: Unseal. Forward request. +// - Open: Rebuild State. Unseal. Retain explicit-ack. Forward request. +// - Acknowledge: Notify response loop. Forward iff explicit-ack. + +// Response loop: +// - Spec / Discovered / Validated / Applied: Forward response. +// - Opened: Acquire State. Re-init combiners. Forward response. +// - Captured: Validate & add to combiner. +// - Checkpoint: Reduce checkpoint. +// If "full": block until Acknowledge notification is ready. +// If Acknowledge notification is ready: +// Drain combiner into forwarded Captured. +// Forward Checkpoint enriched with stats. + +mod image; + +pub type BoxStream = futures::stream::BoxStream<'static, tonic::Result>; + +#[tonic::async_trait] +impl proto_grpc::capture::connector_server::Connector for Runtime +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + type CaptureStream = BoxStream; + + async fn capture( + &self, + request: tonic::Request>, + ) -> tonic::Result> { + let conn_info = request + .extensions() + .get::(); + tracing::debug!(?request, ?conn_info, "started capture request"); + + let response_rx = self.clone().serve_capture(request.into_inner()).await?; + + Ok(tonic::Response::new(response_rx)) + } +} + +impl Runtime +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + pub async fn serve_capture(self, request_rx: In) -> tonic::Result + where + In: Stream> + Send + Unpin + 'static, + { + let mut request_rx = request_rx.peekable(); + + let mut peek_request = match Pin::new(&mut request_rx).peek().await { + Some(Ok(peek)) => peek.clone(), + Some(Err(status)) => return Err(status.clone()), + None => return Ok(futures::stream::empty().boxed()), + }; + let (endpoint, _) = extract_endpoint(&mut peek_request).map_err(crate::anyhow_to_status)?; + + // NOTE(johnny): To debug requests / responses at any layer of this interceptor stack, try: + // let request_rx = request_rx.inspect_ok(|request| { + // eprintln!("REQUEST: {}", serde_json::to_string(request).unwrap()); + // }); + // + // let response_rx = response_rx.inspect_ok(|response| { + // eprintln!("RESPONSE: {}", serde_json::to_string(response).unwrap()); + // }); + + // Request interceptor which adjusts the dynamic log level based on internal shard labels. + let request_rx = adjust_log_level(request_rx, self.set_log_level); + + let response_rx = match endpoint { + models::CaptureEndpoint::Connector(models::ConnectorConfig { image, .. }) => { + image::image_connector( + image, + self.log_handler, + self.container_network, + request_rx, + &self.task_name, + ) + .await? + .boxed() + } + }; + + Ok(response_rx) + } +} + +pub fn adjust_log_level( + request_rx: R, + set_log_level: Option>, +) -> impl Stream> +where + R: Stream> + Send + 'static, +{ + request_rx.inspect_ok(move |request| { + let Some(Ok(CaptureRequestExt{labels: Some(ops::ShardLabeling { log_level, .. })})) = request.get_internal() else { return }; + + if let (Some(log_level), Some(set_log_level)) = + (ops::log::Level::from_i32(log_level), &set_log_level) + { + (set_log_level)(log_level); + } + }) +} + +// Returns the CaptureEndpoint of this Request, and a mutable reference to its inner config_json. +fn extract_endpoint<'r>( + request: &'r mut Request, +) -> anyhow::Result<(models::CaptureEndpoint, &'r mut String)> { + let (connector_type, config_json) = match request { + Request { + spec: Some(spec), .. + } => (spec.connector_type, &mut spec.config_json), + Request { + discover: Some(discover), + .. + } => (discover.connector_type, &mut discover.config_json), + Request { + validate: Some(validate), + .. + } => (validate.connector_type, &mut validate.config_json), + Request { + apply: Some(apply), .. + } => { + let inner = apply + .capture + .as_mut() + .context("`apply` missing required `capture`")?; + + (inner.connector_type, &mut inner.config_json) + } + Request { + open: Some(open), .. + } => { + let inner = open + .capture + .as_mut() + .context("`open` missing required `capture`")?; + + (inner.connector_type, &mut inner.config_json) + } + + _ => anyhow::bail!("request {request:?} does not contain an endpoint"), + }; + + if connector_type == ConnectorType::Image as i32 { + Ok(( + models::CaptureEndpoint::Connector( + serde_json::from_str(config_json).context("parsing connector config")?, + ), + config_json, + )) + } else { + anyhow::bail!("invalid connector type: {connector_type}"); + } +} diff --git a/crates/runtime/src/container.rs b/crates/runtime/src/container.rs new file mode 100644 index 0000000000..7b7314bc44 --- /dev/null +++ b/crates/runtime/src/container.rs @@ -0,0 +1,411 @@ +use anyhow::Context; +use futures::channel::oneshot; +use proto_flow::{flow, ops, runtime}; +use tokio::io::AsyncBufReadExt; + +// Port on which flow-connector-init listens for requests. +// This is its default, made explicit here. +// This number was chosen because it seemed unlikely that a connector would try to use it. +// The main thing is that we want to avoid any common port numbers to avoid conflicts with +// connectors. +const CONNECTOR_INIT_PORT: u16 = 49092; + +/// Start an image connector container, returning its description and a dialed tonic Channel. +/// The container is attached to the given `network`, and its logs are dispatched to `log_handler`. +/// `task_name` and `task_type` are used only to label the container. +pub async fn start( + image: &str, + log_handler: L, + network: &str, + task_name: &str, + task_type: ops::TaskType, +) -> anyhow::Result<(runtime::Container, tonic::transport::Channel, Guard)> +where + L: Fn(&ops::Log) + Send + Sync + 'static, +{ + // We can't start a container without flow-connector-init. + let connector_init = locate_bin::locate("flow-connector-init") + .context("failed to locate flow-connector-init")?; + + // Generate a unique name for this container instance. Pull and inspect its image. + let name = unique_container_name(); + let inspect_content = inspect_image(image.to_string()).await?; + let network_ports = parse_network_ports(&inspect_content)?; + + // Many operational contexts only allow for docker volume mounts + // from certain locations: + // * Docker for Mac restricts file shares to /User, /tmp, and a couple others. + // * Estuary's current K8s deployments use a separate docker daemon container + // within the pod, having a common /tmp tempdir volume. + // + // So, we use temporaries to ensure that files are readable within the container. + let tmp_connector_init = + tempfile::NamedTempFile::new().context("creating temp for flow-connector-init")?; + let mut tmp_docker_inspect = + tempfile::NamedTempFile::new().context("creating temp for docker inspect output")?; + + // Change mode of `docker_inspect` to be readable by all users. + // This is required because the effective container user may have a different UID. + #[cfg(unix)] + { + use std::os::unix::prelude::PermissionsExt; + let mut perms = tmp_docker_inspect.as_file_mut().metadata()?.permissions(); + perms.set_mode(0o644); + tmp_docker_inspect.as_file_mut().set_permissions(perms)?; + } + + // Write `inspect_content` output to its temporary file. + // Copy `flow-connector-init` to its temporary file. + ((), _) = futures::try_join!( + tokio::fs::write(tmp_docker_inspect.path(), &inspect_content), + tokio::fs::copy(connector_init, tmp_connector_init.path()) + ) + .context("writing container temporary file")?; + + // Close our open files but retain a deletion guard. + let tmp_connector_init = tmp_connector_init.into_temp_path(); + let tmp_docker_inspect = tmp_docker_inspect.into_temp_path(); + + // This is default `docker run` behavior if --network is not provided. + let network = if network == "" { "bridge" } else { network }; + + let mut process: async_process::Child = async_process::Command::new("docker") + .args([ + "run".to_string(), + // Remove the docker container upon its exit. + "--rm".to_string(), + // Addressable name of this connector. + format!("--name={name}"), + // Network to which the container should attach. + format!("--network={}", network), + // The entrypoint into a connector is always flow-connector-init, + // which will delegate to the actual entrypoint of the connector. + "--entrypoint=/flow-connector-init".to_string(), + // Mount the flow-connector-init binary and `docker inspect` output. + format!( + "--mount=type=bind,source={},target=/flow-connector-init", + tmp_connector_init.to_string_lossy() + ), + format!( + "--mount=type=bind,source={},target=/image-inspect.json", + tmp_docker_inspect.to_string_lossy(), + ), + // Thread-through the logging configuration of the connector. + "--env=LOG_FORMAT=json".to_string(), + // Cgroup memory / CPU resource limits. + // TODO(johnny): we intend to tighten these down further, over time. + "--memory=1g".to_string(), + "--cpus=2".to_string(), + // Attach labels that let us group connector resource usage under a few dimensions. + format!("--label=image={}", image), + format!("--label=task-name={}", task_name), + format!("--label=task-type={}", task_type.as_str_name()), + // Image to run. + image.to_string(), + // The following are arguments of flow-connector-init, not docker. + "--image-inspect-json-path=/image-inspect.json".to_string(), + format!("--port={CONNECTOR_INIT_PORT}"), + ]) + .stdin(async_process::Stdio::null()) + .stdout(async_process::Stdio::null()) + .stderr(async_process::Stdio::piped()) + .spawn() + .context("failed to docker run the connector")? + .into(); + + // We've started the container and will need to inspect for its IP address. + // Docker has unfortunate race handling and will happily return an empty IPAddress for + // a created or even a running container while it's still performing background setup. + // The only reliable way to determine if the container is "ready" is to wait for + // our inner flow-connector-init process to produce its startup log. + let (ready_tx, ready_rx) = oneshot::channel::<()>(); + + // Service process stderr by decoding ops::Logs and sending to our handler. + let stderr = process.stderr.take().unwrap(); + tokio::spawn(async move { + let mut stderr = tokio::io::BufReader::new(stderr); + let mut line = String::new(); + let mut _ready_tx = Some(ready_tx); + + loop { + line.clear(); + + match stderr.read_line(&mut line).await { + Err(error) => { + tracing::error!(%error, "failed to read from connector stderr"); + break; + } + Ok(0) => break, // Clean EOF. + Ok(_) => (), + } + _ready_tx = None; // Signal that we're ready. + + match serde_json::from_str(&line) { + Ok(log) => log_handler(&log), + Err(error) => { + tracing::error!(?error, %line, "failed to parse ops::Log from container"); + } + } + } + }); + + // Wait for container to become ready, or close its stderr (likely due to a crash), + // or for thirty seconds to elapse (timeout). + tokio::select! { + _ = tokio::time::sleep(std::time::Duration::from_secs(30)) => { + anyhow::bail!("timeout waiting for the container to become ready"); + } + _ = ready_rx => (), + } + if let Some(exit_status) = process.try_wait().expect("wait should never fail") { + anyhow::bail!("container crashed unexpectedly on startup: {exit_status:?}"); + } + + // Ask docker for the IP address it assigned to the container. + let ip_addr = inspect_container_ip(&name) + .await + .context("resolving docker container IP")?; + + // Dial the gRPC endpoint hosted by `flow-connector-init` within the container context. + let channel = + tonic::transport::Endpoint::new(format!("http://{ip_addr}:{CONNECTOR_INIT_PORT}")) + .expect("formatting endpoint address") + .connect_timeout(std::time::Duration::from_secs(5)) + .connect() + .await + .context("failed to connect to connector-init inside of container")?; + + tracing::info!(%image, %name, %task_name, ?task_type, "started connector"); + + Ok(( + runtime::Container { + ip_addr: format!("{ip_addr}"), + network_ports: network_ports.clone(), + }, + channel, + Guard { + _tmp_connector_init: tmp_connector_init, + _tmp_docker_inspect: tmp_docker_inspect, + _process: process, + }, + )) +} + +/// Guard contains a running image container instance, +/// which will be stopped and cleaned up when the Guard is dropped. +pub struct Guard { + _tmp_connector_init: tempfile::TempPath, + _tmp_docker_inspect: tempfile::TempPath, + _process: async_process::Child, +} + +fn unique_container_name() -> String { + let n = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_nanos(); + + format!("fc-{:x}", n as u32) +} + +async fn docker_cmd(args: &[S]) -> anyhow::Result> +where + S: AsRef + std::fmt::Debug, +{ + let output = async_process::output(async_process::Command::new("docker").args(args)) + .await + .with_context(|| format!("failed to run docker command {args:?}"))?; + + if !output.status.success() { + anyhow::bail!( + "docker command {args:?} failed: {}", + String::from_utf8_lossy(&output.stderr), + ); + } + Ok(output.stdout) +} + +async fn inspect_image(image: String) -> anyhow::Result> { + if !image.ends_with(":local") { + _ = docker_cmd(&["pull", &image, "--quiet"]).await?; + } + docker_cmd(&["inspect", &image]).await +} + +async fn inspect_container_ip(name: &str) -> anyhow::Result { + let output = docker_cmd(&[ + "inspect", + "--format", + "{{range.NetworkSettings.Networks}}{{.IPAddress}}{{end}}", + name, + ]) + .await + .context("failed to run docker inspect")?; + + let ip_addr = String::from_utf8_lossy(&output); + let ip_addr: std::net::IpAddr = ip_addr.trim_end().parse().with_context(|| { + format!( + "failed to parse IP address from docker inspect output {:?}", + ip_addr.trim_end() + ) + })?; + + Ok(ip_addr) +} + +fn parse_network_ports(content: &[u8]) -> anyhow::Result> { + use std::collections::BTreeMap; + + #[derive(serde::Deserialize)] + #[serde(rename_all = "PascalCase")] + struct InspectConfig { + /// According to the [OCI spec](https://github.com/opencontainers/image-spec/blob/d60099175f88c47cd379c4738d158884749ed235/config.md?plain=1#L125) + /// `ExposedPorts` is a map where the keys are in the format `1234/tcp`, `456/udp`, or `789` (implicit default of tcp), and the values are + /// empty objects. The choice of `serde_json::Value` here is meant to convey that the actual values are irrelevant. + #[serde(default)] + exposed_ports: BTreeMap, + #[serde(default)] + labels: BTreeMap, + } + + #[derive(serde::Deserialize)] + #[serde(rename_all = "PascalCase")] + struct InspectJson { + config: InspectConfig, + } + + let deserialized: Vec = serde_json::from_slice(&content).with_context(|| { + format!( + "failed to parse `docker inspect` output: {}", + String::from_utf8_lossy(&content) + ) + })?; + + if deserialized.len() != 1 { + anyhow::bail!("expected 1 image, got {}", deserialized.len()); + } + + let mut ports = Vec::new(); + for (exposed_port, _) in deserialized[0].config.exposed_ports.iter() { + // We're unable to support UDP at this time. + if exposed_port.ends_with("/udp") { + continue; + } + // Technically, the ports are allowed to appear without the '/tcp' suffix, though + // I haven't actually observed that in practice. + let exposed_port = exposed_port.strip_suffix("/tcp").unwrap_or(exposed_port); + let number = exposed_port.parse::().with_context(|| { + format!("invalid key in inspected Config.ExposedPorts '{exposed_port}'") + })?; + + let protocol_label = format!("dev.estuary.port-proto.{number}"); + let protocol = deserialized[0].config.labels.get(&protocol_label).cloned(); + + let public_label = format!("dev.estuary.port-public.{number}"); + let public = deserialized[0] + .config + .labels + .get(&public_label) + .map(String::as_str) + .unwrap_or("false"); + let public = public.parse::() + .with_context(|| format!("invalid '{public_label}' label value: '{public}', must be either 'true' or 'false'"))?; + + ports.push(flow::NetworkPort { + number: number as u32, + protocol: protocol.unwrap_or_default(), + public, + }); + } + + Ok(ports) +} + +#[cfg(test)] +mod test { + use super::{parse_network_ports, start}; + use futures::stream::StreamExt; + use proto_flow::flow; + use serde_json::json; + + #[tokio::test] + async fn test_http_ingest_spec() { + if let Err(_) = locate_bin::locate("flow-connector-init") { + // Skip if `flow-connector-init` isn't available (yet). We're probably on CI. + // This test is useful as a sanity check for local development + // and we have plenty of other coverage during CI. + return; + } + + let (container, channel, _guard) = start( + "ghcr.io/estuary/source-http-ingest:dev", + ops::tracing_log_handler, + "", + "a-task-name", + proto_flow::ops::TaskType::Capture, + ) + .await + .unwrap(); + + let mut rx = proto_grpc::capture::connector_client::ConnectorClient::new(channel) + .capture(futures::stream::once(async move { + serde_json::from_value(json!({ + "spec": {"connectorType": "IMAGE", "config": {}} + })) + .unwrap() + })) + .await + .unwrap() + .into_inner(); + + let resp = rx + .next() + .await + .expect("should get a spec response") + .unwrap(); + + assert!(resp.spec.is_some()); + + assert_eq!( + container.network_ports, + [flow::NetworkPort { + number: 8080, + protocol: String::new(), + public: true + }] + ); + } + + #[test] + fn test_parsing_network_ports() { + let fixture = json!([ + { + "Id": "foo", + "Config":{ + "ExposedPorts": {"567/tcp":{}, "123/udp": {}, "789":{} }, + "Labels":{"dev.estuary.port-public.567":"true","dev.estuary.port-proto.789":"h2"} + } + } + ]); + let ports = parse_network_ports(fixture.to_string().as_bytes()).unwrap(); + + assert_eq!( + ports, + [ + flow::NetworkPort { + number: 567, + protocol: String::new(), + public: true + }, + flow::NetworkPort { + number: 789, + protocol: "h2".to_string(), + public: false + }, + ] + ); + + let fixture = json!([{"Invalid": "Inspection"}]); + parse_network_ports(fixture.to_string().as_bytes()).unwrap_err(); + } +} diff --git a/crates/runtime/src/derive/combine.rs b/crates/runtime/src/derive/combine.rs index 6ae5531e62..0b6b31acf6 100644 --- a/crates/runtime/src/derive/combine.rs +++ b/crates/runtime/src/derive/combine.rs @@ -1,4 +1,4 @@ -use super::anyhow_to_status; +use crate::anyhow_to_status; use anyhow::Context; use futures::{channel::mpsc, Future, SinkExt, Stream, StreamExt, TryStreamExt}; use prost::Message; diff --git a/crates/runtime/src/derive/connectors.rs b/crates/runtime/src/derive/connectors.rs deleted file mode 100644 index cdfd9c4a8b..0000000000 --- a/crates/runtime/src/derive/connectors.rs +++ /dev/null @@ -1,46 +0,0 @@ -use futures::Stream; -use proto_flow::derive::{Request, Response}; -use proto_flow::ops; -use std::path::Path; - -pub fn typescript_connector( - _peek_request: &Request, - log_handler: H, - request_rx: R, -) -> tonic::Result>> -where - H: Fn(&ops::Log) + Send + Sync + 'static, - R: futures::stream::Stream> + Send + 'static, -{ - // Look for `flowctl` alongside the current running program (or perhaps it *is* the - // current running program). - let this_program = std::env::args().next().unwrap(); - let mut flowctl = Path::new(&this_program).parent().unwrap().join("flowctl"); - - // Fall back to the $PATH. - if !flowctl.exists() { - flowctl = "flowctl".into(); - } else { - // If the executable does exist, then we need to pass it as an absolute path, - // because the `Command` does not handle relative paths. - flowctl = flowctl.canonicalize().unwrap(); - } - let cmd = connector_init::rpc::new_command(&[flowctl.to_str().unwrap(), "raw", "deno-derive"]); - - let response_rx = connector_init::rpc::bidi::( - cmd, - connector_init::Codec::Json, - request_rx, - log_handler, - )?; - - Ok(response_rx) -} - -/* -pub const DERIVE_DENO_PATH: &[&str] = &[ - "bash", - "-c", - "tee deno.input | flowctl raw deno-derive | tee deno.output", -]; -*/ diff --git a/crates/runtime/src/derive/image.rs b/crates/runtime/src/derive/image.rs new file mode 100644 index 0000000000..3dabdfd69e --- /dev/null +++ b/crates/runtime/src/derive/image.rs @@ -0,0 +1,94 @@ +use super::extract_endpoint; +use crate::{container, eof_on_error, inject_error, unseal}; +use futures::{Stream, TryStreamExt}; +use proto_flow::derive::{Request, Response}; +use proto_flow::ops; + +pub async fn image_connector( + image: String, + log_handler: L, + network: String, + request_rx: R, + task_name: &str, +) -> tonic::Result>> +where + L: Fn(&ops::Log) + Send + Sync + 'static, + R: Stream> + Send + Unpin + 'static, +{ + let (container, channel, guard) = container::start( + &image, + log_handler, + &network, + task_name, + ops::TaskType::Capture, + ) + .await + .map_err(crate::anyhow_to_status)?; + + // Adapt requests by identifying instances that carry endpoint configuration. + // Verify they remain compatible with our started container, and then unseal their config. + // Or if they're not compatible, then map to Status::aborted(). + let request_rx = request_rx.and_then(move |mut request| { + let must_unseal = if matches!( + request, + Request { spec: Some(_), .. } + | Request { + validate: Some(_), + .. + } + | Request { open: Some(_), .. } + ) { + Some(image.clone()) // Outer closure owns `image`. + } else { + None + }; + + async move { + if let Some(expect_image) = must_unseal { + let (endpoint, config_json) = + extract_endpoint(&mut request).map_err(crate::anyhow_to_status)?; + + let sealed_config = match endpoint { + models::DeriveUsing::Connector(models::ConnectorConfig { + image: this_image, + config, + }) if expect_image == this_image => config, + + _ => return Err(tonic::Status::aborted("connector image has changed")), + }; + + *config_json = unseal::decrypt_sops(&sealed_config) + .await + .map_err(crate::anyhow_to_status)? + .to_string(); + } + + Ok(request) + } + }); + + let (request_rx, error_rx) = eof_on_error(request_rx); + + // Start a derive RPC. + let container_response = proto_grpc::derive::connector_client::ConnectorClient::new(channel) + .derive(request_rx) + .await?; + let response_rx = container_response.into_inner(); + + // Adapt responses by enriching the first Response with the image Container. + let mut container = Some(container); + let response_rx = response_rx.and_then(move |mut response| { + _ = &guard; // Move so it's retained while responses are still being read. + + if container.is_some() { + response + .set_internal(&mut bytes::BytesMut::new(), |internal| { + internal.container = container.take(); + }) + .unwrap(); + } + futures::future::ready(Ok(response)) + }); + + Ok(inject_error(response_rx, error_rx)) +} diff --git a/crates/runtime/src/derive/log_level.rs b/crates/runtime/src/derive/log_level.rs deleted file mode 100644 index b3df075d53..0000000000 --- a/crates/runtime/src/derive/log_level.rs +++ /dev/null @@ -1,32 +0,0 @@ -use futures::{Stream, TryStreamExt}; -use prost::Message; -use proto_flow::derive::Request; -use proto_flow::{ - ops, - runtime::{derive_request_ext, DeriveRequestExt}, -}; -use std::sync::Arc; - -pub fn adapt_requests( - request_rx: R, - set_log_level: Option>, -) -> impl Stream> -where - R: futures::stream::Stream> + Send + 'static, -{ - request_rx.inspect_ok(move |request| { - let Some(_open) = &request.open else { return }; - - let Ok(DeriveRequestExt { open: Some(derive_request_ext::Open{log_level, ..}) , .. }) = Message::decode( - request - .internal - .as_ref() - .map(|i| i.value.clone()) - .unwrap_or_default(), - ) else { return }; - - if let (Some(log_level), Some(set_log_level)) = (ops::log::Level::from_i32(log_level), &set_log_level) { - (set_log_level)(log_level); - } - }) -} diff --git a/crates/runtime/src/derive/middleware.rs b/crates/runtime/src/derive/middleware.rs deleted file mode 100644 index 0e8e38e48f..0000000000 --- a/crates/runtime/src/derive/middleware.rs +++ /dev/null @@ -1,171 +0,0 @@ -use super::anyhow_to_status; -use anyhow::Context; -use futures::{StreamExt, TryStreamExt}; -use proto_flow::derive::{request, Request, Response}; -use proto_flow::flow::collection_spec::{self, derivation::ConnectorType}; -use proto_flow::ops; -use std::pin::Pin; -use std::sync::Arc; - -#[derive(Clone)] -pub struct Middleware -where - H: Fn(&ops::Log) + Send + Sync + Clone + 'static, -{ - log_handler: H, - set_log_level: Option>, -} - -pub type BoxStream = std::pin::Pin> + Send>>; - -#[tonic::async_trait] -impl proto_grpc::derive::connector_server::Connector for Middleware -where - H: Fn(&ops::Log) + Send + Sync + Clone + 'static, -{ - type DeriveStream = BoxStream; - - async fn derive( - &self, - request: tonic::Request>, - ) -> tonic::Result> { - let conn_info = request - .extensions() - .get::(); - tracing::debug!(?request, ?conn_info, "started derive request"); - - let response_rx = self.clone().serve(request.into_inner()).await?; - - Ok(tonic::Response::new(response_rx)) - } -} - -impl Middleware -where - H: Fn(&ops::Log) + Send + Sync + Clone + 'static, -{ - pub fn new( - log_handler: H, - set_log_level: Option>, - ) -> Self { - Self { - log_handler, - set_log_level, - } - } - - pub async fn serve(self, request_rx: In) -> tonic::Result - where - In: futures::Stream> + Send + Unpin + 'static, - { - let mut request_rx = request_rx.peekable(); - - let peek = match Pin::new(&mut request_rx).peek().await { - Some(Ok(peek)) => peek.clone(), - Some(Err(status)) => return Err(status.clone()), - None => return Ok(futures::stream::empty().boxed()), - }; - - // NOTE(johnny): To debug requests / responses at any layer of this interceptor stack, try: - // let request_rx = request_rx.inspect_ok(|request| { - // eprintln!("REQUEST: {}", serde_json::to_string(request).unwrap()); - // }); - // - // let response_rx = response_rx.inspect_ok(|response| { - // eprintln!("RESPONSE: {}", serde_json::to_string(response).unwrap()); - // }); - - // Request interceptor which adjusts the dynamic log level with each Open. - let request_rx = super::log_level::adapt_requests(request_rx, self.set_log_level); - - // Request interceptor which filters Request.Read of Ack documents. - let request_rx = request_rx.try_filter(|request| { - let keep = if let Some(request::Read { - uuid: Some(uuid), .. - }) = &request.read - { - proto_gazette::message_flags::ACK_TXN & uuid.node == 0 // Not an ACK. - } else { - true - }; - futures::future::ready(keep) - }); - - // Request interceptor for combining over documents. - let (request_rx, combine_back) = - super::combine::adapt_requests(&peek, request_rx).map_err(anyhow_to_status)?; - - let response_rx = match temp_connector_type(&peek).map_err(anyhow_to_status)? { - ConnectorType::InvalidConnectorType => { - return Err(tonic::Status::invalid_argument("invalid connector type")); - } - ConnectorType::Sqlite => { - // Invoke the underlying SQLite connector. - let response_rx = ::derive_sqlite::connector(&peek, request_rx)?; - - // Response interceptor for combining over documents. - let response_rx = combine_back.adapt_responses(response_rx); - - response_rx.boxed() - } - ConnectorType::Image => { - return Err(tonic::Status::aborted("not implemented")); - } - ConnectorType::Typescript => { - // Request interceptor for stateful RocksDB storage. - let (request_rx, rocks_back) = - super::rocksdb::adapt_requests(&peek, request_rx).map_err(anyhow_to_status)?; - - // Invoke the underlying TypeScript connector. - let response_rx = - super::connectors::typescript_connector(&peek, self.log_handler, request_rx)?; - - // Response interceptor for stateful RocksDB storage. - let response_rx = rocks_back.adapt_responses(response_rx); - // Response interceptor for combining over documents. - let response_rx = combine_back.adapt_responses(response_rx); - - response_rx.boxed() - } - }; - - Ok(response_rx) - } - - pub async fn serve_unary(self, request: Request) -> tonic::Result { - let request_rx = futures::stream::once(async move { Ok(request) }).boxed(); - let mut responses: Vec = self.serve(request_rx).await?.try_collect().await?; - - if responses.len() != 1 { - return Err(tonic::Status::unknown( - "unary request didn't return a response", - )); - } - Ok(responses.pop().unwrap()) - } -} - -// NOTE(johnny): This is a temporary joint to extract the ConnectorType for -// purposes of dispatching to an appropriate connector delegate invocation. -// This will definitely change when we shift `sops` unsealing from Go -> Rust, -// as we'll probably want an InvokeConfig trait or something similar that -// allows us to do the appropriate config unwrapping. -fn temp_connector_type(request: &Request) -> anyhow::Result { - let ct = match (&request.spec, &request.validate, &request.open) { - (Some(r), None, None) => r.connector_type, - (None, Some(r), None) => r.connector_type, - (None, None, Some(r)) => { - let collection_spec::Derivation { connector_type, .. } = r - .collection - .as_ref() - .context("missing collection")? - .derivation - .as_ref() - .context("missing derivation")?; - - *connector_type - } - _ => anyhow::bail!("unexpected request (not Spec, Validate, or Open)"), - }; - Ok(ConnectorType::from_i32(ct).unwrap_or_default()) -} diff --git a/crates/runtime/src/derive/mod.rs b/crates/runtime/src/derive/mod.rs index 6988ea7eea..cc8afb6749 100644 --- a/crates/runtime/src/derive/mod.rs +++ b/crates/runtime/src/derive/mod.rs @@ -1,11 +1,197 @@ -mod middleware; -pub use middleware::Middleware; +use crate::Runtime; +use anyhow::Context; +use futures::{Stream, StreamExt, TryStreamExt}; +use proto_flow::derive::{request, Request, Response}; +use proto_flow::flow::collection_spec::derivation::ConnectorType; +use proto_flow::ops; +use proto_flow::runtime::DeriveRequestExt; +use std::pin::Pin; +use std::sync::Arc; pub mod combine; -pub mod connectors; -pub mod log_level; +pub mod image; pub mod rocksdb; -fn anyhow_to_status(err: anyhow::Error) -> tonic::Status { - tonic::Status::internal(format!("{err:?}")) +pub type BoxStream = futures::stream::BoxStream<'static, tonic::Result>; + +#[tonic::async_trait] +impl proto_grpc::derive::connector_server::Connector for Runtime +where + H: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + type DeriveStream = BoxStream; + + async fn derive( + &self, + request: tonic::Request>, + ) -> tonic::Result> { + let conn_info = request + .extensions() + .get::(); + tracing::debug!(?request, ?conn_info, "started derive request"); + + let response_rx = self.clone().serve_derive(request.into_inner()).await?; + + Ok(tonic::Response::new(response_rx)) + } +} + +impl Runtime +where + H: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + pub async fn serve_derive(self, request_rx: In) -> tonic::Result + where + In: futures::Stream> + Send + Unpin + 'static, + { + let mut request_rx = request_rx.peekable(); + + let mut peek_request = match Pin::new(&mut request_rx).peek().await { + Some(Ok(peek)) => peek.clone(), + Some(Err(status)) => return Err(status.clone()), + None => return Ok(futures::stream::empty().boxed()), + }; + let (endpoint, _) = extract_endpoint(&mut peek_request).map_err(crate::anyhow_to_status)?; + + // NOTE(johnny): To debug requests / responses at any layer of this interceptor stack, try: + // let request_rx = request_rx.inspect_ok(|request| { + // eprintln!("REQUEST: {}", serde_json::to_string(request).unwrap()); + // }); + // + // let response_rx = response_rx.inspect_ok(|response| { + // eprintln!("RESPONSE: {}", serde_json::to_string(response).unwrap()); + // }); + + // Request interceptor which adjusts the dynamic log level with each Open. + let request_rx = adjust_log_level(request_rx, self.set_log_level); + + // Request interceptor which filters Request.Read of Ack documents. + let request_rx = request_rx.try_filter(|request| { + let keep = if let Some(request::Read { + uuid: Some(uuid), .. + }) = &request.read + { + proto_gazette::message_flags::ACK_TXN & uuid.node == 0 // Not an ACK. + } else { + true + }; + futures::future::ready(keep) + }); + + // Request interceptor for combining over documents. + let (request_rx, combine_back) = + combine::adapt_requests(&peek_request, request_rx).map_err(crate::anyhow_to_status)?; + + let response_rx = match endpoint { + models::DeriveUsing::Sqlite(_) => { + // Invoke the underlying SQLite connector. + let response_rx = ::derive_sqlite::connector(&peek_request, request_rx)?; + + // Response interceptor for combining over documents. + let response_rx = combine_back.adapt_responses(response_rx); + + response_rx.boxed() + } + models::DeriveUsing::Connector(models::ConnectorConfig { image, .. }) => { + // Request interceptor for stateful RocksDB storage. + let (request_rx, rocks_back) = rocksdb::adapt_requests(&peek_request, request_rx) + .map_err(crate::anyhow_to_status)?; + + // Invoke the underlying image connector. + let response_rx = image::image_connector( + image, + self.log_handler, + self.container_network, + request_rx, + &self.task_name, + ) + .await?; + + // Response interceptor for stateful RocksDB storage. + let response_rx = rocks_back.adapt_responses(response_rx); + // Response interceptor for combining over documents. + let response_rx = combine_back.adapt_responses(response_rx); + + response_rx.boxed() + } + models::DeriveUsing::Typescript(_) => unreachable!(), + }; + + Ok(response_rx) + } +} + +pub fn adjust_log_level( + request_rx: R, + set_log_level: Option>, +) -> impl Stream> +where + R: Stream> + Send + 'static, +{ + request_rx.inspect_ok(move |request| { + let Some(Ok(DeriveRequestExt{labels: Some(ops::ShardLabeling { log_level, .. }), ..})) = request.get_internal() else { return }; + + if let (Some(log_level), Some(set_log_level)) = + (ops::log::Level::from_i32(log_level), &set_log_level) + { + (set_log_level)(log_level); + } + }) +} + +// Returns the DeriveUsing of this Request, and a mutable reference to its inner config_json. +fn extract_endpoint<'r>( + request: &'r mut Request, +) -> anyhow::Result<(models::DeriveUsing, &'r mut String)> { + let (connector_type, config_json) = match request { + Request { + spec: Some(spec), .. + } => (spec.connector_type, &mut spec.config_json), + Request { + validate: Some(validate), + .. + } => (validate.connector_type, &mut validate.config_json), + Request { + open: Some(open), .. + } => { + let inner = open + .collection + .as_mut() + .context("`open` missing required `collection`")? + .derivation + .as_mut() + .context("`collection` missing required `derivation`")?; + + (inner.connector_type, &mut inner.config_json) + } + + _ => anyhow::bail!("request {request:?} does not contain an endpoint"), + }; + + if connector_type == ConnectorType::Image as i32 { + Ok(( + models::DeriveUsing::Connector( + serde_json::from_str(config_json).context("parsing connector config")?, + ), + config_json, + )) + } else if connector_type == ConnectorType::Sqlite as i32 { + Ok(( + models::DeriveUsing::Sqlite( + serde_json::from_str(config_json).context("parsing connector config")?, + ), + config_json, + )) + } else if connector_type == ConnectorType::Typescript as i32 { + Ok(( + models::DeriveUsing::Connector(models::ConnectorConfig { + image: "ghcr.io/estuary/derive-typescript:dev".to_string(), + config: models::RawValue::from_str(config_json) + .context("parsing connector config")?, + }), + config_json, + )) + } else { + anyhow::bail!("invalid connector type: {connector_type}"); + } } diff --git a/crates/runtime/src/derive/rocksdb.rs b/crates/runtime/src/derive/rocksdb.rs index 009af4a75e..56be1df09d 100644 --- a/crates/runtime/src/derive/rocksdb.rs +++ b/crates/runtime/src/derive/rocksdb.rs @@ -1,4 +1,4 @@ -use super::anyhow_to_status; +use crate::anyhow_to_status; use anyhow::Context; use futures::{channel::mpsc, Stream, StreamExt}; use prost::Message; diff --git a/crates/runtime/src/lib.rs b/crates/runtime/src/lib.rs index 7e92f8c12c..9c68c68029 100644 --- a/crates/runtime/src/lib.rs +++ b/crates/runtime/src/lib.rs @@ -1,11 +1,169 @@ -mod task_runtime; -pub use task_runtime::TaskRuntime; +use futures::{channel::oneshot, Stream, StreamExt}; +use proto_flow::ops; +use std::sync::Arc; +mod capture; +mod container; +mod derive; +mod materialize; mod task_service; -pub use task_service::TaskService; +mod tokio_context; +mod unary; +mod unseal; -pub mod derive; +pub use task_service::TaskService; +pub use tokio_context::TokioContext; // This constant is shared between Rust and Go code. // See go/protocols/flow/document_extensions.go. pub const UUID_PLACEHOLDER: &str = "DocUUIDPlaceholder-329Bb50aa48EAa9ef"; + +fn anyhow_to_status(err: anyhow::Error) -> tonic::Status { + tonic::Status::internal(format!("{err:?}")) +} + +/// Runtime implements the various services that constitute the Flow Runtime. +#[derive(Clone)] +pub struct Runtime +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + container_network: String, + log_handler: L, + set_log_level: Option>, + task_name: String, +} + +impl Runtime +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + /// Build a new Runtime. + /// * `container_network`: the Docker container network used for connector containers. + /// * `log_handler`: handler to which connector logs are dispatched. + /// * `set_log_level`: callback for adjusting the log level implied by runtime requests. + /// * `task_name`: name which is used to label any started connector containers. + pub fn new( + container_network: String, + log_handler: L, + set_log_level: Option>, + task_name: String, + ) -> Self { + Self { + container_network, + log_handler, + set_log_level, + task_name, + } + } + + /// Build a tonic Server which includes all of the Runtime's services. + pub fn build_tonic_server(self) -> tonic::transport::server::Router { + tonic::transport::Server::builder() + .add_service( + proto_grpc::capture::connector_server::ConnectorServer::new(self.clone()) + .max_decoding_message_size(usize::MAX) // Up from 4MB. Accept whatever the Go runtime sends. + .max_encoding_message_size(usize::MAX), // The default, made explicit. + ) + .add_service( + proto_grpc::derive::connector_server::ConnectorServer::new(self.clone()) + .max_decoding_message_size(usize::MAX) // Up from 4MB. Accept whatever the Go runtime sends. + .max_encoding_message_size(usize::MAX), // The default, made explicit. + ) + .add_service( + proto_grpc::materialize::connector_server::ConnectorServer::new(self) + .max_decoding_message_size(usize::MAX) // Up from 4MB. Accept whatever the Go runtime sends. + .max_encoding_message_size(usize::MAX), // The default, made explicit. + ) + } +} + +/// Adapt a Stream> into Stream by mapping the first Error into stream EOF. +/// The Error instance is passed through the returned oneshot Receiver. +fn eof_on_error(stream: S) -> (impl Stream, oneshot::Receiver) +where + S: futures::stream::Stream> + Send + 'static, + Error: std::fmt::Debug, +{ + let (error_tx, error_rx) = oneshot::channel(); + + let stream = stream.scan(error_tx, |error_tx, item| { + futures::future::ready(match item { + Ok(ok) => Some(ok), + Err(error) => { + // Replace because send() consumes `error_tx`. + if let Err(error) = std::mem::replace(error_tx, oneshot::channel().0).send(error) { + tracing::warn!( + ?error, + "request error but the response stream has already closed" + ) + } + None // End of stream. + } + }) + }); + + (stream, error_rx) +} + +/// Adapt a Stream> by monitoring a provided oneshot Receiver and, +/// should it ever resolve, injecting its resolved Error into the adapted Stream. +fn inject_error( + stream: S, + error_rx: oneshot::Receiver, +) -> impl Stream> +where + S: futures::stream::Stream> + Send + 'static + Unpin, +{ + let error_rx = futures::stream::unfold(Some(error_rx), |error_rx| async move { + let Some(error_rx) = error_rx else { return None }; + + match error_rx.await { + Ok(error) => Some((Err(error), None)), + Err(_cancelled) => None, + } + }); + futures::stream::select(stream, error_rx) +} + +#[cfg(test)] +mod test { + use super::*; + + #[tokio::test] + async fn test_error_pass_through() { + // Case 1: A stream produces some values and then fails. + let (stream, err_rx) = eof_on_error(futures::stream::iter(vec![ + Ok(1), + Ok(2), + Ok(3), + Err(99), + Ok(100), + ])); + // We see all values prior to failure. + assert_eq!(stream.collect::>().await, vec![1, 2, 3]); + // We see the error after injecting into an empty stream. + assert_eq!( + inject_error(futures::stream::empty::>(), err_rx) + .collect::>() + .await, + vec![Err(99)] + ); + + // Case 2: A stream produces values and EOF's without failure. + let (stream, err_rx) = eof_on_error(futures::stream::iter(vec![ + Result::<_, i32>::Ok(1), + Ok(2), + Ok(3), + ])); + // We see all values. + assert_eq!(stream.collect::>().await, vec![1, 2, 3]); + // We see a clean EOF of our injected stream. + assert_eq!( + inject_error(futures::stream::iter(vec![Ok(4), Ok(5)]), err_rx) + .collect::>() + .await, + vec![Ok(4), Ok(5)] + ); + } +} diff --git a/crates/runtime/src/materialize.rs b/crates/runtime/src/materialize.rs deleted file mode 100644 index 6104a8a679..0000000000 --- a/crates/runtime/src/materialize.rs +++ /dev/null @@ -1,24 +0,0 @@ -// Notes on how we can structure materialize middleware: -// -// Request loop: -// - Spec / Validate / Apply: Unseal. Forward. -// - Open: Rebuild State. Unseal. Forward. -// - Load: Acquire shared combiners & combine-right. Forward request iff key is new & not cached. -// - Flush: Forward. -// Block awaiting Flushed notification from response loop. -// Acquire state combiners and drain combiners into forwarded Store requests. -// Send Flushed stats to response loop. -// - StartCommit: Forward. -// - Acknowledge: Forward. -// -// (Note that Store is never received from Go runtime). -// -// Response loop: -// - Spec / Validated / Applied / Opened: Forward. -// - Loaded: Acquire shared combiners & reduce-left. -// - Flushed: -// Send Flushed notification to request loop. -// Block awaiting Flushed stats from request loop. -// Forward Flushed to runtime enhanced with stats. -// - StartedCommit: Forward. -// - Acknowledged: Forward. diff --git a/crates/runtime/src/materialize/image.rs b/crates/runtime/src/materialize/image.rs new file mode 100644 index 0000000000..a6bb82934f --- /dev/null +++ b/crates/runtime/src/materialize/image.rs @@ -0,0 +1,96 @@ +use super::extract_endpoint; +use crate::{container, eof_on_error, inject_error, unseal}; +use futures::{Stream, TryStreamExt}; +use proto_flow::materialize::{Request, Response}; +use proto_flow::ops; + +pub async fn image_connector( + image: String, + log_handler: L, + network: String, + request_rx: R, + task_name: &str, +) -> tonic::Result>> +where + L: Fn(&ops::Log) + Send + Sync + 'static, + R: Stream> + Send + Unpin + 'static, +{ + let (container, channel, guard) = container::start( + &image, + log_handler, + &network, + task_name, + ops::TaskType::Materialization, + ) + .await + .map_err(crate::anyhow_to_status)?; + + // Adapt requests by identifying instances that carry endpoint configuration. + // Verify they remain compatible with our started container, and then unseal their config. + // Or if they're not compatible, then map to Status::aborted(). + let request_rx = request_rx.and_then(move |mut request| { + let must_unseal = if matches!( + request, + Request { spec: Some(_), .. } + | Request { + validate: Some(_), + .. + } + | Request { apply: Some(_), .. } + | Request { open: Some(_), .. } + ) { + Some(image.clone()) // Outer closure owns `image`. + } else { + None + }; + + async move { + if let Some(expect_image) = must_unseal { + let (endpoint, config_json) = + extract_endpoint(&mut request).map_err(crate::anyhow_to_status)?; + + let sealed_config = match endpoint { + models::MaterializationEndpoint::Connector(models::ConnectorConfig { + image: this_image, + config, + }) if expect_image == this_image => config, + + _ => return Err(tonic::Status::aborted("connector image has changed")), + }; + + *config_json = unseal::decrypt_sops(&sealed_config) + .await + .map_err(crate::anyhow_to_status)? + .to_string(); + } + + Ok(request) + } + }); + + let (request_rx, error_rx) = eof_on_error(request_rx); + + // Start a materialize RPC. + let container_response = + proto_grpc::materialize::connector_client::ConnectorClient::new(channel) + .materialize(request_rx) + .await?; + let response_rx = container_response.into_inner(); + + // Adapt responses by enriching the first Response with the image Container. + let mut container = Some(container); + let response_rx = response_rx.and_then(move |mut response| { + _ = &guard; // Move so it's retained while responses are still being read. + + if container.is_some() { + response + .set_internal(&mut bytes::BytesMut::new(), |internal| { + internal.container = container.take(); + }) + .unwrap(); + } + futures::future::ready(Ok(response)) + }); + + Ok(inject_error(response_rx, error_rx)) +} diff --git a/crates/runtime/src/materialize/mod.rs b/crates/runtime/src/materialize/mod.rs new file mode 100644 index 0000000000..ed4225025d --- /dev/null +++ b/crates/runtime/src/materialize/mod.rs @@ -0,0 +1,179 @@ +use super::Runtime; +use anyhow::Context; +use futures::{Stream, StreamExt, TryStreamExt}; +use proto_flow::flow::materialization_spec::ConnectorType; +use proto_flow::materialize::{Request, Response}; +use proto_flow::ops; +use proto_flow::runtime::MaterializeRequestExt; +use std::pin::Pin; +use std::sync::Arc; + +// Notes on how we can structure materialize middleware: +// +// Request loop: +// - Spec / Validate / Apply: Unseal. Forward. +// - Open: Rebuild State. Unseal. Forward. +// - Load: Acquire shared combiners & combine-right. Forward request iff key is new & not cached. +// - Flush: Forward. +// Block awaiting Flushed notification from response loop. +// Acquire state combiners and drain combiners into forwarded Store requests. +// Send Flushed stats to response loop. +// - StartCommit: Forward. +// - Acknowledge: Forward. +// +// (Note that Store is never received from Go runtime). +// +// Response loop: +// - Spec / Validated / Applied / Opened: Forward. +// - Loaded: Acquire shared combiners & reduce-left. +// - Flushed: +// Send Flushed notification to request loop. +// Block awaiting Flushed stats from request loop. +// Forward Flushed to runtime enhanced with stats. +// - StartedCommit: Forward. +// - Acknowledged: Forward. + +mod image; + +pub type BoxStream = futures::stream::BoxStream<'static, tonic::Result>; + +#[tonic::async_trait] +impl proto_grpc::materialize::connector_server::Connector for Runtime +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + type MaterializeStream = BoxStream; + + async fn materialize( + &self, + request: tonic::Request>, + ) -> tonic::Result> { + let conn_info = request + .extensions() + .get::(); + tracing::debug!(?request, ?conn_info, "started materialize request"); + + let response_rx = self.clone().serve_materialize(request.into_inner()).await?; + + Ok(tonic::Response::new(response_rx)) + } +} + +impl Runtime +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + pub async fn serve_materialize(self, request_rx: In) -> tonic::Result + where + In: Stream> + Send + Unpin + 'static, + { + let mut request_rx = request_rx.peekable(); + + let mut peek_request = match Pin::new(&mut request_rx).peek().await { + Some(Ok(peek)) => peek.clone(), + Some(Err(status)) => return Err(status.clone()), + None => return Ok(futures::stream::empty().boxed()), + }; + let (endpoint, _) = extract_endpoint(&mut peek_request).map_err(crate::anyhow_to_status)?; + + // NOTE(johnny): To debug requests / responses at any layer of this interceptor stack, try: + // let request_rx = request_rx.inspect_ok(|request| { + // eprintln!("REQUEST: {}", serde_json::to_string(request).unwrap()); + // }); + // + // let response_rx = response_rx.inspect_ok(|response| { + // eprintln!("RESPONSE: {}", serde_json::to_string(response).unwrap()); + // }); + + // Request interceptor which adjusts the dynamic log level based on internal shard labels. + let request_rx = adjust_log_level(request_rx, self.set_log_level); + + let response_rx = match endpoint { + models::MaterializationEndpoint::Connector(models::ConnectorConfig { + image, .. + }) => image::image_connector( + image, + self.log_handler, + self.container_network, + request_rx, + &self.task_name, + ) + .await? + .boxed(), + + models::MaterializationEndpoint::Sqlite(_) => { + return Err(tonic::Status::invalid_argument( + "SQLite materializations are not supported and will be removed", + )) + } + }; + + Ok(response_rx) + } +} + +pub fn adjust_log_level( + request_rx: R, + set_log_level: Option>, +) -> impl Stream> +where + R: Stream> + Send + 'static, +{ + request_rx.inspect_ok(move |request| { + let Some(Ok(MaterializeRequestExt{labels: Some(ops::ShardLabeling { log_level, .. })})) = request.get_internal() else { return }; + + if let (Some(log_level), Some(set_log_level)) = + (ops::log::Level::from_i32(log_level), &set_log_level) + { + (set_log_level)(log_level); + } + }) +} + +// Returns the MaterializationEndpoint of this Request, and a mutable reference to its inner config_json. +fn extract_endpoint<'r>( + request: &'r mut Request, +) -> anyhow::Result<(models::MaterializationEndpoint, &'r mut String)> { + let (connector_type, config_json) = match request { + Request { + spec: Some(spec), .. + } => (spec.connector_type, &mut spec.config_json), + Request { + validate: Some(validate), + .. + } => (validate.connector_type, &mut validate.config_json), + Request { + apply: Some(apply), .. + } => { + let inner = apply + .materialization + .as_mut() + .context("`apply` missing required `materialization`")?; + + (inner.connector_type, &mut inner.config_json) + } + Request { + open: Some(open), .. + } => { + let inner = open + .materialization + .as_mut() + .context("`open` missing required `materialization`")?; + + (inner.connector_type, &mut inner.config_json) + } + + _ => anyhow::bail!("request {request:?} does not contain an endpoint"), + }; + + if connector_type == ConnectorType::Image as i32 { + Ok(( + models::MaterializationEndpoint::Connector( + serde_json::from_str(config_json).context("parsing connector config")?, + ), + config_json, + )) + } else { + anyhow::bail!("invalid connector type: {connector_type}"); + } +} diff --git a/crates/runtime/src/task_runtime.rs b/crates/runtime/src/task_runtime.rs deleted file mode 100644 index c3dedc142a..0000000000 --- a/crates/runtime/src/task_runtime.rs +++ /dev/null @@ -1,63 +0,0 @@ -use std::ops::{Deref, DerefMut}; - -// TaskRuntime is a tokio Runtime that names its threads under a given thread name, -// and forwards its tracing events and spans to a provided log file with a filter. -pub struct TaskRuntime { - runtime: Option, -} - -impl TaskRuntime { - pub fn new(thread_name: String, log_dispatch: tracing::Dispatch) -> Self { - // Map the input thread name into unique thread names suffixed with their millisecond start time. - let thread_name_fn = move || { - let millis = std::time::SystemTime::now() - .duration_since(std::time::UNIX_EPOCH) - .unwrap() - .as_millis(); - format!("{thread_name}:{}", millis) - }; - - let runtime = tokio::runtime::Builder::new_multi_thread() - .worker_threads(1) - .enable_all() - .thread_name_fn(thread_name_fn) - .on_thread_start(move || { - let guard = tracing::dispatcher::set_default(&log_dispatch); - Self::DISPATCH_GUARD.with(|cell| cell.set(Some(guard))); - }) - .on_thread_stop(|| { - Self::DISPATCH_GUARD.with(|cell| cell.take()); - }) - .build() - .unwrap(); - - Self { - runtime: Some(runtime), - } - } - - thread_local!(static DISPATCH_GUARD: std::cell::Cell> = std::cell::Cell::new(None)); -} - -impl Deref for TaskRuntime { - type Target = tokio::runtime::Runtime; - fn deref(&self) -> &Self::Target { - self.runtime.as_ref().unwrap() - } -} -impl DerefMut for TaskRuntime { - fn deref_mut(&mut self) -> &mut Self::Target { - self.runtime.as_mut().unwrap() - } -} - -impl Drop for TaskRuntime { - fn drop(&mut self) { - // Explicitly call Runtime::shutdown_background as an alternative to calling Runtime::Drop. - // This shuts down the runtime without waiting for blocking background tasks to complete, - // which is good because they likely never will. Consider a blocking call to read from stdin, - // where the sender is itself waiting for us to exit or write to our stdout. - // (Note that tokio::io maps AsyncRead of file descriptors to blocking tasks under the hood). - self.runtime.take().unwrap().shutdown_background(); - } -} diff --git a/crates/runtime/src/task_service.rs b/crates/runtime/src/task_service.rs index f4b64bdfcc..241ee68682 100644 --- a/crates/runtime/src/task_service.rs +++ b/crates/runtime/src/task_service.rs @@ -1,15 +1,12 @@ -use super::{derive, TaskRuntime}; +use super::{Runtime, TokioContext}; use anyhow::Context; use futures::channel::oneshot; use futures::FutureExt; -use proto_flow::{ops, runtime::TaskServiceConfig}; -use std::sync::atomic::{AtomicI32, Ordering}; -use std::sync::Arc; -use tracing_subscriber::prelude::*; +use proto_flow::runtime::TaskServiceConfig; pub struct TaskService { cancel_tx: oneshot::Sender<()>, - runtime: TaskRuntime, + tokio_context: TokioContext, server: tokio::task::JoinHandle>, } @@ -19,42 +16,13 @@ impl TaskService { log_file_fd: _, task_name, uds_path, + container_network, } = config; if !std::path::Path::new(&uds_path).is_absolute() { anyhow::bail!("uds_path must be an absolute filesystem path"); } - // Dynamically configurable ops::log::Level, as a shared atomic. - let log_level = std::sync::Arc::new(AtomicI32::new(ops::log::Level::Info as i32)); - - // Dynamic tracing log filter which uses our dynamic Level. - let log_level_clone = log_level.clone(); - let log_filter = tracing_subscriber::filter::DynFilterFn::new(move |metadata, _cx| { - let cur_level = match metadata.level().as_str() { - "TRACE" => ops::log::Level::Trace as i32, - "DEBUG" => ops::log::Level::Debug as i32, - "INFO" => ops::log::Level::Info as i32, - "WARN" => ops::log::Level::Warn as i32, - "ERROR" => ops::log::Level::Error as i32, - _ => ops::log::Level::UndefinedLevel as i32, - }; - - if let Some(path) = metadata.module_path() { - // Hyper / HTTP/2 debug logs are just too noisy and not very useful. - if path.starts_with("h2::") && cur_level >= ops::log::Level::Debug as i32 { - return false; - } - } - - cur_level <= log_level_clone.load(Ordering::Relaxed) - }); - - // Function closure which allows for changing the dynamic log level. - let set_log_level = Arc::new(move |level: ops::log::Level| { - log_level.store(level as i32, Ordering::Relaxed) - }); - // We'll gather logs from tokio-tracing events of our TaskRuntime, // as well as logs which are forwarded from connector container delegates, // and sequence & dispatch them into this task-level `log_handler`. @@ -62,23 +30,22 @@ impl TaskService { let log_handler = ::ops::new_encoded_json_write_handler(std::sync::Arc::new( std::sync::Mutex::new(log_file), )); - // Configure a tracing::Dispatch, which is a type-erased form of a tracing::Subscriber, - // that gathers tracing events & spans and logs them to `log_handler`. - let log_dispatch: tracing::Dispatch = tracing_subscriber::registry() - .with( - ::ops::tracing::Layer::new(log_handler.clone(), std::time::SystemTime::now) - .with_filter(log_filter), - ) - .into(); - let runtime = TaskRuntime::new(task_name, log_dispatch); + let tokio_context = TokioContext::new( + ops::LogLevel::Info, + log_handler.clone(), + task_name.clone(), + 1, + ); // Instantiate selected task service definitions. - let derive_service = Some(derive::Middleware::new( - log_handler.clone(), - Some(set_log_level.clone()), - )); + let runtime = Runtime::new( + container_network, + log_handler, + Some(tokio_context.set_log_level_fn()), + task_name, + ); - let uds = runtime + let uds = tokio_context .block_on(async move { tokio::net::UnixListener::bind(uds_path) }) .context("failed to bind task service unix domain socket")?; let (cancel_tx, cancel_rx) = oneshot::channel::<()>(); @@ -95,20 +62,17 @@ impl TaskService { // completed, and will then immediately tear down client transports. // This means we MUST mask SIGPIPE, because it's quite common for us or our // peer to attempt to send messages over a transport that the other side has torn down. - let server = tonic::transport::Server::builder() - .add_optional_service(derive_service.map(|s| { - proto_grpc::derive::connector_server::ConnectorServer::new(s) - .max_decoding_message_size(usize::MAX) // Up from 4MB. Accept whatever the Go runtime sends. - .max_encoding_message_size(usize::MAX) // The default, made explicit. - })) - .serve_with_incoming_shutdown(uds_stream, async move { - _ = cancel_rx.await; - }); - let server = runtime.spawn(server); + let server = + runtime + .build_tonic_server() + .serve_with_incoming_shutdown(uds_stream, async move { + _ = cancel_rx.await; + }); + let server = tokio_context.spawn(server); Ok(Self { cancel_tx, - runtime, + tokio_context, server, }) } @@ -116,13 +80,13 @@ impl TaskService { pub fn graceful_stop(self) { let Self { cancel_tx, - runtime, + tokio_context, server, } = self; _ = cancel_tx.send(()); - let log = match runtime.block_on(server) { + let log = match tokio_context.block_on(server) { Err(panic) => async move { tracing::error!(?panic, "task gRPC service exited with panic"); } @@ -137,8 +101,8 @@ impl TaskService { .boxed(), }; // Spawn to log from a runtime thread, then block the current thread awaiting it. - let () = runtime.block_on(runtime.spawn(log)).unwrap(); + let () = tokio_context.block_on(tokio_context.spawn(log)).unwrap(); - // TaskRuntime implements Drop for shutdown. + // TokioContext implements Drop for shutdown. } } diff --git a/crates/runtime/src/tokio_context.rs b/crates/runtime/src/tokio_context.rs new file mode 100644 index 0000000000..b0a136619e --- /dev/null +++ b/crates/runtime/src/tokio_context.rs @@ -0,0 +1,125 @@ +use proto_flow::ops; +use std::ops::{Deref, DerefMut}; +use std::sync::atomic::{AtomicI32, Ordering}; +use std::sync::Arc; +use tracing_subscriber::prelude::*; + +/// TokioContext manages a tokio::Runtime that names its threads under a given thread name, +/// and forwards its tracing events to a provided log handler. +pub struct TokioContext { + runtime: Option, + set_log_level_fn: Arc, +} + +impl TokioContext { + /// Build a new TokioContext and associated tokio::Runtime, + /// having the `thread_name_prefix` and `worker_threads`. + /// Threads of the context are initialized with a tracing Subscriber + /// configured with `initial_log_level`. + pub fn new( + initial_log_level: ops::log::Level, + log_handler: L, + thread_name_prefix: String, + worker_threads: usize, + ) -> Self + where + L: Fn(&ops::Log) + Send + Sync + 'static, + { + // Map the input thread name into unique thread names suffixed with their millisecond start time. + let thread_name_fn = move || { + let millis = std::time::SystemTime::now() + .duration_since(std::time::UNIX_EPOCH) + .unwrap() + .as_millis(); + format!("{thread_name_prefix}-{}", millis) + }; + + // Dynamically configurable ops::log::Level, as a shared atomic. + let log_level = std::sync::Arc::new(AtomicI32::new(initial_log_level as i32)); + + // Function closure which allows for changing the dynamic log level. + let log_level_clone = log_level.clone(); + let set_log_level = Arc::new(move |level: ops::log::Level| { + log_level_clone.store(level as i32, Ordering::Relaxed) + }); + + // Build a tracing_subscriber::Filter which uses our dynamic log level. + let log_filter = tracing_subscriber::filter::DynFilterFn::new(move |metadata, _cx| { + let cur_level = match metadata.level().as_str() { + "TRACE" => ops::log::Level::Trace as i32, + "DEBUG" => ops::log::Level::Debug as i32, + "INFO" => ops::log::Level::Info as i32, + "WARN" => ops::log::Level::Warn as i32, + "ERROR" => ops::log::Level::Error as i32, + _ => ops::log::Level::UndefinedLevel as i32, + }; + + if let Some(path) = metadata.module_path() { + // Hyper / HTTP/2 debug logs are just too noisy and not very useful. + if path.starts_with("h2::") && cur_level >= ops::log::Level::Debug as i32 { + return false; + } + } + + cur_level <= log_level.load(Ordering::Relaxed) + }); + + // Configure a tracing::Dispatch, which is a type-erased form of a tracing::Subscriber, + // that gathers tracing events & spans and logs them to `log_handler`. + let log_dispatch: tracing::Dispatch = tracing_subscriber::registry() + .with( + ::ops::tracing::Layer::new(log_handler, std::time::SystemTime::now) + .with_filter(log_filter), + ) + .into(); + + let runtime = tokio::runtime::Builder::new_multi_thread() + .worker_threads(worker_threads) + .enable_all() + .thread_name_fn(thread_name_fn) + .on_thread_start(move || { + let guard = tracing::dispatcher::set_default(&log_dispatch); + Self::DISPATCH_GUARD.with(|cell| cell.set(Some(guard))); + }) + .on_thread_stop(|| { + Self::DISPATCH_GUARD.with(|cell| cell.take()); + }) + .build() + .unwrap(); + + Self { + runtime: Some(runtime), + set_log_level_fn: set_log_level, + } + } + + /// Return a function closure which dynamically updates the configured log level for tracing events. + pub fn set_log_level_fn(&self) -> Arc { + self.set_log_level_fn.clone() + } + + thread_local!(static DISPATCH_GUARD: std::cell::Cell> = std::cell::Cell::new(None)); +} + +impl Deref for TokioContext { + type Target = tokio::runtime::Runtime; + fn deref(&self) -> &Self::Target { + self.runtime.as_ref().unwrap() + } +} +impl DerefMut for TokioContext { + fn deref_mut(&mut self) -> &mut Self::Target { + self.runtime.as_mut().unwrap() + } +} + +impl Drop for TokioContext { + fn drop(&mut self) { + // Explicitly call Runtime::shutdown_background as an alternative to calling Runtime::Drop. + // This shuts down the runtime without waiting for blocking background tasks to complete, + // which is good because they sometimes never will. Consider a blocking call to read from stdin, + // where the sender is itself waiting for us to exit or write to our stdout. + // (Note that tokio::io maps AsyncRead of file descriptors to blocking tasks under the hood). + self.runtime.take().unwrap().shutdown_background(); + } +} diff --git a/crates/runtime/src/unary.rs b/crates/runtime/src/unary.rs new file mode 100644 index 0000000000..eff6601981 --- /dev/null +++ b/crates/runtime/src/unary.rs @@ -0,0 +1,53 @@ +use super::Runtime; +use futures::{FutureExt, StreamExt, TryStreamExt}; +use proto_flow::{capture, derive, materialize}; + +impl Runtime +where + L: Fn(&ops::Log) + Send + Sync + Clone + 'static, +{ + pub async fn unary_capture( + self, + request: capture::Request, + ) -> tonic::Result { + let request_rx = futures::stream::once(async move { Ok(request) }).boxed(); + let response = self.serve_capture(request_rx).boxed(); + let mut responses: Vec = response.await?.try_collect().await?; + + if responses.len() != 1 { + return Err(tonic::Status::unknown( + "unary request didn't return a response", + )); + } + Ok(responses.pop().unwrap()) + } + + pub async fn unary_derive(self, request: derive::Request) -> tonic::Result { + let request_rx = futures::stream::once(async move { Ok(request) }).boxed(); + let response = self.serve_derive(request_rx).boxed(); + let mut responses: Vec = response.await?.try_collect().await?; + + if responses.len() != 1 { + return Err(tonic::Status::unknown( + "unary request didn't return a response", + )); + } + Ok(responses.pop().unwrap()) + } + + pub async fn unary_materialize( + self, + request: materialize::Request, + ) -> tonic::Result { + let request_rx = futures::stream::once(async move { Ok(request) }).boxed(); + let response = self.serve_materialize(request_rx).boxed(); + let mut responses: Vec = response.await?.try_collect().await?; + + if responses.len() != 1 { + return Err(tonic::Status::unknown( + "unary request didn't return a response", + )); + } + Ok(responses.pop().unwrap()) + } +} diff --git a/crates/runtime/src/unseal/mod.rs b/crates/runtime/src/unseal/mod.rs new file mode 100644 index 0000000000..3461f634a8 --- /dev/null +++ b/crates/runtime/src/unseal/mod.rs @@ -0,0 +1,186 @@ +use anyhow::Context; +use serde_json::value::RawValue; +use zeroize::Zeroizing; + +/// Decrypt a `sops`-protected document using `sops` and application default credentials. +pub async fn decrypt_sops(config: &RawValue) -> anyhow::Result> { + let sops = locate_bin::locate("sops").context("failed to locate sops")?; + + #[derive(serde::Deserialize)] + struct Document { + #[serde(default)] + sops: Option, + } + #[derive(serde::Deserialize)] + struct Sops { + #[serde(default)] + encrypted_suffix: Option, + } + + let doc: Document = + serde_json::from_str(config.get()).context("decoding `sops` stanza of endpoint config")?; + + // If this isn't a `sops` document, then return a copy of it unmodified. + let Some(Sops{encrypted_suffix}) = doc.sops else { + return Ok(config.to_owned()) + }; + + // Note that input_output() pre-allocates an output buffer as large as its input buffer, + // and our decrypted result will never be larger than its input. + let async_process::Output { + stderr, + stdout, + status, + } = async_process::input_output( + async_process::Command::new(sops).args([ + "--decrypt", + "--input-type", + "json", + "--output-type", + "json", + "/dev/stdin", + ]), + config.get().as_bytes(), + ) + .await + .context("failed to run sops")?; + + let stdout = Zeroizing::from(stdout); + + if !status.success() { + anyhow::bail!( + "decrypting sops document failed: {}", + String::from_utf8_lossy(&stderr), + ); + } + + // If there is no encrypted suffix, then we're all done. + let Some(encrypted_suffix) = encrypted_suffix else { + return Ok(serde_json::from_slice(&stdout).context("parsing `sops` output")?) + }; + + // We must re-write the document to remove the encrypted suffix. + // Use `jq` to do the re-writing. This avoids allocating and parsing + // values in our own heap, and is also succinct. + // See: https://jqplay.org/s/sQunN3Qc4s + let async_process::Output { + stderr, + stdout, + status, + } = async_process::input_output( + async_process::Command::new("jq").args([ + // --compact-output disables jq's pretty-printer, which will otherwise introduce + // unnecessary newlines/tabs in the output, which will cause the output to be + // longer than the input. + "--compact-output".to_string(), + // --join-output puts jq into raw output mode, and additionally stops it from writing newlines + // at the end of its output, which can otherwise cause the output to be longer + // than the input. + "--join-output".to_string(), + format!("walk(if type == \"object\" then with_entries(. + {{key: .key | rtrimstr(\"{encrypted_suffix}\")}}) else . end)"), + ]), + &stdout, + ) + .await + .context("failed to run jq")?; + + let stdout = Zeroizing::from(stdout); + + if !status.success() { + anyhow::bail!( + "stripping encrypted suffix {encrypted_suffix} from document failed: {}", + String::from_utf8_lossy(&stderr), + ); + } + + Ok(serde_json::from_slice(&stdout).context("parsing stripped `jq` output")?) +} + +#[cfg(test)] +mod test { + use super::decrypt_sops; + use futures::StreamExt; + use serde_json::value::RawValue; + + #[tokio::test] + async fn test_fixtures() { + let configs: Vec> = vec![ + serde_json::from_slice(include_bytes!("testdata/empty-input.json")).unwrap(), + serde_json::from_slice(include_bytes!("testdata/hyphen-suffix.json")).unwrap(), + serde_json::from_slice(include_bytes!("testdata/no-suffix.json")).unwrap(), + serde_json::from_slice(include_bytes!("testdata/not-encrypted.json")).unwrap(), + serde_json::from_slice(include_bytes!("testdata/under-suffix.json")).unwrap(), + ]; + + let outputs = futures::stream::iter(configs.into_iter()) + .map(|config| async move { + serde_json::from_str(decrypt_sops(&config).await.unwrap().get()).unwrap() + }) + .buffered(5) + .collect::>() + .await; + + insta::assert_json_snapshot!(outputs, @r###" + [ + {}, + { + "notsops": "bar", + "s2": "final secret", + "stuff": { + "array": [ + 42, + { + "frob": "bob", + "inner-sops": "nested secret" + } + ], + "nullish": null, + "other": true, + "s1": "secret!" + } + }, + { + "false": null, + "foo": { + "bar": 42, + "some_sops": [ + 3, + "three" + ] + }, + "tru": true + }, + { + "false": null, + "foo": { + "bar": 42, + "some_sops": [ + 3, + "three" + ] + }, + "tru": true + }, + { + "baz": { + "array": [ + 42, + { + "true": false + }, + { + "frob": "bob", + "inner_sops": 15 + } + ], + "nullish": null, + "other": true, + "s1": 42 + }, + "foo": "bar", + "s2": "final secret!" + } + ] + "###); + } +} diff --git a/crates/runtime/src/unseal/testdata/empty-input.json b/crates/runtime/src/unseal/testdata/empty-input.json new file mode 100644 index 0000000000..90d7ddecd8 --- /dev/null +++ b/crates/runtime/src/unseal/testdata/empty-input.json @@ -0,0 +1,20 @@ +{ + "sops": { + "kms": null, + "gcp_kms": [ + { + "resource_id": "projects/helpful-kingdom-273219/locations/us-central1/keyRings/dev/cryptoKeys/CI-estuary-flow", + "created_at": "2022-12-20T18:14:31Z", + "enc": "CiQAW8BC2D8sXCQR40IQakhxkx9r1G5XKvWsOFJRjgKDI+e2tacSSABwmcO9WrY5A39M8T6zXKZPX2c2kHmnL0HfpivJ7Z1zvgaLtBbzvO3arEHGXskaSk6vqb/IinmnozQr6ICYZRymgYa/Sy4cEw==" + } + ], + "azure_kv": null, + "hc_vault": null, + "age": null, + "lastmodified": "2022-12-20T18:14:31Z", + "mac": "ENC[AES256_GCM,data:KslcD3jvHUPKmCL7pf6uQ4PiGB1XZ833I/Yffc5D5e/wcNbjPYEUcRW/hUoHI+mYoysvTPSlwMoaprv2oT2/vnI8leDlVBivf0y2/DaDN+q8WEhbST9Mhjt1eBvHjwyG4zRoUFpjOv/5gOxT2PHOdZ2XEplMaMPONkNQZ45LeXs=,iv:3hIf2QPoWNiTiMWz9qG8L922PqFQ3L3FotzJ5OaM9xk=,tag:BPF0R1rrM+2nbzSJaCMYSA==,type:str]", + "pgp": null, + "encrypted_suffix": "_sops", + "version": "3.7.3" + } +} diff --git a/crates/runtime/src/unseal/testdata/hyphen-suffix.json b/crates/runtime/src/unseal/testdata/hyphen-suffix.json new file mode 100644 index 0000000000..f6360d716c --- /dev/null +++ b/crates/runtime/src/unseal/testdata/hyphen-suffix.json @@ -0,0 +1,34 @@ +{ + "notsops": "bar", + "stuff": { + "s1-sops": "ENC[AES256_GCM,data:+zlLc/EtsQ==,iv:UYapxOwjD5/6u3KiehxvIEf/Wr8vbQ6KHCjkYmqtXiA=,tag:6KYPdl9rwMYj2g7atddIjQ==,type:str]", + "other": true, + "nullish": null, + "array": [ + 42, + { + "inner-sops-sops": "ENC[AES256_GCM,data:LZDdHAoxJg0EDwJV9A==,iv:3I5mPX3HcHrC+ni8nd00Bq9w6RyAwlbqNXXreHxk84c=,tag:TU1yh2fab89FLfsn++FUcQ==,type:str]", + "frob": "bob" + } + ] + }, + "s2-sops": "ENC[AES256_GCM,data:0gfvgHsWhRuxvxzC,iv:Xzz3cSP4oDYKfVnplz5QL2X/c8B7gv5D2nF8fD+JlxI=,tag:eiKZCNcvfnbbBUeylLZhQg==,type:str]", + "sops": { + "kms": null, + "gcp_kms": [ + { + "resource_id": "projects/helpful-kingdom-273219/locations/us-central1/keyRings/dev/cryptoKeys/CI-estuary-flow", + "created_at": "2021-11-28T19:17:34Z", + "enc": "CiQAW8BC2Lx55101CNYhok8Zk7hJuzPniI7CQB2vg8A4wYOV658SSQCnySJKmieqylqbSLD29qAf19zktmpD8XRwREVeNjUuOHkQAfG3LAei3OmgHwD0LA4/K228JWisf0GnarEfAQfxwiRMhmDfQpY=" + } + ], + "azure_kv": null, + "hc_vault": null, + "age": null, + "lastmodified": "2021-11-28T19:19:48Z", + "mac": "ENC[AES256_GCM,data:q5ptIuwDpUSF9G69QqidBhDTY90xhuKYHtrFii32C4u4JSrw9AeulmwWCqw5ireCMTVrxibmcJGb7DIpdJV0BUf6nys4oh1xK0A1veE0BGd9bezamC89r4z7x8XoTTPOOIggpc854m3MVr+YjqJZfpVaTQ1ihgM9La5o3JZ+2Ss=,iv:g6TfjVIBYNy0gaX5kNAWEdAgMlfPFuLR5B2MAZFunkI=,tag:tw0CYcYYxQ4OE4UmO5EzDQ==,type:str]", + "pgp": null, + "encrypted_suffix": "-sops", + "version": "3.7.1" + } +} \ No newline at end of file diff --git a/crates/runtime/src/unseal/testdata/no-suffix.json b/crates/runtime/src/unseal/testdata/no-suffix.json new file mode 100644 index 0000000000..3de544e19f --- /dev/null +++ b/crates/runtime/src/unseal/testdata/no-suffix.json @@ -0,0 +1,29 @@ +{ + "foo": { + "bar": "ENC[AES256_GCM,data:JMc=,iv:C2f8jc7lL1BtVGWp6NSnKp8+WFg8dd90vKPcoYqBFc4=,tag:bH9kEzFsL4D6Ua/rgDTKog==,type:float]", + "some_sops": [ + "ENC[AES256_GCM,data:Pg==,iv:8DGbYxbIhuDkQSQ9ZKBn/xRk1DnxfzMSRYiSVhe93PU=,tag:tyEvtNl6iq29h4vlWDf8/g==,type:float]", + "ENC[AES256_GCM,data:vDaeb+g=,iv:EFxSHUjYdlj/imKk5Rk4UohbA80CHNBwxDC+pBhhSDw=,tag:kRYCbEkLTz1IemMHyKLyAQ==,type:str]" + ] + }, + "tru": "ENC[AES256_GCM,data:1UL9ow==,iv:rda+o6KfLbWuEIb5FfPN2OgwiXwohtor66fSS5t+OMA=,tag:EtW4i7/9d/ZNcJUA87Jw3A==,type:bool]", + "false": null, + "sops": { + "kms": null, + "gcp_kms": [ + { + "resource_id": "projects/helpful-kingdom-273219/locations/us-central1/keyRings/dev/cryptoKeys/CI-estuary-flow", + "created_at": "2021-11-28T19:07:38Z", + "enc": "CiQAW8BC2Encb0B8k/GsRZ7uEZw/6NDfS86A0kKBTCr7Hka1AlsSSQCnySJK1y4Dmurkgw8Jc2DbHc7aeyVTMw0f8kaNsFNrQMab46iohqZbN8zVzpH4ArQY7lLeKGYQPctFB7IjOB/HExVkmi0KvJk=" + } + ], + "azure_kv": null, + "hc_vault": null, + "age": null, + "lastmodified": "2021-11-28T19:07:39Z", + "mac": "ENC[AES256_GCM,data:P513NLVRtvjpgfpYUAyliI6/LfeWuD3iOW0R7tMuzPOPtUZ8qiorAG+0FbPfOsLV4Wf2R95CKQwb8+oVVTgJqCh4A8l64S3GFvS3rmUC4uLm3kjLjrYsAx+UiiFp+DrxjnPr1s7ufAiZ5mW+mlAwwtCI7VN0ieRjqUuCVSIQJiA=,iv:Z2QT31Ru2JnsEUBJrxAA8HHNfUt3lHj72Lbu6FFo4Oo=,tag:PGhz0U3gjCs0SMT1x7x7dw==,type:str]", + "pgp": null, + "unencrypted_suffix": "_unencrypted", + "version": "3.7.1" + } +} \ No newline at end of file diff --git a/crates/runtime/src/unseal/testdata/not-encrypted.json b/crates/runtime/src/unseal/testdata/not-encrypted.json new file mode 100644 index 0000000000..ed9970c15f --- /dev/null +++ b/crates/runtime/src/unseal/testdata/not-encrypted.json @@ -0,0 +1,8 @@ +{ + "foo": { + "bar": 42, + "some_sops": [3, "three"] + }, + "tru": true, + "false": null +} \ No newline at end of file diff --git a/crates/runtime/src/unseal/testdata/under-suffix.json b/crates/runtime/src/unseal/testdata/under-suffix.json new file mode 100644 index 0000000000..5270eae10f --- /dev/null +++ b/crates/runtime/src/unseal/testdata/under-suffix.json @@ -0,0 +1,37 @@ +{ + "foo": "bar", + "baz": { + "s1_sops": "ENC[AES256_GCM,data:iK8=,iv:6eUwxTe2WrzThM/IrxXdtRKylVUl4FRAVW034jMj/Fc=,tag:MwOuXmHI2T3doLkuOS9cwQ==,type:float]", + "other": true, + "nullish": null, + "array": [ + 42, + { + "true": false + }, + { + "inner_sops_sops": "ENC[AES256_GCM,data:lwA=,iv:O1HthVOektVpxMQg7uYwHeqvwQldlx+CHTG56pIr7Uw=,tag:8GXjnRSSIBnpl+ZC0s6IZQ==,type:float]", + "frob": "bob" + } + ] + }, + "s2_sops": "ENC[AES256_GCM,data:1ZQD0AlT235h/OxZGw==,iv:eCYqtbQduexJK/Yrm6+fIJ/pEXxPDKLdAPvFNKNtbns=,tag:BDzNg4Egi+J4sFAlwo7nFQ==,type:str]", + "sops": { + "kms": null, + "gcp_kms": [ + { + "resource_id": "projects/helpful-kingdom-273219/locations/us-central1/keyRings/dev/cryptoKeys/CI-estuary-flow", + "created_at": "2021-11-28T19:15:05Z", + "enc": "CiQAW8BC2O+vyi60ZvLG6qCI/YVpp8LW34A2iXzmWLDVTliY4UESSQCnySJKRYBit2iZgyWUeVvObhBPO9Hp0Yw0jVY+s6Q57o3gGvs2VyvMcSsRsCRKvXTAZU239Q/cc2GQ2bQBULXrdjwa3zj8oMY=" + } + ], + "azure_kv": null, + "hc_vault": null, + "age": null, + "lastmodified": "2021-11-28T19:20:40Z", + "mac": "ENC[AES256_GCM,data:WQHGlU8pUjVwCWJnqYiF2ZW1TpDFHRWeq0LClIywiEytRkAc5UEFNINO+icF+dyD5R6wuGq9l10IqCk1aJurwmgO3tfKhCdDUPnXxLubwN0dmhana06ltukykWHV7E65LUECgud7/YckbNnicDl4xbZ6R36b0NrFXM2AY44jwyc=,iv:lIsOQvTsH3CFKV+ECQGrRShFlUE6qwGQK1zhcPbwlS4=,tag:FiqIHKEp5qpz3qD8vbEaxA==,type:str]", + "pgp": null, + "encrypted_suffix": "_sops", + "version": "3.7.1" + } +} \ No newline at end of file diff --git a/crates/schemalate/src/firebolt/firebolt_schema_builder.rs b/crates/schemalate/src/firebolt/firebolt_schema_builder.rs index 60ad7d147f..1cd58b590b 100644 --- a/crates/schemalate/src/firebolt/firebolt_schema_builder.rs +++ b/crates/schemalate/src/firebolt/firebolt_schema_builder.rs @@ -196,42 +196,47 @@ mod tests { #[test] fn test_build_firebolt_queries_bundle() { - assert_eq!( - build_firebolt_queries_bundle(MaterializationSpec { - config_json: json!({ - "aws_key_id": "aws_key", - "aws_secret_key": "aws_secret", - "s3_bucket": "my-bucket", - "s3_prefix": "/test" - }).to_string(), - bindings: vec![Binding { - resource_config_json: json!({ - "table": "test_table", - "table_type": "fact" - }).to_string(), - field_selection: Some(FieldSelection { - keys: vec!["test".to_string()], - ..Default::default() - }), - collection: Some(CollectionSpec { - write_schema_json: json!({ - "properties": { - "test": {"type": "string"}, - }, - "required": ["test"], - "type": "object" - }).to_string(), - projections: vec![Projection { - field: "test".to_string(), - ptr: "/test".to_string(), - ..Default::default() - }], - ..Default::default() - }), + let mut spec = MaterializationSpec::default(); + + spec.config_json = json!({ + "aws_key_id": "aws_key", + "aws_secret_key": "aws_secret", + "s3_bucket": "my-bucket", + "s3_prefix": "/test" + }) + .to_string(); + + spec.bindings = vec![Binding { + resource_config_json: json!({ + "table": "test_table", + "table_type": "fact" + }) + .to_string(), + field_selection: Some(FieldSelection { + keys: vec!["test".to_string()], + ..Default::default() + }), + collection: Some(CollectionSpec { + write_schema_json: json!({ + "properties": { + "test": {"type": "string"}, + }, + "required": ["test"], + "type": "object" + }) + .to_string(), + projections: vec![Projection { + field: "test".to_string(), + ptr: "/test".to_string(), ..Default::default() }], ..Default::default() - }) + }), + ..Default::default() + }]; + + assert_eq!( + build_firebolt_queries_bundle(spec) .unwrap(), FireboltQueriesBundle { bindings: vec![BindingBundle { diff --git a/crates/sources/src/lib.rs b/crates/sources/src/lib.rs index 1aea8b18dd..405b3f3686 100644 --- a/crates/sources/src/lib.rs +++ b/crates/sources/src/lib.rs @@ -10,7 +10,7 @@ pub use indirect::{indirect_large_files, rebuild_catalog_resources}; pub use inline::inline_sources; pub use bundle_schema::bundle_schema; -pub use loader::{FetchFuture, Fetcher, LoadError, Loader}; +pub use loader::{Fetcher, LoadError, Loader}; pub use scope::Scope; #[derive(Copy, Clone, Debug)] diff --git a/crates/sources/src/loader.rs b/crates/sources/src/loader.rs index 73ea43bb4c..b5bac24a30 100644 --- a/crates/sources/src/loader.rs +++ b/crates/sources/src/loader.rs @@ -1,10 +1,10 @@ use super::Scope; use doc::Schema as CompiledSchema; -use futures::future::{FutureExt, LocalBoxFuture}; +use futures::future::{BoxFuture, FutureExt}; use json::schema::{self, build::build_schema}; use models::RawValue; use proto_flow::flow; -use std::cell::RefCell; +use std::sync::{Mutex, MutexGuard}; use url::Url; #[derive(thiserror::Error, Debug)] @@ -36,21 +36,15 @@ pub enum LoadError { ResourceNotUTF8, } -// FetchResult is the result type of a fetch operation, -// and returns the resolved content of the resource. -pub type FetchResult = Result; -// FetchFuture is a Future of FetchResult. -pub type FetchFuture<'a> = LocalBoxFuture<'a, FetchResult>; - /// Fetcher resolves a resource URL to its byte content. -pub trait Fetcher { +pub trait Fetcher: Send + Sync { fn fetch<'a>( &'a self, // Resource to fetch. resource: &'a Url, // Expected content type of the resource. content_type: flow::ContentType, - ) -> FetchFuture<'a>; + ) -> BoxFuture<'a, anyhow::Result>; } /// Loader provides a stack-based driver for traversing catalog source @@ -58,7 +52,10 @@ pub trait Fetcher { /// tracking of location context. pub struct Loader { // Tables loaded by the build process. - tables: RefCell, + // `tables` is never held across await points or accessed across threads, and the + // tables_mut() accessor asserts that no other lock is held and does not block. + // Wrapping in a Mutex makes it easy to pass around futures holding a Loader. + tables: Mutex, // Fetcher for retrieving discovered, unvisited resources. fetcher: F, } @@ -67,13 +64,13 @@ impl Loader { /// Build and return a new Loader. pub fn new(tables: tables::Sources, fetcher: F) -> Loader { Loader { - tables: RefCell::new(tables), + tables: Mutex::new(tables), fetcher, } } pub fn into_tables(self) -> tables::Sources { - self.tables.into_inner() + std::mem::take(&mut *self.tables_mut()) } /// Load (or re-load) a resource of the given ContentType. @@ -84,7 +81,7 @@ impl Loader { content_type: flow::ContentType, ) { if resource.fragment().is_some() { - self.tables.borrow_mut().errors.insert_row( + self.tables_mut().errors.insert_row( &scope.flatten(), anyhow::anyhow!(LoadError::Fetch { uri: resource.clone(), @@ -96,8 +93,7 @@ impl Loader { } // Mark as visited, so that recursively-loaded imports don't re-visit. - self.tables - .borrow_mut() + self.tables_mut() .fetches .insert_row(scope.resource_depth() as u32, resource); @@ -110,7 +106,7 @@ impl Loader { .await } Err(err) => { - self.tables.borrow_mut().errors.insert_row( + self.tables_mut().errors.insert_row( &scope.flatten(), anyhow::anyhow!(LoadError::Fetch { uri: resource.clone(), @@ -131,7 +127,7 @@ impl Loader { resource: &'a Url, content: bytes::Bytes, content_type: flow::ContentType, - ) -> LocalBoxFuture<'a, ()> { + ) -> BoxFuture<'a, ()> { async move { let scope = scope.push_resource(&resource); @@ -174,7 +170,7 @@ impl Loader { } else if let Ok(content) = std::str::from_utf8(&content) { RawValue::from_string(serde_json::to_string(&content).unwrap()).unwrap() } else { - self.tables.borrow_mut().errors.insert_row( + self.tables_mut().errors.insert_row( &scope.flatten(), anyhow::anyhow!(LoadError::ResourceNotUTF8), ); @@ -194,7 +190,7 @@ impl Loader { _ => {} }; - self.tables.borrow_mut().resources.insert_row( + self.tables_mut().resources.insert_row( resource.clone(), content_type, content, @@ -203,7 +199,7 @@ impl Loader { None } .map(|_: Option<()>| ()) - .boxed_local() + .boxed() } async fn load_schema_document<'s>( @@ -229,7 +225,7 @@ impl Loader { scope: Scope<'s>, index: &'s doc::SchemaIndex<'s>, schema: &'s CompiledSchema, - ) -> LocalBoxFuture<'s, ()> { + ) -> BoxFuture<'s, ()> { let mut tasks = Vec::with_capacity(schema.kw.len()); // Walk keywords, looking for named schemas and references we must resolve. @@ -274,7 +270,7 @@ impl Loader { futures::future::join_all(tasks.into_iter()) .map(|_: Vec<()>| ()) - .boxed_local() + .boxed() } /// Load a schema reference, which may be an inline schema. @@ -331,8 +327,7 @@ impl Loader { // Recursively process the import if it's not already visited. if !self - .tables - .borrow_mut() + .tables_mut() .fetches .iter() .any(|f| f.resource == import) @@ -340,8 +335,7 @@ impl Loader { self.load_resource(scope, &import, content_type).await; } - self.tables - .borrow_mut() + self.tables_mut() .imports .insert_row(scope.flatten(), import); } @@ -374,7 +368,7 @@ impl Loader { ) .await; } - .boxed_local(), + .boxed(), ); } @@ -385,7 +379,7 @@ impl Loader { self.load_capture(scope.push_prop("captures").push_prop(&name), &name, capture) .await; } - .boxed_local(), + .boxed(), ); } @@ -400,7 +394,7 @@ impl Loader { ) .await; } - .boxed_local(), + .boxed(), ); } @@ -415,7 +409,7 @@ impl Loader { ) .await; } - .boxed_local(), + .boxed(), ); } @@ -426,7 +420,7 @@ impl Loader { self.load_test(scope.push_prop("tests").push_prop(&name), &name, test) .await; } - .boxed_local(), + .boxed(), ); } @@ -434,7 +428,7 @@ impl Loader { for (prefix, storage) in storage_mappings.into_iter() { let models::StorageDef { stores } = storage; - self.tables.borrow_mut().storage_mappings.insert_row( + self.tables_mut().storage_mappings.insert_row( scope .push_prop("storageMappings") .push_prop(prefix.as_str()) @@ -458,32 +452,31 @@ impl Loader { if let Some(schema) = &spec.schema { tasks.push( self.load_schema_reference(scope.push_prop("schema"), schema) - .boxed_local(), + .boxed(), ); } if let Some(schema) = &spec.write_schema { tasks.push( self.load_schema_reference(scope.push_prop("writeSchema"), schema) - .boxed_local(), + .boxed(), ); } if let Some(schema) = &spec.read_schema { tasks.push( self.load_schema_reference(scope.push_prop("readSchema"), schema) - .boxed_local(), + .boxed(), ); } if let Some(derive) = &spec.derive { tasks.push( self.load_derivation(scope.push_prop("derive"), derive) - .boxed_local(), + .boxed(), ); } let _: Vec<()> = futures::future::join_all(tasks.into_iter()).await; - self.tables - .borrow_mut() + self.tables_mut() .collections .insert_row(scope.flatten(), collection_name, spec) } @@ -504,7 +497,7 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); } models::DeriveUsing::Sqlite(models::DeriveUsingSqlite { migrations }) => { @@ -521,7 +514,7 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); } } @@ -537,7 +530,7 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); } }; @@ -554,7 +547,7 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); if let models::Shuffle::Lambda(lambda) = &transform.shuffle { @@ -570,7 +563,7 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); } } @@ -599,7 +592,7 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); } }; @@ -616,14 +609,13 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); } let _: Vec<()> = futures::future::join_all(tasks.into_iter()).await; - self.tables - .borrow_mut() + self.tables_mut() .captures .insert_row(scope.flatten(), capture_name, spec); } @@ -651,7 +643,7 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); } models::MaterializationEndpoint::Sqlite(_sqlite) => {} @@ -670,18 +662,16 @@ impl Loader { ) .await } - .boxed_local(), + .boxed(), ); } } let _: Vec<()> = futures::future::join_all(tasks.into_iter()).await; - self.tables.borrow_mut().materializations.insert_row( - scope.flatten(), - materialization_name, - spec, - ); + self.tables_mut() + .materializations + .insert_row(scope.flatten(), materialization_name, spec); } async fn load_test<'s>( @@ -706,8 +696,7 @@ impl Loader { let _: Vec<()> = futures::future::join_all(tasks.into_iter()).await; - self.tables - .borrow_mut() + self.tables_mut() .tests .insert_row(scope.flatten(), test_name.clone(), spec); } @@ -738,12 +727,17 @@ impl Loader { match r { Ok(t) => Some(t), Err(err) => { - self.tables - .borrow_mut() + self.tables_mut() .errors .insert_row(scope.flatten(), anyhow::anyhow!(err.into())); None } } } + + fn tables_mut<'a>(&'a self) -> MutexGuard<'a, tables::Sources> { + self.tables + .try_lock() + .expect("tables should never be accessed concurrently or locked across await points") + } } diff --git a/crates/sources/src/scenarios/mod.rs b/crates/sources/src/scenarios/mod.rs index e8b64c733f..d471c400e1 100644 --- a/crates/sources/src/scenarios/mod.rs +++ b/crates/sources/src/scenarios/mod.rs @@ -1,9 +1,9 @@ use crate::{Fetcher, Loader, Scope}; use futures::channel::oneshot; -use futures::future::{FutureExt, LocalBoxFuture}; +use futures::future::{BoxFuture, FutureExt}; use proto_flow::flow; -use std::cell::RefCell; use std::collections::BTreeMap; +use std::sync::Mutex; use std::task::Poll; use url::Url; @@ -55,7 +55,7 @@ mod test { } // MockFetcher queues and returns oneshot futures for started fetches. struct MockFetcher<'f> { - fetches: &'f RefCell>>>, + fetches: &'f Mutex>>>, } impl<'f> Fetcher for MockFetcher<'f> { @@ -63,13 +63,18 @@ impl<'f> Fetcher for MockFetcher<'f> { &self, resource: &'a Url, _content_type: flow::ContentType, - ) -> LocalBoxFuture<'a, Result> { + ) -> BoxFuture<'a, anyhow::Result> { let (tx, rx) = oneshot::channel(); - if let Some(_) = self.fetches.borrow_mut().insert(resource.to_string(), tx) { + if let Some(_) = self + .fetches + .try_lock() + .unwrap() + .insert(resource.to_string(), tx) + { panic!("resource {} has already been fetched", resource); } - rx.map(|r| r.unwrap()).boxed_local() + rx.map(|r| r.unwrap()).boxed() } } @@ -82,7 +87,7 @@ pub fn evaluate_fixtures(catalog: tables::Sources, fixture: &serde_json::Value) // Fetches holds started fetches since the last future poll. // Use an ordered map so that we signal one-shots in a stable order, // making snapshots reliable. - let fetches = RefCell::new(BTreeMap::new()); + let fetches = Mutex::new(BTreeMap::new()); let loader = Loader::new(catalog, MockFetcher { fetches: &fetches }); let root = Url::parse("test://example/catalog.yaml").unwrap(); @@ -128,13 +133,13 @@ pub fn evaluate_fixtures(catalog: tables::Sources, fixture: &serde_json::Value) std::mem::drop(fut); return loader.into_tables(); } - Poll::Pending if fetches.borrow().is_empty() => { + Poll::Pending if fetches.try_lock().unwrap().is_empty() => { // Note the future can return Pending *only because* it's blocked // waiting for one or more |fetch| fixtures above to resolve. panic!("future is pending, but started no fetches") } Poll::Pending => { - for (url, tx) in fetches.borrow_mut().split_off("") { + for (url, tx) in fetches.try_lock().unwrap().split_off("") { match fixtures.get(&url) { Some(serde_json::Value::String(value)) => tx.send(Ok(value.clone().into())), Some(value) => tx.send(Ok(serde_json::to_vec(&value).unwrap().into())), diff --git a/crates/tables/src/lib.rs b/crates/tables/src/lib.rs index ef89b12733..fa390787a1 100644 --- a/crates/tables/src/lib.rs +++ b/crates/tables/src/lib.rs @@ -147,61 +147,37 @@ pub struct Validations { pub errors: Errors, } -/// All combines Sources and Validations: -/// * errors of the respective tables are combined. -/// * Validations::implicit_projections is folded into Sources::projections. -#[derive(Default, Debug)] -pub struct All { - pub built_captures: BuiltCaptures, - pub built_collections: BuiltCollections, - pub built_materializations: BuiltMaterializations, - pub built_tests: BuiltTests, - pub captures: Captures, - pub collections: Collections, - pub errors: Errors, - pub fetches: Fetches, - pub imports: Imports, - pub materializations: Materializations, - pub meta: Meta, - pub resources: Resources, - pub storage_mappings: StorageMappings, - pub tests: Tests, -} - #[cfg(feature = "persist")] -impl All { +impl Sources { + pub fn into_result(mut self) -> Result { + match std::mem::take(&mut self.errors) { + errors if errors.is_empty() => Ok(self), + errors => Err(errors), + } + } + // Access all tables as an array of dynamic TableObj instances. pub fn as_tables(&self) -> Vec<&dyn SqlTableObj> { // This de-structure ensures we can't fail to update as tables change. let Self { - built_captures, - built_collections, - built_materializations, - built_tests, captures, collections, errors, fetches, imports, materializations, - meta, resources, storage_mappings, tests, } = self; vec![ - built_captures, - built_collections, - built_materializations, - built_tests, captures, collections, errors, fetches, imports, materializations, - meta, resources, storage_mappings, tests, @@ -211,34 +187,24 @@ impl All { // Access all tables as an array of mutable dynamic SqlTableObj instances. pub fn as_tables_mut(&mut self) -> Vec<&mut dyn SqlTableObj> { let Self { - built_captures, - built_collections, - built_materializations, - built_tests, captures, collections, errors, fetches, imports, materializations, - meta, resources, storage_mappings, tests, } = self; vec![ - built_captures, - built_collections, - built_materializations, - built_tests, captures, collections, errors, fetches, imports, materializations, - meta, resources, storage_mappings, tests, @@ -246,6 +212,55 @@ impl All { } } +#[cfg(feature = "persist")] +impl Validations { + pub fn into_result(mut self) -> Result { + match std::mem::take(&mut self.errors) { + errors if errors.is_empty() => Ok(self), + errors => Err(errors), + } + } + + // Access all tables as an array of dynamic TableObj instances. + pub fn as_tables(&self) -> Vec<&dyn SqlTableObj> { + // This de-structure ensures we can't fail to update as tables change. + let Self { + built_captures, + built_collections, + built_materializations, + built_tests, + errors, + } = self; + + vec![ + built_captures, + built_collections, + built_materializations, + built_tests, + errors, + ] + } + + // Access all tables as an array of mutable dynamic SqlTableObj instances. + pub fn as_tables_mut(&mut self) -> Vec<&mut dyn SqlTableObj> { + let Self { + built_captures, + built_collections, + built_materializations, + built_tests, + errors, + } = self; + + vec![ + built_captures, + built_collections, + built_materializations, + built_tests, + errors, + ] + } +} + // macros::TableColumn implementations for table columns. primitive_sql_types!( diff --git a/crates/validation/src/capture.rs b/crates/validation/src/capture.rs index 9c3518b900..3b571e0dcf 100644 --- a/crates/validation/src/capture.rs +++ b/crates/validation/src/capture.rs @@ -1,13 +1,12 @@ -use super::{image, indexed, reference, storage_mapping, Connectors, Error, NoOpDrivers, Scope}; +use super::{indexed, reference, storage_mapping, Connectors, Error, NoOpConnectors, Scope}; use itertools::Itertools; use proto_flow::{capture, flow}; -pub async fn walk_all_captures( - build_config: &flow::build_api::Config, +pub async fn walk_all_captures( + build_id: &str, built_collections: &[tables::BuiltCollection], captures: &[tables::Capture], - connectors: &C, - images: &[image::Image], + connectors: &dyn Connectors, storage_mappings: &[tables::StorageMapping], errors: &mut tables::Errors, ) -> tables::BuiltCaptures { @@ -15,8 +14,7 @@ pub async fn walk_all_captures( for capture in captures { let mut capture_errors = tables::Errors::new(); - let validation = - walk_capture_request(built_collections, capture, images, &mut capture_errors); + let validation = walk_capture_request(built_collections, capture, &mut capture_errors); // Skip validation if errors were encountered while building the request. if !capture_errors.is_empty() { @@ -30,13 +28,16 @@ pub async fn walk_all_captures( let validations = validations .into_iter() .map(|(capture, request)| async move { - // If shards are disabled, then don't ask the connector to validate. Users may - // disable captures in response to the source system being unreachable, and we - // wouldn't want a validation error for a disabled task to terminate the build. + let wrapped = capture::Request { + validate: Some(request.clone()), + ..Default::default() + }; + // If shards are disabled, then don't ask the connector to validate. + // A broken but disabled endpoint should not cause a build to fail. let response = if capture.spec.shards.disable { - NoOpDrivers {}.validate_capture(request.clone()) + NoOpConnectors.validate_capture(wrapped) } else { - connectors.validate_capture(request.clone()) + connectors.validate_capture(wrapped) }; (capture, request, response.await) }); @@ -44,12 +45,12 @@ pub async fn walk_all_captures( let validations: Vec<( &tables::Capture, capture::request::Validate, - anyhow::Result, + anyhow::Result, )> = futures::future::join_all(validations).await; let mut built_captures = tables::BuiltCaptures::new(); - for (capture, request, response) in validations { + for (capture, mut request, response) in validations { let tables::Capture { scope, capture: _, @@ -60,12 +61,12 @@ pub async fn walk_all_captures( let scope = Scope::new(scope); // Unwrap `response` and bail out if it failed. - let validated = match response { + let (validated, network_ports) = match extract_validated(response) { Err(err) => { - Error::Connector { detail: err }.push(scope, errors); + err.push(scope, errors); continue; } - Ok(response) => response, + Ok(ok) => ok, }; let capture::request::Validate { @@ -73,8 +74,7 @@ pub async fn walk_all_captures( config_json, bindings: binding_requests, name, - network_ports, - } = request; + } = &mut request; let capture::response::Validated { bindings: binding_responses, @@ -90,7 +90,7 @@ pub async fn walk_all_captures( // Join requests and responses to produce tuples // of (binding index, built binding). - let built_bindings: Vec<_> = binding_requests + let built_bindings: Vec<_> = std::mem::take(binding_requests) .into_iter() .zip(binding_responses.into_iter()) .enumerate() @@ -145,18 +145,18 @@ pub async fn walk_all_captures( let spec = flow::CaptureSpec { name: name.clone(), - connector_type, - config_json, + connector_type: *connector_type, + config_json: std::mem::take(config_json), bindings: built_bindings, interval_seconds: interval.as_secs() as u32, recovery_log_template: Some(assemble::recovery_log_template( - build_config, + build_id, &name, labels::TASK_TYPE_CAPTURE, recovery_stores, )), shard_template: Some(assemble::shard_template( - build_config, + build_id, &name, labels::TASK_TYPE_CAPTURE, &shards, @@ -165,7 +165,7 @@ pub async fn walk_all_captures( )), network_ports, }; - built_captures.insert_row(scope.flatten(), name, validated, spec); + built_captures.insert_row(scope.flatten(), std::mem::take(name), validated, spec); } built_captures @@ -174,19 +174,14 @@ pub async fn walk_all_captures( fn walk_capture_request<'a>( built_collections: &'a [tables::BuiltCollection], capture: &'a tables::Capture, - images: &[image::Image], errors: &mut tables::Errors, ) -> Option<(&'a tables::Capture, capture::request::Validate)> { let tables::Capture { scope, capture: name, - spec: - models::CaptureDef { - endpoint, - bindings, - shards, - .. - }, + spec: models::CaptureDef { + endpoint, bindings, .. + }, } = capture; let scope = Scope::new(scope); @@ -199,20 +194,10 @@ fn walk_capture_request<'a>( errors, ); - let (connector_type, config_json, network_ports) = match endpoint { + let (connector_type, config_json) = match endpoint { models::CaptureEndpoint::Connector(config) => ( flow::capture_spec::ConnectorType::Image as i32, serde_json::to_string(config).unwrap(), - image::walk_image_network_ports( - scope - .push_prop("endpoint") - .push_prop("connector") - .push_prop("image"), - shards.disable, - &config.image, - images, - errors, - ), ), }; @@ -243,7 +228,6 @@ fn walk_capture_request<'a>( connector_type, config_json, bindings, - network_ports, }; Some((capture, request)) @@ -279,3 +263,31 @@ fn walk_capture_binding<'a>( Some(request) } + +fn extract_validated( + response: anyhow::Result, +) -> Result<(capture::response::Validated, Vec), Error> { + let response = match response { + Ok(response) => response, + Err(err) => return Err(Error::Connector { detail: err }), + }; + + let internal = match response.get_internal() { + Some(Ok(internal)) => internal, + None => Default::default(), + Some(Err(err)) => { + return Err(Error::Connector { + detail: anyhow::anyhow!("parsing internal: {err}"), + }); + } + }; + + let Some(validated) = response.validated else { + return Err(Error::Connector { + detail: anyhow::anyhow!("expected Validated but got {}", serde_json::to_string(&response).unwrap()), + }); + }; + let network_ports = internal.container.unwrap_or_default().network_ports; + + Ok((validated, network_ports)) +} diff --git a/crates/validation/src/collection.rs b/crates/validation/src/collection.rs index 989b811d58..e3437a7621 100644 --- a/crates/validation/src/collection.rs +++ b/crates/validation/src/collection.rs @@ -4,7 +4,7 @@ use proto_flow::flow; use std::collections::BTreeMap; pub fn walk_all_collections( - build_config: &proto_flow::flow::build_api::Config, + build_id: &str, collections: &[tables::Collection], storage_mappings: &[tables::StorageMapping], errors: &mut tables::Errors, @@ -12,7 +12,7 @@ pub fn walk_all_collections( let mut built_collections = tables::BuiltCollections::new(); for collection in collections { - if let Some(spec) = walk_collection(build_config, collection, storage_mappings, errors) { + if let Some(spec) = walk_collection(build_id, collection, storage_mappings, errors) { built_collections.insert_row(&collection.scope, &collection.collection, None, spec); } } @@ -22,7 +22,7 @@ pub fn walk_all_collections( // TODO(johnny): this is temporarily public, as we switch over to built // specs being explicitly represented by the control plane. pub fn walk_collection( - build_config: &proto_flow::flow::build_api::Config, + build_id: &str, collection: &tables::Collection, storage_mappings: &[tables::StorageMapping], errors: &mut tables::Errors, @@ -115,7 +115,7 @@ pub fn walk_collection( ); Some(assemble::collection_spec( - build_config, + build_id, collection, projections, partition_stores, diff --git a/crates/validation/src/derivation.rs b/crates/validation/src/derivation.rs index dc1ef192e5..aa0700088b 100644 --- a/crates/validation/src/derivation.rs +++ b/crates/validation/src/derivation.rs @@ -1,6 +1,4 @@ -use super::{ - collection, image, indexed, reference, schema, storage_mapping, Connectors, Error, Scope, -}; +use super::{collection, indexed, reference, schema, storage_mapping, Connectors, Error, Scope}; use proto_flow::{ derive, flow::{ @@ -10,13 +8,13 @@ use proto_flow::{ }; use superslice::Ext; -pub async fn walk_all_derivations( - build_config: &flow::build_api::Config, +pub async fn walk_all_derivations( + build_id: &str, built_collections: &[tables::BuiltCollection], collections: &[tables::Collection], - connectors: &C, - images: &[image::Image], + connectors: &dyn Connectors, imports: &[tables::Import], + project_root: &url::Url, storage_mappings: &[tables::StorageMapping], errors: &mut tables::Errors, ) -> Vec<( @@ -41,12 +39,11 @@ pub async fn walk_all_derivations( let Ok(built_index) = built_collections.binary_search_by_key(&collection, |b| &b.collection) else { continue }; let validation = walk_derive_request( - build_config, built_collections, built_index, derive, - images, imports, + project_root, &mut derive_errors, ); @@ -62,7 +59,13 @@ pub async fn walk_all_derivations( validations .into_iter() .map(|(built_index, derivation, request)| async move { - let response = connectors.validate_derivation(request.clone()); + let wrapped = derive::Request { + validate: Some(request.clone()), + ..Default::default() + }; + // For the moment, we continue to validate a disabled derivation. + // There's an argument that we shouldn't, but it's currently inconclusive. + let response = connectors.validate_derivation(wrapped); (built_index, derivation, request, response.await) }); @@ -70,12 +73,12 @@ pub async fn walk_all_derivations( usize, &models::Derivation, derive::request::Validate, - anyhow::Result, + anyhow::Result, )> = futures::future::join_all(validations).await; let mut specs = Vec::new(); - for (built_index, derive, request, response) in validations { + for (built_index, derive, mut request, response) in validations { let tables::BuiltCollection { scope, collection: this_collection, @@ -92,12 +95,12 @@ pub async fn walk_all_derivations( } = derive; // Unwrap `response` and bail out if it failed. - let validated = match response { + let (validated, network_ports) = match extract_validated(response) { Err(err) => { - Error::Connector { detail: err }.push(scope, errors); + err.push(scope, errors); continue; } - Ok(response) => response, + Ok(ok) => ok, }; let derive::request::Validate { @@ -108,14 +111,23 @@ pub async fn walk_all_derivations( shuffle_key_types, project_root: _, import_map: _, - network_ports, - } = request; + } = &mut request; let derive::response::Validated { + generated_files, transforms: transform_responses, - generated_files: _, } = &validated; + for (maybe_url, _) in generated_files { + if let Err(err) = url::Url::parse(&maybe_url) { + Error::InvalidGeneratedFileUrl { + url: maybe_url.clone(), + detail: err, + } + .push(scope, errors) + } + } + if transform_requests.len() != transform_responses.len() { Error::WrongConnectorBindings { expect: transform_requests.len(), @@ -124,7 +136,7 @@ pub async fn walk_all_derivations( .push(scope, errors); } - let built_transforms: Vec<_> = transform_requests + let built_transforms: Vec<_> = std::mem::take(transform_requests) .into_iter() .zip(transform_responses.into_iter()) .enumerate() @@ -218,18 +230,18 @@ pub async fn walk_all_derivations( ); let spec = flow::collection_spec::Derivation { - connector_type, - config_json, + connector_type: *connector_type, + config_json: std::mem::take(config_json), transforms: built_transforms, - shuffle_key_types, + shuffle_key_types: std::mem::take(shuffle_key_types), recovery_log_template: Some(assemble::recovery_log_template( - build_config, + build_id, name, labels::TASK_TYPE_DERIVATION, recovery_stores, )), shard_template: Some(assemble::shard_template( - build_config, + build_id, name, labels::TASK_TYPE_DERIVATION, &shards, @@ -245,12 +257,11 @@ pub async fn walk_all_derivations( } fn walk_derive_request<'a>( - build_config: &flow::build_api::Config, built_collections: &[tables::BuiltCollection], built_index: usize, derivation: &'a models::Derivation, - images: &[image::Image], imports: &[tables::Import], + project_root: &url::Url, errors: &mut tables::Errors, ) -> Option<(usize, &'a models::Derivation, derive::request::Validate)> { let tables::BuiltCollection { @@ -295,33 +306,21 @@ fn walk_derive_request<'a>( using, transforms, shuffle_key_types: given_shuffle_types, - shards, + shards: _, } = derivation; - let (connector_type, config_json, network_ports) = match using { + let (connector_type, config_json) = match using { models::DeriveUsing::Connector(config) => ( ConnectorType::Image as i32, serde_json::to_string(config).unwrap(), - image::walk_image_network_ports( - scope - .push_prop("using") - .push_prop("connector") - .push_prop("image"), - shards.disable, - &config.image, - images, - errors, - ), ), models::DeriveUsing::Sqlite(config) => ( ConnectorType::Sqlite as i32, serde_json::to_string(config).unwrap(), - Vec::new(), ), models::DeriveUsing::Typescript(config) => ( ConnectorType::Typescript as i32, serde_json::to_string(config).unwrap(), - Vec::new(), ), }; @@ -422,9 +421,8 @@ fn walk_derive_request<'a>( collection: Some(spec.clone()), transforms: transform_requests, shuffle_key_types, - project_root: build_config.project_root.clone(), + project_root: project_root.to_string(), import_map, - network_ports, }; Some((built_index, derivation, request)) @@ -537,3 +535,31 @@ fn walk_derive_transform( Some((request, shuffle_types)) } + +fn extract_validated( + response: anyhow::Result, +) -> Result<(derive::response::Validated, Vec), Error> { + let response = match response { + Ok(response) => response, + Err(err) => return Err(Error::Connector { detail: err }), + }; + + let internal = match response.get_internal() { + Some(Ok(internal)) => internal, + None => Default::default(), + Some(Err(err)) => { + return Err(Error::Connector { + detail: anyhow::anyhow!("parsing internal: {err}"), + }); + } + }; + + let Some(validated) = response.validated else { + return Err(Error::Connector { + detail: anyhow::anyhow!("expected Validated but got {}", serde_json::to_string(&response).unwrap()), + }); + }; + let network_ports = internal.container.unwrap_or_default().network_ports; + + Ok((validated, network_ports)) +} diff --git a/crates/validation/src/errors.rs b/crates/validation/src/errors.rs index 40658e3872..7ba19b76a8 100644 --- a/crates/validation/src/errors.rs +++ b/crates/validation/src/errors.rs @@ -140,11 +140,11 @@ pub enum Error { }, #[error("transform {transform} is missing `shuffle`, which is now a required field (https://go.estuary.dev/LK19Py). If you're unsure of what shuffle to use, try `shuffle: any`")] ShuffleUnset { transform: String }, - #[error("error while extracting metadata from the connector image '{image}'")] - ImageInspectFailed { - image: String, + #[error("connector returned an invalid generated file URL {url:?}")] + InvalidGeneratedFileUrl { + url: String, #[source] - detail: anyhow::Error, + detail: url::ParseError, }, #[error(transparent)] Connector { diff --git a/crates/validation/src/image.rs b/crates/validation/src/image.rs deleted file mode 100644 index 70e6b54010..0000000000 --- a/crates/validation/src/image.rs +++ /dev/null @@ -1,154 +0,0 @@ -use super::{Connectors, Error, Scope}; -use anyhow::Context; -use proto_flow::flow::NetworkPort; -use std::collections::BTreeMap; - -pub struct Image { - image: String, - inspection: anyhow::Result>, -} - -/// Walks every docker image that is used by a validated specification, -/// attempting to inspect each one and memoizing success or an error. -/// The returned Vec is sorted on ascending image. -pub async fn walk_all_images( - connectors: &C, - captures: &[tables::Capture], - collections: &[tables::Collection], - materializations: &[tables::Materialization], -) -> Vec { - let mut used_images = Vec::new(); - - for capture in captures { - let models::CaptureEndpoint::Connector(config) = &capture.spec.endpoint; - used_images.push(config.image.clone()); - } - for collection in collections { - if let models::CollectionDef { - derive: - Some(models::Derivation { - using: models::DeriveUsing::Connector(config), - .. - }), - .. - } = &collection.spec - { - used_images.push(config.image.clone()); - } - } - for materialization in materializations { - if let models::MaterializationEndpoint::Connector(config) = &materialization.spec.endpoint { - used_images.push(config.image.clone()); - } - } - - used_images.sort(); - used_images.dedup(); - - let inspect_results = used_images.into_iter().map(|image| async move { - Image { - image: image.clone(), - inspection: connectors - .inspect_image(image) - .await - .and_then(parse_image_inspection), - } - }); - - futures::future::join_all(inspect_results).await -} - -pub fn walk_image_network_ports( - scope: Scope, - disabled: bool, - image: &str, - images: &[Image], - errors: &mut tables::Errors, -) -> Vec { - let index = images - .binary_search_by_key(&image, |i| &i.image) - .expect("all images were fetched and are in sorted order"); - match (disabled, &images[index].inspection) { - // When disabled, we ignore the outcome of image inspection. - // It may be disabled *because* the image is broken. - (true, _) => Vec::new(), - (false, Ok(ports)) => ports.clone(), - (false, Err(error)) => { - Error::ImageInspectFailed { - image: image.to_string(), - // `error` is not Clone and multiple specs could be using this image, - // so we must round-trip through a String encoding. - detail: anyhow::anyhow!("{error:#}"), - } - .push(scope, errors); - - Vec::new() - } - } -} - -fn parse_image_inspection(content: Vec) -> anyhow::Result> { - let deserialized: Vec = serde_json::from_slice(&content).with_context(|| { - format!( - "failed to parse `docker inspect` output: {}", - String::from_utf8_lossy(&content) - ) - })?; - - if deserialized.len() != 1 { - anyhow::bail!("expected 1 image, got {}", deserialized.len()); - } - - let mut ports = Vec::new(); - for (exposed_port, _) in deserialized[0].config.exposed_ports.iter() { - // We're unable to support UDP at this time. - if exposed_port.ends_with("/udp") { - continue; - } - // Technically, the ports are allowed to appear without the '/tcp' suffix, though - // I haven't actually observed that in practice. - let exposed_port = exposed_port.strip_suffix("/tcp").unwrap_or(exposed_port); - let number = exposed_port.parse::().with_context(|| { - format!("invalid key in inspected Config.ExposedPorts '{exposed_port}'") - })?; - - let protocol_label = format!("dev.estuary.port-proto.{number}"); - let protocol = deserialized[0].config.labels.get(&protocol_label).cloned(); - - let public_label = format!("dev.estuary.port-public.{number}"); - let public = deserialized[0] - .config - .labels - .get(&public_label) - .map(String::as_str) - .unwrap_or("false"); - let public = public.parse::() - .with_context(|| format!("invalid '{public_label}' label value: '{public}', must be either 'true' or 'false'"))?; - - ports.push(NetworkPort { - number: number as u32, - protocol: protocol.unwrap_or_default(), - public, - }); - } - - Ok(ports) -} - -#[derive(serde::Deserialize)] -#[serde(rename_all = "PascalCase")] -struct InspectConfig { - /// According to the [OCI spec](https://github.com/opencontainers/image-spec/blob/d60099175f88c47cd379c4738d158884749ed235/config.md?plain=1#L125) - /// `ExposedPorts` is a map where the keys are in the format `1234/tcp`, `456/udp`, or `789` (implicit default of tcp), and the values are - /// empty objects. The choice of `serde_json::Value` here is meant to convey that the actual values are irrelevant. - #[serde(default)] - exposed_ports: BTreeMap, - #[serde(default)] - labels: BTreeMap, -} - -#[derive(serde::Deserialize)] -#[serde(rename_all = "PascalCase")] -struct InspectJson { - config: InspectConfig, -} diff --git a/crates/validation/src/lib.rs b/crates/validation/src/lib.rs index 151db2e945..8e21c200b6 100644 --- a/crates/validation/src/lib.rs +++ b/crates/validation/src/lib.rs @@ -1,4 +1,4 @@ -use futures::future::LocalBoxFuture; +use futures::future::BoxFuture; use itertools::{EitherOrBoth, Itertools}; use sources::Scope; @@ -6,7 +6,6 @@ mod capture; mod collection; mod derivation; mod errors; -mod image; mod indexed; mod materialization; mod noop; @@ -16,33 +15,30 @@ mod storage_mapping; mod test_step; pub use errors::Error; -pub use noop::{NoOpControlPlane, NoOpDrivers}; +pub use noop::{NoOpConnectors, NoOpControlPlane}; /// Connectors is a delegated trait -- provided to validate -- through which -/// connector validation RPCs are dispatched. -pub trait Connectors { - fn inspect_image<'a>( - &'a self, - image: String, - ) -> LocalBoxFuture<'a, Result, anyhow::Error>>; - +/// connector validation RPCs are dispatched. Requests will always be Validate variants. +/// We do not pass a Validate directly to reserve the possibility of including additional +/// fields such as `internal`, but do not do so today. +pub trait Connectors: Send + Sync { fn validate_capture<'a>( &'a self, - request: proto_flow::capture::request::Validate, - ) -> LocalBoxFuture<'a, anyhow::Result>; + request: proto_flow::capture::Request, + ) -> BoxFuture<'a, anyhow::Result>; fn validate_derivation<'a>( &'a self, - request: proto_flow::derive::request::Validate, - ) -> LocalBoxFuture<'a, anyhow::Result>; + request: proto_flow::derive::Request, + ) -> BoxFuture<'a, anyhow::Result>; fn validate_materialization<'a>( &'a self, - request: proto_flow::materialize::request::Validate, - ) -> LocalBoxFuture<'a, anyhow::Result>; + request: proto_flow::materialize::Request, + ) -> BoxFuture<'a, anyhow::Result>; } -pub trait ControlPlane { +pub trait ControlPlane: Send + Sync { // Resolve a set of collection names into pre-built CollectionSpecs from // the control plane. Resolution is fuzzy: if there is a spec that's *close* // to a provided name, it will be returned so that a suitable spelling @@ -53,22 +49,23 @@ pub trait ControlPlane { collections: Vec, // These parameters are currently required, but can be removed once we're // actually resolving fuzzy pre-built CollectionSpecs from the control plane. - temp_build_config: &'b proto_flow::flow::build_api::Config, + temp_build_id: &'b str, temp_storage_mappings: &'b [tables::StorageMapping], - ) -> LocalBoxFuture<'a, anyhow::Result>>; + ) -> BoxFuture<'a, anyhow::Result>>; // TODO(johnny): this is a temporary helper which supports the transition // to the control-plane holding built specifications. fn temp_build_collection_helper( + &self, name: String, spec: models::CollectionDef, - build_config: &proto_flow::flow::build_api::Config, + build_id: &str, storage_mappings: &[tables::StorageMapping], ) -> anyhow::Result { let mut errors = tables::Errors::new(); if let Some(built_collection) = collection::walk_collection( - build_config, + build_id, &tables::Collection { scope: url::Url::parse("flow://control-plane").unwrap(), collection: models::Collection::new(name), @@ -84,10 +81,11 @@ pub trait ControlPlane { } } -pub async fn validate( - build_config: &proto_flow::flow::build_api::Config, - connectors: &C, - control_plane: &P, +pub async fn validate( + build_id: &str, + project_root: &url::Url, + connectors: &dyn Connectors, + control_plane: &dyn ControlPlane, captures: &[tables::Capture], collections: &[tables::Collection], fetches: &[tables::Fetch], @@ -110,7 +108,7 @@ pub async fn validate( // Build all local collections. let built_collections = - collection::walk_all_collections(build_config, collections, storage_mappings, &mut errors); + collection::walk_all_collections(build_id, collections, storage_mappings, &mut errors); // If we failed to build one or more collections then further validation // will generate lots of misleading "not found" errors. @@ -133,7 +131,7 @@ pub async fn validate( materializations, tests, ), - build_config, + build_id, storage_mappings, ) .await @@ -205,37 +203,32 @@ pub async fn validate( &mut errors, ); - let image_inspections = - image::walk_all_images(connectors, captures, collections, materializations).await; - let built_captures = capture::walk_all_captures( - build_config, + build_id, &built_collections, captures, connectors, - &image_inspections, storage_mappings, &mut errors, ); let mut derive_errors = tables::Errors::new(); let built_derivations = derivation::walk_all_derivations( - build_config, + build_id, &built_collections, collections, connectors, - &image_inspections, imports, + project_root, storage_mappings, &mut derive_errors, ); let mut materialize_errors = tables::Errors::new(); let built_materializations = materialization::walk_all_materializations( - build_config, + build_id, &built_collections, connectors, - &image_inspections, materializations, storage_mappings, &mut materialize_errors, diff --git a/crates/validation/src/materialization.rs b/crates/validation/src/materialization.rs index a91aa93f97..5a3afc9653 100644 --- a/crates/validation/src/materialization.rs +++ b/crates/validation/src/materialization.rs @@ -1,15 +1,14 @@ use super::{ - collection, image, indexed, reference, storage_mapping, Connectors, Error, NoOpDrivers, Scope, + collection, indexed, reference, storage_mapping, Connectors, Error, NoOpConnectors, Scope, }; use itertools::Itertools; use proto_flow::{flow, materialize}; use std::collections::{BTreeMap, HashMap}; -pub async fn walk_all_materializations( - build_config: &flow::build_api::Config, +pub async fn walk_all_materializations( + build_id: &str, built_collections: &[tables::BuiltCollection], - connectors: &C, - images: &[image::Image], + connectors: &dyn Connectors, materializations: &[tables::Materialization], storage_mappings: &[tables::StorageMapping], errors: &mut tables::Errors, @@ -20,7 +19,6 @@ pub async fn walk_all_materializations( let mut materialization_errors = tables::Errors::new(); let validation = walk_materialization_request( built_collections, - images, materialization, &mut materialization_errors, ); @@ -37,13 +35,16 @@ pub async fn walk_all_materializations( let validations = validations .into_iter() .map(|(materialization, request)| async move { - // If shards are disabled, then don't ask the connector to validate. Users may - // disable materializations in response to the target system being unreachable, and - // we wouldn't want a validation error for a disabled task to terminate the build. + let wrapped = materialize::Request { + validate: Some(request.clone()), + ..Default::default() + }; + // If shards are disabled, then don't ask the connector to validate. + // A broken but disabled endpoint should not cause a build to fail. let response = if materialization.spec.shards.disable { - NoOpDrivers {}.validate_materialization(request.clone()) + NoOpConnectors.validate_materialization(wrapped) } else { - connectors.validate_materialization(request.clone()) + connectors.validate_materialization(wrapped) }; (materialization, request, response.await) }); @@ -51,12 +52,12 @@ pub async fn walk_all_materializations( let validations: Vec<( &tables::Materialization, materialize::request::Validate, - anyhow::Result, + anyhow::Result, )> = futures::future::join_all(validations).await; let mut built_materializations = tables::BuiltMaterializations::new(); - for (materialization, request, response) in validations { + for (materialization, mut request, response) in validations { let tables::Materialization { scope, materialization, @@ -69,13 +70,13 @@ pub async fn walk_all_materializations( } = materialization; let scope = Scope::new(scope); - // Unwrap |response| and continue if an Err. - let validated = match response { + // Unwrap `response` and bail out if it failed. + let (validated, network_ports) = match extract_validated(response) { Err(err) => { - Error::Connector { detail: err }.push(scope, errors); + err.push(scope, errors); continue; } - Ok(response) => response, + Ok(ok) => ok, }; let materialize::request::Validate { @@ -83,8 +84,7 @@ pub async fn walk_all_materializations( config_json, bindings: binding_requests, name, - network_ports, - } = request; + } = &mut request; let materialize::response::Validated { bindings: binding_responses, @@ -100,7 +100,7 @@ pub async fn walk_all_materializations( // Join requests and responses to produce tuples // of (binding index, built binding). - let built_bindings: Vec<_> = binding_requests + let built_bindings: Vec<_> = std::mem::take(binding_requests) .into_iter() .zip(binding_responses.into_iter()) .enumerate() @@ -217,17 +217,17 @@ pub async fn walk_all_materializations( let spec = flow::MaterializationSpec { name: name.clone(), - connector_type, - config_json, + connector_type: *connector_type, + config_json: std::mem::take(config_json), bindings: built_bindings, recovery_log_template: Some(assemble::recovery_log_template( - build_config, + build_id, &name, labels::TASK_TYPE_MATERIALIZATION, recovery_stores, )), shard_template: Some(assemble::shard_template( - build_config, + build_id, &name, labels::TASK_TYPE_MATERIALIZATION, shards, @@ -236,7 +236,7 @@ pub async fn walk_all_materializations( )), network_ports, }; - built_materializations.insert_row(scope.flatten(), name, validated, spec); + built_materializations.insert_row(scope.flatten(), std::mem::take(name), validated, spec); } built_materializations @@ -244,20 +244,15 @@ pub async fn walk_all_materializations( fn walk_materialization_request<'a>( built_collections: &'a [tables::BuiltCollection], - images: &[image::Image], materialization: &'a tables::Materialization, errors: &mut tables::Errors, ) -> Option<(&'a tables::Materialization, materialize::request::Validate)> { let tables::Materialization { scope, materialization: name, - spec: - models::MaterializationDef { - endpoint, - bindings, - shards, - .. - }, + spec: models::MaterializationDef { + endpoint, bindings, .. + }, } = materialization; let scope = Scope::new(scope); @@ -270,25 +265,14 @@ fn walk_materialization_request<'a>( errors, ); - let (connector_type, config_json, network_ports) = match endpoint { + let (connector_type, config_json) = match endpoint { models::MaterializationEndpoint::Connector(config) => ( flow::materialization_spec::ConnectorType::Image as i32, serde_json::to_string(config).unwrap(), - image::walk_image_network_ports( - scope - .push_prop("endpoint") - .push_prop("connector") - .push_prop("image"), - shards.disable, - &config.image, - images, - errors, - ), ), models::MaterializationEndpoint::Sqlite(sqlite) => ( flow::materialization_spec::ConnectorType::Sqlite as i32, serde_json::to_string(sqlite).unwrap(), - Vec::new(), ), }; @@ -318,7 +302,6 @@ fn walk_materialization_request<'a>( connector_type, config_json, bindings, - network_ports, }; Some((materialization, request)) @@ -641,3 +624,31 @@ fn walk_materialization_response( field_config_json_map, } } + +fn extract_validated( + response: anyhow::Result, +) -> Result<(materialize::response::Validated, Vec), Error> { + let response = match response { + Ok(response) => response, + Err(err) => return Err(Error::Connector { detail: err }), + }; + + let internal = match response.get_internal() { + Some(Ok(internal)) => internal, + None => Default::default(), + Some(Err(err)) => { + return Err(Error::Connector { + detail: anyhow::anyhow!("parsing internal: {err}"), + }); + } + }; + + let Some(validated) = response.validated else { + return Err(Error::Connector { + detail: anyhow::anyhow!("expected Validated but got {}", serde_json::to_string(&response).unwrap()), + }); + }; + let network_ports = internal.container.unwrap_or_default().network_ports; + + Ok((validated, network_ports)) +} diff --git a/crates/validation/src/noop.rs b/crates/validation/src/noop.rs index dab6214295..b481a47af3 100644 --- a/crates/validation/src/noop.rs +++ b/crates/validation/src/noop.rs @@ -1,63 +1,69 @@ use super::{Connectors, ControlPlane}; -use futures::future::{FutureExt, LocalBoxFuture}; -use proto_flow::{capture, derive, materialize}; +use futures::future::{BoxFuture, FutureExt}; +use proto_flow::{capture, derive, flow, materialize}; use std::collections::BTreeMap; -/// NoOpDrivers are permissive placeholders for interaction with connectors, -/// that do not fail and return the right shape of response. -pub struct NoOpDrivers; +/// NoOpConnectors are permissive placeholders for interactions with connectors, +/// that never fail and return the right shape of response. +pub struct NoOpConnectors; -impl Connectors for NoOpDrivers { +impl Connectors for NoOpConnectors { fn validate_capture<'a>( &'a self, - request: capture::request::Validate, - ) -> LocalBoxFuture<'a, Result> { + request: capture::Request, + ) -> BoxFuture<'a, anyhow::Result> { + let capture::Request{validate: Some(mut request), ..} = request else { unreachable!() }; use capture::response::{validated::Binding, Validated}; Box::pin(async move { - let bindings = request - .bindings + let bindings = std::mem::take(&mut request.bindings) .into_iter() .enumerate() .map(|(i, _)| Binding { resource_path: vec![format!("binding-{}", i)], }) .collect::>(); - Ok(Validated { bindings }) + Ok(capture::Response { + validated: Some(Validated { bindings }), + ..Default::default() + }) }) } fn validate_derivation<'a>( &'a self, - request: derive::request::Validate, - ) -> LocalBoxFuture<'a, Result> { + request: derive::Request, + ) -> BoxFuture<'a, anyhow::Result> { + let derive::Request{validate: Some(mut request), ..} = request else { unreachable!() }; use derive::response::{validated::Transform, Validated}; Box::pin(async move { - let transforms = request - .transforms + let transforms = std::mem::take(&mut request.transforms) .into_iter() .map(|_| Transform { read_only: false }) .collect::>(); - Ok(Validated { - transforms, - generated_files: BTreeMap::new(), + Ok(derive::Response { + validated: Some(Validated { + transforms, + generated_files: BTreeMap::new(), + }), + ..Default::default() }) }) } fn validate_materialization<'a>( &'a self, - request: materialize::request::Validate, - ) -> LocalBoxFuture<'a, Result> { + request: materialize::Request, + ) -> BoxFuture<'a, anyhow::Result> { + let materialize::Request{validate: Some(mut request), ..} = request else { unreachable!() }; use materialize::response::{ validated::{constraint::Type, Binding, Constraint}, Validated, }; Box::pin(async move { - let response_bindings = request - .bindings + let response_bindings = std::mem::take(&mut request.bindings) .into_iter() .enumerate() .map(|(i, b)| { @@ -84,22 +90,14 @@ impl Connectors for NoOpDrivers { } }) .collect::>(); - Ok(Validated { - bindings: response_bindings, + Ok(materialize::Response { + validated: Some(Validated { + bindings: response_bindings, + }), + ..Default::default() }) }) } - - fn inspect_image<'a>( - &'a self, - _image: String, - ) -> LocalBoxFuture<'a, Result, anyhow::Error>> { - // Just return a constant value that matches the basic shape of the `docker inspect` output. - // The `source` property is just to make it obvious where this came from if looking at a build db. - Box::pin(async move { - Ok(r#"[{"Config": {},"source": "flow no-op driver"}]"#.as_bytes().to_vec()) - }) - } } pub struct NoOpControlPlane; @@ -108,9 +106,9 @@ impl ControlPlane for NoOpControlPlane { fn resolve_collections<'a, 'b: 'a>( &'a self, _collections: Vec, - _temp_build_config: &'b proto_flow::flow::build_api::Config, + _temp_build_id: &'b str, _temp_storage_mappings: &'b [tables::StorageMapping], - ) -> LocalBoxFuture<'a, anyhow::Result>> { + ) -> BoxFuture<'a, anyhow::Result>> { async move { Ok(vec![]) }.boxed() } } diff --git a/crates/validation/tests/model.yaml b/crates/validation/tests/model.yaml index 78f110690e..8e2646d420 100644 --- a/crates/validation/tests/model.yaml +++ b/crates/validation/tests/model.yaml @@ -291,23 +291,6 @@ test://example/int-string-len.schema: type: integer driver: - imageInspections: - s3: - output: &imageInspect '[{ - "Id": "foo", - "Config":{ - "ExposedPorts": {"567/tcp":{}, "123/udp": {}, "789":{} }, - "Labels":{"dev.estuary.port-public.567":"true","dev.estuary.port-proto.789":"h2"} - } - }]' - database: - output: *imageInspect - # documenting the absolute minimal valid output from `docker inspect` that we're able to accept - database/image: - output: '[{"Config":{}}]' - webhook/connector: - output: *imageInspect - materializations: testing/webhook/deliveries: connectorType: IMAGE @@ -334,6 +317,9 @@ driver: Extra: { type: 2, reason: "location required" } Len: { type: 3, reason: "optional" } resourcePath: [targe+, two] + networkPorts: &networkPortsFixture + - { number: 567, public: true } + - { number: 789, protocol: h2 } testing/db-views: connectorType: IMAGE @@ -392,6 +378,7 @@ driver: bindings: - resourcePath: [bucke+, pr@fix] - resourcePath: [other-bucket] + networkPorts: *networkPortsFixture testing/db-cdc: connectorType: IMAGE @@ -402,6 +389,7 @@ driver: user: a-user bindings: - resourcePath: [schema, table] + networkPorts: *networkPortsFixture test://example/array-key.schema: # This schema models array additionalItems which provably exist, diff --git a/crates/validation/tests/scenario_tests.rs b/crates/validation/tests/scenario_tests.rs index 49b266a575..54e6a74fb7 100644 --- a/crates/validation/tests/scenario_tests.rs +++ b/crates/validation/tests/scenario_tests.rs @@ -1,6 +1,6 @@ -use futures::{future::LocalBoxFuture, FutureExt}; +use futures::{future::BoxFuture, FutureExt}; use lazy_static::lazy_static; -use proto_flow::{capture, derive, flow, materialize}; +use proto_flow::{capture, derive, flow, materialize, runtime::Container}; use serde_json::Value; use std::collections::BTreeMap; @@ -10,13 +10,68 @@ lazy_static! { #[test] fn test_golden_all_visits() { - let tables = run_test( - GOLDEN.clone(), - &flow::build_api::Config { - build_id: "a-build-id".to_string(), - ..Default::default() - }, - ); + let ( + ( + tables::Sources { + captures, + collections, + errors: _, + fetches, + imports, + materializations, + resources, + storage_mappings, + tests, + }, + tables::Validations { + built_captures, + built_collections, + built_materializations, + built_tests, + errors: _, + }, + ), + errors, + ) = run_test(GOLDEN.clone(), "a-build-id"); + + // NOTE(johnny): There used to be a tables::All which was removed. + // We re-constitute it here only to avoid churning this existing snapshot. + // We can remove this and separately update the snapshot of `sources` & `validations`. + #[derive(Debug)] + #[allow(dead_code)] + struct All { + built_captures: tables::BuiltCaptures, + built_collections: tables::BuiltCollections, + built_materializations: tables::BuiltMaterializations, + built_tests: tables::BuiltTests, + captures: tables::Captures, + collections: tables::Collections, + errors: tables::Errors, + fetches: tables::Fetches, + imports: tables::Imports, + materializations: tables::Materializations, + meta: tables::Meta, + resources: tables::Resources, + storage_mappings: tables::StorageMappings, + tests: tables::Tests, + } + let tables = All { + built_captures, + built_collections, + built_materializations, + built_tests, + captures, + collections, + errors, + fetches, + imports, + materializations, + meta: tables::Meta::default(), + resources, + storage_mappings, + tests, + }; + insta::assert_debug_snapshot!(tables); } @@ -24,31 +79,27 @@ fn test_golden_all_visits() { fn connector_validation_is_skipped_when_shards_are_disabled() { let fixture = serde_yaml::from_slice(include_bytes!("validation_skipped_when_disabled.yaml")).unwrap(); - let tables = run_test( - fixture, - &flow::build_api::Config { - build_id: "validation-skipped-build-id".to_string(), - ..Default::default() - }, - ); + let ((_, validations), errors) = run_test(fixture, "validation-skipped-build-id"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); + let tables::Validations { + built_captures, + built_materializations, + .. + } = validations; + + assert_eq!(built_captures.len(), 1); assert!( - tables.errors.is_empty(), - "expected no errors, got: {:?}", - tables.errors - ); - assert_eq!(tables.built_captures.len(), 1); - assert!( - tables.built_captures[0] + built_captures[0] .spec .shard_template .as_ref() .unwrap() .disable, ); - assert_eq!(tables.built_materializations.len(), 1); + assert_eq!(built_materializations.len(), 1); assert!( - tables.built_materializations[0] + built_materializations[0] .spec .shard_template .as_ref() @@ -85,21 +136,15 @@ driver: "##, ) .unwrap(); - let tables = run_test( - fixture, - &flow::build_api::Config { - build_id: "collection-contains-flow-document-build-id".to_string(), - ..Default::default() - }, - ); + let ((_, validations), errors) = + run_test(fixture, "collection-contains-flow-document-build-id"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); - assert!( - tables.errors.is_empty(), - "expected no errors, got: {:?}", - tables.errors - ); + let tables::Validations { + built_collections, .. + } = validations; - let collection = &tables.built_collections[0]; + let collection = &built_collections[0]; assert!(!collection .spec .projections @@ -266,42 +311,35 @@ driver: "##; - let tables = run_test( - serde_yaml::from_str(models).unwrap(), - &flow::build_api::Config { - build_id: "disabled-bindings".to_string(), - ..Default::default() - }, - ); + let ((_, validations), errors) = + run_test(serde_yaml::from_str(models).unwrap(), "disabled-bindings"); + assert!(errors.is_empty(), "expected no errors, got: {errors:?}"); - assert!( - tables.errors.is_empty(), - "expected no errors, got: {:?}", - tables.errors - ); - assert_eq!(tables.built_captures.len(), 2); - let partly_disabled_cap = tables - .built_captures + let tables::Validations { + built_captures, + built_materializations, + .. + } = validations; + + assert_eq!(built_captures.len(), 2); + let partly_disabled_cap = built_captures .iter() .find(|c| c.capture == "testing/partially-disabled-capture") .unwrap(); assert_eq!(1, partly_disabled_cap.spec.bindings.len()); - let fully_disabled_cap = tables - .built_captures + let fully_disabled_cap = built_captures .iter() .find(|c| c.capture == "testing/fully-disabled-capture") .unwrap(); assert_eq!(0, fully_disabled_cap.spec.bindings.len()); - let partly_disabled_mat = tables - .built_materializations + let partly_disabled_mat = built_materializations .iter() .find(|m| m.materialization == "testing/partially-disabled-materialization") .unwrap(); assert_eq!(1, partly_disabled_mat.spec.bindings.len()); - let fully_disabled_mat = tables - .built_materializations + let fully_disabled_mat = built_materializations .iter() .find(|m| m.materialization == "testing/fully-disabled-materialization") .unwrap(); @@ -310,22 +348,22 @@ driver: #[test] fn test_database_round_trip() { - let tables = run_test( - GOLDEN.clone(), - &flow::build_api::Config { - build_id: "a-build-id".to_string(), - ..Default::default() - }, - ); + let ((sources, validations), _) = run_test(GOLDEN.clone(), "a-build-id"); // Round-trip source and built tables through the database, verifying equality. let db = rusqlite::Connection::open(":memory:").unwrap(); - tables::persist_tables(&db, &tables.as_tables()).unwrap(); - let mut reload_tables = tables::All::default(); - tables::load_tables(&db, reload_tables.as_tables_mut().as_mut_slice()).unwrap(); - let original = format!("{:#?}", tables); - let recovered = format!("{:#?}", reload_tables); + tables::persist_tables(&db, &sources.as_tables()).unwrap(); + tables::persist_tables(&db, &validations.as_tables()).unwrap(); + + let mut reload_sources = tables::Sources::default(); + let mut reload_validations = tables::Validations::default(); + + tables::load_tables(&db, reload_sources.as_tables_mut().as_mut_slice()).unwrap(); + tables::load_tables(&db, reload_validations.as_tables_mut().as_mut_slice()).unwrap(); + + let original = format!("{sources:#?} {validations:#?}"); + let recovered = format!("{reload_sources:#?} {reload_validations:#?}"); if original != recovered { std::fs::write("ORIGINAL", original).unwrap(); @@ -1368,26 +1406,6 @@ test://example/int-halve: insta::assert_debug_snapshot!(errors); } -#[test] -fn test_image_inspection_is_malformed() { - let errors = run_test_errors( - &GOLDEN, - r#" -driver: - imageInspections: - s3: - output: '[{"Invalid": "Inspection"}]' - database: - output: '[{"whoops": "bad"}]' - database/image: - output: '{"also": "bad"}' - webhook/connector: - output: '{"me": "too"}' -"#, - ); - insta::assert_debug_snapshot!(errors); -} - #[test] fn test_derivation_not_before_after_ordering() { let errors = run_test_errors( @@ -1447,12 +1465,26 @@ test://example/int-string-tests: insta::assert_debug_snapshot!(errors); } +#[test] +fn test_invalid_generated_file_url() { + let errors = run_test_errors( + &GOLDEN, + r#" +driver: + derivations: + testing/from-array-key: + generatedFiles: + "this is not a URL! ": generated content +"#, + ); + insta::assert_debug_snapshot!(errors); +} + #[derive(serde::Deserialize)] #[serde(rename_all = "camelCase")] struct MockDriverCalls { captures: BTreeMap, derivations: BTreeMap, - image_inspections: BTreeMap, materializations: BTreeMap, } @@ -1463,6 +1495,8 @@ struct MockCaptureValidateCall { config: serde_json::Value, bindings: Vec, #[serde(default)] + network_ports: Vec, + #[serde(default)] error: Option, } @@ -1475,6 +1509,8 @@ struct MockDeriveValidateCall { transforms: Vec, generated_files: BTreeMap, #[serde(default)] + network_ports: Vec, + #[serde(default)] error: Option, } @@ -1493,15 +1529,11 @@ struct MockMaterializationValidateCall { #[serde(default)] delta_updates: bool, #[serde(default)] + network_ports: Vec, + #[serde(default)] error: Option, } -#[derive(serde::Deserialize)] -#[serde(deny_unknown_fields, rename_all = "camelCase")] -struct MockImageInspectCall { - output: String, -} - #[derive(serde::Deserialize)] #[serde(deny_unknown_fields, rename_all = "camelCase")] struct MockDriverBinding { @@ -1519,8 +1551,10 @@ struct MockDriverBinding { impl validation::Connectors for MockDriverCalls { fn validate_capture<'a>( &'a self, - request: capture::request::Validate, - ) -> LocalBoxFuture<'a, Result> { + request: capture::Request, + ) -> BoxFuture<'a, anyhow::Result> { + let capture::Request{validate: Some(request), ..} = request else { unreachable!() }; + async move { let call = match self.captures.get(&request.name) { Some(call) => call, @@ -1560,15 +1594,30 @@ impl validation::Connectors for MockDriverCalls { }) .collect(); - return Ok(capture::response::Validated { bindings }); + let mut response = capture::Response { + validated: Some(capture::response::Validated { bindings }), + ..Default::default() + }; + response + .set_internal(&mut bytes::BytesMut::new(), |internal| { + internal.container = Some(Container { + ip_addr: "1.2.3.4".to_string(), + network_ports: call.network_ports.clone(), + }); + }) + .unwrap(); + + return Ok(response); } - .boxed_local() + .boxed() } fn validate_derivation<'a>( &'a self, - request: derive::request::Validate, - ) -> LocalBoxFuture<'a, Result> { + request: derive::Request, + ) -> BoxFuture<'a, anyhow::Result> { + let derive::Request{validate: Some(request), ..} = request else { unreachable!() }; + async move { let name = &request.collection.as_ref().unwrap().name; @@ -1621,18 +1670,33 @@ impl validation::Connectors for MockDriverCalls { }) .collect(); - return Ok(derive::response::Validated { - transforms, - generated_files: call.generated_files.clone(), - }); + let mut response = derive::Response { + validated: Some(derive::response::Validated { + transforms, + generated_files: call.generated_files.clone(), + }), + ..Default::default() + }; + response + .set_internal(&mut bytes::BytesMut::new(), |internal| { + internal.container = Some(Container { + ip_addr: "1.2.3.4".to_string(), + network_ports: call.network_ports.clone(), + }); + }) + .unwrap(); + + return Ok(response); } - .boxed_local() + .boxed() } fn validate_materialization<'a>( &'a self, - request: materialize::request::Validate, - ) -> LocalBoxFuture<'a, Result> { + request: materialize::Request, + ) -> BoxFuture<'a, anyhow::Result> { + let materialize::Request{validate: Some(request), ..} = request else { unreachable!() }; + async move { let call = match self.materializations.get(&request.name) { Some(call) => call, @@ -1686,29 +1750,29 @@ impl validation::Connectors for MockDriverCalls { }) .collect(); - return Ok(materialize::response::Validated { bindings }); - } - .boxed_local() - } + let mut response = materialize::Response { + validated: Some(materialize::response::Validated { bindings }), + ..Default::default() + }; + response + .set_internal(&mut bytes::BytesMut::new(), |internal| { + internal.container = Some(Container { + ip_addr: "1.2.3.4".to_string(), + network_ports: call.network_ports.clone(), + }); + }) + .unwrap(); - fn inspect_image<'a>( - &'a self, - image: String, - ) -> LocalBoxFuture<'a, Result, anyhow::Error>> { - async move { - if let Some(call) = self.image_inspections.get(&image) { - Ok(call.output.clone().into_bytes()) - } else { - Err(anyhow::anyhow!( - "driver fixture not found for image: '{image}'" - )) - } + return Ok(response); } - .boxed_local() + .boxed() } } -fn run_test(mut fixture: Value, config: &flow::build_api::Config) -> tables::All { +fn run_test( + mut fixture: Value, + build_id: &str, +) -> ((tables::Sources, tables::Validations), tables::Errors) { // Extract out driver mock call fixtures. let mock_calls = fixture .get_mut("driver") @@ -1722,52 +1786,33 @@ fn run_test(mut fixture: Value, config: &flow::build_api::Config) -> tables::All let tables::Sources { captures, collections, - mut errors, + errors: _, fetches, imports, materializations, - resources, + resources: _, storage_mappings, tests, - } = sources; + } = &sources; - let tables::Validations { - built_captures, - built_collections, - built_materializations, - built_tests, - errors: validation_errors, - } = futures::executor::block_on(validation::validate( - config, + let mut validations = futures::executor::block_on(validation::validate( + build_id, + &url::Url::parse("file:///project/root").unwrap(), &mock_calls, &validation::NoOpControlPlane, - &captures, - &collections, - &fetches, - &imports, - &materializations, - &storage_mappings, - &tests, - )); - - errors.extend(validation_errors.into_iter()); - - tables::All { - built_captures, - built_collections, - built_materializations, - built_tests, captures, collections, - errors, fetches, imports, materializations, - meta: tables::Meta::new(), - resources, storage_mappings, tests, - } + )); + + let mut errors = std::mem::take(&mut sources.errors); + errors.extend(std::mem::take(&mut validations.errors).into_iter()); + + ((sources, validations), errors) } #[must_use] @@ -1776,13 +1821,7 @@ fn run_test_errors(fixture: &Value, patch: &str) -> tables::Errors { let patch: Value = serde_yaml::from_str(patch).unwrap(); json_patch::merge(&mut fixture, &patch); - let tables::All { mut errors, .. } = run_test( - fixture, - &flow::build_api::Config { - build_id: "a-build-id".to_string(), - ..Default::default() - }, - ); + let (_, mut errors) = run_test(fixture, "a-build-id"); // Squelch expected fixture error. if matches!(errors.first(), Some(err) if err.scope.as_str() == "test://example/from-array-key#/collections/testing~1from-array-key/derive/using/sqlite/migrations/1") diff --git a/crates/validation/tests/snapshots/scenario_tests__image_inspection_is_malformed.snap b/crates/validation/tests/snapshots/scenario_tests__image_inspection_is_malformed.snap deleted file mode 100644 index a32176ab5b..0000000000 --- a/crates/validation/tests/snapshots/scenario_tests__image_inspection_is_malformed.snap +++ /dev/null @@ -1,22 +0,0 @@ ---- -source: crates/validation/tests/scenario_tests.rs -expression: errors ---- -[ - Error { - scope: test://example/int-string-captures#/captures/testing~1db-cdc/endpoint/connector/image, - error: error while extracting metadata from the connector image 'database': failed to parse `docker inspect` output: [{"whoops": "bad"}]: missing field `Config` at line 1 column 18, - }, - Error { - scope: test://example/int-string-captures#/captures/testing~1s3-source/endpoint/connector/image, - error: error while extracting metadata from the connector image 's3': failed to parse `docker inspect` output: [{"Invalid": "Inspection"}]: missing field `Config` at line 1 column 26, - }, - Error { - scope: test://example/db-views#/materializations/testing~1db-views/endpoint/connector/image, - error: error while extracting metadata from the connector image 'database/image': failed to parse `docker inspect` output: {"also": "bad"}: invalid type: map, expected a sequence at line 1 column 0, - }, - Error { - scope: test://example/webhook-deliveries#/materializations/testing~1webhook~1deliveries/endpoint/connector/image, - error: error while extracting metadata from the connector image 'webhook/connector': failed to parse `docker inspect` output: {"me": "too"}: invalid type: map, expected a sequence at line 1 column 0, - }, -] diff --git a/crates/validation/tests/snapshots/scenario_tests__invalid_generated_file_url.snap b/crates/validation/tests/snapshots/scenario_tests__invalid_generated_file_url.snap new file mode 100644 index 0000000000..3d22c0770f --- /dev/null +++ b/crates/validation/tests/snapshots/scenario_tests__invalid_generated_file_url.snap @@ -0,0 +1,10 @@ +--- +source: crates/validation/tests/scenario_tests.rs +expression: errors +--- +[ + Error { + scope: test://example/from-array-key#/collections/testing~1from-array-key, + error: connector returned an invalid generated file URL "this is not a URL! ": relative URL without a base, + }, +] diff --git a/go/bindings/build.go b/go/bindings/build.go index 8c5b2dc974..058ae3ac6b 100644 --- a/go/bindings/build.go +++ b/go/bindings/build.go @@ -3,43 +3,16 @@ package bindings // #include "../../crates/bindings/flow_bindings.h" import "C" import ( - "bytes" "context" "fmt" - "io" - "net/http" + "os" "os/exec" - "strings" + "path" - "github.com/estuary/flow/go/connector" - pc "github.com/estuary/flow/go/protocols/capture" + "github.com/estuary/flow/go/pkgbin" pf "github.com/estuary/flow/go/protocols/flow" - pm "github.com/estuary/flow/go/protocols/materialize" - "github.com/estuary/flow/go/protocols/ops" - log "github.com/sirupsen/logrus" - pb "go.gazette.dev/core/broker/protocol" ) -// CatalogJSONSchema returns the source catalog JSON schema understood by Flow. -func CatalogJSONSchema() string { - var publisher = ops.NewLocalPublisher(ops.ShardLabeling{}) - var svc, err = newBuildSvc(publisher) - if err != nil { - panic(err) - } - defer svc.destroy() - - svc.sendBytes(uint32(pf.BuildAPI_CATALOG_SCHEMA), nil) - - _, out, err := svc.poll() - if err != nil { - panic(err) - } else if len(out) != 1 { - panic("expected 1 output message") - } - return string(svc.arenaSlice(out[0])) -} - // BuildArgs are arguments of the BuildCatalog function. type BuildArgs struct { pf.BuildAPI_Config @@ -48,224 +21,47 @@ type BuildArgs struct { // Directory which roots fetched file:// resolutions. // Or empty, if file:// resolutions are disallowed. FileRoot string - // Publisher of operation logs and stats to use. - // If not set, a publisher will be created that logs to stderr. - OpsPublisher ops.Publisher } // BuildCatalog runs the configured build. func BuildCatalog(args BuildArgs) error { if err := args.BuildAPI_Config.Validate(); err != nil { return fmt.Errorf("validating configuration: %w", err) + } else if args.FileRoot == "" { + return fmt.Errorf("FileRoot is required") } - var transport = http.DefaultTransport.(*http.Transport).Clone() - var client = &http.Client{Transport: transport} - - if args.FileRoot != "" { - transport.RegisterProtocol("file", http.NewFileTransport(http.Dir(args.FileRoot))) - } - if args.OpsPublisher == nil { - args.OpsPublisher = ops.NewLocalPublisher(ops.ShardLabeling{ - Build: args.BuildId, - }) - } - - var svc, err = newBuildSvc(args.OpsPublisher) + var flowctl, err = pkgbin.Locate("flowctl") if err != nil { - return fmt.Errorf("creating build service: %w", err) + return fmt.Errorf("finding flowctl binary: %w", err) } - defer svc.destroy() - - if err := svc.sendMessage(uint32(pf.BuildAPI_BEGIN), &args.BuildAPI_Config); err != nil { - panic(err) // Cannot fail to marshal. - } - - var trampoline, resolvedCh = newTrampolineServer( - args.Context, - trampolineHandler{ - taskCode: uint32(pf.BuildAPI_TRAMPOLINE_FETCH), - decode: func(request []byte) (interface{}, error) { - var fetch = new(pf.BuildAPI_Fetch) - var err = fetch.Unmarshal(request) - return fetch, err - }, - exec: func(ctx context.Context, i interface{}) ([]byte, error) { - var fetch = i.(*pf.BuildAPI_Fetch) - - var body = bytes.NewBuffer(make([]byte, 4096)) - body.Truncate(taskResponseHeader) // Reserve. - - var req, err = http.NewRequestWithContext(ctx, "GET", fetch.ResourceUrl, nil) - var resp *http.Response - - if err == nil { - resp, err = client.Do(req) - } - if err == nil { - _, err = io.Copy(body, resp.Body) - } - if err == nil && resp.StatusCode != 200 && resp.StatusCode != 204 { - err = fmt.Errorf("unexpected status %d: %s", - resp.StatusCode, - strings.TrimSpace(body.String()[taskResponseHeader:]), - ) - } - return body.Bytes(), err - }, - }, - trampolineHandler{ - taskCode: uint32(pf.BuildAPI_TRAMPOLINE_VALIDATE_CAPTURE), - decode: func(request []byte) (interface{}, error) { - var m = new(pc.Request_Validate) - var err = m.Unmarshal(request) - return m, err - }, - exec: func(ctx context.Context, i interface{}) ([]byte, error) { - var request = i.(*pc.Request_Validate) - log.WithField("request", request).Debug("capture validation requested") - - var response, err = connector.Invoke[pc.Response]( - ctx, - &pc.Request{Validate: request}, - args.BuildAPI_Config.ConnectorNetwork, - args.OpsPublisher, - func(driver *connector.Driver) (pc.Connector_CaptureClient, error) { - return driver.CaptureClient().Capture(ctx) - }, - ) - if err != nil { - return nil, err - } - log.WithField("response", response).Debug("capture validation response") - var validated = response.Validated - - // Return marshaled response with a |taskResponseHeader| prefix. - var out = make([]byte, taskResponseHeader+validated.ProtoSize()) - if _, err = validated.MarshalTo(out[taskResponseHeader:]); err != nil { - return nil, fmt.Errorf("marshal response: %w", err) - } - return out, err - }, - }, - trampolineHandler{ - taskCode: uint32(pf.BuildAPI_TRAMPOLINE_VALIDATE_MATERIALIZATION), - decode: func(request []byte) (interface{}, error) { - var m = new(pm.Request_Validate) - var err = m.Unmarshal(request) - return m, err - }, - exec: func(ctx context.Context, i interface{}) ([]byte, error) { - var request = i.(*pm.Request_Validate) - log.WithField("request", request).Debug("materialize validation requested") - var response, err = connector.Invoke[pm.Response]( - ctx, - &pm.Request{Validate: request}, - args.BuildAPI_Config.ConnectorNetwork, - args.OpsPublisher, - func(driver *connector.Driver) (pm.Connector_MaterializeClient, error) { - // TODO(johnny): This is to make the gRPC loopback used by sqlite.InProcessServer - // work properly, and can be removed once that implementation is removed. - ctx = pb.WithDispatchDefault(ctx) - return driver.MaterializeClient().Materialize(ctx) - }, - ) - if err != nil { - return nil, err - } - log.WithField("response", response).Debug("materialize validation response") - var validated = response.Validated - - // Return marshaled response with a |taskResponseHeader| prefix. - var out = make([]byte, taskResponseHeader+validated.ProtoSize()) - if _, err = validated.MarshalTo(out[taskResponseHeader:]); err != nil { - return nil, fmt.Errorf("marshal response: %w", err) - } - return out, err - }, - }, - trampolineHandler{ - taskCode: uint32(pf.BuildAPI_TRAMPOLINE_DOCKER_INSPECT), - decode: func(request []byte) (interface{}, error) { - return string(request), nil - }, - exec: func(ctx context.Context, i interface{}) ([]byte, error) { - var image = i.(string) - // We first need to pull the image, since it may not be available locally - if err := connector.PullImage(ctx, image); err != nil { - return nil, fmt.Errorf("pulling image: '%s': %w", image, err) - } - - var cmd = exec.Command("docker", "inspect", image) - var result, err = cmd.Output() - if err != nil { - return nil, fmt.Errorf("invoking docker inspect: %w", err) - } - - var out = make([]byte, taskResponseHeader+len(result)) - copy(out[taskResponseHeader:], result) - return out, nil - }, - }, - ) - defer trampoline.stop() - - // mayPoll tracks whether we've resolved tasks since our last poll. - var mayPoll = true - - for { - var resolved []byte - - if !mayPoll { - resolved = <-resolvedCh // Must block. - } else { - select { - case resolved = <-resolvedCh: - default: // Don't block. - } - } - - if resolved != nil { - svc.sendBytes(uint32(pf.BuildAPI_TRAMPOLINE), resolved) - mayPoll = true - continue - } - - // Poll the service. - svc.sendBytes(uint32(pf.BuildAPI_POLL), nil) - var _, out, err = svc.poll() + // A number of existing Go tests use a relative FileRoot + // which must be evaluated against the current working directory + // to resolve to an absolute path, as required by `flowctl`. + if !path.IsAbs(args.FileRoot) { + cwd, err := os.Getwd() if err != nil { - return err + return fmt.Errorf("getting current working directory: %w", err) } - // We must resolve pending work before polling again. - mayPoll = false - - for _, o := range out { - switch pf.BuildAPI_Code(o.code) { - - case pf.BuildAPI_DONE, pf.BuildAPI_DONE_WITH_ERRORS: - return nil - - case pf.BuildAPI_TRAMPOLINE: - trampoline.startTask(svc.arenaSlice(o)) + args.FileRoot = path.Join(cwd, args.FileRoot) + } - default: - log.WithField("code", o.code).Panic("unexpected code from Rust bindings") - } - } + var v = []string{ + "raw", + "build", + "--build-id", args.BuildId, + "--db-path", args.BuildDb, + "--connector-network", args.ConnectorNetwork, + "--file-root", args.FileRoot, + "--source", args.Source, } -} + var cmd = exec.Command(flowctl, v...) + cmd.Stdin, cmd.Stdout, cmd.Stderr = nil, os.Stdout, os.Stderr -func newBuildSvc(publisher ops.Publisher) (*service, error) { - return newService( - "build", - func(logFilter, logDest C.int32_t) *C.Channel { return C.build_create(logFilter, logDest) }, - func(ch *C.Channel, in C.In1) { C.build_invoke1(ch, in) }, - func(ch *C.Channel, in C.In4) { C.build_invoke4(ch, in) }, - func(ch *C.Channel, in C.In16) { C.build_invoke16(ch, in) }, - func(ch *C.Channel) { C.build_drop(ch) }, - publisher, - ) + if err = cmd.Run(); err != nil { + return fmt.Errorf("catalog build failed: %w", err) + } + return nil } diff --git a/go/bindings/build_test.go b/go/bindings/build_test.go index 9074bf7ccf..7fc4d2a2fd 100644 --- a/go/bindings/build_test.go +++ b/go/bindings/build_test.go @@ -79,8 +79,3 @@ func TestBuildCatalog(t *testing.T) { return nil })) } - -func TestCatalogSchema(t *testing.T) { - var schema = CatalogJSONSchema() - require.True(t, len(schema) > 100) -} diff --git a/go/bindings/task_service_test.go b/go/bindings/task_service_test.go index 0bca3847d0..f13c1ab05e 100644 --- a/go/bindings/task_service_test.go +++ b/go/bindings/task_service_test.go @@ -70,7 +70,7 @@ func TestSimpleDerive(t *testing.T) { StateJson: []byte("{}"), }, Internal: pr.ToAny(&pr.DeriveRequestExt{ - Open: &pr.DeriveRequestExt_Open{ + Labels: &ops.ShardLabeling{ LogLevel: ops.Log_debug, }, }), diff --git a/go/connector/driver.go b/go/connector/driver.go index 3403ce4026..2d2860aea7 100644 --- a/go/connector/driver.go +++ b/go/connector/driver.go @@ -35,7 +35,6 @@ type Driver struct { type imageSpec struct { Image string `json:"image"` Config json.RawMessage `json:"config"` - Ports json.RawMessage `json:"ports"` } // Validate returns an error if EndpointSpec is invalid. diff --git a/go/flowctl-go/cmd-json-schema.go b/go/flowctl-go/cmd-json-schema.go deleted file mode 100644 index a06e1da259..0000000000 --- a/go/flowctl-go/cmd-json-schema.go +++ /dev/null @@ -1,14 +0,0 @@ -package main - -import ( - "os" - - "github.com/estuary/flow/go/bindings" -) - -type cmdJSONSchema struct{} - -func (cmdJSONSchema) Execute(_ []string) error { - var _, err = os.Stdout.WriteString(bindings.CatalogJSONSchema()) - return err -} diff --git a/go/flowctl-go/main.go b/go/flowctl-go/main.go index 6af7848786..2fcbad03cd 100644 --- a/go/flowctl-go/main.go +++ b/go/flowctl-go/main.go @@ -16,12 +16,6 @@ func main() { Locally test a Flow catalog. `, &cmdTest{}) - addCmd(parser, "json-schema", "Print the catalog JSON schema", ` -Print the JSON schema specification of Flow catalogs, as understood by this -specific build of Flow. This JSON schema can be used to enable IDE support -and auto-completions. -`, &cmdJSONSchema{}) - addCmd(parser, "temp-data-plane", "Run an ephemeral, temporary local data plane", ` Run a local data plane by shelling out to start Etcd, Gazette, and the Flow consumer. A local data plane is intended for local development and testing, and doesn't persist diff --git a/go/protocols/capture/capture.pb.go b/go/protocols/capture/capture.pb.go index b7344c5f31..327de9360d 100644 --- a/go/protocols/capture/capture.pb.go +++ b/go/protocols/capture/capture.pb.go @@ -181,13 +181,11 @@ type Request_Validate struct { // Connector type addressed by this request. ConnectorType flow.CaptureSpec_ConnectorType `protobuf:"varint,2,opt,name=connector_type,json=connectorType,proto3,enum=flow.CaptureSpec_ConnectorType" json:"connector_type,omitempty"` // Connector configuration, as an encoded JSON object. - ConfigJson encoding_json.RawMessage `protobuf:"bytes,3,opt,name=config_json,json=config,proto3,casttype=encoding/json.RawMessage" json:"config_json,omitempty"` - Bindings []*Request_Validate_Binding `protobuf:"bytes,4,rep,name=bindings,proto3" json:"bindings,omitempty"` - // Network ports of this proposed capture. - NetworkPorts []*flow.NetworkPort `protobuf:"bytes,5,rep,name=network_ports,json=networkPorts,proto3" json:"network_ports,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ConfigJson encoding_json.RawMessage `protobuf:"bytes,3,opt,name=config_json,json=config,proto3,casttype=encoding/json.RawMessage" json:"config_json,omitempty"` + Bindings []*Request_Validate_Binding `protobuf:"bytes,4,rep,name=bindings,proto3" json:"bindings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Request_Validate) Reset() { *m = Request_Validate{} } @@ -905,80 +903,78 @@ func init() { } var fileDescriptor_841a70e6e6288f13 = []byte{ - // 1162 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0xcb, 0x6e, 0x1c, 0x45, - 0x17, 0x4e, 0x7b, 0x6e, 0x3d, 0x67, 0x6c, 0xc7, 0x2e, 0xf9, 0xff, 0x69, 0x77, 0x2c, 0x3b, 0x24, - 0x41, 0xca, 0x8d, 0x9e, 0x30, 0x51, 0x42, 0xc2, 0x25, 0xc1, 0x76, 0x88, 0x44, 0x24, 0xe2, 0xa8, - 0xcc, 0x45, 0xb0, 0x19, 0xb5, 0xab, 0xcb, 0x3d, 0x8d, 0x7b, 0xaa, 0x9a, 0xee, 0x1e, 0x9b, 0x91, - 0x78, 0x01, 0x16, 0x2c, 0xd8, 0xb0, 0xe6, 0x05, 0x58, 0x23, 0xde, 0xc0, 0xec, 0x78, 0x02, 0x4b, - 0x24, 0x2f, 0x81, 0xb2, 0x42, 0x75, 0xeb, 0x69, 0xbb, 0xe3, 0x68, 0x88, 0xb2, 0x60, 0x63, 0x77, - 0xd5, 0xf9, 0xbe, 0x9a, 0x3a, 0xdf, 0xa9, 0xf3, 0x55, 0xc1, 0x85, 0x90, 0x77, 0x93, 0x94, 0xe7, - 0x9c, 0xf0, 0x38, 0xeb, 0x12, 0x3f, 0xc9, 0x47, 0x29, 0x35, 0xff, 0x3d, 0x19, 0x41, 0x2d, 0x3d, - 0x74, 0x57, 0x8e, 0x81, 0x77, 0x63, 0x7e, 0x20, 0xff, 0x28, 0x98, 0xbb, 0x14, 0xf2, 0x90, 0xcb, - 0xcf, 0xae, 0xf8, 0xd2, 0xb3, 0xcb, 0x21, 0xe7, 0x61, 0x4c, 0x15, 0x6f, 0x67, 0xb4, 0xdb, 0xf5, - 0xd9, 0x58, 0x85, 0x2e, 0xfc, 0xde, 0x81, 0x16, 0xa6, 0xdf, 0x8e, 0x68, 0x96, 0xa3, 0x2b, 0x50, - 0xcf, 0x12, 0x4a, 0x1c, 0xeb, 0xbc, 0x75, 0xb9, 0xd3, 0xfb, 0x9f, 0x67, 0x76, 0xa0, 0xe3, 0xde, - 0x76, 0x42, 0x09, 0x96, 0x10, 0x74, 0x0b, 0xec, 0x20, 0xca, 0x08, 0xdf, 0xa7, 0xa9, 0x33, 0x23, - 0xe1, 0xcb, 0x15, 0xf8, 0x03, 0x0d, 0xc0, 0x05, 0x54, 0xd0, 0xf6, 0xfd, 0x38, 0x0a, 0xfc, 0x9c, - 0x3a, 0xb5, 0x53, 0x68, 0x5f, 0x68, 0x00, 0x2e, 0xa0, 0xe8, 0x3a, 0x34, 0xfc, 0x24, 0x89, 0xc7, - 0x4e, 0x5d, 0x72, 0xfe, 0x5f, 0xe1, 0xac, 0x8b, 0x28, 0x56, 0x20, 0x91, 0x06, 0x4f, 0x28, 0x73, - 0x1a, 0xa7, 0xa4, 0xb1, 0x95, 0x50, 0x86, 0x25, 0x04, 0xdd, 0x83, 0x8e, 0x4f, 0xf6, 0x18, 0x3f, - 0x88, 0x69, 0x10, 0x52, 0xa7, 0x29, 0x19, 0x2b, 0xd5, 0xe5, 0x27, 0x18, 0x5c, 0x26, 0xa0, 0x1b, - 0x60, 0x47, 0x2c, 0xa7, 0x29, 0xf3, 0x63, 0x27, 0x90, 0xe4, 0x25, 0x4f, 0x69, 0xed, 0x19, 0xad, - 0xbd, 0x75, 0x36, 0xc6, 0x05, 0xca, 0xfd, 0xc1, 0x82, 0xba, 0xd0, 0x11, 0x3d, 0x84, 0x79, 0xc2, - 0x19, 0xa3, 0x24, 0xe7, 0x69, 0x3f, 0x1f, 0x27, 0x54, 0xca, 0x3e, 0xdf, 0x5b, 0xf3, 0x64, 0x39, - 0x37, 0xd5, 0x16, 0x04, 0xd4, 0xdb, 0x34, 0xb8, 0xcf, 0xc6, 0x09, 0xc5, 0x73, 0xa4, 0x3c, 0x44, - 0x77, 0xa1, 0x43, 0x38, 0xdb, 0x8d, 0xc2, 0xfe, 0x37, 0x19, 0x67, 0xb2, 0x18, 0xed, 0x8d, 0x95, - 0xe7, 0x47, 0x6b, 0x0e, 0x65, 0x84, 0x07, 0x11, 0x0b, 0xbb, 0x22, 0xe0, 0x61, 0xff, 0xe0, 0x53, - 0x9a, 0x65, 0x7e, 0x48, 0x71, 0x53, 0x11, 0xdc, 0x1f, 0x2d, 0xb0, 0x4d, 0x91, 0xfe, 0x0b, 0xfb, - 0x79, 0x56, 0x03, 0xdb, 0x54, 0x1f, 0x7d, 0x02, 0x75, 0xe6, 0x0f, 0xd5, 0x2e, 0xda, 0x1b, 0xb7, - 0x9e, 0x1f, 0xad, 0xbd, 0x13, 0x46, 0xf9, 0x60, 0xb4, 0xe3, 0x11, 0x3e, 0xec, 0xd2, 0x2c, 0x1f, - 0xf9, 0xe9, 0x58, 0x9d, 0xff, 0x4a, 0x47, 0x98, 0xdd, 0x62, 0xb9, 0xc4, 0x0b, 0x52, 0x9b, 0x79, - 0x1d, 0xa9, 0xd5, 0xa6, 0x4f, 0x0d, 0x7d, 0x08, 0xf6, 0x4e, 0xc4, 0x04, 0x24, 0x73, 0xea, 0xe7, - 0x6b, 0x97, 0x3b, 0xbd, 0x37, 0x4f, 0x3d, 0xf8, 0xde, 0x86, 0x42, 0xe2, 0x82, 0x82, 0x6e, 0xc3, - 0x1c, 0xa3, 0xf9, 0x01, 0x4f, 0xf7, 0xfa, 0x09, 0x4f, 0xf3, 0xcc, 0x69, 0xc8, 0x35, 0x16, 0x55, - 0x02, 0x8f, 0x55, 0xe8, 0x09, 0x4f, 0x73, 0x3c, 0xcb, 0x26, 0x83, 0xcc, 0xfd, 0xc9, 0x82, 0x96, - 0x5e, 0x0d, 0x3d, 0x82, 0xa5, 0x94, 0x66, 0x7c, 0x94, 0x12, 0xda, 0x2f, 0xa7, 0x61, 0x4d, 0x91, - 0xc6, 0xbc, 0x61, 0x6e, 0xaa, 0x74, 0xde, 0x03, 0x20, 0x3c, 0x8e, 0x29, 0xc9, 0x23, 0x5d, 0x63, - 0x71, 0xf2, 0x95, 0x9a, 0xc5, 0xbc, 0x10, 0x74, 0xa3, 0x7e, 0x78, 0xb4, 0x76, 0x06, 0x97, 0xd0, - 0x6e, 0x08, 0x0d, 0xd9, 0xae, 0xe8, 0x1a, 0x18, 0x53, 0xd3, 0x8e, 0xb3, 0x58, 0xa9, 0x07, 0x36, - 0x08, 0xe4, 0x40, 0x6b, 0x9f, 0xa6, 0x99, 0xf9, 0xb9, 0x36, 0x36, 0x43, 0xf4, 0x06, 0xb4, 0x82, - 0x74, 0xdc, 0x4f, 0x47, 0xaa, 0x22, 0x36, 0x6e, 0x06, 0xe9, 0x18, 0x8f, 0x98, 0xfb, 0xab, 0x05, - 0x75, 0xd1, 0xeb, 0xaf, 0xeb, 0x87, 0xde, 0x82, 0x46, 0xea, 0xb3, 0xd0, 0x38, 0xd7, 0x59, 0xb5, - 0x08, 0x16, 0x53, 0x72, 0x09, 0x15, 0x45, 0xef, 0x02, 0x64, 0xb9, 0x9f, 0x53, 0xa5, 0x6e, 0x7d, - 0x0a, 0x75, 0x1b, 0x12, 0xef, 0x76, 0xa1, 0x53, 0x32, 0x1a, 0x74, 0x1e, 0x3a, 0x64, 0x40, 0xc9, - 0x5e, 0xc2, 0x23, 0x96, 0x67, 0x72, 0xe7, 0x73, 0xb8, 0x3c, 0x75, 0xe1, 0xb7, 0x0e, 0xd8, 0x98, - 0x66, 0x09, 0x67, 0x19, 0x45, 0x57, 0x8f, 0x99, 0x77, 0xd9, 0x22, 0x15, 0xa0, 0xec, 0xde, 0x1f, - 0x00, 0x18, 0x4b, 0xa6, 0x81, 0x2e, 0xdf, 0x4a, 0x95, 0xf1, 0xa0, 0xc0, 0xe0, 0x12, 0x1e, 0xdd, - 0x85, 0xb6, 0x71, 0xe6, 0x40, 0x6b, 0x71, 0xae, 0x4a, 0x36, 0xa7, 0x39, 0xc0, 0x13, 0x34, 0xba, - 0x09, 0x2d, 0xe1, 0xd1, 0x11, 0x0d, 0xb4, 0x95, 0x2f, 0x57, 0x89, 0xeb, 0x0a, 0x80, 0x0d, 0x12, - 0xdd, 0x80, 0xa6, 0x30, 0x6b, 0x1a, 0x68, 0x47, 0x77, 0xaa, 0x9c, 0x2d, 0x19, 0xc7, 0x1a, 0x87, - 0x6e, 0x83, 0xad, 0x21, 0x81, 0xf6, 0x74, 0xb7, 0xca, 0xd1, 0xd5, 0x0f, 0x70, 0x81, 0x15, 0xba, - 0x4c, 0xf4, 0x75, 0x5a, 0xa7, 0xe9, 0xb2, 0x59, 0x60, 0x70, 0x09, 0xff, 0x0a, 0x97, 0xc1, 0xcf, - 0x33, 0xfa, 0x32, 0x70, 0xc1, 0x36, 0x0e, 0xa6, 0x0b, 0x5d, 0x8c, 0xd1, 0x43, 0x40, 0xba, 0x5d, - 0x33, 0x32, 0xa0, 0x43, 0x7f, 0x7a, 0x5f, 0x9d, 0x55, 0xbc, 0x6d, 0x49, 0x43, 0x5f, 0xc2, 0xb9, - 0x93, 0xfd, 0x5f, 0x5e, 0x70, 0x1a, 0x37, 0x5b, 0x3a, 0x6e, 0x03, 0x7a, 0xe1, 0x6b, 0xb0, 0x18, - 0x70, 0x32, 0x1a, 0x52, 0x96, 0xfb, 0xa2, 0xc3, 0xfb, 0xa3, 0x34, 0x56, 0xe7, 0x1e, 0x2f, 0x1c, - 0x0b, 0x7c, 0x9e, 0xc6, 0xe8, 0x12, 0x34, 0xb9, 0x3f, 0xca, 0x07, 0x3d, 0x5d, 0xcc, 0x59, 0xd5, - 0x45, 0x5b, 0xeb, 0x62, 0x0e, 0xeb, 0x98, 0xfb, 0xc7, 0x0c, 0xc0, 0xe4, 0xf4, 0xa1, 0x8f, 0x4a, - 0xee, 0x69, 0x49, 0xe7, 0xbb, 0xf4, 0xb2, 0xd3, 0x5a, 0x35, 0x50, 0xf7, 0xef, 0x92, 0x11, 0x5e, - 0x81, 0x85, 0x94, 0x12, 0x3e, 0x1c, 0x52, 0x16, 0xd0, 0xa0, 0x3f, 0xb9, 0x65, 0xf0, 0xd9, 0xd2, - 0xfc, 0x63, 0x71, 0x73, 0x9c, 0xe6, 0x99, 0x33, 0xaf, 0xe0, 0x99, 0x8f, 0x60, 0xc9, 0xa8, 0xf1, - 0xaf, 0x85, 0x9f, 0x37, 0x4c, 0x2d, 0xf9, 0x02, 0xd4, 0xf6, 0xe8, 0x58, 0xde, 0x24, 0x6d, 0x2c, - 0x3e, 0x85, 0x6d, 0x05, 0x51, 0xe6, 0xef, 0xc4, 0x54, 0x0a, 0x6b, 0x63, 0x33, 0x74, 0xbf, 0x87, - 0x76, 0xd1, 0x8b, 0xe8, 0x7e, 0x45, 0xc9, 0x8b, 0x2f, 0x69, 0xdd, 0x17, 0x08, 0xe9, 0x4d, 0x74, - 0xbc, 0x08, 0x73, 0x85, 0x38, 0x89, 0x9f, 0x0f, 0xe4, 0x82, 0x6d, 0x3c, 0x6b, 0x26, 0x9f, 0xf8, - 0xf9, 0xc0, 0xbd, 0x03, 0x2d, 0xdd, 0xd0, 0xe8, 0x6d, 0x40, 0xbe, 0xbc, 0x02, 0xfa, 0x01, 0xcd, - 0x48, 0x1a, 0x25, 0xf2, 0xf2, 0x50, 0xca, 0x2f, 0xaa, 0xc8, 0x83, 0x49, 0xc0, 0xfd, 0x18, 0x9a, - 0xaa, 0xad, 0xd1, 0xfb, 0xb0, 0x4c, 0xbf, 0x4b, 0xe2, 0x88, 0x44, 0x79, 0xbf, 0xf4, 0xfa, 0x12, - 0x7a, 0x28, 0x5f, 0xb4, 0xb1, 0x63, 0x00, 0xeb, 0x27, 0xe2, 0xee, 0x57, 0x60, 0x9b, 0x4e, 0x17, - 0x22, 0xe9, 0x44, 0x74, 0x97, 0x99, 0x21, 0xba, 0x09, 0x76, 0xc0, 0xc9, 0xf4, 0xc5, 0xad, 0x05, - 0x9c, 0xb8, 0x77, 0x00, 0x26, 0x56, 0x80, 0xae, 0x82, 0xf2, 0x71, 0xed, 0xc0, 0xc5, 0x75, 0xa8, - 0x5f, 0x10, 0xdb, 0x22, 0xa6, 0xad, 0xbe, 0x77, 0x1f, 0xda, 0x45, 0x00, 0xf5, 0xa0, 0xa5, 0x77, - 0x88, 0x16, 0x4e, 0x3e, 0x0a, 0xdc, 0xc5, 0x4a, 0x79, 0x2e, 0x5b, 0x37, 0xac, 0x8d, 0x7b, 0x87, - 0x7f, 0xad, 0x9e, 0x39, 0x7c, 0xba, 0x6a, 0xfd, 0xf9, 0x74, 0xd5, 0xfa, 0xe5, 0xd9, 0xaa, 0xf5, - 0xf5, 0xf5, 0xa9, 0x5e, 0x47, 0x7a, 0xb1, 0x9d, 0xa6, 0x9c, 0xba, 0xf9, 0x4f, 0x00, 0x00, 0x00, - 0xff, 0xff, 0x9a, 0x14, 0x9d, 0xf9, 0x7b, 0x0c, 0x00, 0x00, + // 1128 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x56, 0x4d, 0x6f, 0xdc, 0x44, + 0x18, 0xae, 0xb3, 0x9b, 0x5d, 0xef, 0xbb, 0x49, 0x9a, 0x8c, 0x02, 0x38, 0x6e, 0x94, 0x84, 0xb4, + 0x48, 0xe9, 0x07, 0xde, 0xb0, 0x51, 0xa1, 0xe5, 0xa3, 0x25, 0x1f, 0x54, 0xa2, 0x12, 0x04, 0x4d, + 0xf8, 0x10, 0x5c, 0x56, 0xce, 0x78, 0xe2, 0x35, 0xf1, 0x7a, 0x8c, 0x3f, 0x12, 0x56, 0xe2, 0xc2, + 0x91, 0x03, 0x07, 0x2e, 0x9c, 0xb9, 0x23, 0xce, 0xfc, 0x85, 0x70, 0xe3, 0x17, 0x44, 0xa2, 0xfc, + 0x09, 0xd4, 0x13, 0x9a, 0x2f, 0xaf, 0x13, 0x27, 0xd5, 0x52, 0xf5, 0xc0, 0x65, 0xd7, 0x33, 0xef, + 0xf3, 0x8c, 0xdf, 0x79, 0xde, 0x79, 0x9f, 0x31, 0xac, 0xfa, 0xac, 0x13, 0x27, 0x2c, 0x63, 0x84, + 0x85, 0x69, 0x87, 0xb8, 0x71, 0x96, 0x27, 0x54, 0xff, 0x3b, 0x22, 0x82, 0x9a, 0x6a, 0x68, 0x2f, + 0x9e, 0x01, 0x1f, 0x84, 0xec, 0x58, 0xfc, 0x48, 0x98, 0x3d, 0xef, 0x33, 0x9f, 0x89, 0xc7, 0x0e, + 0x7f, 0x52, 0xb3, 0x0b, 0x3e, 0x63, 0x7e, 0x48, 0x25, 0x6f, 0x3f, 0x3f, 0xe8, 0xb8, 0xd1, 0x50, + 0x86, 0x56, 0xbf, 0x6f, 0x43, 0x13, 0xd3, 0x6f, 0x72, 0x9a, 0x66, 0xe8, 0x26, 0xd4, 0xd3, 0x98, + 0x12, 0xcb, 0x58, 0x31, 0xd6, 0xda, 0xdd, 0x97, 0x1c, 0x9d, 0x81, 0x8a, 0x3b, 0x7b, 0x31, 0x25, + 0x58, 0x40, 0xd0, 0x5d, 0x30, 0xbd, 0x20, 0x25, 0xec, 0x88, 0x26, 0xd6, 0x84, 0x80, 0x2f, 0x54, + 0xe0, 0x3b, 0x0a, 0x80, 0x0b, 0x28, 0xa7, 0x1d, 0xb9, 0x61, 0xe0, 0xb9, 0x19, 0xb5, 0x6a, 0x97, + 0xd0, 0x3e, 0x57, 0x00, 0x5c, 0x40, 0xd1, 0x1d, 0x98, 0x74, 0xe3, 0x38, 0x1c, 0x5a, 0x75, 0xc1, + 0x79, 0xb9, 0xc2, 0xd9, 0xe4, 0x51, 0x2c, 0x41, 0x7c, 0x1b, 0x2c, 0xa6, 0x91, 0x35, 0x79, 0xc9, + 0x36, 0x76, 0x63, 0x1a, 0x61, 0x01, 0x41, 0x0f, 0xa0, 0xed, 0x92, 0xc3, 0x88, 0x1d, 0x87, 0xd4, + 0xf3, 0xa9, 0xd5, 0x10, 0x8c, 0xc5, 0xea, 0xf2, 0x23, 0x0c, 0x2e, 0x13, 0xd0, 0x3a, 0x98, 0x41, + 0x94, 0xd1, 0x24, 0x72, 0x43, 0xcb, 0x13, 0xe4, 0x79, 0x47, 0x6a, 0xed, 0x68, 0xad, 0x9d, 0xcd, + 0x68, 0x88, 0x0b, 0x94, 0xfd, 0x83, 0x01, 0x75, 0xae, 0x23, 0x7a, 0x04, 0x33, 0x84, 0x45, 0x11, + 0x25, 0x19, 0x4b, 0x7a, 0xd9, 0x30, 0xa6, 0x42, 0xf6, 0x99, 0xee, 0xb2, 0x23, 0xca, 0xb9, 0x2d, + 0x53, 0xe0, 0x50, 0x67, 0x5b, 0xe3, 0x3e, 0x1d, 0xc6, 0x14, 0x4f, 0x93, 0xf2, 0x10, 0xdd, 0x87, + 0x36, 0x61, 0xd1, 0x41, 0xe0, 0xf7, 0xbe, 0x4e, 0x59, 0x24, 0x8a, 0xd1, 0xda, 0x5a, 0x7c, 0x7a, + 0xba, 0x6c, 0xd1, 0x88, 0x30, 0x2f, 0x88, 0xfc, 0x0e, 0x0f, 0x38, 0xd8, 0x3d, 0xfe, 0x88, 0xa6, + 0xa9, 0xeb, 0x53, 0xdc, 0x90, 0x04, 0xfb, 0x47, 0x03, 0x4c, 0x5d, 0xa4, 0xff, 0x43, 0x3e, 0xbf, + 0xd6, 0xc0, 0xd4, 0xd5, 0x47, 0x1f, 0x42, 0x3d, 0x72, 0x07, 0x32, 0x8b, 0xd6, 0xd6, 0xdd, 0xa7, + 0xa7, 0xcb, 0x6f, 0xf8, 0x41, 0xd6, 0xcf, 0xf7, 0x1d, 0xc2, 0x06, 0x1d, 0x9a, 0x66, 0xb9, 0x9b, + 0x0c, 0xe5, 0xf9, 0xaf, 0x74, 0x84, 0xce, 0x16, 0x8b, 0x25, 0x2e, 0xd8, 0xda, 0xc4, 0x8b, 0xd8, + 0x5a, 0x6d, 0xfc, 0xad, 0xa1, 0xf7, 0xc0, 0xdc, 0x0f, 0x22, 0x0e, 0x49, 0xad, 0xfa, 0x4a, 0x6d, + 0xad, 0xdd, 0x7d, 0xf5, 0xd2, 0x83, 0xef, 0x6c, 0x49, 0x24, 0x2e, 0x28, 0xf6, 0x4f, 0x06, 0x34, + 0xd5, 0x2c, 0x7a, 0x0c, 0xf3, 0x09, 0x4d, 0x59, 0x9e, 0x10, 0xda, 0x2b, 0xa7, 0x63, 0x8c, 0x91, + 0xce, 0x8c, 0x66, 0x6e, 0xcb, 0xb4, 0xde, 0x06, 0x20, 0x2c, 0x0c, 0x29, 0xc9, 0x02, 0x55, 0x2b, + 0x7e, 0x82, 0xa5, 0x2a, 0xc5, 0x3c, 0x17, 0x66, 0xab, 0x7e, 0x72, 0xba, 0x7c, 0x05, 0x97, 0xd0, + 0xb6, 0x0f, 0x93, 0xa2, 0xed, 0xd0, 0x6d, 0xd0, 0xe6, 0xa4, 0x9c, 0x63, 0xae, 0xa2, 0x2b, 0xd6, + 0x08, 0x64, 0x41, 0xf3, 0x88, 0x26, 0xa9, 0x7e, 0x5d, 0x0b, 0xeb, 0x21, 0x7a, 0x05, 0x9a, 0x5e, + 0x32, 0xec, 0x25, 0xb9, 0x54, 0xd6, 0xc4, 0x0d, 0x2f, 0x19, 0xe2, 0x3c, 0xb2, 0x7f, 0x33, 0xa0, + 0xce, 0x7b, 0xf6, 0x45, 0xbd, 0xe8, 0x35, 0x98, 0x4c, 0xdc, 0xc8, 0xd7, 0x0e, 0x74, 0x55, 0x2e, + 0x82, 0xf9, 0x94, 0x58, 0x42, 0x46, 0xd1, 0x5b, 0x00, 0x69, 0xe6, 0x66, 0x54, 0xaa, 0x5b, 0x1f, + 0x43, 0xdd, 0x49, 0x81, 0xb7, 0x3b, 0xd0, 0x2e, 0x19, 0x06, 0x5a, 0x81, 0x36, 0xe9, 0x53, 0x72, + 0x18, 0xb3, 0x20, 0xca, 0x52, 0x91, 0xf9, 0x34, 0x2e, 0x4f, 0xad, 0xfe, 0xde, 0x06, 0x13, 0xd3, + 0x34, 0x66, 0x51, 0x4a, 0xd1, 0xad, 0x33, 0x26, 0x5c, 0xb6, 0x3a, 0x09, 0x28, 0xbb, 0xf0, 0xbb, + 0x00, 0xda, 0x5a, 0xa9, 0xa7, 0xca, 0xb7, 0x58, 0x65, 0xec, 0x14, 0x18, 0x5c, 0xc2, 0xa3, 0xfb, + 0xd0, 0xd2, 0x0e, 0xeb, 0x29, 0x2d, 0xae, 0x55, 0xc9, 0xfa, 0x54, 0x7a, 0x78, 0x84, 0x46, 0x1b, + 0xd0, 0xe4, 0x5e, 0x1b, 0x50, 0x4f, 0x59, 0xf2, 0x42, 0x95, 0xb8, 0x29, 0x01, 0x58, 0x23, 0xd1, + 0x3a, 0x34, 0xb8, 0xe9, 0x52, 0x4f, 0x39, 0xb3, 0x55, 0xe5, 0xec, 0x8a, 0x38, 0x56, 0x38, 0xf4, + 0x26, 0x98, 0x0a, 0xe2, 0x29, 0x6f, 0xb6, 0xab, 0x1c, 0x55, 0x7d, 0x0f, 0x17, 0x58, 0xae, 0xcb, + 0x48, 0x5f, 0xab, 0x79, 0x99, 0x2e, 0xdb, 0x05, 0x06, 0x97, 0xf0, 0xcf, 0x61, 0xea, 0x3f, 0x4f, + 0x28, 0x53, 0xb7, 0xc1, 0xd4, 0x4e, 0xa4, 0x0a, 0x5d, 0x8c, 0xd1, 0x23, 0x40, 0xaa, 0x5d, 0x53, + 0xd2, 0xa7, 0x03, 0x77, 0x7c, 0x7f, 0x9c, 0x92, 0xbc, 0x3d, 0x41, 0x43, 0x5f, 0xc0, 0xb5, 0xf3, + 0xfd, 0x5f, 0x5e, 0x70, 0x1c, 0x57, 0x9a, 0x3f, 0x6b, 0x03, 0x6a, 0xe1, 0xdb, 0x30, 0xe7, 0x31, + 0x92, 0x0f, 0x68, 0x94, 0xb9, 0xbc, 0xc3, 0x7b, 0x79, 0x12, 0xca, 0x73, 0x8f, 0x67, 0xcf, 0x04, + 0x3e, 0x4b, 0x42, 0x74, 0x03, 0x1a, 0xcc, 0xcd, 0xb3, 0x7e, 0x57, 0x15, 0x73, 0x4a, 0x76, 0xd1, + 0xee, 0x26, 0x9f, 0xc3, 0x2a, 0x66, 0xff, 0x31, 0x01, 0x30, 0x3a, 0x7d, 0xe8, 0xfd, 0x92, 0x0b, + 0x1a, 0xc2, 0x05, 0x6f, 0x3c, 0xeb, 0xb4, 0x5e, 0x60, 0x84, 0xff, 0x94, 0x8c, 0xf0, 0x26, 0xcc, + 0x26, 0x94, 0xb0, 0xc1, 0x80, 0x46, 0x1e, 0xf5, 0x7a, 0xa3, 0xdb, 0x02, 0x5f, 0x2d, 0xcd, 0x7f, + 0xcc, 0x6f, 0x80, 0xcb, 0x3c, 0x73, 0xe2, 0x39, 0x3c, 0xf3, 0x31, 0xcc, 0x6b, 0x35, 0xfe, 0xb3, + 0xf0, 0x33, 0x9a, 0xa9, 0x24, 0x9f, 0x85, 0xda, 0x21, 0x1d, 0x8a, 0x1b, 0xa1, 0x85, 0xf9, 0x23, + 0xb7, 0x2d, 0x2f, 0x48, 0xdd, 0xfd, 0x90, 0x0a, 0x61, 0x4d, 0xac, 0x87, 0xf6, 0x77, 0xd0, 0x2a, + 0x7a, 0x11, 0x3d, 0xac, 0x28, 0x79, 0xfd, 0x19, 0xad, 0x7b, 0x81, 0x90, 0xce, 0x48, 0xc7, 0xeb, + 0x30, 0x5d, 0x88, 0x13, 0xbb, 0x59, 0x5f, 0x2c, 0xd8, 0xc2, 0x53, 0x7a, 0xf2, 0x13, 0x37, 0xeb, + 0xdb, 0xf7, 0xa0, 0xa9, 0x1a, 0x1a, 0xbd, 0x0e, 0xc8, 0x15, 0x57, 0x40, 0xcf, 0xa3, 0x29, 0x49, + 0x82, 0x58, 0x5c, 0x1e, 0x52, 0xf9, 0x39, 0x19, 0xd9, 0x19, 0x05, 0xec, 0x0f, 0xa0, 0x21, 0xdb, + 0x1a, 0xbd, 0x03, 0x0b, 0xf4, 0xdb, 0x38, 0x0c, 0x48, 0x90, 0xf5, 0x4a, 0x5f, 0x51, 0x5c, 0x0f, + 0xe9, 0x8b, 0x26, 0xb6, 0x34, 0x60, 0xf3, 0x5c, 0xdc, 0xfe, 0x12, 0x4c, 0xdd, 0xe9, 0x5c, 0x24, + 0xb5, 0x11, 0xd5, 0x65, 0x7a, 0x88, 0x36, 0xc0, 0xf4, 0x18, 0x19, 0xbf, 0xb8, 0x35, 0x8f, 0x11, + 0xfb, 0x1e, 0xc0, 0xc8, 0x0a, 0xd0, 0x2d, 0x90, 0x3e, 0xae, 0x1c, 0xb8, 0xb8, 0x0e, 0xd5, 0x97, + 0xc0, 0x1e, 0x8f, 0x29, 0xab, 0xef, 0x3e, 0x84, 0x56, 0x11, 0x40, 0x5d, 0x68, 0xaa, 0x0c, 0xd1, + 0xec, 0xf9, 0xcb, 0xdd, 0x9e, 0xab, 0x94, 0x67, 0xcd, 0x58, 0x37, 0xb6, 0x1e, 0x9c, 0xfc, 0xb5, + 0x74, 0xe5, 0xe4, 0xc9, 0x92, 0xf1, 0xe7, 0x93, 0x25, 0xe3, 0x97, 0xbf, 0x97, 0x8c, 0xaf, 0xee, + 0x8c, 0xf5, 0x95, 0xa3, 0x16, 0xdb, 0x6f, 0x88, 0xa9, 0x8d, 0x7f, 0x03, 0x00, 0x00, 0xff, 0xff, + 0x1a, 0x5d, 0xe3, 0xed, 0x43, 0x0c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1308,20 +1304,6 @@ func (m *Request_Validate) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if len(m.NetworkPorts) > 0 { - for iNdEx := len(m.NetworkPorts) - 1; iNdEx >= 0; iNdEx-- { - { - size, err := m.NetworkPorts[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintCapture(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x2a - } - } if len(m.Bindings) > 0 { for iNdEx := len(m.Bindings) - 1; iNdEx >= 0; iNdEx-- { { @@ -2191,12 +2173,6 @@ func (m *Request_Validate) ProtoSize() (n int) { n += 1 + l + sovCapture(uint64(l)) } } - if len(m.NetworkPorts) > 0 { - for _, e := range m.NetworkPorts { - l = e.ProtoSize() - n += 1 + l + sovCapture(uint64(l)) - } - } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -3174,40 +3150,6 @@ func (m *Request_Validate) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 5: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field NetworkPorts", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowCapture - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthCapture - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthCapture - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.NetworkPorts = append(m.NetworkPorts, &flow.NetworkPort{}) - if err := m.NetworkPorts[len(m.NetworkPorts)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipCapture(dAtA[iNdEx:]) diff --git a/go/protocols/capture/capture.proto b/go/protocols/capture/capture.proto index b9e7e82660..dbf2863e21 100644 --- a/go/protocols/capture/capture.proto +++ b/go/protocols/capture/capture.proto @@ -118,8 +118,6 @@ message Request { flow.CollectionSpec collection = 2 [ (gogoproto.nullable) = false ]; } repeated Binding bindings = 4; - // Network ports of this proposed capture. - repeated flow.NetworkPort network_ports = 5; } Validate validate = 3; diff --git a/go/protocols/derive/derive.pb.go b/go/protocols/derive/derive.pb.go index 21b3c5cf48..8062ff7138 100644 --- a/go/protocols/derive/derive.pb.go +++ b/go/protocols/derive/derive.pb.go @@ -152,12 +152,10 @@ type Request_Validate struct { // Connectors may use this for generating more helpful errors which are // framed to the user's filesystem, rather than the filesystem within // the connector. - ImportMap map[string]string `protobuf:"bytes,7,rep,name=import_map,json=importMap,proto3" json:"import_map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` - // Network ports of this proposed derivation. - NetworkPorts []*flow.NetworkPort `protobuf:"bytes,8,rep,name=network_ports,json=networkPorts,proto3" json:"network_ports,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ImportMap map[string]string `protobuf:"bytes,7,rep,name=import_map,json=importMap,proto3" json:"import_map,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Request_Validate) Reset() { *m = Request_Validate{} } @@ -568,13 +566,20 @@ type Response_Spec struct { Protocol uint32 `protobuf:"varint,1,opt,name=protocol,proto3" json:"protocol,omitempty"` // JSON schema of the connector's configuration. ConfigSchemaJson encoding_json.RawMessage `protobuf:"bytes,2,opt,name=config_schema_json,json=configSchema,proto3,casttype=encoding/json.RawMessage" json:"config_schema_json,omitempty"` - // JSON schema of the connecor's lambda configuration. - LambdaConfigSchemaJson encoding_json.RawMessage `protobuf:"bytes,3,opt,name=lambda_config_schema_json,json=lambdaConfigSchema,proto3,casttype=encoding/json.RawMessage" json:"lambda_config_schema_json,omitempty"` + // JSON schema of the connector's lambda configuration. + // The "resource" terminology is used with Response.Spec + // only for symmetry with the capture and materialization protocols. + // NOTE(johnny): We can encapsulate separate lambda config vs + // shuffle lambda config schemas by encapsulating them as separate + // definitions within the resource config schema. + ResourceConfigSchemaJson encoding_json.RawMessage `protobuf:"bytes,3,opt,name=resource_config_schema_json,json=resourceConfigSchema,proto3,casttype=encoding/json.RawMessage" json:"resource_config_schema_json,omitempty"` // URL for connector's documention. - DocumentationUrl string `protobuf:"bytes,4,opt,name=documentation_url,json=documentationUrl,proto3" json:"documentation_url,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + DocumentationUrl string `protobuf:"bytes,4,opt,name=documentation_url,json=documentationUrl,proto3" json:"documentation_url,omitempty"` + // Optional OAuth2 configuration. + Oauth2 *flow.OAuth2 `protobuf:"bytes,5,opt,name=oauth2,proto3" json:"oauth2,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Response_Spec) Reset() { *m = Response_Spec{} } @@ -896,86 +901,86 @@ func init() { func init() { proto.RegisterFile("go/protocols/derive/derive.proto", fileDescriptor_4410d076c75e1e4f) } var fileDescriptor_4410d076c75e1e4f = []byte{ - // 1259 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x57, 0xcd, 0x6e, 0x1c, 0x45, - 0x10, 0xce, 0xd8, 0xfb, 0x37, 0xe5, 0xbf, 0xb8, 0x13, 0x60, 0x32, 0xb1, 0x1c, 0x13, 0x40, 0x18, - 0x2c, 0x66, 0x23, 0x27, 0x0a, 0x21, 0x04, 0x44, 0xec, 0xc4, 0x28, 0x82, 0x90, 0xa8, 0x1d, 0x83, - 0xc4, 0x65, 0x35, 0x9e, 0xe9, 0xdd, 0x9d, 0x78, 0xb6, 0x7b, 0xe8, 0xee, 0x71, 0xb4, 0x4f, 0x92, - 0x2b, 0x42, 0x3c, 0x03, 0x27, 0x1e, 0xc0, 0x47, 0xc4, 0x03, 0x44, 0x22, 0x5c, 0x79, 0x82, 0x9c, - 0x50, 0xff, 0xcc, 0xec, 0x8e, 0x77, 0x0d, 0x1b, 0x4e, 0x5c, 0xec, 0xae, 0xae, 0xaf, 0xaa, 0xab, - 0xab, 0xaa, 0xeb, 0x9b, 0x85, 0x8d, 0x1e, 0x6b, 0x67, 0x9c, 0x49, 0x16, 0xb1, 0x54, 0xb4, 0x63, - 0xc2, 0x93, 0x63, 0x62, 0xff, 0x05, 0x7a, 0x1f, 0x35, 0x8c, 0xe4, 0x6f, 0x44, 0x8c, 0x8a, 0x7c, - 0x40, 0x78, 0x89, 0x2f, 0x17, 0x06, 0xe9, 0xaf, 0x55, 0x7c, 0x75, 0x53, 0xf6, 0x4c, 0xff, 0xb1, - 0xda, 0x8b, 0x3d, 0xd6, 0x63, 0x7a, 0xd9, 0x56, 0x2b, 0xbb, 0x7b, 0xa9, 0xc7, 0x58, 0x2f, 0x25, - 0xc6, 0xee, 0x30, 0xef, 0xb6, 0x43, 0x3a, 0x34, 0xaa, 0xab, 0xbf, 0xae, 0x40, 0x13, 0x93, 0x1f, - 0x72, 0x22, 0x24, 0xda, 0x84, 0x9a, 0xc8, 0x48, 0xe4, 0x39, 0x1b, 0xce, 0xe6, 0xc2, 0xf6, 0xc5, - 0xc0, 0x46, 0x68, 0xd5, 0xc1, 0x7e, 0x46, 0x22, 0xac, 0x11, 0xe8, 0x06, 0xb4, 0x8e, 0xc3, 0x34, - 0x89, 0x43, 0x49, 0xbc, 0x39, 0x8d, 0xf6, 0x4e, 0xa3, 0xbf, 0xb5, 0x7a, 0x5c, 0x22, 0x95, 0x7f, - 0x96, 0x11, 0xea, 0xcd, 0x4f, 0xf7, 0xff, 0x28, 0x23, 0x14, 0x6b, 0x84, 0x42, 0x72, 0x12, 0xc6, - 0x5e, 0x6d, 0x3a, 0x12, 0x93, 0x30, 0xc6, 0x1a, 0x81, 0xb6, 0xa0, 0xde, 0x4d, 0x73, 0xd1, 0xf7, - 0xea, 0x1a, 0xfa, 0xc6, 0x69, 0xe8, 0x9e, 0x52, 0x62, 0x83, 0x41, 0x9f, 0xc3, 0xa2, 0x90, 0x21, - 0x97, 0x9d, 0x88, 0x0d, 0x06, 0x89, 0xf4, 0x1a, 0xda, 0xe6, 0xf2, 0xc4, 0x45, 0x15, 0x66, 0x57, - 0x43, 0xf0, 0x82, 0x18, 0x09, 0xea, 0x30, 0x4e, 0x04, 0x91, 0x5e, 0x73, 0xfa, 0x61, 0x58, 0x29, - 0xb1, 0xc1, 0xa0, 0x6b, 0xd0, 0x4a, 0xa8, 0x24, 0x9c, 0x86, 0xa9, 0x17, 0xdb, 0x7b, 0x98, 0x3a, - 0x04, 0x45, 0x1d, 0x82, 0xbb, 0x74, 0x88, 0x4b, 0x94, 0xff, 0xdc, 0x81, 0x9a, 0x4a, 0x32, 0x7a, - 0x02, 0xcb, 0x11, 0xa3, 0x94, 0x44, 0x92, 0xf1, 0x8e, 0x1c, 0x66, 0x44, 0x97, 0x64, 0x79, 0xfb, - 0xa3, 0x40, 0x97, 0x7a, 0x97, 0xa5, 0x29, 0x89, 0x64, 0xc2, 0xa8, 0x42, 0x07, 0xf7, 0x54, 0x10, - 0xa1, 0x12, 0x83, 0xdd, 0xc2, 0xea, 0xc9, 0x30, 0x23, 0x78, 0x29, 0x1a, 0x17, 0xd1, 0x27, 0xb0, - 0x10, 0x31, 0xda, 0x4d, 0x7a, 0x9d, 0xa7, 0x82, 0x51, 0x5d, 0x37, 0x77, 0x67, 0xed, 0xd5, 0x8b, - 0x2b, 0x1e, 0xa1, 0x11, 0x8b, 0x13, 0xda, 0x6b, 0x2b, 0x45, 0x80, 0xc3, 0x67, 0x0f, 0x89, 0x10, - 0x61, 0x8f, 0xe0, 0x86, 0x31, 0xf0, 0x7f, 0x6f, 0x40, 0xab, 0x28, 0xe8, 0xff, 0x2e, 0x3a, 0x74, - 0x1b, 0x20, 0x2a, 0x0f, 0x2d, 0xbb, 0x6b, 0x4a, 0x30, 0x3b, 0xb5, 0x93, 0x17, 0x57, 0xce, 0xe1, - 0x31, 0x34, 0xda, 0x01, 0x90, 0x3c, 0xa4, 0xa2, 0xcb, 0xf8, 0x40, 0x78, 0xb5, 0x8d, 0xf9, 0xcd, - 0x85, 0xed, 0xab, 0x67, 0xf5, 0x72, 0xf0, 0xa4, 0x80, 0xe2, 0x31, 0x2b, 0xf4, 0x1d, 0xac, 0x8a, - 0x7e, 0xde, 0xed, 0xa6, 0xa4, 0x73, 0x44, 0x86, 0x3a, 0x25, 0xc2, 0xab, 0x6f, 0xcc, 0x6f, 0x2e, - 0x6f, 0x6f, 0xfd, 0x5b, 0x4e, 0xf6, 0x8d, 0xa1, 0xce, 0xc8, 0x8a, 0xf5, 0xf2, 0x15, 0x19, 0x2a, - 0x59, 0xa0, 0xb7, 0x61, 0x31, 0xe3, 0xec, 0x29, 0x89, 0x64, 0x87, 0x33, 0x66, 0xfa, 0xd5, 0xc5, - 0x0b, 0x76, 0x0f, 0x33, 0x26, 0xd1, 0x1e, 0x40, 0x32, 0xc8, 0x18, 0x97, 0x9d, 0x41, 0x98, 0x79, - 0x4d, 0x1d, 0xff, 0xfb, 0x67, 0xc6, 0xff, 0x40, 0x43, 0x1f, 0x86, 0xd9, 0x7d, 0x2a, 0xf9, 0x10, - 0xbb, 0x49, 0x21, 0xa3, 0x9b, 0xb0, 0x44, 0x89, 0x7c, 0xc6, 0xf8, 0x51, 0x47, 0x6d, 0x09, 0xaf, - 0xa5, 0x5d, 0xad, 0x9a, 0xf8, 0xbf, 0x31, 0xaa, 0xc7, 0x8c, 0x4b, 0xbc, 0x48, 0x47, 0x82, 0xf0, - 0x5f, 0x39, 0xe0, 0x96, 0x59, 0x41, 0x08, 0x6a, 0x34, 0x1c, 0x98, 0x86, 0x70, 0xb1, 0x5e, 0x9f, - 0xaa, 0xce, 0xdc, 0x6b, 0x55, 0xe7, 0x00, 0xfc, 0x22, 0xb3, 0x69, 0x38, 0x38, 0x8c, 0xc3, 0xce, - 0x78, 0x8f, 0xcc, 0xcf, 0xd0, 0x23, 0x17, 0xac, 0xfd, 0xd7, 0xda, 0x7c, 0xd7, 0x34, 0xcc, 0x1e, - 0xa0, 0x29, 0xee, 0x6a, 0x33, 0xb8, 0x5b, 0x4c, 0xc7, 0xfc, 0xf8, 0x77, 0x60, 0xb9, 0x9a, 0x51, - 0x74, 0x1e, 0xe6, 0x8f, 0xc8, 0xd0, 0xde, 0x5f, 0x2d, 0xd1, 0x45, 0xa8, 0x1f, 0x87, 0x69, 0x6e, - 0xe6, 0xa4, 0x8b, 0x8d, 0x70, 0x7b, 0xee, 0x96, 0xe3, 0xff, 0xe2, 0x40, 0x4d, 0xcd, 0x3c, 0x74, - 0xa3, 0x92, 0x21, 0xe7, 0xec, 0x0c, 0x55, 0x72, 0xe3, 0x41, 0xf3, 0x98, 0x70, 0x51, 0x24, 0xd5, - 0xc5, 0x85, 0x88, 0xde, 0x83, 0x3a, 0x0f, 0x69, 0x8f, 0xd8, 0xa7, 0xb0, 0x62, 0x5c, 0x61, 0xb5, - 0xa5, 0xbd, 0x18, 0x2d, 0xfa, 0x18, 0x40, 0xc8, 0x50, 0x12, 0x73, 0xfb, 0xfa, 0x0c, 0xb7, 0xaf, - 0x6b, 0xbc, 0xff, 0xd3, 0x1c, 0xd4, 0xd4, 0x08, 0x46, 0x6b, 0xe0, 0x96, 0xcf, 0x40, 0xc7, 0xbd, - 0x84, 0x47, 0x1b, 0xe8, 0x1d, 0xa8, 0xe5, 0x79, 0x12, 0xdb, 0x92, 0xdb, 0x28, 0x0e, 0x0e, 0x1e, - 0xdc, 0x7b, 0x1c, 0x72, 0x29, 0xb0, 0x56, 0xa2, 0x9b, 0xd0, 0xb4, 0x15, 0xb2, 0xd1, 0xae, 0x4d, - 0x1b, 0xf6, 0xc5, 0x33, 0xc1, 0x05, 0x18, 0x5d, 0x87, 0x56, 0xcc, 0xa2, 0xd9, 0x0b, 0x37, 0x1f, - 0xb3, 0xc8, 0x7f, 0x0a, 0xcd, 0xfd, 0x91, 0xbd, 0x7a, 0xab, 0xda, 0xde, 0x99, 0xc5, 0x5e, 0xd5, - 0xf2, 0x4d, 0x68, 0x64, 0x61, 0x74, 0x44, 0xcc, 0x9d, 0x16, 0xb1, 0x95, 0x54, 0xdb, 0xf7, 0x43, - 0xd1, 0xd7, 0x37, 0x58, 0xc2, 0x7a, 0xed, 0x37, 0xa1, 0xae, 0xb9, 0xc7, 0xc7, 0xb0, 0x30, 0x46, - 0x28, 0x68, 0x17, 0x10, 0xcf, 0xa9, 0x4c, 0x06, 0xa4, 0x13, 0xf5, 0x49, 0x74, 0x94, 0xb1, 0x84, - 0xca, 0xb2, 0xe8, 0x05, 0xfd, 0x07, 0xbb, 0xa5, 0x0e, 0xaf, 0x5a, 0xfc, 0x68, 0x4b, 0x39, 0xd7, - 0x5c, 0x73, 0xf5, 0xe7, 0x16, 0xb4, 0x30, 0x11, 0x19, 0xa3, 0x82, 0xa0, 0x0f, 0x2a, 0xfc, 0x3d, - 0xc6, 0x4e, 0x46, 0x3f, 0x4e, 0xe0, 0xb7, 0xc0, 0x2d, 0x68, 0xb9, 0x28, 0x90, 0x3f, 0x81, 0x2f, - 0xc6, 0x46, 0x8c, 0x47, 0x60, 0xd4, 0x86, 0x86, 0xa2, 0x68, 0x12, 0xdb, 0x7a, 0xbd, 0x35, 0x61, - 0xf6, 0x48, 0xab, 0xb1, 0x85, 0xa9, 0xa3, 0xb2, 0xfc, 0x30, 0x4d, 0x44, 0x9f, 0x14, 0x84, 0x3e, - 0x79, 0xd4, 0xe3, 0x02, 0x81, 0x47, 0x60, 0xb4, 0x0d, 0x4d, 0xcd, 0xdb, 0x24, 0xb6, 0xec, 0xee, - 0x4d, 0xd8, 0xed, 0x19, 0x3d, 0x2e, 0x80, 0xe8, 0x3e, 0x2c, 0x6b, 0xc6, 0x26, 0x71, 0x95, 0xe4, - 0xd7, 0x27, 0xb3, 0x61, 0x60, 0x96, 0xe7, 0x97, 0xc4, 0xb8, 0xf8, 0x1f, 0xc8, 0xfb, 0xaf, 0x82, - 0xbc, 0x7d, 0x68, 0x15, 0xdf, 0x67, 0xf6, 0x4d, 0x94, 0xb2, 0x1a, 0x3c, 0x76, 0xe2, 0x88, 0xa8, - 0x4f, 0x06, 0xe1, 0xec, 0x5c, 0xb7, 0x68, 0xec, 0xf6, 0xb5, 0x19, 0xda, 0x87, 0x4b, 0xd5, 0x01, - 0x36, 0xee, 0x6e, 0x96, 0xb1, 0x88, 0xc6, 0xe7, 0x98, 0x75, 0xba, 0x05, 0xab, 0x31, 0x8b, 0xf2, - 0x01, 0xa1, 0x52, 0x73, 0x53, 0x27, 0xe7, 0xa9, 0x79, 0x5b, 0xf8, 0x7c, 0x45, 0x71, 0xc0, 0x53, - 0xff, 0xf9, 0x1c, 0xb8, 0x65, 0x7f, 0xa0, 0x7b, 0x15, 0x16, 0x75, 0x34, 0x75, 0xbc, 0x7b, 0x76, - 0x3f, 0x9d, 0xc9, 0xa3, 0x2b, 0x3d, 0x42, 0x09, 0x57, 0x90, 0x4e, 0x37, 0x49, 0x89, 0xf0, 0xe6, - 0xb4, 0xab, 0xe0, 0x1f, 0x5c, 0x7d, 0x59, 0x58, 0xec, 0x29, 0x03, 0xc3, 0x6b, 0xcb, 0xbd, 0xca, - 0xa6, 0xbf, 0x39, 0xce, 0x51, 0x97, 0xc1, 0x55, 0x5f, 0x8e, 0x1d, 0x46, 0x53, 0x33, 0xa8, 0x5b, - 0xb8, 0xa5, 0x36, 0x1e, 0xd1, 0x74, 0xe8, 0xdf, 0x85, 0x0b, 0x53, 0x1c, 0xbe, 0xd6, 0x58, 0x6f, - 0x41, 0xc3, 0xbc, 0x00, 0xff, 0x0b, 0x70, 0xcb, 0xbe, 0xae, 0x0c, 0x2c, 0x67, 0xd6, 0x81, 0xe5, - 0x42, 0xd3, 0x76, 0xb8, 0xff, 0x29, 0x2c, 0x55, 0x3a, 0x16, 0x7d, 0x08, 0x66, 0x1c, 0x9f, 0x26, - 0x0c, 0xfb, 0x51, 0xb5, 0xaf, 0x74, 0x76, 0x62, 0x6f, 0xdf, 0x01, 0xb7, 0x54, 0xa8, 0x17, 0xac, - 0x3f, 0x40, 0x08, 0x5a, 0x39, 0x35, 0x6b, 0xfd, 0xf3, 0xa7, 0x13, 0xbd, 0xe9, 0x5c, 0x73, 0x76, - 0x3e, 0x3b, 0xf9, 0x63, 0xfd, 0xdc, 0xc9, 0xcb, 0x75, 0xe7, 0xb7, 0x97, 0xeb, 0xce, 0x8f, 0x7f, - 0xae, 0x3b, 0xdf, 0x6f, 0xf5, 0x12, 0xd9, 0xcf, 0x0f, 0x83, 0x88, 0x0d, 0xda, 0x44, 0xc8, 0x3c, - 0xe4, 0x43, 0xf3, 0x53, 0x64, 0xca, 0x0f, 0x9d, 0xc3, 0x86, 0xde, 0xb9, 0xfe, 0x77, 0x00, 0x00, - 0x00, 0xff, 0xff, 0xec, 0x9f, 0x9d, 0xb8, 0x06, 0x0d, 0x00, 0x00, + // 1255 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xcc, 0x57, 0xdd, 0x6e, 0x1b, 0xc5, + 0x17, 0xef, 0x3a, 0xfe, 0xda, 0x93, 0xaf, 0x76, 0x9a, 0xff, 0x9f, 0x65, 0x1b, 0xa5, 0xa1, 0x14, + 0x11, 0x88, 0x58, 0x57, 0x6e, 0x05, 0xa5, 0x14, 0x44, 0xe3, 0x36, 0xa8, 0x82, 0x2a, 0xd5, 0xa4, + 0xa1, 0x12, 0x37, 0xd6, 0x64, 0x77, 0x6c, 0x6f, 0xb3, 0x9e, 0x59, 0x76, 0x66, 0x53, 0xf9, 0x9e, + 0x67, 0xa0, 0x12, 0x57, 0x88, 0x87, 0xe0, 0x19, 0x72, 0x85, 0x78, 0x82, 0x4a, 0x94, 0xb7, 0xe8, + 0x15, 0x9a, 0x8f, 0xdd, 0xd8, 0xb1, 0x03, 0x2e, 0x57, 0xdc, 0x24, 0x73, 0xce, 0xf9, 0x9d, 0x33, + 0x33, 0xe7, 0x9c, 0x39, 0xbf, 0x35, 0x6c, 0xf6, 0x79, 0x2b, 0xcd, 0xb8, 0xe4, 0x21, 0x4f, 0x44, + 0x2b, 0xa2, 0x59, 0x7c, 0x4c, 0xed, 0xbf, 0x40, 0xeb, 0x51, 0xdd, 0x48, 0xfe, 0x66, 0xc8, 0x99, + 0xc8, 0x87, 0x34, 0x2b, 0xf1, 0xe5, 0xc2, 0x20, 0xfd, 0xf5, 0x89, 0x58, 0xbd, 0x84, 0x3f, 0xd7, + 0x7f, 0xac, 0x75, 0xad, 0xcf, 0xfb, 0x5c, 0x2f, 0x5b, 0x6a, 0x65, 0xb5, 0x6f, 0xf7, 0x39, 0xef, + 0x27, 0xd4, 0xf8, 0x1d, 0xe6, 0xbd, 0x16, 0x61, 0x23, 0x63, 0xba, 0xf6, 0xc3, 0x2a, 0x34, 0x30, + 0xfd, 0x3e, 0xa7, 0x42, 0xa2, 0x2d, 0xa8, 0x8a, 0x94, 0x86, 0x9e, 0xb3, 0xe9, 0x6c, 0x2d, 0xb6, + 0xd7, 0x02, 0x7b, 0x42, 0x6b, 0x0e, 0xf6, 0x53, 0x1a, 0x62, 0x8d, 0x40, 0xb7, 0xa0, 0x79, 0x4c, + 0x92, 0x38, 0x22, 0x92, 0x7a, 0x15, 0x8d, 0xf6, 0xce, 0xa2, 0xbf, 0xb5, 0x76, 0x5c, 0x22, 0x55, + 0x7c, 0x9e, 0x52, 0xe6, 0x2d, 0xcc, 0x8e, 0xbf, 0x97, 0x52, 0x86, 0x35, 0x42, 0x21, 0x33, 0x4a, + 0x22, 0xaf, 0x3a, 0x1b, 0x89, 0x29, 0x89, 0xb0, 0x46, 0xa0, 0x6d, 0xa8, 0xf5, 0x92, 0x5c, 0x0c, + 0xbc, 0x9a, 0x86, 0xfe, 0xef, 0x2c, 0x74, 0x57, 0x19, 0xb1, 0xc1, 0xa0, 0x2f, 0x60, 0x49, 0x48, + 0x92, 0xc9, 0x6e, 0xc8, 0x87, 0xc3, 0x58, 0x7a, 0x75, 0xed, 0x73, 0x65, 0xea, 0xa2, 0x0a, 0xd3, + 0xd1, 0x10, 0xbc, 0x28, 0x4e, 0x05, 0xb5, 0x59, 0x46, 0x05, 0x95, 0x5e, 0x63, 0xf6, 0x66, 0x58, + 0x19, 0xb1, 0xc1, 0xa0, 0x1b, 0xd0, 0x8c, 0x99, 0xa4, 0x19, 0x23, 0x89, 0x17, 0xd9, 0x7b, 0x98, + 0x3a, 0x04, 0x45, 0x1d, 0x82, 0x7b, 0x6c, 0x84, 0x4b, 0x94, 0xff, 0xc2, 0x81, 0xaa, 0x4a, 0x32, + 0x7a, 0x02, 0x2b, 0x21, 0x67, 0x8c, 0x86, 0x92, 0x67, 0x5d, 0x39, 0x4a, 0xa9, 0x2e, 0xc9, 0x4a, + 0xfb, 0xa3, 0x40, 0x97, 0xba, 0xc3, 0x93, 0x84, 0x86, 0x32, 0xe6, 0x4c, 0xa1, 0x83, 0xfb, 0xea, + 0x10, 0x44, 0x89, 0x41, 0xa7, 0xf0, 0x7a, 0x32, 0x4a, 0x29, 0x5e, 0x0e, 0xc7, 0x45, 0xf4, 0x29, + 0x2c, 0x86, 0x9c, 0xf5, 0xe2, 0x7e, 0xf7, 0x99, 0xe0, 0x4c, 0xd7, 0xcd, 0xdd, 0x59, 0x7f, 0xfd, + 0xf2, 0xaa, 0x47, 0x59, 0xc8, 0xa3, 0x98, 0xf5, 0x5b, 0xca, 0x10, 0x60, 0xf2, 0xfc, 0x11, 0x15, + 0x82, 0xf4, 0x29, 0xae, 0x1b, 0x07, 0xff, 0xa7, 0x3a, 0x34, 0x8b, 0x82, 0xfe, 0xe7, 0x4e, 0x87, + 0xee, 0x00, 0x84, 0xe5, 0xa6, 0x65, 0x77, 0xcd, 0x38, 0xcc, 0x4e, 0xf5, 0xe4, 0xe5, 0xd5, 0x0b, + 0x78, 0x0c, 0x8d, 0x76, 0x00, 0x64, 0x46, 0x98, 0xe8, 0xf1, 0x6c, 0x28, 0xbc, 0xea, 0xe6, 0xc2, + 0xd6, 0x62, 0xfb, 0xda, 0x79, 0xbd, 0x1c, 0x3c, 0x29, 0xa0, 0x78, 0xcc, 0x0b, 0x3d, 0x85, 0x4b, + 0x62, 0x90, 0xf7, 0x7a, 0x09, 0xed, 0x1e, 0xd1, 0x91, 0x4e, 0x89, 0xf0, 0x6a, 0x9b, 0x0b, 0x5b, + 0x2b, 0xed, 0xed, 0x7f, 0xca, 0xc9, 0xbe, 0x71, 0xd4, 0x19, 0x59, 0xb5, 0x51, 0xbe, 0xa6, 0x23, + 0x25, 0x0b, 0xf4, 0x0e, 0x2c, 0xa5, 0x19, 0x7f, 0x46, 0x43, 0xd9, 0xcd, 0x38, 0x37, 0xfd, 0xea, + 0xe2, 0x45, 0xab, 0xc3, 0x9c, 0x4b, 0xb4, 0x0b, 0x10, 0x0f, 0x53, 0x9e, 0xc9, 0xee, 0x90, 0xa4, + 0x5e, 0x43, 0x9f, 0xff, 0xfd, 0x73, 0xcf, 0xff, 0x50, 0x43, 0x1f, 0x91, 0xf4, 0x01, 0x93, 0xd9, + 0x08, 0xbb, 0x71, 0x21, 0xfb, 0xaf, 0x1d, 0x70, 0xcb, 0xdb, 0x21, 0x04, 0x55, 0x46, 0x86, 0xa6, + 0xb0, 0x2e, 0xd6, 0xeb, 0x33, 0x59, 0xae, 0xbc, 0x51, 0x96, 0x0f, 0xc0, 0x2f, 0x32, 0x94, 0x90, + 0xe1, 0x61, 0x44, 0xba, 0xe3, 0xb5, 0x5e, 0x98, 0xa3, 0xd6, 0x97, 0xad, 0xff, 0x37, 0xda, 0xbd, + 0x63, 0x0a, 0xbf, 0x0b, 0x68, 0x46, 0xb8, 0xea, 0x1c, 0xe1, 0x96, 0x92, 0xb1, 0x38, 0xfe, 0x5d, + 0x58, 0x99, 0xcc, 0x0c, 0xba, 0x08, 0x0b, 0x47, 0x74, 0x64, 0xef, 0xaf, 0x96, 0x68, 0x0d, 0x6a, + 0xc7, 0x24, 0xc9, 0xcd, 0xbc, 0x73, 0xb1, 0x11, 0xee, 0x54, 0x6e, 0x3b, 0xfe, 0xaf, 0x0e, 0x54, + 0xd5, 0xec, 0x42, 0xb7, 0x26, 0x32, 0xe4, 0x9c, 0x9f, 0xa1, 0x89, 0xdc, 0x78, 0xd0, 0x38, 0xa6, + 0x99, 0x28, 0x92, 0xea, 0xe2, 0x42, 0x44, 0xef, 0x41, 0x2d, 0x23, 0xac, 0x4f, 0x6d, 0x4b, 0xaf, + 0x9a, 0x50, 0x58, 0xa9, 0x74, 0x14, 0x63, 0x45, 0x9f, 0x00, 0x08, 0x49, 0x24, 0x35, 0xb7, 0xaf, + 0xcd, 0x71, 0xfb, 0x9a, 0xc6, 0xfb, 0xbf, 0x54, 0xa0, 0xaa, 0x46, 0x29, 0x5a, 0x07, 0xb7, 0x6c, + 0x67, 0x7d, 0xee, 0x65, 0x7c, 0xaa, 0x40, 0xef, 0x42, 0x35, 0xcf, 0xe3, 0xc8, 0x96, 0xdc, 0x9e, + 0xe2, 0xe0, 0xe0, 0xe1, 0xfd, 0xc7, 0x24, 0x93, 0x02, 0x6b, 0x23, 0xfa, 0x18, 0x1a, 0xb6, 0x42, + 0xf6, 0xb4, 0xeb, 0xb3, 0x86, 0x76, 0xd1, 0xee, 0xb8, 0x00, 0xa3, 0x9b, 0xd0, 0x8c, 0x78, 0x38, + 0x7f, 0xe1, 0x16, 0x22, 0x1e, 0xfa, 0xcf, 0xa0, 0xb1, 0x7f, 0xea, 0xaf, 0xde, 0x9c, 0xf6, 0x77, + 0xe6, 0xf1, 0x57, 0xb5, 0xfc, 0x3f, 0xd4, 0x53, 0x12, 0x1e, 0x51, 0x73, 0xa7, 0x25, 0x6c, 0x25, + 0xd5, 0xf6, 0x03, 0x22, 0x06, 0xfa, 0x06, 0xcb, 0x58, 0xaf, 0xfd, 0x06, 0xd4, 0x34, 0x87, 0xf8, + 0x18, 0x16, 0xc7, 0x88, 0x01, 0x75, 0x00, 0x65, 0x39, 0x93, 0xf1, 0x90, 0x76, 0xc3, 0x01, 0x0d, + 0x8f, 0x52, 0x1e, 0x33, 0x59, 0x16, 0xbd, 0xa0, 0xf1, 0xa0, 0x53, 0xda, 0xf0, 0x25, 0x8b, 0x3f, + 0x55, 0xa9, 0xe0, 0x9a, 0x33, 0xae, 0xfd, 0xd6, 0x84, 0x26, 0xa6, 0x22, 0xe5, 0x4c, 0x50, 0xf4, + 0xc1, 0x04, 0x0f, 0x8f, 0xb1, 0x8c, 0xb1, 0x8f, 0x13, 0xf1, 0x6d, 0x70, 0x0b, 0x7a, 0x2d, 0x0a, + 0xe4, 0x4f, 0xe1, 0x8b, 0xe7, 0x1f, 0xe1, 0x53, 0x30, 0x6a, 0x41, 0x5d, 0x51, 0x2d, 0x8d, 0x6c, + 0xbd, 0xde, 0x9a, 0x72, 0xdb, 0xd3, 0x66, 0x6c, 0x61, 0x6a, 0xab, 0x34, 0x3f, 0x4c, 0x62, 0x31, + 0xa0, 0x05, 0x31, 0x4f, 0x6f, 0xf5, 0xb8, 0x40, 0xe0, 0x53, 0x30, 0x6a, 0x43, 0x43, 0xf3, 0x2f, + 0x8d, 0x2c, 0x4b, 0x7b, 0x53, 0x7e, 0xbb, 0xc6, 0x8e, 0x0b, 0x20, 0x7a, 0x00, 0x2b, 0x9a, 0x79, + 0x69, 0x34, 0x49, 0xd6, 0x1b, 0xd3, 0xd9, 0x30, 0x30, 0xcb, 0xd7, 0xcb, 0x62, 0x5c, 0xfc, 0x17, + 0x24, 0xfc, 0x63, 0xc5, 0x92, 0xb0, 0x0f, 0xcd, 0xe2, 0x3b, 0xcb, 0xbe, 0x89, 0x52, 0x56, 0x83, + 0xc7, 0x4e, 0x1c, 0x11, 0x0e, 0xe8, 0x90, 0xcc, 0xcf, 0x59, 0x4b, 0xc6, 0x6f, 0x5f, 0xbb, 0xa1, + 0xa7, 0x70, 0x25, 0xa3, 0x82, 0xe7, 0x59, 0x48, 0xbb, 0x33, 0x02, 0xce, 0x33, 0x18, 0xd7, 0x8a, + 0x00, 0x9d, 0xf1, 0xc0, 0xdb, 0x70, 0x29, 0xe2, 0x61, 0x3e, 0xa4, 0x4c, 0x6a, 0x9e, 0xe9, 0xe6, + 0x59, 0x62, 0xde, 0x17, 0xbe, 0x38, 0x61, 0x38, 0xc8, 0x12, 0x74, 0x1d, 0xea, 0x9c, 0xe4, 0x72, + 0xd0, 0xb6, 0xe5, 0x59, 0x32, 0x4f, 0x7c, 0xef, 0x9e, 0xd2, 0x61, 0x6b, 0xf3, 0x5f, 0x54, 0xc0, + 0x2d, 0x3b, 0x09, 0xdd, 0x9f, 0xe0, 0x4d, 0x47, 0xf3, 0xce, 0xf5, 0xf3, 0x3b, 0xef, 0x5c, 0xe6, + 0x5c, 0xed, 0x53, 0x46, 0x33, 0x05, 0xe9, 0xf6, 0xe2, 0x84, 0x0a, 0xaf, 0xa2, 0x43, 0x05, 0x7f, + 0x13, 0xea, 0xab, 0xc2, 0x63, 0x57, 0x39, 0x18, 0x26, 0x5b, 0xe9, 0x4f, 0x28, 0xfd, 0xad, 0x71, + 0x36, 0xbb, 0x02, 0xae, 0xfa, 0x56, 0xec, 0x72, 0x96, 0x98, 0x91, 0xde, 0xc4, 0x4d, 0xa5, 0xd8, + 0x63, 0xc9, 0xc8, 0xbf, 0x07, 0x97, 0x67, 0x04, 0x7c, 0x23, 0x02, 0x68, 0x42, 0xdd, 0xbc, 0x15, + 0xff, 0x4b, 0x70, 0xcb, 0x17, 0x30, 0x31, 0xda, 0x9c, 0x79, 0x47, 0x9b, 0x0b, 0x0d, 0xfb, 0x16, + 0xfc, 0xcf, 0x60, 0x79, 0xa2, 0xb7, 0xd1, 0x87, 0x60, 0x06, 0xf7, 0x59, 0x6a, 0xb1, 0x9f, 0x51, + 0xfb, 0xca, 0x66, 0x67, 0x7b, 0xfb, 0x2e, 0xb8, 0xa5, 0x41, 0xbd, 0x75, 0xfd, 0xc9, 0x41, 0xd1, + 0xea, 0x99, 0xa9, 0xec, 0x5f, 0x3c, 0x9b, 0xe8, 0x2d, 0xe7, 0x86, 0xb3, 0xf3, 0xf9, 0xc9, 0x1f, + 0x1b, 0x17, 0x4e, 0x5e, 0x6d, 0x38, 0xbf, 0xbf, 0xda, 0x70, 0x7e, 0xfe, 0x73, 0xc3, 0xf9, 0x6e, + 0xbb, 0x1f, 0xcb, 0x41, 0x7e, 0x18, 0x84, 0x7c, 0xd8, 0xa2, 0x42, 0xe6, 0x24, 0x1b, 0x99, 0x1f, + 0x1f, 0x33, 0x7e, 0xda, 0x1c, 0xd6, 0xb5, 0xe6, 0xe6, 0x5f, 0x01, 0x00, 0x00, 0xff, 0xff, 0x84, + 0x6a, 0x99, 0x88, 0xf8, 0x0c, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1278,20 +1283,6 @@ func (m *Request_Validate) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if len(m.NetworkPorts) > 0 { - for iNdEx := len(m.NetworkPorts) - 1; iNdEx >= 0; iNdEx-- { - { - size, err := m.NetworkPorts[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintDerive(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x42 - } - } if len(m.ImportMap) > 0 { for k := range m.ImportMap { v := m.ImportMap[k] @@ -1837,6 +1828,18 @@ func (m *Response_Spec) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if m.Oauth2 != nil { + { + size, err := m.Oauth2.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintDerive(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x2a + } if len(m.DocumentationUrl) > 0 { i -= len(m.DocumentationUrl) copy(dAtA[i:], m.DocumentationUrl) @@ -1844,10 +1847,10 @@ func (m *Response_Spec) MarshalToSizedBuffer(dAtA []byte) (int, error) { i-- dAtA[i] = 0x22 } - if len(m.LambdaConfigSchemaJson) > 0 { - i -= len(m.LambdaConfigSchemaJson) - copy(dAtA[i:], m.LambdaConfigSchemaJson) - i = encodeVarintDerive(dAtA, i, uint64(len(m.LambdaConfigSchemaJson))) + if len(m.ResourceConfigSchemaJson) > 0 { + i -= len(m.ResourceConfigSchemaJson) + copy(dAtA[i:], m.ResourceConfigSchemaJson) + i = encodeVarintDerive(dAtA, i, uint64(len(m.ResourceConfigSchemaJson))) i-- dAtA[i] = 0x1a } @@ -2204,12 +2207,6 @@ func (m *Request_Validate) ProtoSize() (n int) { n += mapEntrySize + 1 + sovDerive(uint64(mapEntrySize)) } } - if len(m.NetworkPorts) > 0 { - for _, e := range m.NetworkPorts { - l = e.ProtoSize() - n += 1 + l + sovDerive(uint64(l)) - } - } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -2413,7 +2410,7 @@ func (m *Response_Spec) ProtoSize() (n int) { if l > 0 { n += 1 + l + sovDerive(uint64(l)) } - l = len(m.LambdaConfigSchemaJson) + l = len(m.ResourceConfigSchemaJson) if l > 0 { n += 1 + l + sovDerive(uint64(l)) } @@ -2421,6 +2418,10 @@ func (m *Response_Spec) ProtoSize() (n int) { if l > 0 { n += 1 + l + sovDerive(uint64(l)) } + if m.Oauth2 != nil { + l = m.Oauth2.ProtoSize() + n += 1 + l + sovDerive(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -3346,40 +3347,6 @@ func (m *Request_Validate) Unmarshal(dAtA []byte) error { } m.ImportMap[mapkey] = mapvalue iNdEx = postIndex - case 8: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field NetworkPorts", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowDerive - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthDerive - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthDerive - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.NetworkPorts = append(m.NetworkPorts, &flow.NetworkPort{}) - if err := m.NetworkPorts[len(m.NetworkPorts)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipDerive(dAtA[iNdEx:]) @@ -4653,7 +4620,7 @@ func (m *Response_Spec) Unmarshal(dAtA []byte) error { iNdEx = postIndex case 3: if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field LambdaConfigSchemaJson", wireType) + return fmt.Errorf("proto: wrong wireType = %d for field ResourceConfigSchemaJson", wireType) } var stringLen uint64 for shift := uint(0); ; shift += 7 { @@ -4681,7 +4648,7 @@ func (m *Response_Spec) Unmarshal(dAtA []byte) error { if postIndex > l { return io.ErrUnexpectedEOF } - m.LambdaConfigSchemaJson = encoding_json.RawMessage(dAtA[iNdEx:postIndex]) + m.ResourceConfigSchemaJson = encoding_json.RawMessage(dAtA[iNdEx:postIndex]) iNdEx = postIndex case 4: if wireType != 2 { @@ -4715,6 +4682,42 @@ func (m *Response_Spec) Unmarshal(dAtA []byte) error { } m.DocumentationUrl = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 5: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Oauth2", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowDerive + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthDerive + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthDerive + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Oauth2 == nil { + m.Oauth2 = &flow.OAuth2{} + } + if err := m.Oauth2.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipDerive(dAtA[iNdEx:]) diff --git a/go/protocols/derive/derive.proto b/go/protocols/derive/derive.proto index cc1352451e..c0df1bf1cf 100644 --- a/go/protocols/derive/derive.proto +++ b/go/protocols/derive/derive.proto @@ -81,8 +81,6 @@ message Request { // framed to the user's filesystem, rather than the filesystem within // the connector. map import_map = 7; - // Network ports of this proposed derivation. - repeated flow.NetworkPort network_ports = 8; } Validate validate = 2; @@ -169,13 +167,20 @@ message Response { (gogoproto.casttype) = "encoding/json.RawMessage", json_name = "configSchema" ]; - // JSON schema of the connecor's lambda configuration. - string lambda_config_schema_json = 3 [ + // JSON schema of the connector's lambda configuration. + // The "resource" terminology is used with Response.Spec + // only for symmetry with the capture and materialization protocols. + // NOTE(johnny): We can encapsulate separate lambda config vs + // shuffle lambda config schemas by encapsulating them as separate + // definitions within the resource config schema. + string resource_config_schema_json = 3 [ (gogoproto.casttype) = "encoding/json.RawMessage", - json_name = "lambdaConfigSchema" + json_name = "resourceConfigSchema" ]; // URL for connector's documention. string documentation_url = 4; + // Optional OAuth2 configuration. + flow.OAuth2 oauth2 = 5; } Spec spec = 1; diff --git a/go/protocols/flow/flow.pb.go b/go/protocols/flow/flow.pb.go index 2e6c0a0d6c..95a68f800b 100644 --- a/go/protocols/flow/flow.pb.go +++ b/go/protocols/flow/flow.pb.go @@ -356,67 +356,6 @@ func (CombineAPI_Code) EnumDescriptor() ([]byte, []int) { return fileDescriptor_d0677502142fec31, []int{14, 0} } -// Code labels message codes passed over the CGO bridge. -type BuildAPI_Code int32 - -const ( - // Begin a build with a Config (Go -> Rust). - BuildAPI_BEGIN BuildAPI_Code = 0 - // Poll the build after completing one or more trampoline tasks (Go -> - // Rust). - BuildAPI_POLL BuildAPI_Code = 1 - // Trampoline task start or completion (Rust <-> Go). - BuildAPI_TRAMPOLINE BuildAPI_Code = 2 - // Trampoline sub-type: Start fetch of a resource. - BuildAPI_TRAMPOLINE_FETCH BuildAPI_Code = 3 - // Trampoline sub-type: Start validation of a capture. - BuildAPI_TRAMPOLINE_VALIDATE_CAPTURE BuildAPI_Code = 4 - // Trampoline sub-type: Start validation of a materialization. - BuildAPI_TRAMPOLINE_VALIDATE_MATERIALIZATION BuildAPI_Code = 5 - // Build completed successfully (Rust -> Go). - BuildAPI_DONE BuildAPI_Code = 6 - // Build completed with errors (Rust -> Go). - BuildAPI_DONE_WITH_ERRORS BuildAPI_Code = 7 - // Trampoline sub-type: start docker ispect of an image - BuildAPI_TRAMPOLINE_DOCKER_INSPECT BuildAPI_Code = 8 - // Generate catalog specification JSON schema (Go <-> Rust) - BuildAPI_CATALOG_SCHEMA BuildAPI_Code = 100 -) - -var BuildAPI_Code_name = map[int32]string{ - 0: "BEGIN", - 1: "POLL", - 2: "TRAMPOLINE", - 3: "TRAMPOLINE_FETCH", - 4: "TRAMPOLINE_VALIDATE_CAPTURE", - 5: "TRAMPOLINE_VALIDATE_MATERIALIZATION", - 6: "DONE", - 7: "DONE_WITH_ERRORS", - 8: "TRAMPOLINE_DOCKER_INSPECT", - 100: "CATALOG_SCHEMA", -} - -var BuildAPI_Code_value = map[string]int32{ - "BEGIN": 0, - "POLL": 1, - "TRAMPOLINE": 2, - "TRAMPOLINE_FETCH": 3, - "TRAMPOLINE_VALIDATE_CAPTURE": 4, - "TRAMPOLINE_VALIDATE_MATERIALIZATION": 5, - "DONE": 6, - "DONE_WITH_ERRORS": 7, - "TRAMPOLINE_DOCKER_INSPECT": 8, - "CATALOG_SCHEMA": 100, -} - -func (x BuildAPI_Code) String() string { - return proto.EnumName(BuildAPI_Code_name, int32(x)) -} - -func (BuildAPI_Code) EnumDescriptor() ([]byte, []int) { - return fileDescriptor_d0677502142fec31, []int{15, 0} -} - // Status represents the high-level response to an Open request. If OK, then // the connection may proceed. Any other status indicates the reason for refusal. // This enum is a superset of the consumer.Status enum used by the Shards service, @@ -1127,7 +1066,7 @@ type MaterializationSpec struct { ShardTemplate *protocol1.ShardSpec `protobuf:"bytes,5,opt,name=shard_template,json=shardTemplate,proto3" json:"shard_template,omitempty"` // Template for recovery logs of shards of this materialization. RecoveryLogTemplate *protocol.JournalSpec `protobuf:"bytes,6,opt,name=recovery_log_template,json=recoveryLogTemplate,proto3" json:"recovery_log_template,omitempty"` - // Network ports of this capture. + // Network ports of this materialization. NetworkPorts []*NetworkPort `protobuf:"bytes,7,rep,name=network_ports,json=networkPorts,proto3" json:"network_ports,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -1782,8 +1721,9 @@ func (m *CombineAPI_Stats) XXX_DiscardUnknown() { var xxx_messageInfo_CombineAPI_Stats proto.InternalMessageInfo -// BuildAPI is a meta-message which name spaces messages of the Build API -// bridge. +// BuildAPI is deprecated and will be removed. +// We're currently keeping Config around only to +// avoid churning various Go snapshot tests. type BuildAPI struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` @@ -1878,47 +1818,6 @@ func (m *BuildAPI_Config) XXX_DiscardUnknown() { var xxx_messageInfo_BuildAPI_Config proto.InternalMessageInfo -type BuildAPI_Fetch struct { - ResourceUrl string `protobuf:"bytes,1,opt,name=resource_url,json=resourceUrl,proto3" json:"resource_url,omitempty"` - ContentType ContentType `protobuf:"varint,2,opt,name=content_type,json=contentType,proto3,enum=flow.ContentType" json:"content_type,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` -} - -func (m *BuildAPI_Fetch) Reset() { *m = BuildAPI_Fetch{} } -func (m *BuildAPI_Fetch) String() string { return proto.CompactTextString(m) } -func (*BuildAPI_Fetch) ProtoMessage() {} -func (*BuildAPI_Fetch) Descriptor() ([]byte, []int) { - return fileDescriptor_d0677502142fec31, []int{15, 1} -} -func (m *BuildAPI_Fetch) XXX_Unmarshal(b []byte) error { - return m.Unmarshal(b) -} -func (m *BuildAPI_Fetch) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { - if deterministic { - return xxx_messageInfo_BuildAPI_Fetch.Marshal(b, m, deterministic) - } else { - b = b[:cap(b)] - n, err := m.MarshalToSizedBuffer(b) - if err != nil { - return nil, err - } - return b[:n], nil - } -} -func (m *BuildAPI_Fetch) XXX_Merge(src proto.Message) { - xxx_messageInfo_BuildAPI_Fetch.Merge(m, src) -} -func (m *BuildAPI_Fetch) XXX_Size() int { - return m.ProtoSize() -} -func (m *BuildAPI_Fetch) XXX_DiscardUnknown() { - xxx_messageInfo_BuildAPI_Fetch.DiscardUnknown(m) -} - -var xxx_messageInfo_BuildAPI_Fetch proto.InternalMessageInfo - // ResetStateRequest is the request of the Testing.ResetState RPC. type ResetStateRequest struct { XXX_NoUnkeyedLiteral struct{} `json:"-"` @@ -2392,7 +2291,6 @@ func init() { proto.RegisterEnum("flow.TestSpec_Step_Type", TestSpec_Step_Type_name, TestSpec_Step_Type_value) proto.RegisterEnum("flow.ExtractAPI_Code", ExtractAPI_Code_name, ExtractAPI_Code_value) proto.RegisterEnum("flow.CombineAPI_Code", CombineAPI_Code_name, CombineAPI_Code_value) - proto.RegisterEnum("flow.BuildAPI_Code", BuildAPI_Code_name, BuildAPI_Code_value) proto.RegisterEnum("flow.TaskNetworkProxyResponse_Status", TaskNetworkProxyResponse_Status_name, TaskNetworkProxyResponse_Status_value) proto.RegisterType((*Slice)(nil), "flow.Slice") proto.RegisterType((*UUIDParts)(nil), "flow.UUIDParts") @@ -2426,7 +2324,6 @@ func init() { proto.RegisterType((*CombineAPI_Stats)(nil), "flow.CombineAPI.Stats") proto.RegisterType((*BuildAPI)(nil), "flow.BuildAPI") proto.RegisterType((*BuildAPI_Config)(nil), "flow.BuildAPI.Config") - proto.RegisterType((*BuildAPI_Fetch)(nil), "flow.BuildAPI.Fetch") proto.RegisterType((*ResetStateRequest)(nil), "flow.ResetStateRequest") proto.RegisterType((*ResetStateResponse)(nil), "flow.ResetStateResponse") proto.RegisterType((*AdvanceTimeRequest)(nil), "flow.AdvanceTimeRequest") @@ -2444,248 +2341,238 @@ func init() { func init() { proto.RegisterFile("go/protocols/flow/flow.proto", fileDescriptor_d0677502142fec31) } var fileDescriptor_d0677502142fec31 = []byte{ - // 3852 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x5a, 0x4d, 0x8c, 0x23, 0x59, - 0x52, 0xae, 0xf4, 0xbf, 0xc3, 0x55, 0xae, 0xac, 0x57, 0xd5, 0xdd, 0x6e, 0xcf, 0x74, 0xb9, 0xc6, - 0xb3, 0x4b, 0xd7, 0xcc, 0xec, 0xb8, 0x97, 0x6a, 0x76, 0x99, 0xe9, 0x56, 0xb3, 0xf2, 0x4f, 0x56, - 0xb7, 0xbb, 0x5d, 0xb6, 0x49, 0x67, 0xed, 0x6c, 0x8f, 0x84, 0x92, 0xac, 0xcc, 0x67, 0x57, 0x4e, - 0xa5, 0x33, 0x4d, 0xe6, 0x73, 0x77, 0x7b, 0x2f, 0x20, 0x40, 0x42, 0x1a, 0x81, 0xb4, 0x07, 0x56, - 0xbb, 0x37, 0x86, 0x95, 0x90, 0x10, 0x17, 0x2e, 0x08, 0x09, 0xb8, 0x20, 0x71, 0x19, 0xfe, 0xc4, - 0x0a, 0xc4, 0x85, 0x43, 0x8f, 0x58, 0x0e, 0xec, 0x89, 0x13, 0xa7, 0x3e, 0xa1, 0xf7, 0x93, 0xe9, - 0x74, 0xb9, 0xba, 0x7e, 0x76, 0x97, 0xc3, 0x5e, 0xac, 0x7c, 0xf1, 0x22, 0x22, 0xdf, 0x8b, 0x88, - 0x17, 0xf1, 0x65, 0x3c, 0xc3, 0x9b, 0x23, 0xef, 0xce, 0xc4, 0xf7, 0x88, 0x67, 0x7a, 0x4e, 0x70, - 0x67, 0xe8, 0x78, 0xcf, 0xd9, 0x4f, 0x8d, 0xd1, 0x50, 0x8a, 0x3e, 0x97, 0xb7, 0x8f, 0x7c, 0xef, - 0x04, 0xfb, 0x11, 0x5f, 0xf4, 0xc0, 0xb9, 0xca, 0x3b, 0xa6, 0xe7, 0x06, 0xd3, 0xf1, 0x39, 0x1c, - 0x5b, 0x23, 0x6f, 0xe4, 0xb1, 0xc7, 0x3b, 0xf4, 0x49, 0x50, 0x2b, 0x23, 0xcf, 0x1b, 0x39, 0x98, - 0x33, 0x1f, 0x4d, 0x87, 0x77, 0x88, 0x3d, 0xc6, 0x01, 0x31, 0xc6, 0x13, 0xce, 0x50, 0x7d, 0x00, - 0xe9, 0x81, 0x63, 0x9b, 0x18, 0x6d, 0x41, 0xfa, 0x08, 0x8f, 0x6c, 0xb7, 0x24, 0xed, 0x48, 0xbb, - 0x6b, 0x2a, 0x1f, 0x20, 0x19, 0x92, 0xd8, 0xb5, 0x4a, 0x09, 0x46, 0xa3, 0x8f, 0xf7, 0x56, 0x7f, - 0xf8, 0xc7, 0x95, 0x95, 0xef, 0xfc, 0xa0, 0xb2, 0xf2, 0xfd, 0x1f, 0x54, 0x56, 0xaa, 0x2e, 0xe4, - 0x0f, 0x0f, 0xdb, 0xad, 0xbe, 0xe1, 0x93, 0x00, 0x21, 0x48, 0xb9, 0x9e, 0x85, 0x99, 0x86, 0x8c, - 0xca, 0x9e, 0xd1, 0x7d, 0x48, 0x9b, 0x8e, 0x67, 0x9e, 0x30, 0x15, 0x99, 0xc6, 0x97, 0x5f, 0xbd, - 0xac, 0xbc, 0x35, 0xf2, 0x6a, 0x23, 0xe3, 0xdb, 0x98, 0x10, 0x5c, 0xb3, 0xf0, 0xb3, 0x3b, 0xa6, - 0xe7, 0xe3, 0x3b, 0x63, 0x1c, 0x04, 0xc6, 0x08, 0xd7, 0x9a, 0x94, 0x59, 0xe5, 0x32, 0xf7, 0xe4, - 0x1f, 0x7f, 0x56, 0x91, 0x16, 0xde, 0xf7, 0xef, 0x12, 0x40, 0xdf, 0xf7, 0x3e, 0xc1, 0x26, 0xb1, - 0x3d, 0xb6, 0xbc, 0x09, 0xf1, 0xd9, 0x0b, 0xf3, 0x2a, 0x7d, 0xa4, 0xdb, 0x18, 0xda, 0xd8, 0xe1, - 0x4b, 0xce, 0xab, 0x7c, 0x80, 0xca, 0x90, 0xc3, 0x2f, 0x26, 0x8e, 0x6d, 0xda, 0xa4, 0x94, 0xdc, - 0x91, 0x76, 0x73, 0x6a, 0x34, 0x46, 0xbb, 0x20, 0xdb, 0x81, 0x3e, 0x31, 0x7c, 0x62, 0x53, 0x9d, - 0xfa, 0x09, 0x9e, 0x95, 0x52, 0x8c, 0xa7, 0x68, 0x07, 0xfd, 0x90, 0xfc, 0x04, 0xcf, 0xd0, 0x97, - 0xa0, 0x48, 0x39, 0x7d, 0x7b, 0x6c, 0xf8, 0x33, 0xc6, 0x97, 0x66, 0x7c, 0xab, 0x76, 0xd0, 0xe7, - 0x44, 0xca, 0x75, 0x17, 0xf2, 0xb6, 0x3b, 0xc4, 0x3e, 0x76, 0x4d, 0x5c, 0xca, 0xec, 0x48, 0xbb, - 0x85, 0xbd, 0xf5, 0x1a, 0x73, 0x78, 0x3b, 0x24, 0x37, 0x52, 0x9f, 0xbf, 0xac, 0xac, 0xa8, 0x73, - 0xbe, 0xea, 0xff, 0x26, 0x21, 0x1f, 0x4d, 0xd3, 0x4d, 0x90, 0xd9, 0x04, 0x07, 0x25, 0x69, 0x27, - 0x49, 0x37, 0xc1, 0x06, 0xa8, 0x06, 0x99, 0x80, 0xf8, 0xb6, 0x3b, 0x62, 0x5b, 0x28, 0xec, 0x5d, - 0x3f, 0xa5, 0xb5, 0x36, 0x60, 0xb3, 0xaa, 0xe0, 0x62, 0x5a, 0x6c, 0xe2, 0x60, 0xb6, 0x1b, 0xaa, - 0x85, 0x0e, 0xd0, 0x0e, 0x14, 0x2c, 0x1c, 0x98, 0xbe, 0x3d, 0xa1, 0xdb, 0x62, 0x3b, 0xc8, 0xab, - 0x71, 0x12, 0xba, 0x0f, 0xab, 0x16, 0x1e, 0x1a, 0x53, 0x87, 0xe8, 0x9f, 0x04, 0x9e, 0xcb, 0xf6, - 0x90, 0x6f, 0xbc, 0xf9, 0xea, 0x65, 0xa5, 0x84, 0x5d, 0xd3, 0xb3, 0x6c, 0x77, 0x74, 0x87, 0x4e, - 0xd4, 0x54, 0xe3, 0xf9, 0x01, 0x77, 0x9b, 0x9a, 0x15, 0x12, 0xe8, 0x3a, 0x64, 0x02, 0x6c, 0xfa, - 0x98, 0x94, 0xb2, 0xcc, 0x36, 0x62, 0x44, 0x17, 0x8f, 0x5f, 0xd8, 0x01, 0x09, 0x4a, 0xb9, 0x1d, - 0x69, 0xb7, 0xb8, 0xbc, 0x78, 0x85, 0xcd, 0xaa, 0x82, 0xab, 0xfc, 0x47, 0x12, 0x64, 0xf8, 0x7e, - 0xd0, 0x5b, 0xb0, 0x6a, 0x7a, 0x2e, 0xc1, 0x2e, 0xd1, 0xa9, 0x21, 0xd8, 0xee, 0xf3, 0x6a, 0x41, - 0xd0, 0xb4, 0xd9, 0x04, 0xd3, 0xb7, 0x0e, 0x3d, 0x7f, 0x6c, 0x10, 0xb1, 0x57, 0x31, 0x42, 0xef, - 0x80, 0x1c, 0x8a, 0x86, 0x4b, 0x67, 0xeb, 0xca, 0xab, 0xeb, 0x82, 0xae, 0x08, 0x32, 0xba, 0x05, - 0x30, 0x36, 0x5e, 0xe8, 0x0e, 0x76, 0x47, 0xe4, 0x98, 0xed, 0x79, 0x4d, 0xcd, 0x8f, 0x8d, 0x17, - 0x1d, 0x46, 0x78, 0x9c, 0xca, 0x49, 0x72, 0xe2, 0x71, 0x2a, 0x97, 0x90, 0x93, 0x8f, 0x53, 0xb9, - 0xb4, 0x9c, 0xa9, 0x36, 0x20, 0xc3, 0xd7, 0x8c, 0x0a, 0x90, 0x6d, 0x77, 0xbf, 0x59, 0xef, 0xb4, - 0x5b, 0xf2, 0x0a, 0xca, 0x41, 0xea, 0xe0, 0x70, 0xa0, 0xc9, 0x12, 0xca, 0x42, 0xf2, 0xa0, 0xfe, - 0x54, 0x4e, 0xa0, 0x55, 0xc8, 0xb5, 0x0f, 0xfa, 0x9d, 0x76, 0xb3, 0xad, 0xc9, 0x49, 0x04, 0x90, - 0x69, 0xd6, 0xbb, 0xdd, 0x9e, 0x26, 0xa7, 0xaa, 0x4f, 0xa1, 0xd0, 0xc5, 0xe4, 0xb9, 0xe7, 0x9f, - 0xf4, 0x3d, 0x9f, 0x19, 0xcf, 0x9d, 0x8e, 0x8f, 0xb0, 0x2f, 0x0e, 0xa1, 0x18, 0xd1, 0xf0, 0x0d, - 0x4f, 0xbb, 0x88, 0xeb, 0x68, 0x4c, 0x65, 0x26, 0xd3, 0x23, 0xc7, 0x36, 0x45, 0x60, 0x8b, 0x51, - 0xf5, 0x5f, 0x8a, 0x50, 0x6c, 0x7a, 0x8e, 0xc3, 0x4f, 0xca, 0x60, 0x82, 0x4d, 0x54, 0x85, 0x94, - 0x6b, 0x8c, 0xf9, 0xf9, 0xcc, 0x37, 0x8a, 0xaf, 0x5e, 0x56, 0x60, 0xce, 0xa1, 0xb2, 0x39, 0xd4, - 0x82, 0x8d, 0xe7, 0xbe, 0x4d, 0xb0, 0x1e, 0x98, 0xc7, 0x78, 0x6c, 0xf0, 0x08, 0xc8, 0x5d, 0x22, - 0x02, 0x0a, 0x4c, 0x6c, 0xc0, 0xa4, 0x50, 0x03, 0x64, 0x1f, 0x1b, 0xd6, 0x82, 0x92, 0xc2, 0x25, - 0x94, 0x00, 0x95, 0x12, 0x3a, 0x64, 0x48, 0xd2, 0x23, 0x96, 0x64, 0x47, 0x80, 0x3e, 0xa2, 0x9b, - 0x90, 0x9b, 0x4e, 0x6d, 0x4b, 0xa7, 0x47, 0x9e, 0xfb, 0x39, 0x4b, 0xc7, 0x7d, 0xe2, 0x53, 0x47, - 0xcf, 0x4f, 0x30, 0x3b, 0xf3, 0x41, 0x29, 0xcd, 0x24, 0xd7, 0x23, 0xfa, 0x3e, 0x23, 0xa3, 0x0f, - 0xa0, 0x30, 0x89, 0x32, 0x48, 0x50, 0xca, 0xec, 0x24, 0x77, 0x0b, 0x7b, 0x32, 0x0f, 0xc7, 0x79, - 0x6a, 0x11, 0x47, 0x34, 0xce, 0x4a, 0x6d, 0x63, 0x98, 0x27, 0x3a, 0xc1, 0xe3, 0x89, 0x63, 0x10, - 0xcc, 0xb7, 0x95, 0xbd, 0x8c, 0x6d, 0x0c, 0xf3, 0x44, 0x13, 0x52, 0xa8, 0x05, 0x68, 0xbe, 0xd4, - 0x50, 0x57, 0x29, 0xcf, 0x8e, 0xf4, 0xb5, 0x5a, 0x94, 0xd5, 0x1f, 0x7b, 0x53, 0xdf, 0x35, 0x1c, - 0xea, 0x38, 0x75, 0x23, 0x12, 0x88, 0xb4, 0x7c, 0x03, 0xc0, 0xc2, 0xbe, 0xfd, 0xcc, 0x60, 0xa7, - 0x78, 0x95, 0x49, 0x57, 0xf8, 0x26, 0x16, 0xbd, 0x5e, 0x6b, 0x45, 0x6c, 0x6a, 0x4c, 0xa4, 0xfc, - 0xe7, 0x00, 0x30, 0x9f, 0x42, 0x1a, 0x14, 0x4d, 0xcf, 0x75, 0xb1, 0x49, 0x3c, 0x9f, 0x1f, 0x33, - 0x89, 0x9d, 0xd3, 0xf7, 0x2f, 0xd0, 0x59, 0x6b, 0x86, 0x52, 0xf4, 0x20, 0xaa, 0x6b, 0x66, 0x7c, - 0x88, 0x3e, 0x04, 0x7a, 0x4c, 0x87, 0xf6, 0x88, 0xdb, 0x2a, 0x71, 0x09, 0x5b, 0x65, 0xb8, 0x00, - 0xea, 0x01, 0x10, 0xdf, 0x70, 0x03, 0x7a, 0x92, 0x03, 0x16, 0x05, 0x85, 0xbd, 0x77, 0x2e, 0x5a, - 0x8c, 0x16, 0x4a, 0x08, 0xf7, 0xc5, 0x54, 0xa0, 0x8f, 0x60, 0x23, 0x38, 0x9e, 0x0e, 0x87, 0x0e, - 0xa6, 0xa9, 0x5b, 0xe7, 0x09, 0x36, 0xb5, 0x93, 0xdc, 0x2d, 0xee, 0xbd, 0x77, 0x91, 0xde, 0x01, - 0x17, 0x64, 0x5b, 0x5c, 0x17, 0x5a, 0x9e, 0xe0, 0x99, 0xc6, 0xf2, 0xf2, 0x3d, 0x28, 0x06, 0xc7, - 0x86, 0x6f, 0xcd, 0x9d, 0x99, 0x66, 0xee, 0xd8, 0xac, 0x85, 0x45, 0xbb, 0x36, 0xa0, 0xf3, 0xcc, - 0x95, 0x6b, 0x8c, 0x35, 0x72, 0x63, 0x1b, 0xae, 0xf9, 0xd8, 0xf4, 0x9e, 0x61, 0x7f, 0xa6, 0x3b, - 0xde, 0x68, 0xae, 0x22, 0x73, 0x5e, 0x3c, 0x6c, 0x86, 0x32, 0x1d, 0x6f, 0x14, 0xa9, 0xfa, 0x3a, - 0xac, 0xb9, 0x3c, 0x97, 0xe8, 0x13, 0xcf, 0x27, 0x41, 0x29, 0xcb, 0x6c, 0xb6, 0xc1, 0xf7, 0x16, - 0x4b, 0x33, 0xea, 0xaa, 0x3b, 0x1f, 0x04, 0xe5, 0xdf, 0x49, 0x43, 0x3e, 0xb2, 0x1b, 0x7a, 0x6b, - 0x21, 0x47, 0xac, 0xbd, 0x7a, 0x59, 0x99, 0x4f, 0x8a, 0x14, 0x71, 0x0f, 0xc0, 0x8c, 0x2c, 0xc5, - 0x7c, 0x5a, 0xd8, 0xdb, 0x3a, 0xcb, 0x82, 0xa1, 0x13, 0xe6, 0xdc, 0xa8, 0x13, 0x0f, 0xfe, 0x00, - 0x3b, 0x2c, 0x54, 0x44, 0x3d, 0xbb, 0x31, 0xdf, 0x6c, 0xc7, 0x38, 0xc2, 0xce, 0x40, 0x4c, 0x0b, - 0x35, 0xf3, 0x43, 0x10, 0x4e, 0xf0, 0xbc, 0x68, 0x7b, 0xbe, 0x4d, 0x78, 0xc9, 0x5e, 0x53, 0xa3, - 0x31, 0xfa, 0x0a, 0x20, 0x96, 0x82, 0x2c, 0xec, 0x18, 0x33, 0x3d, 0xc0, 0xa6, 0xe7, 0xb2, 0x9c, - 0x40, 0xb9, 0x58, 0x72, 0x6a, 0xd1, 0x89, 0x01, 0xa7, 0xa3, 0x0a, 0x14, 0x62, 0xc1, 0xc1, 0x92, - 0x42, 0x5e, 0x85, 0xb9, 0xa7, 0xd1, 0x21, 0x94, 0x43, 0x06, 0xc7, 0x18, 0x1f, 0x59, 0x86, 0x1e, - 0x0f, 0xec, 0xcb, 0x24, 0x81, 0x4d, 0x21, 0xdf, 0x61, 0xe2, 0x4d, 0x1e, 0xe5, 0xfb, 0x80, 0xce, - 0x50, 0x77, 0x99, 0x7c, 0xbb, 0xea, 0xc4, 0xf5, 0xbc, 0x01, 0x79, 0xb6, 0x5b, 0xcf, 0x75, 0x66, - 0x2c, 0x97, 0xe4, 0xd4, 0x1c, 0x25, 0xf4, 0x5c, 0x67, 0x86, 0x6a, 0xb0, 0xf9, 0x09, 0x8f, 0x1e, - 0x9d, 0x67, 0xe5, 0xe9, 0x70, 0x68, 0xbf, 0x28, 0x01, 0x4b, 0xa1, 0x1b, 0x62, 0x4a, 0xa5, 0x99, - 0x97, 0x4d, 0xa0, 0x0f, 0x01, 0x5c, 0x8f, 0xe8, 0x47, 0x78, 0xe8, 0xf9, 0x98, 0xe5, 0xed, 0xc2, - 0x5e, 0xb9, 0xc6, 0x91, 0x64, 0x2d, 0x44, 0x92, 0x35, 0x2d, 0x44, 0x92, 0x6a, 0xde, 0xf5, 0x48, - 0x83, 0x31, 0xa3, 0x5f, 0x06, 0x3a, 0xd0, 0x8d, 0x21, 0xc1, 0xbe, 0xc8, 0x4a, 0xe7, 0x49, 0xe6, - 0x5c, 0x8f, 0xd4, 0x29, 0x6f, 0x55, 0x85, 0xb5, 0x85, 0x4c, 0x82, 0xca, 0x70, 0x5d, 0x14, 0x55, - 0xbd, 0xd9, 0xeb, 0x76, 0x95, 0xa6, 0xd6, 0x53, 0x75, 0xed, 0x69, 0x5f, 0x91, 0x57, 0x68, 0x09, - 0x1d, 0xfc, 0x6a, 0xa7, 0xad, 0x29, 0xb2, 0x84, 0x8a, 0x00, 0x94, 0x3a, 0x68, 0xaa, 0xed, 0xbe, - 0x26, 0x27, 0x50, 0x1e, 0xd2, 0xed, 0x83, 0xfa, 0x43, 0x45, 0x4e, 0x56, 0x0f, 0xa0, 0x10, 0x3b, - 0xb8, 0xa8, 0x04, 0x5b, 0xa1, 0xc6, 0xc1, 0xa3, 0xc3, 0xfd, 0xfd, 0x8e, 0x12, 0xea, 0x2b, 0x40, - 0xb6, 0xd1, 0xeb, 0x75, 0x94, 0x7a, 0x57, 0x96, 0x78, 0x35, 0xd7, 0x94, 0x87, 0x8a, 0x2a, 0x27, - 0xd8, 0x9b, 0x34, 0xb5, 0xdd, 0x7d, 0x28, 0x27, 0xab, 0xdf, 0x4b, 0x40, 0x91, 0xd5, 0x10, 0x1e, - 0x7f, 0x34, 0x9c, 0x11, 0xa4, 0x4e, 0xf0, 0x2c, 0xc4, 0x69, 0xec, 0x99, 0x16, 0xe4, 0x67, 0x86, - 0x33, 0xc5, 0x41, 0x29, 0xc1, 0xa8, 0x62, 0x44, 0x83, 0xd5, 0xf2, 0xcc, 0xe9, 0x18, 0xbb, 0x44, - 0x40, 0x98, 0x68, 0x8c, 0x9e, 0xc1, 0x35, 0x56, 0xb4, 0xe2, 0x51, 0xa0, 0x8f, 0x8d, 0x09, 0xcb, - 0x4f, 0x85, 0xbd, 0xaf, 0xf0, 0xd3, 0xb5, 0xf8, 0x72, 0x3e, 0xe4, 0xfe, 0x7f, 0x1c, 0x78, 0xee, - 0x81, 0x31, 0x51, 0x5c, 0xe2, 0xcf, 0x1a, 0x6f, 0x7e, 0xfa, 0xc5, 0x79, 0xb5, 0x68, 0x38, 0x17, - 0x2b, 0x2b, 0x70, 0xe3, 0x35, 0x5a, 0xc2, 0xf2, 0x2b, 0xa0, 0x35, 0x2d, 0xbf, 0x5b, 0x90, 0x66, - 0x5b, 0x09, 0xa1, 0x35, 0x1b, 0xdc, 0x4b, 0x7c, 0x20, 0x55, 0xff, 0x22, 0x0d, 0x85, 0xa6, 0x31, - 0x21, 0x53, 0x1f, 0x33, 0xa0, 0x51, 0x59, 0x48, 0x22, 0x85, 0x57, 0x2f, 0x2b, 0x59, 0x31, 0x2d, - 0x52, 0xc8, 0xfe, 0x52, 0xb5, 0x49, 0xb0, 0x6a, 0x13, 0x56, 0xb0, 0xb9, 0xae, 0x2b, 0xd5, 0x97, - 0xe4, 0x15, 0xea, 0xcb, 0xd7, 0x20, 0x77, 0x64, 0xbb, 0x94, 0x25, 0x10, 0x56, 0xbe, 0xb9, 0xfc, - 0xf2, 0x06, 0xe7, 0x50, 0x23, 0x56, 0x0a, 0x34, 0x6c, 0x97, 0x60, 0xff, 0x99, 0xe1, 0x9c, 0x4a, - 0x2a, 0xeb, 0x21, 0x3d, 0xcc, 0x29, 0xcb, 0x75, 0x21, 0xf3, 0xd3, 0xd7, 0x85, 0xec, 0x4f, 0x5f, - 0x17, 0x72, 0x97, 0xab, 0x0b, 0x7f, 0x29, 0x41, 0x56, 0xec, 0x1f, 0x3d, 0x86, 0x2d, 0x1f, 0x07, - 0xde, 0xd4, 0x37, 0xf1, 0x42, 0xa2, 0x92, 0x2e, 0x61, 0xf0, 0x62, 0x28, 0x29, 0x52, 0xd5, 0xdb, - 0xb0, 0x16, 0xe9, 0x9a, 0x18, 0xe4, 0x58, 0x1c, 0x93, 0xd5, 0x90, 0xd8, 0x37, 0xc8, 0xf1, 0xa9, - 0x1a, 0x93, 0xbc, 0x4a, 0x8d, 0xa9, 0xde, 0x3e, 0x9d, 0x4a, 0x16, 0xf0, 0x79, 0x94, 0x1f, 0xb2, - 0xd5, 0x3f, 0x04, 0xd8, 0x3c, 0x30, 0x08, 0xf6, 0x6d, 0xc3, 0xb1, 0xbf, 0x6d, 0x44, 0x38, 0xf9, - 0xf6, 0x42, 0xf8, 0x6e, 0xbe, 0x7a, 0x59, 0x59, 0x3f, 0xc5, 0x26, 0xc2, 0xb8, 0xfb, 0x9a, 0x30, - 0xbe, 0xcd, 0x57, 0x7a, 0x86, 0xee, 0xff, 0xb7, 0x70, 0x7e, 0xb0, 0x14, 0xce, 0x6f, 0xbd, 0x7e, - 0x11, 0xcb, 0x61, 0xfd, 0x73, 0x8e, 0x61, 0xbe, 0x9b, 0xf9, 0xf9, 0x8c, 0xd5, 0xd7, 0xe0, 0xa1, - 0xec, 0xcf, 0x00, 0x0f, 0xe5, 0x4f, 0xe1, 0xa1, 0x26, 0xac, 0xf3, 0x12, 0x13, 0x84, 0x05, 0x84, - 0x41, 0xa6, 0x68, 0xa9, 0x8b, 0xc5, 0x45, 0xbc, 0xa3, 0x38, 0x5c, 0xac, 0x77, 0x6f, 0xc3, 0x9a, - 0x85, 0x1d, 0x62, 0xe8, 0xd3, 0x89, 0x65, 0x10, 0x1c, 0x84, 0x0d, 0x10, 0x46, 0x3c, 0xe4, 0x34, - 0x74, 0x04, 0xc8, 0xc2, 0x13, 0x1f, 0x9b, 0x06, 0xc1, 0x96, 0x2e, 0x50, 0x8f, 0x08, 0x86, 0xbb, - 0x17, 0x06, 0x65, 0xad, 0x15, 0xc9, 0x8a, 0xe2, 0xad, 0x6e, 0x58, 0xa7, 0x49, 0xaf, 0x83, 0x34, - 0xb9, 0xcb, 0x41, 0x1a, 0xf8, 0x89, 0x21, 0x4d, 0xe1, 0xf2, 0x90, 0xa6, 0xfc, 0x5b, 0x12, 0x6c, - 0x2c, 0x6d, 0x06, 0xdd, 0x02, 0x18, 0xf9, 0xde, 0x74, 0xa2, 0xcf, 0x53, 0x8c, 0x9a, 0x67, 0x94, - 0x2e, 0x4d, 0x29, 0x3f, 0x53, 0x80, 0x5c, 0xbd, 0x7b, 0x6e, 0x2a, 0x9c, 0xc3, 0xa8, 0x18, 0x6c, - 0xca, 0x55, 0xff, 0x27, 0x0f, 0x99, 0x5e, 0x7d, 0x4a, 0x8e, 0xf7, 0x44, 0xe3, 0xe1, 0x99, 0x6d, - 0xe1, 0xb0, 0xc9, 0x16, 0x8d, 0xd1, 0xbb, 0xb0, 0x61, 0x4c, 0xc9, 0xb1, 0x3e, 0xf5, 0x9d, 0xf9, - 0x91, 0xe7, 0xd0, 0x60, 0x9d, 0x4e, 0x1c, 0xfa, 0x4e, 0x74, 0xae, 0x3f, 0x84, 0x9b, 0x86, 0x69, - 0xe2, 0x20, 0xd0, 0x89, 0x77, 0x82, 0xdd, 0x45, 0x19, 0x0e, 0x86, 0xae, 0x73, 0x06, 0x8d, 0xce, - 0xc7, 0x45, 0x6b, 0xb0, 0xb9, 0x20, 0x3a, 0xc6, 0xe4, 0xd8, 0xb3, 0x78, 0x37, 0x41, 0xdd, 0x88, - 0x09, 0x1d, 0xb0, 0x09, 0xb6, 0xac, 0x38, 0xff, 0x91, 0x67, 0xcd, 0x44, 0xb7, 0x60, 0x3d, 0xc6, - 0xdd, 0xf0, 0xac, 0x19, 0xfa, 0x5d, 0x09, 0x6e, 0x2d, 0x30, 0x1f, 0x63, 0xc3, 0xc2, 0x7e, 0x30, - 0xc7, 0x5f, 0x69, 0x96, 0x7f, 0xc4, 0xf7, 0x21, 0x37, 0x4a, 0xad, 0x3e, 0xd7, 0xf2, 0x88, 0xf3, - 0x5f, 0x01, 0x7e, 0x21, 0x63, 0x49, 0x1a, 0x7d, 0x2a, 0xc1, 0xf6, 0xc2, 0x32, 0x7c, 0x1c, 0x4c, - 0x3c, 0x37, 0xc0, 0xf3, 0x75, 0x64, 0xe2, 0x38, 0x70, 0x79, 0x1d, 0xaa, 0x10, 0xb8, 0xc2, 0x42, - 0xe2, 0xf6, 0x0e, 0xc5, 0x0f, 0x8c, 0x09, 0xba, 0x0f, 0x65, 0x1f, 0x0f, 0x7d, 0x1c, 0x1c, 0x9f, - 0xe5, 0x2b, 0xde, 0x3c, 0xbb, 0x21, 0x38, 0x96, 0x9c, 0xf5, 0x55, 0x9a, 0x7b, 0xe3, 0xc2, 0xc2, - 0x5b, 0xab, 0x4c, 0x0c, 0xc5, 0xc5, 0x84, 0xbb, 0xd8, 0x67, 0x5a, 0x5c, 0x82, 0xf9, 0x8b, 0x9f, - 0x63, 0x39, 0xce, 0xcf, 0x1c, 0xf6, 0x7b, 0x12, 0x6c, 0x2f, 0xb2, 0x2f, 0x79, 0x2c, 0x7f, 0x86, - 0xa5, 0xd4, 0x98, 0x9e, 0xab, 0xbb, 0x6c, 0xd3, 0x5f, 0x16, 0x47, 0x7f, 0x20, 0x41, 0x65, 0x71, - 0x25, 0xcb, 0x4e, 0x03, 0xb6, 0x94, 0xf7, 0x5f, 0xbb, 0x94, 0x9f, 0xc0, 0x6b, 0x0b, 0x96, 0x8f, - 0xb9, 0xad, 0xdc, 0x81, 0xed, 0xf3, 0xe3, 0xf2, 0x2a, 0x80, 0xbe, 0x7c, 0x00, 0x95, 0x0b, 0xa2, - 0xeb, 0xaa, 0xea, 0x2e, 0x70, 0xc1, 0x95, 0xd4, 0x75, 0x61, 0xe7, 0x22, 0x33, 0x5e, 0xe9, 0xf3, - 0xe5, 0xaf, 0x92, 0x90, 0xd3, 0x70, 0x40, 0x18, 0xf8, 0x43, 0x71, 0xf0, 0x27, 0x70, 0xde, 0x1d, - 0x48, 0x07, 0x04, 0x4f, 0xf8, 0x17, 0x1d, 0x05, 0x45, 0xcc, 0xa3, 0xa1, 0x48, 0x6d, 0x40, 0xf0, - 0x44, 0xe4, 0x60, 0xce, 0x57, 0xfe, 0xef, 0x04, 0xa4, 0x28, 0x15, 0x7d, 0x0d, 0xf2, 0x94, 0x12, - 0xef, 0xa8, 0x95, 0xce, 0x90, 0xae, 0x31, 0x34, 0x98, 0xa3, 0xac, 0x2c, 0x4d, 0xdf, 0x02, 0x60, - 0x62, 0xb6, 0x6b, 0xe1, 0x17, 0xe2, 0xf6, 0x85, 0x29, 0x6a, 0x53, 0xc2, 0xe9, 0x1e, 0x7e, 0x72, - 0xb9, 0x87, 0x1f, 0x2a, 0x08, 0x4c, 0x6f, 0x12, 0x5e, 0x00, 0x30, 0x05, 0x03, 0x4a, 0x40, 0xb5, - 0x05, 0xc8, 0x92, 0x3e, 0xb3, 0x1f, 0x1c, 0x87, 0x29, 0x1f, 0xc2, 0x9a, 0xe5, 0x99, 0xe2, 0x94, - 0x3d, 0xc3, 0x26, 0x6f, 0x90, 0x5c, 0x00, 0xa6, 0x52, 0x54, 0x04, 0x3d, 0x00, 0x88, 0xea, 0x52, - 0x70, 0x39, 0x64, 0x13, 0x13, 0xa8, 0x6e, 0x43, 0x8a, 0x59, 0x04, 0x20, 0xd3, 0xee, 0x3e, 0x54, - 0x06, 0x1a, 0xaf, 0x5b, 0xdf, 0x54, 0xd4, 0xf6, 0xfe, 0x53, 0x59, 0xaa, 0x7e, 0x4f, 0x82, 0xbc, - 0x6a, 0xb8, 0x23, 0xfe, 0xe1, 0xf9, 0x06, 0xe4, 0x4f, 0xf0, 0x4c, 0xe7, 0x17, 0x59, 0xd4, 0x6c, - 0x59, 0x35, 0x77, 0x82, 0x67, 0x0d, 0x76, 0x97, 0x75, 0x03, 0xb2, 0x74, 0x12, 0xbb, 0x16, 0xb3, - 0x58, 0x56, 0xcd, 0x9c, 0xe0, 0x99, 0xe2, 0x5a, 0xa8, 0x0a, 0x6b, 0xbe, 0xce, 0x6e, 0x9c, 0x84, - 0x64, 0x8a, 0x4d, 0x17, 0x7c, 0x76, 0x19, 0xc5, 0x85, 0xb7, 0xa1, 0x10, 0xf2, 0x50, 0x05, 0x69, - 0xc6, 0x91, 0xe7, 0x1c, 0x8a, 0x6b, 0xdd, 0x93, 0xbf, 0xff, 0x59, 0x65, 0xe5, 0xd4, 0xd5, 0x58, - 0x31, 0xaa, 0xbd, 0x03, 0x42, 0xb3, 0xe3, 0x7d, 0x58, 0xe5, 0xb8, 0xc9, 0xba, 0x3c, 0x22, 0xcd, - 0x0a, 0x09, 0x54, 0x81, 0xc2, 0x18, 0xfb, 0x23, 0x86, 0x43, 0xcd, 0x63, 0xb6, 0xb9, 0x9c, 0x0a, - 0x8c, 0xd4, 0xa7, 0x94, 0xea, 0x9f, 0x24, 0x00, 0x94, 0x17, 0xc4, 0x37, 0x4c, 0x52, 0xef, 0xb7, - 0xcb, 0x7f, 0x23, 0x41, 0x46, 0xa0, 0xd8, 0x78, 0xdf, 0x5c, 0x5a, 0xec, 0x9b, 0x3f, 0x80, 0x42, - 0xbc, 0x47, 0x7f, 0x99, 0x06, 0x2d, 0x70, 0x01, 0x7a, 0xda, 0x68, 0x98, 0x71, 0x50, 0x39, 0x21, - 0x7e, 0x20, 0x5a, 0xf5, 0x79, 0x46, 0xe9, 0x13, 0x7f, 0xa9, 0xd5, 0x9e, 0xba, 0x74, 0xab, 0xbd, - 0xfa, 0x14, 0x52, 0x4d, 0xcf, 0x3a, 0x85, 0x57, 0xd6, 0x20, 0xdf, 0xec, 0x75, 0xf7, 0xdb, 0x0f, - 0x0f, 0x55, 0x85, 0x37, 0x6a, 0x94, 0x6f, 0x69, 0x6a, 0xbd, 0xa9, 0xc9, 0x09, 0x84, 0xa0, 0x28, - 0x06, 0x4a, 0x4b, 0x3f, 0x3c, 0x6c, 0xb7, 0xe4, 0x24, 0xda, 0x02, 0x79, 0x4e, 0xdb, 0x6f, 0x2b, - 0x9d, 0xd6, 0x40, 0x4e, 0x55, 0x3f, 0x4b, 0x03, 0x34, 0xbd, 0xf1, 0x91, 0xed, 0x62, 0x66, 0xa7, - 0x44, 0x64, 0xa7, 0x53, 0xc6, 0x90, 0xae, 0x68, 0x8c, 0x9b, 0x40, 0x63, 0x8d, 0x9b, 0x82, 0x7f, - 0x27, 0xd0, 0x78, 0x63, 0x86, 0xb8, 0x0e, 0x19, 0x71, 0x29, 0xc1, 0x6d, 0x24, 0x46, 0xb4, 0x5e, - 0x72, 0xcf, 0x38, 0x86, 0x89, 0x8f, 0x3d, 0xc7, 0xc2, 0x7e, 0xec, 0x76, 0x03, 0x31, 0x2f, 0xcd, - 0xa7, 0xa8, 0xc3, 0x4e, 0x99, 0x34, 0x7d, 0xf9, 0xdb, 0x8b, 0xdb, 0xb0, 0x3e, 0x3f, 0xd1, 0x1c, - 0x7d, 0xb2, 0x9b, 0x3d, 0xb5, 0x38, 0x27, 0x33, 0x08, 0xfa, 0x2e, 0x6c, 0xb0, 0x8b, 0xc9, 0x85, - 0xdb, 0x1b, 0x71, 0x6b, 0xc6, 0x26, 0x06, 0xd1, 0x9e, 0xcb, 0xbf, 0x09, 0x69, 0x1a, 0xdb, 0x01, - 0xfa, 0x05, 0x48, 0x39, 0x78, 0x48, 0x98, 0xd1, 0x0a, 0x7b, 0x88, 0x2f, 0xa8, 0xe5, 0x99, 0x41, - 0xdd, 0xb5, 0x1a, 0x33, 0x82, 0x03, 0x95, 0xcd, 0xa3, 0x5d, 0x48, 0xfb, 0xf6, 0xe8, 0x98, 0x88, - 0xbe, 0xf1, 0x59, 0x8c, 0x9c, 0x01, 0x7d, 0x09, 0x92, 0xde, 0x94, 0x08, 0xe8, 0x7b, 0x16, 0x1f, - 0x9d, 0xae, 0xfe, 0xab, 0x74, 0x89, 0x48, 0x59, 0x87, 0x82, 0xaa, 0xb4, 0x0e, 0x9b, 0x8a, 0xde, - 0x51, 0xf6, 0x69, 0xb4, 0x6c, 0xc0, 0x5a, 0xb3, 0x77, 0xd0, 0x68, 0x77, 0x15, 0x5d, 0x6d, 0x3f, - 0x7c, 0xa4, 0xc9, 0x49, 0x24, 0x43, 0xa1, 0xa5, 0xd6, 0xdb, 0x5d, 0xbd, 0xf9, 0xe8, 0xb0, 0xfb, - 0x44, 0xfe, 0x5c, 0x42, 0xdb, 0x70, 0x93, 0x51, 0x94, 0x96, 0x2e, 0x98, 0x5b, 0x7a, 0xab, 0xd7, - 0x3c, 0x3c, 0x50, 0xba, 0x9a, 0xfc, 0xf7, 0x12, 0xba, 0x05, 0xa5, 0x70, 0x9e, 0x6b, 0x8f, 0x4d, - 0xff, 0x83, 0x14, 0x29, 0x54, 0x5a, 0xfa, 0x13, 0xe5, 0xa9, 0xfc, 0x8f, 0x12, 0xda, 0x84, 0x62, - 0x48, 0x11, 0xd1, 0xf8, 0x4f, 0x12, 0x42, 0xb0, 0x16, 0x12, 0x07, 0x5a, 0x5d, 0x1b, 0xc8, 0xff, - 0x2c, 0x55, 0xff, 0x2c, 0x05, 0xb9, 0xc6, 0xd4, 0x76, 0x2c, 0x1a, 0xa0, 0xff, 0xb1, 0x70, 0x90, - 0x8f, 0x28, 0x59, 0xb7, 0xad, 0xf0, 0x20, 0xb3, 0x71, 0xdb, 0x9a, 0x4f, 0x59, 0x47, 0xa2, 0xc0, - 0xf1, 0xa9, 0xd6, 0x11, 0xbb, 0x92, 0x65, 0x5f, 0xab, 0xa2, 0x50, 0x88, 0x11, 0xda, 0x83, 0x82, - 0xf8, 0xb4, 0x65, 0xd5, 0x29, 0xc5, 0xaa, 0xd3, 0x46, 0xf8, 0xe1, 0x1a, 0x5d, 0xae, 0xaa, 0xc0, - 0xb9, 0x58, 0x1a, 0x7e, 0x0f, 0x36, 0xe6, 0x1d, 0x0f, 0xf1, 0x09, 0x2e, 0xee, 0x90, 0xe5, 0x68, - 0x42, 0x7c, 0xa7, 0xa3, 0xb7, 0x60, 0x55, 0x04, 0xa0, 0xee, 0x7b, 0x1e, 0x11, 0xe1, 0x16, 0x06, - 0xa5, 0xea, 0x79, 0xa4, 0xfc, 0xeb, 0x90, 0xde, 0xc7, 0xc4, 0x3c, 0xa6, 0xbc, 0xd1, 0x97, 0xf6, - 0xd4, 0x77, 0xc4, 0xf6, 0x0a, 0x21, 0xed, 0xd0, 0x77, 0xd0, 0x2f, 0x9d, 0xba, 0x07, 0x4e, 0xbc, - 0x6e, 0xc1, 0xf1, 0xab, 0xe1, 0xea, 0xcb, 0x30, 0x40, 0xf2, 0x90, 0x6e, 0x28, 0x0f, 0xdb, 0x5d, - 0x7e, 0x47, 0xdb, 0xef, 0x75, 0x3a, 0xa2, 0x7b, 0xac, 0xd6, 0x0f, 0xfa, 0xbd, 0x4e, 0xbb, 0x4b, - 0x3f, 0x83, 0xb6, 0x40, 0x9e, 0x8f, 0xf5, 0x7d, 0x45, 0x6b, 0x3e, 0x92, 0x93, 0xa8, 0x02, 0x6f, - 0xc4, 0xa8, 0x2c, 0xc8, 0xea, 0x9a, 0xa2, 0x37, 0xeb, 0x7d, 0x8d, 0x06, 0x58, 0x0a, 0xdd, 0x86, - 0xb7, 0xcf, 0x62, 0x38, 0xa8, 0x6b, 0x8a, 0xda, 0xae, 0x77, 0xda, 0x1f, 0xd7, 0xb5, 0x76, 0xaf, - 0x2b, 0xa7, 0xe9, 0x9b, 0x5b, 0xbd, 0xae, 0x22, 0x67, 0xe8, 0x9b, 0xe8, 0x93, 0xfe, 0x51, 0x5b, - 0x7b, 0xa4, 0x2b, 0xaa, 0xda, 0x53, 0x07, 0x72, 0x16, 0xdd, 0x82, 0x9b, 0x31, 0x45, 0xad, 0x5e, - 0xf3, 0x89, 0xa2, 0xea, 0xed, 0xee, 0xa0, 0xaf, 0x34, 0x35, 0x39, 0x47, 0xb3, 0x5c, 0xb3, 0xae, - 0xd5, 0x3b, 0xbd, 0x87, 0xfa, 0xa0, 0xf9, 0x48, 0x39, 0xa8, 0xcb, 0x56, 0x75, 0x13, 0x36, 0x54, - 0x1c, 0x60, 0xc2, 0x6a, 0x8c, 0x8a, 0x7f, 0x63, 0x8a, 0x03, 0x52, 0xdd, 0x02, 0x14, 0x27, 0x72, - 0x80, 0x54, 0x7d, 0x00, 0xa8, 0x6e, 0x3d, 0x33, 0x5c, 0x13, 0xd3, 0xef, 0x55, 0xc1, 0x4b, 0x13, - 0x83, 0xc1, 0xa9, 0x51, 0x47, 0x93, 0x5a, 0x3f, 0xa5, 0x16, 0x05, 0x59, 0x34, 0x34, 0xab, 0xd7, - 0x60, 0x73, 0x41, 0x5c, 0x68, 0xfd, 0x00, 0x56, 0xe3, 0xe7, 0x92, 0x22, 0x28, 0x5a, 0xf9, 0xc5, - 0x1d, 0x36, 0x47, 0x01, 0x5b, 0x90, 0x3e, 0xa2, 0x93, 0xcc, 0x69, 0x29, 0x95, 0x0f, 0xaa, 0xdf, - 0x95, 0x60, 0xad, 0xed, 0x8e, 0x70, 0x40, 0xc2, 0xb5, 0x2c, 0x02, 0x13, 0xe9, 0x42, 0x60, 0x12, - 0x3f, 0x11, 0x89, 0xc5, 0x13, 0xb1, 0x84, 0x59, 0x92, 0x97, 0xc5, 0x2c, 0xd5, 0x7f, 0x4b, 0x40, - 0x31, 0x5c, 0x17, 0xdf, 0x24, 0xfa, 0x6b, 0x69, 0xde, 0x71, 0xe0, 0x17, 0xe4, 0xf4, 0xcb, 0x83, - 0x77, 0xfe, 0xa3, 0x0f, 0xc4, 0x45, 0x99, 0xb0, 0xdd, 0xf5, 0x11, 0x65, 0xa7, 0x98, 0x37, 0xe0, - 0x08, 0xff, 0xd7, 0x7e, 0xfb, 0x8b, 0xca, 0x7b, 0x67, 0xfd, 0x07, 0xe6, 0xd4, 0x7f, 0x80, 0x42, - 0xf9, 0x4f, 0xbf, 0xa8, 0xbc, 0x7b, 0x19, 0xf6, 0xde, 0x70, 0x18, 0x60, 0x12, 0x75, 0x3f, 0xe6, - 0xaf, 0x45, 0xbf, 0x02, 0xab, 0xe1, 0xda, 0x31, 0x31, 0x2d, 0x91, 0x7b, 0x63, 0x8d, 0x39, 0x8e, - 0xc8, 0x6b, 0x0a, 0x31, 0xad, 0xb0, 0x74, 0x08, 0x01, 0x4a, 0x2a, 0xb7, 0xe0, 0xfa, 0xd9, 0x7b, - 0xb9, 0x08, 0x66, 0x27, 0xe3, 0x30, 0xfb, 0x4f, 0x13, 0x70, 0x43, 0x33, 0x82, 0x93, 0xb0, 0x8d, - 0xe7, 0x7b, 0x2f, 0x66, 0xa1, 0xdf, 0xbf, 0x0e, 0x29, 0x6f, 0x82, 0x5d, 0x51, 0x3e, 0xaa, 0x02, - 0x22, 0x9f, 0xcd, 0x5c, 0xeb, 0x4d, 0xb0, 0xab, 0x32, 0x7e, 0x16, 0x6b, 0x06, 0x31, 0xd8, 0xcb, - 0x56, 0x55, 0xf6, 0x5c, 0xfe, 0x5b, 0x09, 0x52, 0x94, 0x05, 0xed, 0x42, 0x86, 0x7f, 0x1e, 0x0a, - 0xb5, 0xf2, 0xe9, 0x0d, 0xab, 0x62, 0x1e, 0x75, 0x20, 0xc7, 0xdb, 0x9f, 0x61, 0x18, 0x35, 0x7e, - 0xf1, 0xd5, 0xcb, 0xca, 0xfb, 0x67, 0x59, 0x7d, 0xe9, 0x8f, 0x58, 0xbc, 0x31, 0xda, 0x6e, 0xa9, - 0x59, 0xa6, 0xa2, 0xcd, 0xa0, 0x1a, 0x31, 0xfc, 0x11, 0x26, 0xac, 0x89, 0xc9, 0xb2, 0xee, 0x9a, - 0x0a, 0x9c, 0xc4, 0xfe, 0xe7, 0x51, 0x81, 0x82, 0xe9, 0xd8, 0x34, 0x91, 0x19, 0x96, 0x15, 0x56, - 0x7b, 0xe0, 0xa4, 0xba, 0x65, 0xf9, 0xd5, 0xdf, 0x4f, 0x42, 0x69, 0x79, 0xf7, 0x22, 0x14, 0xfb, - 0xb0, 0x46, 0xf7, 0x1e, 0x7d, 0x70, 0x8a, 0xdd, 0xbd, 0xf7, 0x3a, 0xa3, 0x89, 0x68, 0x64, 0x56, - 0x13, 0x03, 0x75, 0xd5, 0x8b, 0x8d, 0xce, 0xb4, 0xe2, 0x73, 0x58, 0x8d, 0x4b, 0xa0, 0x07, 0x90, - 0x09, 0x88, 0x41, 0xa6, 0x81, 0xf8, 0x8c, 0xf9, 0xf2, 0x05, 0xaf, 0x1b, 0x30, 0x66, 0x55, 0x08, - 0xc5, 0x7c, 0x91, 0x38, 0xdf, 0x17, 0xd5, 0xef, 0xb0, 0x7f, 0xfe, 0x30, 0xa1, 0x0c, 0x24, 0x7a, - 0x4f, 0xe4, 0x15, 0xb4, 0x09, 0xeb, 0x83, 0x47, 0x75, 0xb5, 0xa5, 0x77, 0x7b, 0x9a, 0xbe, 0xdf, - 0x3b, 0xec, 0xb6, 0x64, 0x89, 0x26, 0xd0, 0x6e, 0x4f, 0xe7, 0xf4, 0xbe, 0xda, 0x3e, 0xa8, 0xab, - 0x4f, 0xe5, 0x04, 0xba, 0x06, 0x1b, 0x94, 0x69, 0x91, 0x9c, 0xa4, 0x89, 0xb3, 0xdd, 0xd5, 0x14, - 0xb5, 0x5b, 0xef, 0xf0, 0x64, 0x2b, 0xa7, 0x28, 0x08, 0xe0, 0x6c, 0x03, 0xad, 0xd7, 0xef, 0x2b, - 0x2d, 0x39, 0x8d, 0xae, 0x81, 0xdc, 0xef, 0xa9, 0x1a, 0x7b, 0x4f, 0xbd, 0xd3, 0xe9, 0x7d, 0xa4, - 0xb4, 0xe4, 0x1f, 0x67, 0xdf, 0xed, 0x42, 0x21, 0x56, 0x5f, 0x28, 0xd4, 0x10, 0x59, 0x58, 0x5e, - 0xa1, 0xd8, 0xe2, 0xf1, 0xa0, 0xd7, 0x0d, 0xf3, 0xb1, 0xc4, 0xfe, 0xdf, 0xc3, 0xb0, 0x87, 0x9c, - 0xa2, 0xab, 0x09, 0x21, 0xc1, 0x40, 0xdf, 0x6f, 0x7f, 0x8b, 0x95, 0x8b, 0xf4, 0xde, 0xdf, 0x49, - 0x90, 0xa5, 0xdf, 0x7f, 0xb6, 0x3b, 0x42, 0xdf, 0x00, 0x98, 0x67, 0x6a, 0x74, 0x83, 0x5b, 0x75, - 0x29, 0xa1, 0x97, 0x4b, 0xcb, 0x13, 0xc2, 0x31, 0x0d, 0x28, 0xc4, 0xb2, 0x32, 0x12, 0x8c, 0xcb, - 0x79, 0xbe, 0x7c, 0xf3, 0x8c, 0x19, 0xa1, 0xe3, 0x2e, 0x64, 0x78, 0xee, 0x42, 0x9b, 0x8b, 0x99, - 0x8c, 0x4b, 0x6e, 0x9d, 0x95, 0xde, 0xf6, 0x3e, 0x86, 0xd5, 0xb8, 0xe7, 0xd1, 0x63, 0x48, 0xf3, - 0x87, 0x5b, 0xe7, 0x1e, 0xdf, 0xf2, 0xf6, 0xf9, 0x91, 0xb3, 0x2b, 0x7d, 0x55, 0x6a, 0xdc, 0xff, - 0xfc, 0x3f, 0xb7, 0x57, 0x3e, 0xff, 0xd1, 0xb6, 0xf4, 0xc3, 0x1f, 0x6d, 0x4b, 0x9f, 0xfd, 0xd7, - 0xb6, 0xf4, 0xf1, 0x3b, 0x23, 0x9b, 0x1c, 0x4f, 0x8f, 0x6a, 0xa6, 0x37, 0xbe, 0x83, 0x03, 0x32, - 0x35, 0xfc, 0x19, 0xff, 0x2f, 0xe5, 0xd2, 0xbf, 0x2b, 0x8f, 0x32, 0x6c, 0x7c, 0xf7, 0xff, 0x02, - 0x00, 0x00, 0xff, 0xff, 0x51, 0xf0, 0x25, 0x5d, 0x79, 0x29, 0x00, 0x00, + // 3686 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xd4, 0x3a, 0x4d, 0x6c, 0x1b, 0xd9, + 0x79, 0x1a, 0xfe, 0xf3, 0xe3, 0x8f, 0x46, 0x4f, 0xb2, 0x4d, 0x33, 0x6b, 0x51, 0x66, 0x92, 0xae, + 0x76, 0x37, 0x4b, 0xa7, 0x32, 0x92, 0xee, 0xda, 0x70, 0x03, 0xfe, 0xc9, 0xa6, 0x4d, 0x91, 0xec, + 0x90, 0xca, 0xc6, 0x01, 0x8a, 0xc1, 0x70, 0xe6, 0x91, 0x9a, 0xd5, 0x70, 0x86, 0x9d, 0x79, 0x94, + 0xcd, 0x5c, 0x5a, 0xb4, 0x05, 0x0a, 0x2c, 0x5a, 0x20, 0x87, 0x06, 0xc9, 0xad, 0xdb, 0x00, 0x05, + 0x7a, 0xeb, 0xa5, 0x28, 0xd0, 0xf6, 0x52, 0xa0, 0x97, 0xed, 0x1f, 0x1a, 0xb4, 0xe8, 0xa5, 0x07, + 0x2f, 0x9a, 0x1e, 0x92, 0x53, 0x4f, 0x3d, 0xf9, 0x54, 0xbc, 0x9f, 0x19, 0x0e, 0x45, 0x59, 0xa2, + 0x92, 0xf4, 0x90, 0x0b, 0x31, 0xef, 0xfb, 0x9b, 0xf7, 0xbe, 0xff, 0xf9, 0x1e, 0xe1, 0xad, 0xb1, + 0x73, 0x6f, 0xea, 0x3a, 0xc4, 0xd1, 0x1d, 0xcb, 0xbb, 0x37, 0xb2, 0x9c, 0x17, 0xec, 0xa7, 0xc2, + 0x60, 0x28, 0x46, 0x9f, 0x8b, 0xbb, 0x43, 0xd7, 0x39, 0xc5, 0x6e, 0x40, 0x17, 0x3c, 0x70, 0xaa, + 0xe2, 0x9e, 0xee, 0xd8, 0xde, 0x6c, 0x72, 0x09, 0xc5, 0xce, 0xd8, 0x19, 0x3b, 0xec, 0xf1, 0x1e, + 0x7d, 0x12, 0xd0, 0xd2, 0xd8, 0x71, 0xc6, 0x16, 0xe6, 0xc4, 0xc3, 0xd9, 0xe8, 0x1e, 0x31, 0x27, + 0xd8, 0x23, 0xda, 0x64, 0xca, 0x09, 0xca, 0x8f, 0x20, 0xde, 0xb7, 0x4c, 0x1d, 0xa3, 0x1d, 0x88, + 0x0f, 0xf1, 0xd8, 0xb4, 0x0b, 0xd2, 0x9e, 0xb4, 0x9f, 0x53, 0xf8, 0x02, 0xc9, 0x10, 0xc5, 0xb6, + 0x51, 0x88, 0x30, 0x18, 0x7d, 0x7c, 0x90, 0xfd, 0xd1, 0x9f, 0x96, 0x36, 0xbe, 0xfb, 0xc3, 0xd2, + 0xc6, 0x0f, 0x7e, 0x58, 0xda, 0x28, 0xdb, 0x90, 0x3e, 0x3e, 0x6e, 0x35, 0x7a, 0x9a, 0x4b, 0x3c, + 0x84, 0x20, 0x66, 0x3b, 0x06, 0x66, 0x12, 0x12, 0x0a, 0x7b, 0x46, 0x0f, 0x21, 0xae, 0x5b, 0x8e, + 0x7e, 0xca, 0x44, 0x24, 0x6a, 0x5f, 0x7e, 0xfd, 0xaa, 0x74, 0x77, 0xec, 0x54, 0xc6, 0xda, 0x77, + 0x30, 0x21, 0xb8, 0x62, 0xe0, 0xb3, 0x7b, 0xba, 0xe3, 0xe2, 0x7b, 0x13, 0xec, 0x79, 0xda, 0x18, + 0x57, 0xea, 0x94, 0x58, 0xe1, 0x3c, 0x0f, 0xe4, 0x9f, 0x7e, 0x5a, 0x92, 0x96, 0xde, 0xf7, 0x1f, + 0x12, 0x40, 0xcf, 0x75, 0x3e, 0xc6, 0x3a, 0x31, 0x1d, 0xb6, 0xbd, 0x29, 0x71, 0xd9, 0x0b, 0xd3, + 0x0a, 0x7d, 0xa4, 0xc7, 0x18, 0x99, 0xd8, 0xe2, 0x5b, 0x4e, 0x2b, 0x7c, 0x81, 0x8a, 0x90, 0xc2, + 0x2f, 0xa7, 0x96, 0xa9, 0x9b, 0xa4, 0x10, 0xdd, 0x93, 0xf6, 0x53, 0x4a, 0xb0, 0x46, 0xfb, 0x20, + 0x9b, 0x9e, 0x3a, 0xd5, 0x5c, 0x62, 0x52, 0x99, 0xea, 0x29, 0x9e, 0x17, 0x62, 0x8c, 0x26, 0x6f, + 0x7a, 0x3d, 0x1f, 0xfc, 0x0c, 0xcf, 0xd1, 0x97, 0x20, 0x4f, 0x29, 0x5d, 0x73, 0xa2, 0xb9, 0x73, + 0x46, 0x17, 0x67, 0x74, 0x59, 0xd3, 0xeb, 0x71, 0x20, 0xa5, 0xba, 0x0f, 0x69, 0xd3, 0x1e, 0x61, + 0x17, 0xdb, 0x3a, 0x2e, 0x24, 0xf6, 0xa4, 0xfd, 0xcc, 0xc1, 0x66, 0x85, 0x19, 0xbc, 0xe5, 0x83, + 0x6b, 0xb1, 0xcf, 0x5e, 0x95, 0x36, 0x94, 0x05, 0x5d, 0xf9, 0x7f, 0xa3, 0x90, 0x0e, 0xd0, 0xf4, + 0x10, 0x64, 0x3e, 0xc5, 0x5e, 0x41, 0xda, 0x8b, 0xd2, 0x43, 0xb0, 0x05, 0xaa, 0x40, 0xc2, 0x23, + 0xae, 0x69, 0x8f, 0xd9, 0x11, 0x32, 0x07, 0x37, 0xcf, 0x49, 0xad, 0xf4, 0x19, 0x56, 0x11, 0x54, + 0x4c, 0x8a, 0x49, 0x2c, 0xcc, 0x4e, 0x43, 0xa5, 0xd0, 0x05, 0xda, 0x83, 0x8c, 0x81, 0x3d, 0xdd, + 0x35, 0xa7, 0xf4, 0x58, 0xec, 0x04, 0x69, 0x25, 0x0c, 0x42, 0x0f, 0x21, 0x6b, 0xe0, 0x91, 0x36, + 0xb3, 0x88, 0xfa, 0xb1, 0xe7, 0xd8, 0xec, 0x0c, 0xe9, 0xda, 0x5b, 0xaf, 0x5f, 0x95, 0x0a, 0xd8, + 0xd6, 0x1d, 0xc3, 0xb4, 0xc7, 0xf7, 0x28, 0xa2, 0xa2, 0x68, 0x2f, 0x8e, 0xb8, 0xd9, 0x94, 0xa4, + 0xe0, 0x40, 0x37, 0x21, 0xe1, 0x61, 0xdd, 0xc5, 0xa4, 0x90, 0x64, 0xba, 0x11, 0x2b, 0xba, 0x79, + 0xfc, 0xd2, 0xf4, 0x88, 0x57, 0x48, 0xed, 0x49, 0xfb, 0xf9, 0xd5, 0xcd, 0x37, 0x19, 0x56, 0x11, + 0x54, 0xc5, 0x3f, 0x91, 0x20, 0xc1, 0xcf, 0x83, 0xee, 0x42, 0x56, 0x77, 0x6c, 0x82, 0x6d, 0xa2, + 0x52, 0x45, 0xb0, 0xd3, 0xa7, 0x95, 0x8c, 0x80, 0x0d, 0xe6, 0x53, 0x4c, 0xdf, 0x3a, 0x72, 0xdc, + 0x89, 0x46, 0xc4, 0x59, 0xc5, 0x0a, 0xbd, 0x03, 0xb2, 0xcf, 0xea, 0x6f, 0x9d, 0xed, 0x2b, 0xad, + 0x6c, 0x0a, 0x78, 0x53, 0x80, 0xd1, 0x1d, 0x80, 0x89, 0xf6, 0x52, 0xb5, 0xb0, 0x3d, 0x26, 0x27, + 0xec, 0xcc, 0x39, 0x25, 0x3d, 0xd1, 0x5e, 0xb6, 0x19, 0xe0, 0x69, 0x2c, 0x25, 0xc9, 0x91, 0xa7, + 0xb1, 0x54, 0x44, 0x8e, 0x3e, 0x8d, 0xa5, 0xe2, 0x72, 0xa2, 0x5c, 0x83, 0x04, 0xdf, 0x33, 0xca, + 0x40, 0xb2, 0xd5, 0xf9, 0x66, 0xb5, 0xdd, 0x6a, 0xc8, 0x1b, 0x28, 0x05, 0xb1, 0xa3, 0xe3, 0xfe, + 0x40, 0x96, 0x50, 0x12, 0xa2, 0x47, 0xd5, 0xe7, 0x72, 0x04, 0x65, 0x21, 0xd5, 0x3a, 0xea, 0xb5, + 0x5b, 0xf5, 0xd6, 0x40, 0x8e, 0x22, 0x80, 0x44, 0xbd, 0xda, 0xe9, 0x74, 0x07, 0x72, 0xac, 0xfc, + 0x1c, 0x32, 0x1d, 0x4c, 0x5e, 0x38, 0xee, 0x69, 0xcf, 0x71, 0x99, 0xf2, 0xec, 0xd9, 0x64, 0x88, + 0x5d, 0x11, 0x84, 0x62, 0x45, 0xdd, 0xd7, 0x8f, 0x76, 0xe1, 0xd7, 0xc1, 0x9a, 0xf2, 0x4c, 0x67, + 0x43, 0xcb, 0xd4, 0x85, 0x63, 0x8b, 0x55, 0xf9, 0x5f, 0xf3, 0x90, 0xaf, 0x3b, 0x96, 0xc5, 0x23, + 0xa5, 0x3f, 0xc5, 0x3a, 0x2a, 0x43, 0xcc, 0xd6, 0x26, 0x3c, 0x3e, 0xd3, 0xb5, 0xfc, 0xeb, 0x57, + 0x25, 0x58, 0x50, 0x28, 0x0c, 0x87, 0x1a, 0xb0, 0xf5, 0xc2, 0x35, 0x09, 0x56, 0x3d, 0xfd, 0x04, + 0x4f, 0x34, 0xee, 0x01, 0xa9, 0x35, 0x3c, 0x20, 0xc3, 0xd8, 0xfa, 0x8c, 0x0b, 0xd5, 0x40, 0x76, + 0xb1, 0x66, 0x2c, 0x09, 0xc9, 0xac, 0x21, 0x04, 0x28, 0x97, 0x90, 0x21, 0x43, 0x94, 0x86, 0x58, + 0x94, 0x85, 0x00, 0x7d, 0x44, 0xb7, 0x21, 0x35, 0x9b, 0x99, 0x86, 0x4a, 0x43, 0x9e, 0xdb, 0x39, + 0x49, 0xd7, 0x3d, 0xe2, 0x52, 0x43, 0x2f, 0x22, 0x98, 0xc5, 0xbc, 0x57, 0x88, 0x33, 0xce, 0xcd, + 0x00, 0x7e, 0xc8, 0xc0, 0xe8, 0x03, 0xc8, 0x4c, 0x83, 0x0c, 0xe2, 0x15, 0x12, 0x7b, 0xd1, 0xfd, + 0xcc, 0x81, 0xcc, 0xdd, 0x71, 0x91, 0x5a, 0x44, 0x88, 0x86, 0x49, 0xa9, 0x6e, 0x34, 0xfd, 0x54, + 0x25, 0x78, 0x32, 0xb5, 0x34, 0x82, 0xf9, 0xb1, 0x92, 0xeb, 0xe8, 0x46, 0xd3, 0x4f, 0x07, 0x82, + 0x0b, 0x35, 0x00, 0x2d, 0xb6, 0xea, 0xcb, 0x2a, 0xa4, 0x59, 0x48, 0xdf, 0xa8, 0x04, 0x59, 0xfd, + 0xa9, 0x33, 0x73, 0x6d, 0xcd, 0xa2, 0x86, 0x53, 0xb6, 0x02, 0x86, 0x40, 0xca, 0x37, 0x00, 0x0c, + 0xec, 0x9a, 0x67, 0x1a, 0x8b, 0xe2, 0x2c, 0xe3, 0x2e, 0xf1, 0x43, 0x2c, 0x5b, 0xbd, 0xd2, 0x08, + 0xc8, 0x94, 0x10, 0x4b, 0xf1, 0x2f, 0x00, 0x60, 0x81, 0x42, 0x03, 0xc8, 0xeb, 0x8e, 0x6d, 0x63, + 0x9d, 0x38, 0x2e, 0x0f, 0x33, 0x89, 0xc5, 0xe9, 0xfb, 0x57, 0xc8, 0xac, 0xd4, 0x7d, 0x2e, 0x1a, + 0x88, 0x4a, 0x4e, 0x0f, 0x2f, 0xd1, 0x87, 0x40, 0xc3, 0x74, 0x64, 0x8e, 0xb9, 0xae, 0x22, 0x6b, + 0xe8, 0x2a, 0xc1, 0x19, 0x50, 0x17, 0x80, 0xb8, 0x9a, 0xed, 0xd1, 0x48, 0xf6, 0x98, 0x17, 0x64, + 0x0e, 0xde, 0xb9, 0x6a, 0x33, 0x03, 0x9f, 0x43, 0x98, 0x2f, 0x24, 0x02, 0x7d, 0x04, 0x5b, 0xde, + 0xc9, 0x6c, 0x34, 0xb2, 0x30, 0x4d, 0xdd, 0x2a, 0x4f, 0xb0, 0xb1, 0xbd, 0xe8, 0x7e, 0xfe, 0xe0, + 0xbd, 0xab, 0xe4, 0xf6, 0x39, 0x23, 0x3b, 0xe2, 0xa6, 0x90, 0xf2, 0x0c, 0xcf, 0x07, 0x2c, 0x2f, + 0x3f, 0x80, 0xbc, 0x77, 0xa2, 0xb9, 0xc6, 0xc2, 0x98, 0x71, 0x66, 0x8e, 0xed, 0x8a, 0x5f, 0xb4, + 0x2b, 0x7d, 0x8a, 0x67, 0xa6, 0xcc, 0x31, 0xd2, 0xc0, 0x8c, 0x2d, 0xb8, 0xe1, 0x62, 0xdd, 0x39, + 0xc3, 0xee, 0x5c, 0xb5, 0x9c, 0xf1, 0x42, 0x44, 0xe2, 0x32, 0x7f, 0xd8, 0xf6, 0x79, 0xda, 0xce, + 0x38, 0x10, 0xf5, 0x75, 0xc8, 0xd9, 0x3c, 0x97, 0xa8, 0x53, 0xc7, 0x25, 0x5e, 0x21, 0xc9, 0x74, + 0xb6, 0xc5, 0xcf, 0x16, 0x4a, 0x33, 0x4a, 0xd6, 0x5e, 0x2c, 0xbc, 0xe2, 0xef, 0xc5, 0x21, 0x1d, + 0xe8, 0x0d, 0xdd, 0x5d, 0xca, 0x11, 0xb9, 0xd7, 0xaf, 0x4a, 0x0b, 0xa4, 0x48, 0x11, 0x0f, 0x00, + 0xf4, 0x40, 0x53, 0xcc, 0xa6, 0x99, 0x83, 0x9d, 0x8b, 0x34, 0xe8, 0x1b, 0x61, 0x41, 0x8d, 0xda, + 0x61, 0xe7, 0xf7, 0xb0, 0xc5, 0x5c, 0x45, 0xd4, 0xb3, 0x5b, 0x8b, 0xc3, 0xb6, 0xb5, 0x21, 0xb6, + 0xfa, 0x02, 0x2d, 0xc4, 0x2c, 0x82, 0xc0, 0x47, 0xf0, 0xbc, 0x68, 0x3a, 0xae, 0x49, 0x78, 0xc9, + 0xce, 0x29, 0xc1, 0x1a, 0x7d, 0x05, 0x10, 0x4b, 0x41, 0x06, 0xb6, 0xb4, 0xb9, 0xea, 0x61, 0xdd, + 0xb1, 0x59, 0x4e, 0xa0, 0x54, 0x2c, 0x39, 0x35, 0x28, 0xa2, 0xcf, 0xe1, 0xa8, 0x04, 0x99, 0x90, + 0x73, 0xb0, 0xa4, 0x90, 0x56, 0x60, 0x61, 0x69, 0x74, 0x0c, 0x45, 0x9f, 0xc0, 0xd2, 0x26, 0x43, + 0x43, 0x53, 0xc3, 0x8e, 0xbd, 0x4e, 0x12, 0xd8, 0x16, 0xfc, 0x6d, 0xc6, 0x5e, 0xe7, 0x5e, 0x7e, + 0x08, 0xe8, 0x02, 0x71, 0xeb, 0xe4, 0xdb, 0xac, 0x15, 0x96, 0xf3, 0x05, 0x48, 0xb3, 0xd3, 0x3a, + 0xb6, 0x35, 0x67, 0xb9, 0x24, 0xa5, 0xa4, 0x28, 0xa0, 0x6b, 0x5b, 0x73, 0x54, 0x81, 0xed, 0x8f, + 0xb9, 0xf7, 0xa8, 0x3c, 0x2b, 0xcf, 0x46, 0x23, 0xf3, 0x65, 0x01, 0x58, 0x0a, 0xdd, 0x12, 0x28, + 0x85, 0x66, 0x5e, 0x86, 0x40, 0x1f, 0x02, 0xd8, 0x0e, 0x51, 0x87, 0x78, 0xe4, 0xb8, 0x98, 0xe5, + 0xed, 0xcc, 0x41, 0xb1, 0xc2, 0x3b, 0xc9, 0x8a, 0xdf, 0x49, 0x56, 0x06, 0x7e, 0x27, 0xa9, 0xa4, + 0x6d, 0x87, 0xd4, 0x18, 0x31, 0xfa, 0x35, 0xa0, 0x0b, 0x55, 0x1b, 0x11, 0xec, 0x8a, 0xac, 0x74, + 0x19, 0x67, 0xca, 0x76, 0x48, 0x95, 0xd2, 0x96, 0x15, 0xc8, 0x2d, 0x65, 0x12, 0x54, 0x84, 0x9b, + 0xa2, 0xa8, 0xaa, 0xf5, 0x6e, 0xa7, 0xd3, 0xac, 0x0f, 0xba, 0x8a, 0x3a, 0x78, 0xde, 0x6b, 0xca, + 0x1b, 0xb4, 0x84, 0xf6, 0x7f, 0xa3, 0xdd, 0x1a, 0x34, 0x65, 0x09, 0xe5, 0x01, 0x28, 0xb4, 0x5f, + 0x57, 0x5a, 0xbd, 0x81, 0x1c, 0x41, 0x69, 0x88, 0xb7, 0x8e, 0xaa, 0x8f, 0x9b, 0x72, 0xb4, 0x7c, + 0x04, 0x99, 0x50, 0xe0, 0xa2, 0x02, 0xec, 0xf8, 0x12, 0xfb, 0x4f, 0x8e, 0x0f, 0x0f, 0xdb, 0x4d, + 0x5f, 0x5e, 0x06, 0x92, 0xb5, 0x6e, 0xb7, 0xdd, 0xac, 0x76, 0x64, 0x89, 0x57, 0xf3, 0x41, 0xf3, + 0x71, 0x53, 0x91, 0x23, 0xec, 0x4d, 0x03, 0xa5, 0xd5, 0x79, 0x2c, 0x47, 0xcb, 0xdf, 0x8f, 0x40, + 0x9e, 0xd5, 0x10, 0xee, 0x7f, 0xd4, 0x9d, 0x11, 0xc4, 0x4e, 0xf1, 0xdc, 0xef, 0xd3, 0xd8, 0x33, + 0x2d, 0xc8, 0x67, 0x9a, 0x35, 0xc3, 0x5e, 0x21, 0xc2, 0xa0, 0x62, 0x45, 0x9d, 0xd5, 0x70, 0xf4, + 0xd9, 0x04, 0xdb, 0x44, 0xb4, 0x30, 0xc1, 0x1a, 0x9d, 0xc1, 0x0d, 0x56, 0xb4, 0xc2, 0x5e, 0xa0, + 0x4e, 0xb4, 0x29, 0xcb, 0x4f, 0x99, 0x83, 0xaf, 0xf0, 0xe8, 0x5a, 0x7e, 0x39, 0x5f, 0x72, 0xfb, + 0x3f, 0xf5, 0x1c, 0xfb, 0x48, 0x9b, 0x36, 0x6d, 0xe2, 0xce, 0x6b, 0x6f, 0x7d, 0xf2, 0xf9, 0x65, + 0xb5, 0x68, 0xb4, 0x60, 0x2b, 0x36, 0xe1, 0xd6, 0x1b, 0xa4, 0xf8, 0xe5, 0x57, 0xb4, 0xd6, 0xb4, + 0xfc, 0xee, 0x40, 0x9c, 0x1d, 0xc5, 0x6f, 0xad, 0xd9, 0xe2, 0x41, 0xe4, 0x03, 0xa9, 0xfc, 0x97, + 0x71, 0xc8, 0xd4, 0xb5, 0x29, 0x99, 0xb9, 0x98, 0x35, 0x1a, 0xa5, 0xa5, 0x24, 0x92, 0x79, 0xfd, + 0xaa, 0x94, 0x14, 0x68, 0x91, 0x42, 0x0e, 0x57, 0xaa, 0x4d, 0x84, 0x55, 0x1b, 0xbf, 0x82, 0x2d, + 0x64, 0x5d, 0xab, 0xbe, 0x44, 0xaf, 0x51, 0x5f, 0xbe, 0x06, 0xa9, 0xa1, 0x69, 0x53, 0x12, 0x4f, + 0x68, 0xf9, 0xf6, 0xea, 0xcb, 0x6b, 0x9c, 0x42, 0x09, 0x48, 0x69, 0xa3, 0x61, 0xda, 0x04, 0xbb, + 0x67, 0x9a, 0x75, 0x2e, 0xa9, 0x6c, 0xfa, 0x70, 0x3f, 0xa7, 0xac, 0xd6, 0x85, 0xc4, 0xcf, 0x5f, + 0x17, 0x92, 0x3f, 0x7f, 0x5d, 0x48, 0xad, 0x57, 0x17, 0xfe, 0x4a, 0x82, 0xa4, 0x38, 0x3f, 0x7a, + 0x0a, 0x3b, 0x2e, 0xf6, 0x9c, 0x99, 0xab, 0xe3, 0xa5, 0x44, 0x25, 0xad, 0xa1, 0xf0, 0xbc, 0xcf, + 0x29, 0x52, 0xd5, 0x17, 0x21, 0x17, 0xc8, 0x9a, 0x6a, 0xe4, 0x44, 0x84, 0x49, 0xd6, 0x07, 0xf6, + 0x34, 0x72, 0x72, 0xae, 0xc6, 0x44, 0xaf, 0x53, 0x63, 0xca, 0x6f, 0x9f, 0x4f, 0x25, 0x4b, 0xfd, + 0x79, 0x90, 0x1f, 0x92, 0xe5, 0x3f, 0x06, 0xd8, 0x3e, 0xd2, 0x08, 0x76, 0x4d, 0xcd, 0x32, 0xbf, + 0xa3, 0x05, 0x7d, 0xf2, 0xdb, 0x4b, 0xee, 0xbb, 0xfd, 0xfa, 0x55, 0x69, 0xf3, 0x1c, 0x99, 0x70, + 0xe3, 0xce, 0x1b, 0xdc, 0xf8, 0x6d, 0xbe, 0xd3, 0x0b, 0x64, 0xff, 0xbf, 0xb9, 0xf3, 0xa3, 0x15, + 0x77, 0xbe, 0xfb, 0xe6, 0x4d, 0xac, 0xba, 0xf5, 0x2f, 0x79, 0x0f, 0xf3, 0xbd, 0xc4, 0x2f, 0xa7, + 0xaf, 0xbe, 0xa1, 0x1f, 0x4a, 0xfe, 0x02, 0xfa, 0xa1, 0xf4, 0xb9, 0x7e, 0xa8, 0x0e, 0x9b, 0xbc, + 0xc4, 0x78, 0x7e, 0x01, 0x61, 0x2d, 0x53, 0xb0, 0xd5, 0xe5, 0xe2, 0x22, 0xde, 0x91, 0x1f, 0x2d, + 0xd7, 0xbb, 0x2f, 0x42, 0xce, 0xc0, 0x16, 0xd1, 0xd4, 0xd9, 0xd4, 0xd0, 0x08, 0xf6, 0xfc, 0x01, + 0x08, 0x03, 0x1e, 0x73, 0x18, 0x1a, 0x02, 0x32, 0xf0, 0xd4, 0xc5, 0xba, 0x46, 0xb0, 0xa1, 0x8a, + 0xae, 0x47, 0x38, 0xc3, 0xfd, 0x2b, 0x9d, 0xb2, 0xd2, 0x08, 0x78, 0x45, 0xf1, 0x56, 0xb6, 0x8c, + 0xf3, 0xa0, 0x37, 0xb5, 0x34, 0xa9, 0xf5, 0x5a, 0x1a, 0xf8, 0x99, 0x5b, 0x9a, 0xcc, 0xfa, 0x2d, + 0x4d, 0xf1, 0x77, 0x24, 0xd8, 0x5a, 0x39, 0x0c, 0xba, 0x03, 0x30, 0x76, 0x9d, 0xd9, 0x54, 0x5d, + 0xa4, 0x18, 0x25, 0xcd, 0x20, 0x1d, 0x9a, 0x52, 0x7e, 0xa1, 0x0d, 0x72, 0xf9, 0xfe, 0xa5, 0xa9, + 0x70, 0xd1, 0x46, 0x85, 0xda, 0xa6, 0x54, 0xf9, 0x7f, 0xd2, 0x90, 0xe8, 0x56, 0x67, 0xe4, 0xe4, + 0x40, 0x0c, 0x1e, 0xce, 0x4c, 0x03, 0xfb, 0x43, 0xb6, 0x60, 0x8d, 0xde, 0x85, 0x2d, 0x6d, 0x46, + 0x4e, 0xd4, 0x99, 0x6b, 0x2d, 0x42, 0x9e, 0xb7, 0x06, 0x9b, 0x14, 0x71, 0xec, 0x5a, 0x41, 0x5c, + 0x7f, 0x08, 0xb7, 0x35, 0x5d, 0xc7, 0x9e, 0xa7, 0x12, 0xe7, 0x14, 0xdb, 0xcb, 0x3c, 0xbc, 0x19, + 0xba, 0xc9, 0x09, 0x06, 0x14, 0x1f, 0x66, 0xad, 0xc0, 0xf6, 0x12, 0xeb, 0x04, 0x93, 0x13, 0xc7, + 0xe0, 0xd3, 0x04, 0x65, 0x2b, 0xc4, 0x74, 0xc4, 0x10, 0x6c, 0x5b, 0x61, 0xfa, 0xa1, 0x63, 0xcc, + 0xc5, 0xb4, 0x60, 0x33, 0x44, 0x5d, 0x73, 0x8c, 0x39, 0xfa, 0x7d, 0x09, 0xee, 0x2c, 0x11, 0x9f, + 0x60, 0xcd, 0xc0, 0xae, 0xb7, 0xe8, 0xbf, 0xe2, 0x2c, 0xff, 0x88, 0xef, 0x43, 0xae, 0x94, 0x4a, + 0x75, 0x21, 0xe5, 0x09, 0xa7, 0xbf, 0x46, 0xfb, 0x85, 0xb4, 0x15, 0x6e, 0xf4, 0x89, 0x04, 0xbb, + 0x4b, 0xdb, 0x70, 0xb1, 0x37, 0x75, 0x6c, 0x0f, 0x2f, 0xf6, 0x91, 0x08, 0xf7, 0x81, 0xab, 0xfb, + 0x50, 0x04, 0xc3, 0x35, 0x36, 0x12, 0xd6, 0xb7, 0xcf, 0x7e, 0xa4, 0x4d, 0xd1, 0x43, 0x28, 0xba, + 0x78, 0xe4, 0x62, 0xef, 0xe4, 0x22, 0x5b, 0xf1, 0xe1, 0xd9, 0x2d, 0x41, 0xb1, 0x62, 0xac, 0xaf, + 0xd2, 0xdc, 0x1b, 0x66, 0x16, 0xd6, 0xca, 0x32, 0x36, 0x14, 0x66, 0x13, 0xe6, 0x62, 0x9f, 0x69, + 0x61, 0x0e, 0x66, 0x2f, 0x1e, 0xc7, 0x72, 0x98, 0x9e, 0x19, 0xec, 0x0f, 0x24, 0xd8, 0x5d, 0x26, + 0x5f, 0xb1, 0x58, 0xfa, 0x02, 0x4d, 0x29, 0x21, 0x39, 0xd7, 0x37, 0xd9, 0xb6, 0xbb, 0xca, 0x8e, + 0xfe, 0x48, 0x82, 0xd2, 0xf2, 0x4e, 0x56, 0x8d, 0x06, 0x6c, 0x2b, 0xef, 0xbf, 0x71, 0x2b, 0x3f, + 0x83, 0xd5, 0x96, 0x34, 0x1f, 0x32, 0x5b, 0xb1, 0x0d, 0xbb, 0x97, 0xfb, 0xe5, 0x75, 0x1a, 0xfa, + 0xe2, 0x11, 0x94, 0xae, 0xf0, 0xae, 0xeb, 0x8a, 0xbb, 0xc2, 0x04, 0xd7, 0x12, 0xd7, 0x81, 0xbd, + 0xab, 0xd4, 0x78, 0xad, 0xcf, 0x97, 0xbf, 0x8e, 0x42, 0x6a, 0x80, 0x3d, 0xc2, 0x9a, 0x3f, 0x14, + 0x6e, 0xfe, 0x44, 0x9f, 0x77, 0x0f, 0xe2, 0x1e, 0xc1, 0x53, 0xfe, 0x45, 0x47, 0x9b, 0x22, 0x66, + 0x51, 0x9f, 0xa5, 0xd2, 0x27, 0x78, 0x2a, 0x72, 0x30, 0xa7, 0x2b, 0xfe, 0x24, 0x02, 0x31, 0x0a, + 0x45, 0x5f, 0x83, 0x34, 0x85, 0x84, 0x27, 0x6a, 0x85, 0x0b, 0xb8, 0x2b, 0xac, 0x1b, 0x4c, 0x51, + 0x52, 0x96, 0xa6, 0xef, 0x00, 0x30, 0x36, 0xd3, 0x36, 0xf0, 0x4b, 0x71, 0xfb, 0xc2, 0x04, 0xb5, + 0x28, 0xe0, 0xfc, 0x0c, 0x3f, 0xba, 0x3a, 0xc3, 0xf7, 0x05, 0x78, 0xba, 0x33, 0xf5, 0x2f, 0x00, + 0x98, 0x80, 0x3e, 0x05, 0xa0, 0xca, 0x52, 0xcb, 0x12, 0xbf, 0x70, 0x1e, 0x1c, 0x6e, 0x53, 0x3e, + 0x84, 0x9c, 0xe1, 0xe8, 0x22, 0xca, 0xce, 0xb0, 0xce, 0x07, 0x24, 0x57, 0x34, 0x53, 0x31, 0xca, + 0x82, 0x1e, 0x01, 0x04, 0x75, 0xc9, 0x5b, 0xaf, 0xb3, 0x09, 0x31, 0x94, 0x77, 0x21, 0xc6, 0x34, + 0x02, 0x90, 0x68, 0x75, 0x1e, 0x37, 0xfb, 0x03, 0x5e, 0xb7, 0xbe, 0xd9, 0x54, 0x5a, 0x87, 0xcf, + 0x65, 0xa9, 0xfc, 0x7d, 0x09, 0xd2, 0x8a, 0x66, 0x8f, 0xf9, 0x87, 0xe7, 0x17, 0x20, 0x7d, 0x8a, + 0xe7, 0x2a, 0xbf, 0xc8, 0xa2, 0x6a, 0x4b, 0x2a, 0xa9, 0x53, 0x3c, 0xaf, 0xb1, 0xbb, 0xac, 0x5b, + 0x90, 0xa4, 0x48, 0x6c, 0x1b, 0x4c, 0x63, 0x49, 0x25, 0x71, 0x8a, 0xe7, 0x4d, 0xdb, 0x40, 0x65, + 0xc8, 0xb9, 0x2a, 0xbb, 0x71, 0x12, 0x9c, 0x31, 0x86, 0xce, 0xb8, 0xec, 0x32, 0x8a, 0x33, 0xef, + 0x42, 0xc6, 0xa7, 0xa1, 0x02, 0xe2, 0x8c, 0x22, 0xcd, 0x29, 0x9a, 0xb6, 0xf1, 0x40, 0xfe, 0xc1, + 0xa7, 0xa5, 0x8d, 0x73, 0x57, 0x63, 0xf9, 0xa0, 0xf6, 0xf6, 0x09, 0xcd, 0x8e, 0x0f, 0x21, 0xcb, + 0xfb, 0x26, 0x63, 0xfd, 0x8e, 0x34, 0x29, 0x38, 0x50, 0x09, 0x32, 0x13, 0xec, 0x8e, 0x59, 0x1f, + 0xaa, 0x9f, 0xb0, 0xc3, 0xa5, 0x14, 0x60, 0xa0, 0x1e, 0x85, 0x94, 0xff, 0x2c, 0x02, 0xd0, 0x7c, + 0x49, 0x5c, 0x4d, 0x27, 0xd5, 0x5e, 0xab, 0xf8, 0xb7, 0x12, 0x24, 0x44, 0x17, 0x1b, 0x9e, 0x9b, + 0x4b, 0xcb, 0x73, 0xf3, 0x47, 0x90, 0x09, 0xcf, 0xe8, 0xd7, 0x19, 0xd0, 0x02, 0x67, 0xa0, 0xd1, + 0x46, 0xdd, 0x8c, 0x37, 0x95, 0x53, 0xe2, 0x7a, 0x62, 0x54, 0x9f, 0x66, 0x90, 0x1e, 0x71, 0x57, + 0x46, 0xed, 0xb1, 0xb5, 0x47, 0xed, 0xe5, 0xe7, 0x10, 0xab, 0x3b, 0xc6, 0xb9, 0x7e, 0x25, 0x07, + 0xe9, 0x7a, 0xb7, 0x73, 0xd8, 0x7a, 0x7c, 0xac, 0x34, 0xf9, 0xa0, 0xa6, 0xf9, 0xad, 0x81, 0x52, + 0xad, 0x0f, 0xe4, 0x08, 0x42, 0x90, 0x17, 0x8b, 0x66, 0x43, 0x3d, 0x3e, 0x6e, 0x35, 0xe4, 0x28, + 0xda, 0x01, 0x79, 0x01, 0x3b, 0x6c, 0x35, 0xdb, 0x8d, 0xbe, 0x1c, 0x2b, 0x7f, 0x1a, 0x07, 0xa8, + 0x3b, 0x93, 0xa1, 0x69, 0x63, 0xa6, 0xa7, 0x48, 0xa0, 0xa7, 0x73, 0xca, 0x90, 0xae, 0xa9, 0x8c, + 0xdb, 0x40, 0x7d, 0x8d, 0xab, 0x82, 0x7f, 0x27, 0x50, 0x7f, 0x63, 0x8a, 0xb8, 0x09, 0x09, 0x71, + 0x29, 0xc1, 0x75, 0x24, 0x56, 0xb4, 0x5e, 0x72, 0xcb, 0x58, 0x9a, 0x8e, 0x4f, 0x1c, 0xcb, 0xc0, + 0x6e, 0xe8, 0x76, 0x03, 0x31, 0x2b, 0x2d, 0x50, 0xd4, 0x60, 0xe7, 0x54, 0x1a, 0x5f, 0xff, 0xf6, + 0xe2, 0x6d, 0xd8, 0x5c, 0x44, 0x34, 0xef, 0x3e, 0xd9, 0xcd, 0x9e, 0x92, 0x5f, 0x80, 0x59, 0x0b, + 0xfa, 0x2e, 0x6c, 0xb1, 0x8b, 0xc9, 0xa5, 0xdb, 0x1b, 0x71, 0x6b, 0xc6, 0x10, 0xfd, 0xe0, 0xcc, + 0xc5, 0xdf, 0x86, 0x38, 0xf5, 0x6d, 0x0f, 0xfd, 0x0a, 0xc4, 0x2c, 0x3c, 0x22, 0x4c, 0x69, 0x99, + 0x03, 0xc4, 0x37, 0xd4, 0x70, 0x74, 0xaf, 0x6a, 0x1b, 0xb5, 0x39, 0xc1, 0x9e, 0xc2, 0xf0, 0x68, + 0x1f, 0xe2, 0xae, 0x39, 0x3e, 0x21, 0x62, 0x6e, 0x7c, 0x11, 0x21, 0x27, 0x40, 0x5f, 0x82, 0xa8, + 0x33, 0x23, 0xa2, 0xf5, 0xbd, 0x88, 0x8e, 0xa2, 0xcb, 0xff, 0x26, 0xad, 0xe1, 0x29, 0x9b, 0x90, + 0x51, 0x9a, 0x8d, 0xe3, 0x7a, 0x53, 0x6d, 0x37, 0x0f, 0xa9, 0xb7, 0x6c, 0x41, 0xae, 0xde, 0x3d, + 0xaa, 0xb5, 0x3a, 0x4d, 0x55, 0x69, 0x3d, 0x7e, 0x32, 0x90, 0xa3, 0x48, 0x86, 0x4c, 0x43, 0xa9, + 0xb6, 0x3a, 0x6a, 0xfd, 0xc9, 0x71, 0xe7, 0x99, 0xfc, 0x99, 0x84, 0x76, 0xe1, 0x36, 0x83, 0x34, + 0x1b, 0xaa, 0x20, 0x6e, 0xa8, 0x8d, 0x6e, 0xfd, 0xf8, 0xa8, 0xd9, 0x19, 0xc8, 0xff, 0x20, 0xa1, + 0x3b, 0x50, 0xf0, 0xf1, 0x5c, 0x7a, 0x08, 0xfd, 0x8f, 0x52, 0x20, 0xb0, 0xd9, 0x50, 0x9f, 0x35, + 0x9f, 0xcb, 0xff, 0x24, 0xa1, 0x6d, 0xc8, 0xfb, 0x10, 0xe1, 0x8d, 0xff, 0x2c, 0x21, 0x04, 0x39, + 0x1f, 0xd8, 0x1f, 0x54, 0x07, 0x7d, 0xf9, 0x5f, 0xa4, 0xf2, 0x4f, 0x24, 0x48, 0xd5, 0x66, 0xa6, + 0x65, 0x50, 0x07, 0xfd, 0xcf, 0xa5, 0x40, 0x1e, 0x52, 0xb0, 0x6a, 0x1a, 0x7e, 0x20, 0xb3, 0x75, + 0xcb, 0x58, 0xa0, 0x8c, 0xa1, 0x28, 0x70, 0x1c, 0xd5, 0x18, 0xb2, 0x2b, 0x59, 0xf6, 0xb5, 0x2a, + 0x0a, 0x85, 0x58, 0xa1, 0x03, 0xc8, 0x88, 0x4f, 0x5b, 0x56, 0x9d, 0x62, 0xac, 0x3a, 0x6d, 0xf9, + 0x1f, 0xae, 0xc1, 0xe5, 0xaa, 0x02, 0x9c, 0x8a, 0xa5, 0xe1, 0xf7, 0x60, 0x6b, 0x31, 0xf1, 0x10, + 0x9f, 0xe0, 0xe2, 0x0e, 0x59, 0x0e, 0x10, 0xe2, 0x3b, 0x1d, 0xdd, 0x85, 0xac, 0x70, 0x40, 0xd5, + 0x75, 0x1c, 0x22, 0xdc, 0xcd, 0x77, 0x4a, 0xc5, 0x71, 0x48, 0x79, 0x1b, 0xb6, 0x14, 0xec, 0x61, + 0xc2, 0x12, 0xa4, 0x82, 0x7f, 0x6b, 0x86, 0x3d, 0x52, 0xde, 0x01, 0x14, 0x06, 0xf2, 0xea, 0x5e, + 0x7e, 0x04, 0xa8, 0x6a, 0x9c, 0x69, 0xb6, 0x8e, 0xe9, 0xc7, 0x96, 0xa0, 0xa5, 0x5e, 0xad, 0x71, + 0x68, 0x30, 0x8e, 0xa3, 0x9a, 0x89, 0x29, 0x79, 0x01, 0x16, 0xd3, 0xb8, 0xf2, 0x0d, 0xd8, 0x5e, + 0x62, 0x17, 0x52, 0x3f, 0x80, 0x6c, 0xd8, 0xa9, 0x68, 0xf9, 0xa7, 0x65, 0x4b, 0x5c, 0xc0, 0xf2, + 0x12, 0xb6, 0x03, 0xf1, 0x21, 0x45, 0x32, 0xc5, 0xc6, 0x14, 0xbe, 0x28, 0x7f, 0x4f, 0x82, 0x5c, + 0xcb, 0x1e, 0x63, 0x8f, 0xf8, 0x7b, 0x59, 0xae, 0xaa, 0xd2, 0x95, 0x55, 0x35, 0x6c, 0xce, 0xc8, + 0xb2, 0x39, 0x57, 0x0a, 0x6e, 0x74, 0xdd, 0x82, 0x5b, 0xfe, 0xf7, 0x08, 0xe4, 0xfd, 0x7d, 0xf1, + 0x43, 0xa2, 0xbf, 0x91, 0x16, 0x9f, 0xcb, 0xfc, 0x76, 0x97, 0xb6, 0xcd, 0x7c, 0x6c, 0x1d, 0x7c, + 0xdd, 0x2c, 0xf3, 0xf8, 0xb3, 0x9a, 0x8f, 0x28, 0x39, 0x6d, 0xd8, 0x3c, 0xde, 0x9e, 0xfe, 0xe6, + 0xef, 0x7e, 0x5e, 0x7a, 0xef, 0xa2, 0x3f, 0x70, 0x9c, 0xfb, 0x03, 0x8b, 0xcf, 0xff, 0xc9, 0xe7, + 0xa5, 0x77, 0xd7, 0x21, 0xef, 0x8e, 0x46, 0x1e, 0x26, 0xc1, 0xa7, 0xfb, 0xe2, 0xb5, 0xe8, 0xd7, + 0x21, 0xeb, 0xef, 0x1d, 0x13, 0xdd, 0x10, 0x89, 0x23, 0x34, 0x55, 0xe2, 0xed, 0x64, 0xa5, 0x49, + 0x74, 0xc3, 0xcf, 0x7b, 0x82, 0x81, 0x82, 0x8a, 0x0d, 0xb8, 0x79, 0xf1, 0x59, 0xae, 0xea, 0x11, + 0xa3, 0xe1, 0x1e, 0xf1, 0xcf, 0x23, 0x70, 0x6b, 0xa0, 0x79, 0xa7, 0xfe, 0x0c, 0xca, 0x75, 0x5e, + 0xce, 0x7d, 0xbb, 0x7f, 0x1d, 0x62, 0xce, 0x14, 0xdb, 0x22, 0xf7, 0x95, 0x45, 0x7f, 0x77, 0x31, + 0x71, 0xa5, 0x3b, 0xc5, 0xb6, 0xc2, 0xe8, 0x99, 0xaf, 0x69, 0x44, 0x63, 0x2f, 0xcb, 0x2a, 0xec, + 0xb9, 0xf8, 0x77, 0x12, 0xc4, 0x28, 0x09, 0xda, 0x87, 0x04, 0xff, 0xb6, 0x11, 0x62, 0xe5, 0xf3, + 0x07, 0x56, 0x04, 0x1e, 0xb5, 0x21, 0xc5, 0x67, 0x77, 0xbe, 0x1b, 0xd5, 0x7e, 0xf5, 0xf5, 0xab, + 0xd2, 0xfb, 0x17, 0x69, 0x7d, 0xe5, 0x5f, 0x44, 0x7c, 0xaa, 0xd7, 0x6a, 0x28, 0x49, 0x26, 0xa2, + 0xc5, 0xfa, 0x0c, 0xa2, 0xb9, 0x63, 0x4c, 0xd8, 0x04, 0x8e, 0xa5, 0x8c, 0x9c, 0x02, 0x1c, 0xc4, + 0xfe, 0xa4, 0x50, 0x82, 0x8c, 0x6e, 0x99, 0xd8, 0x26, 0xaa, 0x66, 0x18, 0x7e, 0xa9, 0x02, 0x0e, + 0xaa, 0x1a, 0x86, 0x5b, 0xfe, 0xc3, 0x28, 0x14, 0x56, 0x4f, 0x2f, 0x5c, 0xb1, 0x07, 0x39, 0x7a, + 0xf6, 0xe0, 0x6b, 0x49, 0x9c, 0xee, 0xbd, 0x37, 0x29, 0x4d, 0x78, 0x23, 0xd3, 0x9a, 0x58, 0x28, + 0x59, 0x27, 0xb4, 0xba, 0x50, 0x8b, 0x2f, 0x20, 0x1b, 0xe6, 0x40, 0x8f, 0x20, 0xe1, 0x11, 0x8d, + 0xcc, 0x3c, 0xd1, 0x83, 0x7f, 0xf9, 0x8a, 0xd7, 0xf5, 0x19, 0xb1, 0x22, 0x98, 0x42, 0xb6, 0x88, + 0x5c, 0x6e, 0x8b, 0xf2, 0x77, 0xd9, 0xdf, 0x56, 0x18, 0x53, 0x02, 0x22, 0xdd, 0x67, 0xf2, 0x06, + 0xda, 0x86, 0xcd, 0xfe, 0x93, 0xaa, 0xd2, 0x50, 0x3b, 0xdd, 0x81, 0x7a, 0xd8, 0x3d, 0xee, 0x34, + 0x64, 0x89, 0xb6, 0x26, 0x9d, 0xae, 0xca, 0xe1, 0x3d, 0xa5, 0x75, 0x54, 0x55, 0x9e, 0xcb, 0x11, + 0x74, 0x03, 0xb6, 0x28, 0xd1, 0x32, 0x38, 0x4a, 0x7b, 0x9b, 0x56, 0x67, 0xd0, 0x54, 0x3a, 0xd5, + 0xb6, 0xda, 0x54, 0x94, 0xae, 0x22, 0xc7, 0x68, 0x05, 0xe3, 0x64, 0xfd, 0x41, 0xb7, 0xd7, 0x6b, + 0x36, 0xe4, 0x38, 0xba, 0x01, 0x72, 0xaf, 0xab, 0x0c, 0xd8, 0x7b, 0xaa, 0xed, 0x76, 0xf7, 0xa3, + 0x66, 0x43, 0xfe, 0x69, 0xf2, 0xdd, 0x0e, 0x64, 0x42, 0xd9, 0x9c, 0xd6, 0xc9, 0x7a, 0x75, 0x50, + 0x6d, 0x77, 0x1f, 0xcb, 0x1b, 0xb4, 0x30, 0x3e, 0xed, 0x77, 0x3b, 0x6a, 0xbf, 0xfe, 0xa4, 0x79, + 0x54, 0x95, 0x25, 0xf6, 0xe7, 0x14, 0x56, 0x38, 0xe5, 0x18, 0xdd, 0x8d, 0x5f, 0xcf, 0xfa, 0xea, + 0x61, 0xeb, 0x5b, 0x03, 0x5a, 0x4c, 0xe3, 0x07, 0x7f, 0x2f, 0x41, 0x92, 0x7e, 0xbc, 0x98, 0xf6, + 0x18, 0x7d, 0x03, 0x60, 0x91, 0xa9, 0xd1, 0x2d, 0xae, 0xd5, 0x95, 0x84, 0x5e, 0x2c, 0xac, 0x22, + 0x84, 0x61, 0x6a, 0x90, 0x09, 0x65, 0x65, 0x24, 0x08, 0x57, 0xf3, 0x7c, 0xf1, 0xf6, 0x05, 0x18, + 0x21, 0xe3, 0x3e, 0x24, 0x78, 0xee, 0x42, 0xdb, 0xcb, 0x99, 0x8c, 0x73, 0xee, 0x5c, 0x94, 0xde, + 0x0e, 0xbe, 0x0d, 0xd9, 0xb0, 0xe5, 0xd1, 0x53, 0x88, 0xf3, 0x87, 0x3b, 0x97, 0x86, 0x6f, 0x71, + 0xf7, 0x72, 0xcf, 0xd9, 0x97, 0xbe, 0x2a, 0xd5, 0x1e, 0x7e, 0xf6, 0x5f, 0xbb, 0x1b, 0x9f, 0xfd, + 0x78, 0x57, 0xfa, 0xd1, 0x8f, 0x77, 0xa5, 0x4f, 0xff, 0x7b, 0x57, 0xfa, 0xf6, 0x3b, 0x63, 0x93, + 0x9c, 0xcc, 0x86, 0x15, 0xdd, 0x99, 0xdc, 0xc3, 0x1e, 0x99, 0x69, 0xee, 0x9c, 0xff, 0x11, 0x70, + 0xe5, 0xaf, 0x81, 0xc3, 0x04, 0x5b, 0xdf, 0xff, 0xbf, 0x00, 0x00, 0x00, 0xff, 0xff, 0xda, 0x5f, + 0x89, 0x90, 0x36, 0x28, 0x00, 0x00, } func (this *UUIDParts) Equal(that interface{}) bool { @@ -4899,45 +4786,6 @@ func (m *BuildAPI_Config) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *BuildAPI_Fetch) Marshal() (dAtA []byte, err error) { - size := m.ProtoSize() - dAtA = make([]byte, size) - n, err := m.MarshalToSizedBuffer(dAtA[:size]) - if err != nil { - return nil, err - } - return dAtA[:n], nil -} - -func (m *BuildAPI_Fetch) MarshalTo(dAtA []byte) (int, error) { - size := m.ProtoSize() - return m.MarshalToSizedBuffer(dAtA[:size]) -} - -func (m *BuildAPI_Fetch) MarshalToSizedBuffer(dAtA []byte) (int, error) { - i := len(dAtA) - _ = i - var l int - _ = l - if m.XXX_unrecognized != nil { - i -= len(m.XXX_unrecognized) - copy(dAtA[i:], m.XXX_unrecognized) - } - if m.ContentType != 0 { - i = encodeVarintFlow(dAtA, i, uint64(m.ContentType)) - i-- - dAtA[i] = 0x10 - } - if len(m.ResourceUrl) > 0 { - i -= len(m.ResourceUrl) - copy(dAtA[i:], m.ResourceUrl) - i = encodeVarintFlow(dAtA, i, uint64(len(m.ResourceUrl))) - i-- - dAtA[i] = 0xa - } - return len(dAtA) - i, nil -} - func (m *ResetStateRequest) Marshal() (dAtA []byte, err error) { size := m.ProtoSize() dAtA = make([]byte, size) @@ -6272,25 +6120,6 @@ func (m *BuildAPI_Config) ProtoSize() (n int) { return n } -func (m *BuildAPI_Fetch) ProtoSize() (n int) { - if m == nil { - return 0 - } - var l int - _ = l - l = len(m.ResourceUrl) - if l > 0 { - n += 1 + l + sovFlow(uint64(l)) - } - if m.ContentType != 0 { - n += 1 + sovFlow(uint64(m.ContentType)) - } - if m.XXX_unrecognized != nil { - n += len(m.XXX_unrecognized) - } - return n -} - func (m *ResetStateRequest) ProtoSize() (n int) { if m == nil { return 0 @@ -12380,108 +12209,6 @@ func (m *BuildAPI_Config) Unmarshal(dAtA []byte) error { } return nil } -func (m *BuildAPI_Fetch) Unmarshal(dAtA []byte) error { - l := len(dAtA) - iNdEx := 0 - for iNdEx < l { - preIndex := iNdEx - var wire uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowFlow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - wire |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - fieldNum := int32(wire >> 3) - wireType := int(wire & 0x7) - if wireType == 4 { - return fmt.Errorf("proto: Fetch: wiretype end group for non-group") - } - if fieldNum <= 0 { - return fmt.Errorf("proto: Fetch: illegal tag %d (wire type %d)", fieldNum, wire) - } - switch fieldNum { - case 1: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field ResourceUrl", wireType) - } - var stringLen uint64 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowFlow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - stringLen |= uint64(b&0x7F) << shift - if b < 0x80 { - break - } - } - intStringLen := int(stringLen) - if intStringLen < 0 { - return ErrInvalidLengthFlow - } - postIndex := iNdEx + intStringLen - if postIndex < 0 { - return ErrInvalidLengthFlow - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.ResourceUrl = string(dAtA[iNdEx:postIndex]) - iNdEx = postIndex - case 2: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field ContentType", wireType) - } - m.ContentType = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowFlow - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.ContentType |= ContentType(b&0x7F) << shift - if b < 0x80 { - break - } - } - default: - iNdEx = preIndex - skippy, err := skipFlow(dAtA[iNdEx:]) - if err != nil { - return err - } - if (skippy < 0) || (iNdEx+skippy) < 0 { - return ErrInvalidLengthFlow - } - if (iNdEx + skippy) > l { - return io.ErrUnexpectedEOF - } - m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) - iNdEx += skippy - } - } - - if iNdEx > l { - return io.ErrUnexpectedEOF - } - return nil -} func (m *ResetStateRequest) Unmarshal(dAtA []byte) error { l := len(dAtA) iNdEx := 0 diff --git a/go/protocols/flow/flow.proto b/go/protocols/flow/flow.proto index 9c1387bfdd..cc0f699ef0 100644 --- a/go/protocols/flow/flow.proto +++ b/go/protocols/flow/flow.proto @@ -366,7 +366,7 @@ message MaterializationSpec { consumer.ShardSpec shard_template = 5; // Template for recovery logs of shards of this materialization. protocol.JournalSpec recovery_log_template = 6; - // Network ports of this capture. + // Network ports of this materialization. repeated NetworkPort network_ports = 7; } @@ -634,34 +634,10 @@ enum ContentType { DOCUMENTS_FIXTURE = 5; } -// BuildAPI is a meta-message which name spaces messages of the Build API -// bridge. +// BuildAPI is deprecated and will be removed. +// We're currently keeping Config around only to +// avoid churning various Go snapshot tests. message BuildAPI { - // Code labels message codes passed over the CGO bridge. - enum Code { - // Begin a build with a Config (Go -> Rust). - BEGIN = 0; - // Poll the build after completing one or more trampoline tasks (Go -> - // Rust). - POLL = 1; - // Trampoline task start or completion (Rust <-> Go). - TRAMPOLINE = 2; - // Trampoline sub-type: Start fetch of a resource. - TRAMPOLINE_FETCH = 3; - // Trampoline sub-type: Start validation of a capture. - TRAMPOLINE_VALIDATE_CAPTURE = 4; - // Trampoline sub-type: Start validation of a materialization. - TRAMPOLINE_VALIDATE_MATERIALIZATION = 5; - // Build completed successfully (Rust -> Go). - DONE = 6; - // Build completed with errors (Rust -> Go). - DONE_WITH_ERRORS = 7; - // Trampoline sub-type: start docker ispect of an image - TRAMPOLINE_DOCKER_INSPECT = 8; - // Generate catalog specification JSON schema (Go <-> Rust) - CATALOG_SCHEMA = 100; - }; - message Config { // Identifier of this build. // The path of the output database is determined by joining the @@ -680,11 +656,6 @@ message BuildAPI { // URL which roots the Flow project. string project_root = 6; } - - message Fetch { - string resource_url = 1; - ContentType content_type = 2; - } } // ResetStateRequest is the request of the Testing.ResetState RPC. diff --git a/go/protocols/materialize/materialize.pb.go b/go/protocols/materialize/materialize.pb.go index 3834cbf466..4314f9d491 100644 --- a/go/protocols/materialize/materialize.pb.go +++ b/go/protocols/materialize/materialize.pb.go @@ -190,13 +190,11 @@ type Request_Validate struct { // Connector type addressed by this request. ConnectorType flow.MaterializationSpec_ConnectorType `protobuf:"varint,2,opt,name=connector_type,json=connectorType,proto3,enum=flow.MaterializationSpec_ConnectorType" json:"connector_type,omitempty"` // Connector configuration, as an encoded JSON object. - ConfigJson encoding_json.RawMessage `protobuf:"bytes,3,opt,name=config_json,json=config,proto3,casttype=encoding/json.RawMessage" json:"config_json,omitempty"` - Bindings []*Request_Validate_Binding `protobuf:"bytes,4,rep,name=bindings,proto3" json:"bindings,omitempty"` - // Network ports of this proposed materialization. - NetworkPorts []*flow.NetworkPort `protobuf:"bytes,5,rep,name=network_ports,json=networkPorts,proto3" json:"network_ports,omitempty"` - XXX_NoUnkeyedLiteral struct{} `json:"-"` - XXX_unrecognized []byte `json:"-"` - XXX_sizecache int32 `json:"-"` + ConfigJson encoding_json.RawMessage `protobuf:"bytes,3,opt,name=config_json,json=config,proto3,casttype=encoding/json.RawMessage" json:"config_json,omitempty"` + Bindings []*Request_Validate_Binding `protobuf:"bytes,4,rep,name=bindings,proto3" json:"bindings,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` } func (m *Request_Validate) Reset() { *m = Request_Validate{} } @@ -1315,113 +1313,111 @@ func init() { } var fileDescriptor_3e8b62b327f34bc6 = []byte{ - // 1686 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x57, 0x4f, 0x6f, 0x1b, 0xc7, - 0x15, 0xf7, 0x52, 0xfc, 0xfb, 0x48, 0x4a, 0xd4, 0x94, 0x6e, 0x36, 0x5b, 0xc7, 0x56, 0x94, 0x06, - 0x31, 0x5c, 0x64, 0x65, 0xc8, 0x45, 0x63, 0x27, 0x70, 0x51, 0x92, 0xa2, 0x00, 0xb9, 0xfa, 0x97, - 0x91, 0x9d, 0x00, 0xb9, 0x10, 0xe3, 0xdd, 0x11, 0xb5, 0xd6, 0x72, 0x67, 0xbb, 0x33, 0xb4, 0xc3, - 0x5e, 0xda, 0x4b, 0x51, 0xa0, 0xb7, 0x02, 0x45, 0x2f, 0xbd, 0x14, 0xfd, 0x0e, 0xfd, 0x04, 0x45, - 0x01, 0x1d, 0xdb, 0x2f, 0xe0, 0xa2, 0x6e, 0xbf, 0x40, 0xaf, 0x39, 0x15, 0xf3, 0x67, 0x97, 0x4b, - 0x59, 0xa4, 0x89, 0x22, 0xf5, 0x85, 0xd8, 0x79, 0xf3, 0xfb, 0x3d, 0xbe, 0x79, 0xfb, 0xe6, 0xbd, - 0xdf, 0xc2, 0x9d, 0x21, 0xdb, 0x8a, 0x13, 0x26, 0x98, 0xc7, 0x42, 0xbe, 0x35, 0x22, 0x82, 0x26, - 0x01, 0x09, 0x83, 0x9f, 0xd3, 0xfc, 0xb3, 0xab, 0x10, 0xa8, 0x9e, 0x33, 0x39, 0x1b, 0x1e, 0x8b, - 0xf8, 0x78, 0x44, 0x93, 0x8c, 0x9e, 0x3d, 0x68, 0xb8, 0x73, 0x63, 0xc6, 0xf5, 0x69, 0xc8, 0x5e, - 0xa8, 0x1f, 0xb3, 0xdb, 0x1e, 0xb2, 0x21, 0x53, 0x8f, 0x5b, 0xf2, 0xc9, 0x58, 0xdf, 0x1d, 0x32, - 0x36, 0x0c, 0xa9, 0xe6, 0x3d, 0x1d, 0x9f, 0x6e, 0x91, 0x68, 0xa2, 0xb7, 0x36, 0xff, 0xbe, 0x0e, - 0x15, 0x4c, 0x7f, 0x36, 0xa6, 0x5c, 0xa0, 0x8f, 0xa1, 0xc8, 0x63, 0xea, 0xd9, 0xd6, 0x86, 0x75, - 0xbb, 0xbe, 0xfd, 0xae, 0x9b, 0x8f, 0xd5, 0x60, 0xdc, 0x93, 0x98, 0x7a, 0x58, 0xc1, 0xd0, 0x03, - 0xa8, 0x3e, 0x27, 0x61, 0xe0, 0x13, 0x41, 0xed, 0x82, 0xa2, 0xbc, 0x77, 0x25, 0xe5, 0x0b, 0x03, - 0xc2, 0x19, 0x1c, 0xdd, 0x85, 0x12, 0x89, 0xe3, 0x70, 0x62, 0xaf, 0x28, 0x9e, 0x73, 0x25, 0xaf, - 0x23, 0x11, 0x58, 0x03, 0x65, 0x6c, 0x2c, 0xa6, 0x91, 0x5d, 0x5c, 0x10, 0xdb, 0x51, 0x4c, 0x23, - 0xac, 0x60, 0x12, 0x1e, 0x32, 0xe2, 0xdb, 0xa5, 0x05, 0xf0, 0x7d, 0x46, 0x7c, 0xac, 0x60, 0x32, - 0x9e, 0xd3, 0x70, 0xcc, 0xcf, 0xec, 0xf2, 0x82, 0x78, 0x76, 0x25, 0x02, 0x6b, 0xa0, 0x64, 0x70, - 0xc1, 0x12, 0x6a, 0x57, 0x16, 0x30, 0x4e, 0x24, 0x02, 0x6b, 0x20, 0xea, 0x41, 0x83, 0x0b, 0x92, - 0x88, 0x81, 0xc7, 0x46, 0xa3, 0x40, 0xd8, 0x55, 0x45, 0xdc, 0x98, 0x43, 0x24, 0x89, 0xe8, 0x29, - 0x1c, 0xae, 0xf3, 0xe9, 0x02, 0x75, 0xa1, 0x4e, 0xbc, 0xf3, 0x88, 0xbd, 0x08, 0xa9, 0x3f, 0xa4, - 0x76, 0x6d, 0x81, 0x8f, 0xce, 0x14, 0x87, 0xf3, 0x24, 0x74, 0x17, 0xaa, 0x41, 0x24, 0x68, 0x12, - 0x91, 0xd0, 0xf6, 0x95, 0x83, 0xb6, 0xab, 0x0b, 0xc4, 0x4d, 0x0b, 0xc4, 0xed, 0x44, 0x13, 0x9c, - 0xa1, 0x9c, 0xdf, 0x5a, 0x50, 0x94, 0x2f, 0x1e, 0x1d, 0xc2, 0xaa, 0xc7, 0xa2, 0x88, 0x7a, 0x82, - 0x25, 0x03, 0x31, 0x89, 0xa9, 0xaa, 0x95, 0xd5, 0xed, 0x8f, 0x5c, 0x55, 0x83, 0x07, 0x59, 0x18, - 0x44, 0x04, 0x2c, 0x92, 0x14, 0xb7, 0x97, 0xe2, 0x1f, 0x4f, 0x62, 0x8a, 0x9b, 0x5e, 0x7e, 0x89, - 0x1e, 0x40, 0xdd, 0x63, 0xd1, 0x69, 0x30, 0x1c, 0x3c, 0xe3, 0x2c, 0x52, 0x55, 0x54, 0xeb, 0xde, - 0xf8, 0xe6, 0xe5, 0x2d, 0x9b, 0x46, 0x1e, 0xf3, 0x83, 0x68, 0xb8, 0x25, 0x37, 0x5c, 0x4c, 0x5e, - 0x1c, 0x50, 0xce, 0xc9, 0x90, 0xe2, 0xb2, 0x26, 0x38, 0x17, 0x25, 0xa8, 0xa6, 0x95, 0x85, 0x3e, - 0x87, 0x62, 0x44, 0x46, 0x3a, 0x9a, 0x5a, 0xf7, 0xe1, 0x37, 0x2f, 0x6f, 0x3d, 0x18, 0x06, 0xe2, - 0x6c, 0xfc, 0xd4, 0xf5, 0xd8, 0x68, 0x8b, 0x72, 0x31, 0x26, 0xc9, 0x44, 0x5f, 0x96, 0xd7, 0xae, - 0xcf, 0xe5, 0xa8, 0xb1, 0x72, 0x75, 0xc5, 0x51, 0x0b, 0xdf, 0xe6, 0x51, 0x57, 0x96, 0x3f, 0x2a, - 0xea, 0x40, 0xf5, 0x69, 0x10, 0x49, 0x08, 0xb7, 0x8b, 0x1b, 0x2b, 0xb7, 0xeb, 0xdb, 0x1f, 0x2e, - 0xbc, 0x68, 0x6e, 0x57, 0xa3, 0x71, 0x46, 0x43, 0x3f, 0x82, 0x66, 0x44, 0xc5, 0x0b, 0x96, 0x9c, - 0x0f, 0x62, 0x96, 0x08, 0x6e, 0x97, 0x94, 0x9f, 0x75, 0x7d, 0x98, 0x43, 0xbd, 0x75, 0xcc, 0x12, - 0x81, 0x1b, 0xd1, 0x74, 0xc1, 0x9d, 0x57, 0x05, 0xa8, 0x18, 0x6f, 0xe8, 0x11, 0xb4, 0x13, 0xca, - 0xd9, 0x38, 0xf1, 0xe8, 0x20, 0x7f, 0x14, 0x6b, 0x89, 0xa3, 0xac, 0xa6, 0xcc, 0x9e, 0x3e, 0xd2, - 0xa7, 0x00, 0x1e, 0x0b, 0x43, 0xea, 0xc9, 0xe4, 0x99, 0xee, 0xd1, 0xd6, 0xc1, 0xf4, 0x32, 0xbb, - 0x4c, 0x6a, 0xb7, 0x78, 0xf1, 0xf2, 0xd6, 0x35, 0x9c, 0x43, 0xa3, 0x5f, 0x5b, 0x70, 0xfd, 0x34, - 0xa0, 0xa1, 0x9f, 0x8f, 0x62, 0x30, 0x22, 0xb1, 0xbd, 0xa2, 0x0e, 0xf5, 0x70, 0xa9, 0xe4, 0xb8, - 0xbb, 0xd2, 0x85, 0x0e, 0xe7, 0x11, 0x67, 0xd1, 0x01, 0x89, 0xfb, 0x91, 0x48, 0x26, 0xdd, 0x1b, - 0xbf, 0xf9, 0xc7, 0x82, 0x83, 0xd4, 0x4f, 0xa7, 0x34, 0xa7, 0x0f, 0xef, 0xcc, 0xf1, 0x82, 0x5a, - 0xb0, 0x72, 0x4e, 0x27, 0x3a, 0x37, 0x58, 0x3e, 0xa2, 0x36, 0x94, 0x9e, 0x93, 0x70, 0xac, 0xeb, - 0xa8, 0x86, 0xf5, 0xe2, 0xd3, 0xc2, 0x7d, 0xcb, 0xf9, 0x05, 0x94, 0x54, 0xaf, 0x43, 0x3d, 0x58, - 0x1b, 0xcd, 0xd6, 0x55, 0xd6, 0x8b, 0xe7, 0x15, 0x1d, 0xbe, 0xcc, 0x40, 0x36, 0x54, 0x9e, 0xd3, - 0x84, 0xa7, 0x79, 0xad, 0xe1, 0x74, 0x89, 0xde, 0x81, 0x8a, 0x9f, 0x4c, 0x06, 0xc9, 0x58, 0x97, - 0x5f, 0x15, 0x97, 0xfd, 0x64, 0x82, 0xc7, 0x91, 0xf3, 0x57, 0x0b, 0x8a, 0xb2, 0x79, 0xfe, 0xbf, - 0x03, 0xf8, 0x10, 0x4a, 0x09, 0x89, 0x86, 0xd4, 0xb4, 0xfd, 0x35, 0xed, 0x14, 0x4b, 0x93, 0x72, - 0xa5, 0x77, 0xd1, 0x27, 0x00, 0x5c, 0x10, 0x41, 0x75, 0x79, 0x15, 0x97, 0x28, 0xaf, 0x92, 0xc2, - 0x3b, 0x02, 0x8a, 0xb2, 0xa9, 0xcb, 0x08, 0x4c, 0xe5, 0xab, 0xf0, 0x9b, 0x38, 0x5d, 0xa2, 0x7b, - 0x50, 0x3d, 0xa7, 0x93, 0xe5, 0xbb, 0x8d, 0x7a, 0x73, 0xef, 0x01, 0x48, 0x52, 0x4c, 0xbc, 0x73, - 0xea, 0xab, 0xd8, 0x1b, 0xb8, 0x76, 0x4e, 0x27, 0xc7, 0xca, 0xe0, 0x54, 0xa0, 0xa4, 0x46, 0x83, - 0xf3, 0xa7, 0x02, 0x94, 0x54, 0xcb, 0x7f, 0xbb, 0x01, 0xc8, 0xd6, 0xa2, 0x8a, 0x89, 0x2f, 0x9f, - 0xb0, 0xb2, 0x26, 0xa0, 0x0f, 0xa0, 0x69, 0xa8, 0xc6, 0x79, 0x49, 0x39, 0x6f, 0x68, 0xa3, 0xf1, - 0x7f, 0x0f, 0xaa, 0x3e, 0xf3, 0xb4, 0xf3, 0xf2, 0x32, 0x31, 0xfb, 0xcc, 0x43, 0xdf, 0x85, 0x32, - 0xfd, 0x3a, 0xe0, 0x82, 0xab, 0x09, 0x59, 0xc5, 0x66, 0xe5, 0x60, 0xa8, 0xe7, 0xa6, 0x1b, 0xea, - 0x01, 0x4a, 0xc6, 0x91, 0x08, 0x46, 0x74, 0xe0, 0x9d, 0x51, 0xef, 0x3c, 0x66, 0x41, 0x24, 0x4c, - 0xd1, 0xb5, 0xdd, 0x54, 0x0d, 0xb9, 0xbd, 0x6c, 0x0f, 0xaf, 0x1b, 0xfc, 0xd4, 0xe4, 0x34, 0xa1, - 0x9e, 0x9b, 0x76, 0x9b, 0xff, 0x6e, 0x42, 0x15, 0x53, 0x1e, 0xb3, 0x88, 0x53, 0xe4, 0xce, 0x88, - 0x9a, 0xcb, 0x73, 0x5a, 0x83, 0xf2, 0xaa, 0xe6, 0x21, 0xd4, 0x52, 0x99, 0xe2, 0x9b, 0xc6, 0x74, - 0xeb, 0x6a, 0x52, 0xda, 0x51, 0x7c, 0x3c, 0x65, 0xa0, 0x4f, 0xa0, 0x22, 0x05, 0x4b, 0x60, 0xde, - 0xd3, 0xeb, 0x9a, 0xc8, 0x90, 0x3b, 0x1a, 0x84, 0x53, 0x34, 0xfa, 0x21, 0x94, 0xa5, 0x72, 0xa1, - 0xbe, 0x91, 0x38, 0x37, 0xae, 0xe6, 0x1d, 0x29, 0x0c, 0x36, 0x58, 0xc9, 0x92, 0x02, 0x86, 0xa6, - 0x4a, 0x67, 0x0e, 0x6b, 0x5f, 0x61, 0xb0, 0xc1, 0xca, 0x20, 0x95, 0x8a, 0xa1, 0xbe, 0x11, 0x3c, - 0x73, 0x82, 0xdc, 0xd5, 0x20, 0x9c, 0xa2, 0xd1, 0x23, 0x58, 0x55, 0x6a, 0x84, 0xfa, 0xa9, 0x8a, - 0xd1, 0xf2, 0xe7, 0x83, 0x39, 0x69, 0xd5, 0x58, 0x23, 0x64, 0x9a, 0x3c, 0xbf, 0x44, 0xbb, 0xd0, - 0xc8, 0xa9, 0x12, 0xdf, 0xe8, 0xa1, 0xcd, 0x39, 0xe9, 0xca, 0x21, 0xf1, 0x0c, 0xef, 0x7f, 0x90, - 0x33, 0xbf, 0x2f, 0x18, 0x39, 0xe3, 0x40, 0x35, 0xd5, 0x02, 0xe6, 0x9e, 0x66, 0x6b, 0xb4, 0x0b, - 0xc8, 0x8c, 0x17, 0xee, 0x9d, 0xd1, 0x11, 0x59, 0xfe, 0xca, 0x36, 0x34, 0xef, 0x44, 0xd1, 0xd0, - 0x97, 0xf0, 0xbd, 0xcb, 0x53, 0x33, 0xef, 0x70, 0x19, 0x1d, 0xd0, 0x9e, 0x1d, 0x9e, 0xc6, 0xf1, - 0x0f, 0x60, 0xdd, 0x67, 0xde, 0x78, 0x44, 0x23, 0xa1, 0xfa, 0xee, 0x60, 0x9c, 0x84, 0xfa, 0xee, - 0xe3, 0xd6, 0xcc, 0xc6, 0x93, 0x24, 0x44, 0xdf, 0x87, 0x32, 0x23, 0x63, 0x71, 0xb6, 0x6d, 0xea, - 0xa4, 0xa1, 0x5b, 0xef, 0x51, 0x47, 0xda, 0xb0, 0xd9, 0x73, 0xfe, 0x53, 0x84, 0x5a, 0x56, 0xd5, - 0xa8, 0x97, 0x93, 0x1d, 0x96, 0x9a, 0xac, 0x1f, 0xbd, 0xe1, 0x22, 0xbc, 0x2e, 0x3c, 0x9c, 0x5f, - 0x16, 0x00, 0x7a, 0x2c, 0xe2, 0x22, 0x21, 0x41, 0x24, 0xaf, 0x7b, 0x31, 0xa7, 0xa5, 0xb6, 0xde, - 0xe4, 0x6f, 0xca, 0x74, 0x95, 0xa6, 0x52, 0x64, 0xd9, 0x5a, 0x12, 0x4a, 0xb2, 0xec, 0x61, 0xb3, - 0xda, 0xfc, 0x9d, 0x05, 0x45, 0xa5, 0xb5, 0xea, 0x50, 0xd9, 0x3b, 0xfc, 0xa2, 0xb3, 0xbf, 0xb7, - 0xd3, 0xba, 0x86, 0x10, 0xac, 0xee, 0xee, 0xf5, 0xf7, 0x77, 0x06, 0xb8, 0xff, 0xf9, 0x93, 0x3d, - 0xdc, 0xdf, 0x69, 0x59, 0xe8, 0x3a, 0xac, 0xef, 0x1f, 0xf5, 0x3a, 0x8f, 0xf7, 0x8e, 0x0e, 0xa7, - 0xe6, 0x02, 0xb2, 0xa1, 0x9d, 0x33, 0xf7, 0x8e, 0x0e, 0x0e, 0xfa, 0x87, 0x3b, 0xfd, 0x9d, 0xd6, - 0xca, 0xd4, 0xc9, 0xd1, 0xb1, 0xdc, 0xed, 0xec, 0xb7, 0x8a, 0xe8, 0x3b, 0xb0, 0xa6, 0x6d, 0xbb, - 0x47, 0xb8, 0xbb, 0xb7, 0xb3, 0xd3, 0x3f, 0x6c, 0x95, 0xd0, 0x3a, 0x34, 0x9f, 0x1c, 0x9e, 0x74, - 0x1e, 0xef, 0x9d, 0xec, 0xee, 0x75, 0xba, 0xfb, 0xfd, 0x56, 0xd9, 0xf9, 0x43, 0x4e, 0x43, 0x7d, - 0xa5, 0x54, 0xa0, 0x39, 0x53, 0x9a, 0xd6, 0xfb, 0x4b, 0xa6, 0x35, 0x97, 0x0e, 0xae, 0x54, 0x06, - 0xce, 0x3b, 0x93, 0xbd, 0x3c, 0xab, 0xb4, 0x98, 0x88, 0x33, 0xbb, 0xb0, 0xb1, 0x72, 0xbb, 0x86, - 0x1b, 0xa9, 0xf1, 0x98, 0x88, 0x33, 0x09, 0xf2, 0x69, 0x28, 0xc8, 0x60, 0x1c, 0x4b, 0xdf, 0xdc, - 0x28, 0x81, 0x86, 0x32, 0x3e, 0xd1, 0x36, 0xe7, 0x19, 0xb4, 0x2e, 0xff, 0xd5, 0x15, 0x82, 0xe6, - 0x27, 0x79, 0x41, 0x53, 0xdf, 0xbe, 0xb3, 0xfc, 0xcb, 0xcc, 0x8b, 0x9f, 0xfb, 0x50, 0x31, 0xbd, - 0x10, 0x7d, 0x0c, 0x88, 0x28, 0x89, 0x37, 0xf0, 0x29, 0xf7, 0x92, 0x20, 0xce, 0x04, 0x48, 0x0d, - 0xaf, 0xeb, 0x9d, 0x9d, 0xe9, 0x86, 0x73, 0x00, 0x65, 0xdd, 0x0d, 0xbf, 0x9d, 0x21, 0xf2, 0x25, - 0x94, 0x75, 0x9b, 0x5c, 0x3c, 0xbd, 0xb3, 0x49, 0x58, 0x58, 0x72, 0x12, 0x3a, 0x35, 0xa8, 0x98, - 0x46, 0xea, 0x7c, 0x06, 0xcd, 0x99, 0x9e, 0x88, 0xee, 0x80, 0x96, 0x2e, 0x59, 0xb0, 0x46, 0x02, - 0x9b, 0x2f, 0x87, 0x13, 0xb9, 0x97, 0xaa, 0x9b, 0x55, 0x68, 0xe4, 0xdb, 0xe0, 0xe6, 0xaf, 0x8a, - 0x50, 0xea, 0x7f, 0x2d, 0x12, 0xe2, 0xfc, 0xc5, 0x82, 0xf7, 0xd3, 0x3c, 0xf7, 0xe5, 0x98, 0x0d, - 0xa2, 0xe1, 0x71, 0xc2, 0x9e, 0x69, 0xc1, 0x9c, 0x7e, 0xde, 0xef, 0x43, 0x8b, 0x9a, 0xcd, 0x41, - 0xfe, 0x7c, 0xf5, 0xed, 0xf7, 0xe7, 0x7f, 0xd3, 0xa4, 0x37, 0x7a, 0x2d, 0xa5, 0xa6, 0x95, 0x7c, - 0x0c, 0xad, 0x38, 0x61, 0x31, 0xe3, 0xd4, 0xcf, 0xbc, 0xe9, 0x42, 0x58, 0xf2, 0xe3, 0x64, 0x2d, - 0xa5, 0x1b, 0x83, 0xf3, 0xe7, 0xc2, 0xf4, 0x14, 0xc6, 0xd6, 0x19, 0x92, 0x20, 0xe2, 0x22, 0x57, - 0x8c, 0xe8, 0xb3, 0xd9, 0x97, 0xb3, 0x54, 0xf0, 0xd9, 0xfb, 0x1b, 0xce, 0x5e, 0xbf, 0x82, 0xba, - 0x7e, 0xfd, 0x99, 0x78, 0x55, 0x46, 0xdd, 0x37, 0xc6, 0xb1, 0xf8, 0x2e, 0xbe, 0xcd, 0x1b, 0xb4, - 0xfd, 0x53, 0xa8, 0x65, 0x05, 0x83, 0x7e, 0x0c, 0xf5, 0x69, 0x26, 0x28, 0x6a, 0x5f, 0xf5, 0x2e, - 0x9c, 0xeb, 0x57, 0xfe, 0xd1, 0x6d, 0xeb, 0xae, 0xd5, 0xed, 0x5e, 0xfc, 0xf3, 0xe6, 0xb5, 0x8b, - 0x57, 0x37, 0xad, 0xbf, 0xbd, 0xba, 0x69, 0xfd, 0xf1, 0x5f, 0x37, 0xad, 0xaf, 0xee, 0x2e, 0xf5, - 0x25, 0x9d, 0x73, 0xf8, 0xb4, 0xac, 0xcc, 0xf7, 0xfe, 0x1b, 0x00, 0x00, 0xff, 0xff, 0x21, 0x3e, - 0xf0, 0x87, 0x06, 0x13, 0x00, 0x00, + // 1655 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xbc, 0x57, 0xdd, 0x6e, 0xe3, 0xc6, + 0x15, 0x5e, 0xca, 0xfa, 0x3d, 0x92, 0x6c, 0x79, 0xaa, 0x6d, 0x18, 0x76, 0x63, 0x3b, 0x4e, 0x83, + 0x18, 0x5b, 0x84, 0x36, 0xbc, 0x05, 0xb2, 0x9b, 0x60, 0x8b, 0xea, 0x17, 0xd0, 0x56, 0xb6, 0x9c, + 0xf1, 0x3a, 0x01, 0x72, 0x23, 0x8c, 0xc9, 0xb1, 0xcc, 0x35, 0xc5, 0x61, 0x49, 0x6a, 0x37, 0xea, + 0x4d, 0x7b, 0x53, 0x14, 0xe8, 0x55, 0x0b, 0x14, 0x05, 0x8a, 0xde, 0x14, 0x7d, 0x87, 0x3e, 0x41, + 0x51, 0x60, 0x2f, 0xfb, 0x04, 0x5b, 0x74, 0xdb, 0x17, 0xe8, 0x6d, 0xae, 0x8a, 0xf9, 0x21, 0x45, + 0x39, 0x92, 0x56, 0x28, 0xd2, 0xdc, 0x08, 0x9c, 0x33, 0xdf, 0x77, 0x74, 0xe6, 0xf0, 0xcc, 0x39, + 0x1f, 0xe1, 0xfe, 0x88, 0x1d, 0xfa, 0x01, 0x8b, 0x98, 0xc5, 0xdc, 0xf0, 0x70, 0x4c, 0x22, 0x1a, + 0x38, 0xc4, 0x75, 0x7e, 0x46, 0xd3, 0xcf, 0xa6, 0x40, 0xa0, 0x72, 0xca, 0x64, 0xec, 0x59, 0xcc, + 0x0b, 0x27, 0x63, 0x1a, 0x24, 0xf4, 0xe4, 0x41, 0xc2, 0x8d, 0x7b, 0x73, 0xae, 0xaf, 0x5c, 0xf6, + 0x42, 0xfc, 0xa8, 0xdd, 0xfa, 0x88, 0x8d, 0x98, 0x78, 0x3c, 0xe4, 0x4f, 0xca, 0xfa, 0xf6, 0x88, + 0xb1, 0x91, 0x4b, 0x25, 0xef, 0x72, 0x72, 0x75, 0x48, 0xbc, 0xa9, 0xdc, 0xda, 0xff, 0xc3, 0x36, + 0x14, 0x30, 0xfd, 0xe9, 0x84, 0x86, 0x11, 0xfa, 0x10, 0xb2, 0xa1, 0x4f, 0x2d, 0x5d, 0xdb, 0xd3, + 0x0e, 0xca, 0xc7, 0x6f, 0x9b, 0xe9, 0x58, 0x15, 0xc6, 0x3c, 0xf7, 0xa9, 0x85, 0x05, 0x0c, 0x3d, + 0x82, 0xe2, 0x73, 0xe2, 0x3a, 0x36, 0x89, 0xa8, 0x9e, 0x11, 0x94, 0x77, 0x16, 0x52, 0x3e, 0x53, + 0x20, 0x9c, 0xc0, 0xd1, 0x11, 0xe4, 0x88, 0xef, 0xbb, 0x53, 0x7d, 0x43, 0xf0, 0x8c, 0x85, 0xbc, + 0x06, 0x47, 0x60, 0x09, 0xe4, 0xb1, 0x31, 0x9f, 0x7a, 0x7a, 0x76, 0x45, 0x6c, 0x03, 0x9f, 0x7a, + 0x58, 0xc0, 0x38, 0xdc, 0x65, 0xc4, 0xd6, 0x73, 0x2b, 0xe0, 0x7d, 0x46, 0x6c, 0x2c, 0x60, 0x3c, + 0x9e, 0x2b, 0x77, 0x12, 0x5e, 0xeb, 0xf9, 0x15, 0xf1, 0x74, 0x39, 0x02, 0x4b, 0x20, 0x67, 0x84, + 0x11, 0x0b, 0xa8, 0x5e, 0x58, 0xc1, 0x38, 0xe7, 0x08, 0x2c, 0x81, 0xa8, 0x05, 0x95, 0x30, 0x22, + 0x41, 0x34, 0xb4, 0xd8, 0x78, 0xec, 0x44, 0x7a, 0x51, 0x10, 0xf7, 0x96, 0x10, 0x49, 0x10, 0xb5, + 0x04, 0x0e, 0x97, 0xc3, 0xd9, 0x02, 0x35, 0xa1, 0x4c, 0xac, 0x1b, 0x8f, 0xbd, 0x70, 0xa9, 0x3d, + 0xa2, 0x7a, 0x69, 0x85, 0x8f, 0xc6, 0x0c, 0x87, 0xd3, 0x24, 0x74, 0x04, 0x45, 0xc7, 0x8b, 0x68, + 0xe0, 0x11, 0x57, 0xb7, 0x85, 0x83, 0xba, 0x29, 0x0b, 0xc4, 0x8c, 0x0b, 0xc4, 0x6c, 0x78, 0x53, + 0x9c, 0xa0, 0x8c, 0xdf, 0x6a, 0x90, 0xe5, 0x2f, 0x1e, 0x9d, 0xc2, 0xa6, 0xc5, 0x3c, 0x8f, 0x5a, + 0x11, 0x0b, 0x86, 0xd1, 0xd4, 0xa7, 0xa2, 0x56, 0x36, 0x8f, 0x3f, 0x30, 0x45, 0x0d, 0x9e, 0x24, + 0x61, 0x90, 0xc8, 0x61, 0x1e, 0xa7, 0x98, 0xad, 0x18, 0xff, 0x74, 0xea, 0x53, 0x5c, 0xb5, 0xd2, + 0x4b, 0xf4, 0x08, 0xca, 0x16, 0xf3, 0xae, 0x9c, 0xd1, 0xf0, 0x59, 0xc8, 0x3c, 0x51, 0x45, 0xa5, + 0xe6, 0xbd, 0xaf, 0x5e, 0xed, 0xea, 0xd4, 0xb3, 0x98, 0xed, 0x78, 0xa3, 0x43, 0xbe, 0x61, 0x62, + 0xf2, 0xe2, 0x84, 0x86, 0x21, 0x19, 0x51, 0x9c, 0x97, 0x04, 0xe3, 0x37, 0x39, 0x28, 0xc6, 0x95, + 0x85, 0x3e, 0x85, 0xac, 0x47, 0xc6, 0x32, 0x9a, 0x52, 0xf3, 0xf1, 0x57, 0xaf, 0x76, 0x1f, 0x8d, + 0x9c, 0xe8, 0x7a, 0x72, 0x69, 0x5a, 0x6c, 0x7c, 0x48, 0xc3, 0x68, 0x42, 0x82, 0xa9, 0xbc, 0x2c, + 0x5f, 0xbb, 0x3e, 0xb7, 0xa3, 0xc6, 0xc2, 0xd5, 0x82, 0xa3, 0x66, 0xbe, 0xc9, 0xa3, 0x6e, 0xac, + 0x7f, 0x54, 0xd4, 0x80, 0xe2, 0xa5, 0xe3, 0x71, 0x48, 0xa8, 0x67, 0xf7, 0x36, 0x0e, 0xca, 0xc7, + 0xef, 0xaf, 0xbc, 0x68, 0x66, 0x53, 0xa2, 0x71, 0x42, 0x33, 0x5e, 0x67, 0xa0, 0xa0, 0xac, 0xe8, + 0x09, 0xd4, 0x03, 0x1a, 0xb2, 0x49, 0x60, 0xd1, 0x61, 0x3a, 0x24, 0x6d, 0x8d, 0x90, 0x36, 0x63, + 0x66, 0x4b, 0x86, 0xf6, 0x31, 0x80, 0xc5, 0x5c, 0x97, 0x5a, 0x3c, 0x09, 0xaa, 0x0b, 0xd4, 0x65, + 0x86, 0x5a, 0x89, 0x9d, 0x27, 0xa7, 0x99, 0x7d, 0xf9, 0x6a, 0xf7, 0x0e, 0x4e, 0xa1, 0xd1, 0xaf, + 0x34, 0xb8, 0x7b, 0xe5, 0x50, 0xd7, 0x4e, 0x47, 0x31, 0x1c, 0x13, 0x5f, 0xdf, 0x10, 0x87, 0x7c, + 0xbc, 0xd6, 0x21, 0xcd, 0x2e, 0x77, 0x21, 0xc3, 0x79, 0x12, 0x32, 0xef, 0x84, 0xf8, 0x1d, 0x2f, + 0x0a, 0xa6, 0xcd, 0x7b, 0xbf, 0xfe, 0xc7, 0x8a, 0x83, 0x94, 0xaf, 0x66, 0x34, 0xa3, 0x03, 0x6f, + 0x2d, 0xf1, 0x82, 0x6a, 0xb0, 0x71, 0x43, 0xa7, 0x32, 0x37, 0x98, 0x3f, 0xa2, 0x3a, 0xe4, 0x9e, + 0x13, 0x77, 0x22, 0xeb, 0xa1, 0x84, 0xe5, 0xe2, 0xe3, 0xcc, 0x43, 0xcd, 0xf8, 0x39, 0xe4, 0x44, + 0xcf, 0x42, 0x2d, 0xd8, 0x1a, 0xcf, 0xd7, 0x47, 0xd2, 0x53, 0x97, 0x15, 0x0f, 0xbe, 0xcd, 0x40, + 0x3a, 0x14, 0x9e, 0xd3, 0x20, 0x8c, 0xf3, 0x5a, 0xc2, 0xf1, 0x12, 0xbd, 0x05, 0x05, 0x3b, 0x98, + 0x0e, 0x83, 0x89, 0x2c, 0xa3, 0x22, 0xce, 0xdb, 0xc1, 0x14, 0x4f, 0x3c, 0xe3, 0x6f, 0x1a, 0x64, + 0x79, 0x13, 0xfc, 0x7f, 0x07, 0xf0, 0x3e, 0xe4, 0x02, 0xe2, 0x8d, 0xa8, 0x6a, 0xdf, 0x5b, 0xd2, + 0x29, 0xe6, 0x26, 0xe1, 0x4a, 0xee, 0xa2, 0x8f, 0x00, 0xc2, 0x88, 0x44, 0x54, 0x96, 0x57, 0x76, + 0x8d, 0xf2, 0xca, 0x09, 0xbc, 0x11, 0x41, 0x96, 0x37, 0x67, 0x1e, 0x81, 0xaa, 0x60, 0x11, 0x7e, + 0x15, 0xc7, 0x4b, 0xf4, 0x00, 0x8a, 0x37, 0x74, 0xba, 0x7e, 0xd7, 0x10, 0x6f, 0xee, 0x1d, 0x00, + 0x4e, 0xf2, 0x89, 0x75, 0x43, 0x6d, 0x11, 0x7b, 0x05, 0x97, 0x6e, 0xe8, 0xf4, 0x4c, 0x18, 0x8c, + 0x02, 0xe4, 0x44, 0x8b, 0x37, 0xfe, 0x9c, 0x81, 0x9c, 0x68, 0xdd, 0xdf, 0x6e, 0x00, 0xbc, 0x45, + 0x88, 0x62, 0x0a, 0xd7, 0x4f, 0x58, 0x5e, 0x12, 0xd0, 0x7b, 0x50, 0x55, 0x54, 0xe5, 0x3c, 0x27, + 0x9c, 0x57, 0xa4, 0x51, 0xf9, 0x7f, 0x00, 0x45, 0x9b, 0x59, 0xd2, 0x79, 0x7e, 0x9d, 0x98, 0x6d, + 0x66, 0xa1, 0xef, 0x42, 0x9e, 0x7e, 0xe9, 0x84, 0x51, 0x28, 0x26, 0x5d, 0x11, 0xab, 0x95, 0x81, + 0xa1, 0x9c, 0x9a, 0x52, 0xa8, 0x05, 0x28, 0x98, 0x78, 0x91, 0x33, 0xa6, 0x43, 0xeb, 0x9a, 0x5a, + 0x37, 0x3e, 0x73, 0xbc, 0x48, 0x15, 0x5d, 0xdd, 0x8c, 0x55, 0x8d, 0xd9, 0x4a, 0xf6, 0xf0, 0xb6, + 0xc2, 0xcf, 0x4c, 0x46, 0x15, 0xca, 0xa9, 0xa9, 0xb5, 0xff, 0xef, 0x2a, 0x14, 0x31, 0x0d, 0x7d, + 0xe6, 0x85, 0x14, 0x99, 0x73, 0xe2, 0xe4, 0xf6, 0xbc, 0x95, 0xa0, 0xb4, 0x3a, 0x79, 0x0c, 0xa5, + 0x58, 0x6e, 0xd8, 0xaa, 0x31, 0xed, 0x2e, 0x26, 0xc5, 0x1d, 0xc5, 0xc6, 0x33, 0x06, 0xfa, 0x08, + 0x0a, 0x5c, 0x78, 0x38, 0xea, 0x3d, 0x7d, 0x5d, 0xdb, 0x28, 0x72, 0x43, 0x82, 0x70, 0x8c, 0x46, + 0x3f, 0x84, 0x3c, 0x57, 0x20, 0xd4, 0x56, 0x52, 0xe5, 0xde, 0x62, 0xde, 0x40, 0x60, 0xb0, 0xc2, + 0x72, 0x16, 0x17, 0x22, 0x34, 0x56, 0x2c, 0x4b, 0x58, 0x7d, 0x81, 0xc1, 0x0a, 0xcb, 0x83, 0x14, + 0x6a, 0x84, 0xda, 0x4a, 0xb8, 0x2c, 0x09, 0xb2, 0x2b, 0x41, 0x38, 0x46, 0xa3, 0x27, 0xb0, 0x29, + 0x54, 0x05, 0xb5, 0x63, 0x35, 0x22, 0x65, 0xcc, 0x7b, 0x4b, 0xd2, 0x2a, 0xb1, 0x4a, 0x90, 0x54, + 0xc3, 0xf4, 0x12, 0x75, 0xa1, 0x92, 0x52, 0x17, 0xb6, 0xd2, 0x35, 0xfb, 0x4b, 0xd2, 0x95, 0x42, + 0xe2, 0x39, 0xde, 0xff, 0x20, 0x4b, 0x7e, 0x9f, 0x51, 0xb2, 0xc4, 0x80, 0x62, 0x3c, 0xd3, 0xd5, + 0x3d, 0x4d, 0xd6, 0xa8, 0x0b, 0x48, 0x8d, 0x97, 0xd0, 0xba, 0xa6, 0x63, 0xb2, 0xfe, 0x95, 0xad, + 0x48, 0xde, 0xb9, 0xa0, 0xa1, 0xcf, 0xe1, 0x7b, 0xb7, 0xa7, 0x66, 0xda, 0xe1, 0x3a, 0xf3, 0xbc, + 0x3e, 0x3f, 0x3c, 0x95, 0xe3, 0x1f, 0xc0, 0xb6, 0xcd, 0xac, 0xc9, 0x98, 0x7a, 0x91, 0xe8, 0xbb, + 0xc3, 0x49, 0xe0, 0xca, 0xbb, 0x8f, 0x6b, 0x73, 0x1b, 0x17, 0x81, 0x8b, 0xbe, 0x0f, 0x79, 0x46, + 0x26, 0xd1, 0xf5, 0xb1, 0xaa, 0x93, 0x8a, 0x6c, 0xbd, 0x83, 0x06, 0xb7, 0x61, 0xb5, 0x67, 0xfc, + 0x27, 0x0b, 0xa5, 0xa4, 0xaa, 0x51, 0x2b, 0x25, 0x1f, 0x34, 0x31, 0x59, 0x3f, 0x78, 0xc3, 0x45, + 0x58, 0x20, 0x20, 0x7e, 0x91, 0x01, 0x68, 0x31, 0x2f, 0x8c, 0x02, 0xe2, 0x78, 0xfc, 0xba, 0x67, + 0x53, 0x9a, 0xe8, 0xf0, 0x4d, 0xfe, 0x66, 0x4c, 0x53, 0x68, 0x23, 0x41, 0xe6, 0xad, 0x25, 0xa0, + 0x24, 0xc9, 0x1e, 0x56, 0xab, 0xfd, 0xdf, 0x69, 0x90, 0x15, 0x9a, 0xa9, 0x0c, 0x85, 0xde, 0xe9, + 0x67, 0x8d, 0x7e, 0xaf, 0x5d, 0xbb, 0x83, 0x10, 0x6c, 0x76, 0x7b, 0x9d, 0x7e, 0x7b, 0x88, 0x3b, + 0x9f, 0x5e, 0xf4, 0x70, 0xa7, 0x5d, 0xd3, 0xd0, 0x5d, 0xd8, 0xee, 0x0f, 0x5a, 0x8d, 0xa7, 0xbd, + 0xc1, 0xe9, 0xcc, 0x9c, 0x41, 0x3a, 0xd4, 0x53, 0xe6, 0xd6, 0xe0, 0xe4, 0xa4, 0x73, 0xda, 0xee, + 0xb4, 0x6b, 0x1b, 0x33, 0x27, 0x83, 0x33, 0xbe, 0xdb, 0xe8, 0xd7, 0xb2, 0xe8, 0x3b, 0xb0, 0x25, + 0x6d, 0xdd, 0x01, 0x6e, 0xf6, 0xda, 0xed, 0xce, 0x69, 0x2d, 0x87, 0xb6, 0xa1, 0x7a, 0x71, 0x7a, + 0xde, 0x78, 0xda, 0x3b, 0xef, 0xf6, 0x1a, 0xcd, 0x7e, 0xa7, 0x96, 0x37, 0xfe, 0x98, 0xd2, 0x50, + 0x5f, 0x08, 0x35, 0xa7, 0xce, 0x14, 0xa7, 0xf5, 0xe1, 0x9a, 0x69, 0x4d, 0xa5, 0x23, 0x14, 0x2a, + 0x03, 0xa7, 0x9d, 0xf1, 0x5e, 0x9e, 0x54, 0x9a, 0x4f, 0xa2, 0x6b, 0x3d, 0xb3, 0xb7, 0x71, 0x50, + 0xc2, 0x95, 0xd8, 0x78, 0x46, 0xa2, 0x6b, 0x0e, 0xb2, 0xa9, 0x1b, 0x91, 0xe1, 0xc4, 0xe7, 0xbe, + 0x43, 0xa5, 0x04, 0x2a, 0xc2, 0x78, 0x21, 0x6d, 0xc6, 0x33, 0xa8, 0xdd, 0xfe, 0xab, 0x05, 0x82, + 0xe6, 0xc7, 0x69, 0x41, 0x53, 0x3e, 0xbe, 0xbf, 0xfe, 0xcb, 0x4c, 0x8b, 0x9f, 0x87, 0x50, 0x50, + 0xbd, 0x10, 0x7d, 0x08, 0x88, 0x08, 0x89, 0x37, 0xb4, 0x69, 0x68, 0x05, 0x8e, 0x9f, 0x08, 0x90, + 0x12, 0xde, 0x96, 0x3b, 0xed, 0xd9, 0x86, 0x71, 0x02, 0x79, 0xd9, 0x0d, 0xbf, 0x99, 0x21, 0xf2, + 0x39, 0xe4, 0x65, 0x9b, 0x5c, 0x3d, 0xbd, 0x93, 0x49, 0x98, 0x59, 0x73, 0x12, 0x1a, 0x25, 0x28, + 0xa8, 0x46, 0x6a, 0x7c, 0x02, 0xd5, 0xb9, 0x9e, 0x88, 0xee, 0x83, 0x94, 0x2e, 0x49, 0xb0, 0x4a, + 0x02, 0xab, 0x2f, 0x80, 0x73, 0xbe, 0x17, 0xab, 0x9b, 0x4d, 0xa8, 0xa4, 0xdb, 0xe0, 0xfe, 0x2f, + 0xb3, 0x90, 0xeb, 0x7c, 0x19, 0x05, 0xc4, 0xf8, 0xab, 0x06, 0xef, 0xc6, 0x79, 0xee, 0xf0, 0x31, + 0xeb, 0x78, 0xa3, 0xb3, 0x80, 0x3d, 0x93, 0x82, 0x39, 0xfe, 0x4c, 0xef, 0x43, 0x8d, 0xaa, 0xcd, + 0x61, 0xfa, 0x7c, 0xe5, 0xe3, 0x77, 0x97, 0x7f, 0x9b, 0xc4, 0x37, 0x7a, 0x2b, 0xa6, 0xc6, 0x95, + 0x7c, 0x06, 0x35, 0x3f, 0x60, 0x3e, 0x0b, 0xa9, 0x9d, 0x78, 0x93, 0x85, 0xb0, 0xe6, 0x47, 0xc6, + 0x56, 0x4c, 0x57, 0x06, 0xe3, 0x2f, 0x99, 0xd9, 0x29, 0x94, 0xad, 0x31, 0x22, 0x8e, 0x17, 0x46, + 0xa9, 0x62, 0x44, 0x9f, 0xcc, 0xbf, 0x9c, 0xb5, 0x82, 0x4f, 0xde, 0xdf, 0x68, 0xfe, 0xfa, 0x65, + 0xc4, 0xf5, 0xeb, 0xcc, 0xc5, 0x2b, 0x32, 0x6a, 0xbe, 0x31, 0x8e, 0xd5, 0x77, 0xf1, 0xdb, 0xbc, + 0x41, 0xc7, 0x3f, 0x81, 0x52, 0x52, 0x30, 0xe8, 0x47, 0x50, 0x9e, 0x65, 0x82, 0xa2, 0xfa, 0xa2, + 0x77, 0x61, 0xdc, 0x5d, 0xf8, 0x47, 0x07, 0xda, 0x91, 0xd6, 0x6c, 0xbe, 0xfc, 0xe7, 0xce, 0x9d, + 0x97, 0xaf, 0x77, 0xb4, 0xbf, 0xbf, 0xde, 0xd1, 0xfe, 0xf4, 0xaf, 0x1d, 0xed, 0x8b, 0xa3, 0xb5, + 0xbe, 0x88, 0x53, 0x0e, 0x2f, 0xf3, 0xc2, 0xfc, 0xe0, 0xbf, 0x01, 0x00, 0x00, 0xff, 0xff, 0x60, + 0xa9, 0x4e, 0xd7, 0xce, 0x12, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -1748,20 +1744,6 @@ func (m *Request_Validate) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if len(m.NetworkPorts) > 0 { - for iNdEx := len(m.NetworkPorts) - 1; iNdEx >= 0; iNdEx-- { - { - size, err := m.NetworkPorts[iNdEx].MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintMaterialize(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x2a - } - } if len(m.Bindings) > 0 { for iNdEx := len(m.Bindings) - 1; iNdEx >= 0; iNdEx-- { { @@ -3005,12 +2987,6 @@ func (m *Request_Validate) ProtoSize() (n int) { n += 1 + l + sovMaterialize(uint64(l)) } } - if len(m.NetworkPorts) > 0 { - for _, e := range m.NetworkPorts { - l = e.ProtoSize() - n += 1 + l + sovMaterialize(uint64(l)) - } - } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -4162,40 +4138,6 @@ func (m *Request_Validate) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 5: - if wireType != 2 { - return fmt.Errorf("proto: wrong wireType = %d for field NetworkPorts", wireType) - } - var msglen int - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowMaterialize - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - msglen |= int(b&0x7F) << shift - if b < 0x80 { - break - } - } - if msglen < 0 { - return ErrInvalidLengthMaterialize - } - postIndex := iNdEx + msglen - if postIndex < 0 { - return ErrInvalidLengthMaterialize - } - if postIndex > l { - return io.ErrUnexpectedEOF - } - m.NetworkPorts = append(m.NetworkPorts, &flow.NetworkPort{}) - if err := m.NetworkPorts[len(m.NetworkPorts)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { - return err - } - iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipMaterialize(dAtA[iNdEx:]) diff --git a/go/protocols/materialize/materialize.proto b/go/protocols/materialize/materialize.proto index a5ba13c1f5..43c090d1de 100644 --- a/go/protocols/materialize/materialize.proto +++ b/go/protocols/materialize/materialize.proto @@ -69,8 +69,6 @@ message Request { ]; } repeated Binding bindings = 4; - // Network ports of this proposed materialization. - repeated flow.NetworkPort network_ports = 5; } Validate validate = 2; diff --git a/go/protocols/runtime/runtime.pb.go b/go/protocols/runtime/runtime.pb.go index 2c7ef19790..b9aefc3a63 100644 --- a/go/protocols/runtime/runtime.pb.go +++ b/go/protocols/runtime/runtime.pb.go @@ -39,6 +39,7 @@ type TaskServiceConfig struct { LogFileFd int32 `protobuf:"varint,1,opt,name=log_file_fd,json=logFileFd,proto3" json:"log_file_fd,omitempty"` TaskName string `protobuf:"bytes,2,opt,name=task_name,json=taskName,proto3" json:"task_name,omitempty"` UdsPath string `protobuf:"bytes,3,opt,name=uds_path,json=udsPath,proto3" json:"uds_path,omitempty"` + ContainerNetwork string `protobuf:"bytes,4,opt,name=container_network,json=containerNetwork,proto3" json:"container_network,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -273,8 +274,131 @@ func (m *RocksDBDescriptor) XXX_DiscardUnknown() { var xxx_messageInfo_RocksDBDescriptor proto.InternalMessageInfo +// Container is a description of a running connector container. +type Container struct { + IpAddr string `protobuf:"bytes,1,opt,name=ip_addr,json=ipAddr,proto3" json:"ip_addr,omitempty"` + NetworkPorts []*flow.NetworkPort `protobuf:"bytes,2,rep,name=network_ports,json=networkPorts,proto3" json:"network_ports,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *Container) Reset() { *m = Container{} } +func (m *Container) String() string { return proto.CompactTextString(m) } +func (*Container) ProtoMessage() {} +func (*Container) Descriptor() ([]byte, []int) { + return fileDescriptor_73af6e0737ce390c, []int{4} +} +func (m *Container) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *Container) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_Container.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *Container) XXX_Merge(src proto.Message) { + xxx_messageInfo_Container.Merge(m, src) +} +func (m *Container) XXX_Size() int { + return m.ProtoSize() +} +func (m *Container) XXX_DiscardUnknown() { + xxx_messageInfo_Container.DiscardUnknown(m) +} + +var xxx_messageInfo_Container proto.InternalMessageInfo + +type CaptureRequestExt struct { + Labels *ops.ShardLabeling `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CaptureRequestExt) Reset() { *m = CaptureRequestExt{} } +func (m *CaptureRequestExt) String() string { return proto.CompactTextString(m) } +func (*CaptureRequestExt) ProtoMessage() {} +func (*CaptureRequestExt) Descriptor() ([]byte, []int) { + return fileDescriptor_73af6e0737ce390c, []int{5} +} +func (m *CaptureRequestExt) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *CaptureRequestExt) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_CaptureRequestExt.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *CaptureRequestExt) XXX_Merge(src proto.Message) { + xxx_messageInfo_CaptureRequestExt.Merge(m, src) +} +func (m *CaptureRequestExt) XXX_Size() int { + return m.ProtoSize() +} +func (m *CaptureRequestExt) XXX_DiscardUnknown() { + xxx_messageInfo_CaptureRequestExt.DiscardUnknown(m) +} + +var xxx_messageInfo_CaptureRequestExt proto.InternalMessageInfo + +type CaptureResponseExt struct { + Container *Container `protobuf:"bytes,1,opt,name=container,proto3" json:"container,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *CaptureResponseExt) Reset() { *m = CaptureResponseExt{} } +func (m *CaptureResponseExt) String() string { return proto.CompactTextString(m) } +func (*CaptureResponseExt) ProtoMessage() {} +func (*CaptureResponseExt) Descriptor() ([]byte, []int) { + return fileDescriptor_73af6e0737ce390c, []int{6} +} +func (m *CaptureResponseExt) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *CaptureResponseExt) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_CaptureResponseExt.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *CaptureResponseExt) XXX_Merge(src proto.Message) { + xxx_messageInfo_CaptureResponseExt.Merge(m, src) +} +func (m *CaptureResponseExt) XXX_Size() int { + return m.ProtoSize() +} +func (m *CaptureResponseExt) XXX_DiscardUnknown() { + xxx_messageInfo_CaptureResponseExt.DiscardUnknown(m) +} + +var xxx_messageInfo_CaptureResponseExt proto.InternalMessageInfo + type DeriveRequestExt struct { - Open *DeriveRequestExt_Open `protobuf:"bytes,1,opt,name=open,proto3" json:"open,omitempty"` + Labels *ops.ShardLabeling `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels,omitempty"` + Open *DeriveRequestExt_Open `protobuf:"bytes,2,opt,name=open,proto3" json:"open,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -284,7 +408,7 @@ func (m *DeriveRequestExt) Reset() { *m = DeriveRequestExt{} } func (m *DeriveRequestExt) String() string { return proto.CompactTextString(m) } func (*DeriveRequestExt) ProtoMessage() {} func (*DeriveRequestExt) Descriptor() ([]byte, []int) { - return fileDescriptor_73af6e0737ce390c, []int{4} + return fileDescriptor_73af6e0737ce390c, []int{7} } func (m *DeriveRequestExt) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -314,12 +438,10 @@ func (m *DeriveRequestExt) XXX_DiscardUnknown() { var xxx_messageInfo_DeriveRequestExt proto.InternalMessageInfo type DeriveRequestExt_Open struct { - // Configured log level. - LogLevel ops.Log_Level `protobuf:"varint,1,opt,name=log_level,json=logLevel,proto3,enum=ops.Log_Level" json:"log_level,omitempty"` // URL with a registered SQLite VFS which should be opened. - SqliteVfsUri string `protobuf:"bytes,2,opt,name=sqlite_vfs_uri,json=sqliteVfsUri,proto3" json:"sqlite_vfs_uri,omitempty"` + SqliteVfsUri string `protobuf:"bytes,1,opt,name=sqlite_vfs_uri,json=sqliteVfsUri,proto3" json:"sqlite_vfs_uri,omitempty"` // RocksDB descriptor which should be opened. - RocksdbDescriptor *RocksDBDescriptor `protobuf:"bytes,3,opt,name=rocksdb_descriptor,json=rocksdbDescriptor,proto3" json:"rocksdb_descriptor,omitempty"` + RocksdbDescriptor *RocksDBDescriptor `protobuf:"bytes,2,opt,name=rocksdb_descriptor,json=rocksdbDescriptor,proto3" json:"rocksdb_descriptor,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -329,7 +451,7 @@ func (m *DeriveRequestExt_Open) Reset() { *m = DeriveRequestExt_Open{} } func (m *DeriveRequestExt_Open) String() string { return proto.CompactTextString(m) } func (*DeriveRequestExt_Open) ProtoMessage() {} func (*DeriveRequestExt_Open) Descriptor() ([]byte, []int) { - return fileDescriptor_73af6e0737ce390c, []int{4, 0} + return fileDescriptor_73af6e0737ce390c, []int{7, 0} } func (m *DeriveRequestExt_Open) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -359,9 +481,10 @@ func (m *DeriveRequestExt_Open) XXX_DiscardUnknown() { var xxx_messageInfo_DeriveRequestExt_Open proto.InternalMessageInfo type DeriveResponseExt struct { - Opened *DeriveResponseExt_Opened `protobuf:"bytes,1,opt,name=opened,proto3" json:"opened,omitempty"` - Published *DeriveResponseExt_Published `protobuf:"bytes,2,opt,name=published,proto3" json:"published,omitempty"` - Flushed *DeriveResponseExt_Flushed `protobuf:"bytes,3,opt,name=flushed,proto3" json:"flushed,omitempty"` + Container *Container `protobuf:"bytes,1,opt,name=container,proto3" json:"container,omitempty"` + Opened *DeriveResponseExt_Opened `protobuf:"bytes,2,opt,name=opened,proto3" json:"opened,omitempty"` + Published *DeriveResponseExt_Published `protobuf:"bytes,3,opt,name=published,proto3" json:"published,omitempty"` + Flushed *DeriveResponseExt_Flushed `protobuf:"bytes,4,opt,name=flushed,proto3" json:"flushed,omitempty"` XXX_NoUnkeyedLiteral struct{} `json:"-"` XXX_unrecognized []byte `json:"-"` XXX_sizecache int32 `json:"-"` @@ -371,7 +494,7 @@ func (m *DeriveResponseExt) Reset() { *m = DeriveResponseExt{} } func (m *DeriveResponseExt) String() string { return proto.CompactTextString(m) } func (*DeriveResponseExt) ProtoMessage() {} func (*DeriveResponseExt) Descriptor() ([]byte, []int) { - return fileDescriptor_73af6e0737ce390c, []int{5} + return fileDescriptor_73af6e0737ce390c, []int{8} } func (m *DeriveResponseExt) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -411,7 +534,7 @@ func (m *DeriveResponseExt_Opened) Reset() { *m = DeriveResponseExt_Open func (m *DeriveResponseExt_Opened) String() string { return proto.CompactTextString(m) } func (*DeriveResponseExt_Opened) ProtoMessage() {} func (*DeriveResponseExt_Opened) Descriptor() ([]byte, []int) { - return fileDescriptor_73af6e0737ce390c, []int{5, 0} + return fileDescriptor_73af6e0737ce390c, []int{8, 0} } func (m *DeriveResponseExt_Opened) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -456,7 +579,7 @@ func (m *DeriveResponseExt_Published) Reset() { *m = DeriveResponseExt_P func (m *DeriveResponseExt_Published) String() string { return proto.CompactTextString(m) } func (*DeriveResponseExt_Published) ProtoMessage() {} func (*DeriveResponseExt_Published) Descriptor() ([]byte, []int) { - return fileDescriptor_73af6e0737ce390c, []int{5, 1} + return fileDescriptor_73af6e0737ce390c, []int{8, 1} } func (m *DeriveResponseExt_Published) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -496,7 +619,7 @@ func (m *DeriveResponseExt_Flushed) Reset() { *m = DeriveResponseExt_Flu func (m *DeriveResponseExt_Flushed) String() string { return proto.CompactTextString(m) } func (*DeriveResponseExt_Flushed) ProtoMessage() {} func (*DeriveResponseExt_Flushed) Descriptor() ([]byte, []int) { - return fileDescriptor_73af6e0737ce390c, []int{5, 2} + return fileDescriptor_73af6e0737ce390c, []int{8, 2} } func (m *DeriveResponseExt_Flushed) XXX_Unmarshal(b []byte) error { return m.Unmarshal(b) @@ -525,17 +648,102 @@ func (m *DeriveResponseExt_Flushed) XXX_DiscardUnknown() { var xxx_messageInfo_DeriveResponseExt_Flushed proto.InternalMessageInfo +type MaterializeRequestExt struct { + Labels *ops.ShardLabeling `protobuf:"bytes,1,opt,name=labels,proto3" json:"labels,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaterializeRequestExt) Reset() { *m = MaterializeRequestExt{} } +func (m *MaterializeRequestExt) String() string { return proto.CompactTextString(m) } +func (*MaterializeRequestExt) ProtoMessage() {} +func (*MaterializeRequestExt) Descriptor() ([]byte, []int) { + return fileDescriptor_73af6e0737ce390c, []int{9} +} +func (m *MaterializeRequestExt) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *MaterializeRequestExt) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_MaterializeRequestExt.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *MaterializeRequestExt) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaterializeRequestExt.Merge(m, src) +} +func (m *MaterializeRequestExt) XXX_Size() int { + return m.ProtoSize() +} +func (m *MaterializeRequestExt) XXX_DiscardUnknown() { + xxx_messageInfo_MaterializeRequestExt.DiscardUnknown(m) +} + +var xxx_messageInfo_MaterializeRequestExt proto.InternalMessageInfo + +type MaterializeResponseExt struct { + Container *Container `protobuf:"bytes,1,opt,name=container,proto3" json:"container,omitempty"` + XXX_NoUnkeyedLiteral struct{} `json:"-"` + XXX_unrecognized []byte `json:"-"` + XXX_sizecache int32 `json:"-"` +} + +func (m *MaterializeResponseExt) Reset() { *m = MaterializeResponseExt{} } +func (m *MaterializeResponseExt) String() string { return proto.CompactTextString(m) } +func (*MaterializeResponseExt) ProtoMessage() {} +func (*MaterializeResponseExt) Descriptor() ([]byte, []int) { + return fileDescriptor_73af6e0737ce390c, []int{10} +} +func (m *MaterializeResponseExt) XXX_Unmarshal(b []byte) error { + return m.Unmarshal(b) +} +func (m *MaterializeResponseExt) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) { + if deterministic { + return xxx_messageInfo_MaterializeResponseExt.Marshal(b, m, deterministic) + } else { + b = b[:cap(b)] + n, err := m.MarshalToSizedBuffer(b) + if err != nil { + return nil, err + } + return b[:n], nil + } +} +func (m *MaterializeResponseExt) XXX_Merge(src proto.Message) { + xxx_messageInfo_MaterializeResponseExt.Merge(m, src) +} +func (m *MaterializeResponseExt) XXX_Size() int { + return m.ProtoSize() +} +func (m *MaterializeResponseExt) XXX_DiscardUnknown() { + xxx_messageInfo_MaterializeResponseExt.DiscardUnknown(m) +} + +var xxx_messageInfo_MaterializeResponseExt proto.InternalMessageInfo + func init() { proto.RegisterType((*TaskServiceConfig)(nil), "runtime.TaskServiceConfig") proto.RegisterType((*ShuffleRequest)(nil), "runtime.ShuffleRequest") proto.RegisterType((*ShuffleResponse)(nil), "runtime.ShuffleResponse") proto.RegisterType((*RocksDBDescriptor)(nil), "runtime.RocksDBDescriptor") + proto.RegisterType((*Container)(nil), "runtime.Container") + proto.RegisterType((*CaptureRequestExt)(nil), "runtime.CaptureRequestExt") + proto.RegisterType((*CaptureResponseExt)(nil), "runtime.CaptureResponseExt") proto.RegisterType((*DeriveRequestExt)(nil), "runtime.DeriveRequestExt") proto.RegisterType((*DeriveRequestExt_Open)(nil), "runtime.DeriveRequestExt.Open") proto.RegisterType((*DeriveResponseExt)(nil), "runtime.DeriveResponseExt") proto.RegisterType((*DeriveResponseExt_Opened)(nil), "runtime.DeriveResponseExt.Opened") proto.RegisterType((*DeriveResponseExt_Published)(nil), "runtime.DeriveResponseExt.Published") proto.RegisterType((*DeriveResponseExt_Flushed)(nil), "runtime.DeriveResponseExt.Flushed") + proto.RegisterType((*MaterializeRequestExt)(nil), "runtime.MaterializeRequestExt") + proto.RegisterType((*MaterializeResponseExt)(nil), "runtime.MaterializeResponseExt") } func init() { @@ -543,79 +751,87 @@ func init() { } var fileDescriptor_73af6e0737ce390c = []byte{ - // 1151 bytes of a gzipped FileDescriptorProto - 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x9c, 0x56, 0xcd, 0x6e, 0xdb, 0x46, - 0x17, 0x8d, 0x3e, 0xcb, 0xfa, 0xb9, 0x72, 0x1c, 0x7b, 0x10, 0x7c, 0x55, 0xd4, 0x56, 0x56, 0xd4, - 0x04, 0x10, 0xea, 0x94, 0x4a, 0x95, 0x6e, 0x0a, 0x14, 0x01, 0x2a, 0x3b, 0x46, 0x94, 0xc6, 0x8d, - 0x3b, 0x4a, 0xba, 0xe8, 0x86, 0x18, 0x73, 0xae, 0x28, 0x56, 0x14, 0x87, 0x99, 0x21, 0x15, 0x3b, - 0x4f, 0xd3, 0x45, 0x9f, 0xa4, 0xab, 0x2c, 0x8b, 0x3e, 0x80, 0x81, 0xa6, 0xeb, 0x3e, 0x40, 0xbd, - 0x2a, 0xe6, 0x87, 0x72, 0x1c, 0xa7, 0x41, 0xe0, 0x85, 0xed, 0xe1, 0xb9, 0xe7, 0xdc, 0xb9, 0x77, - 0x78, 0xe6, 0xd2, 0xd0, 0x0d, 0x45, 0x3f, 0x95, 0x22, 0x13, 0x81, 0x88, 0x55, 0x5f, 0xe6, 0x49, - 0x16, 0xcd, 0xb1, 0xf8, 0xeb, 0x99, 0x08, 0xa9, 0xba, 0xc7, 0x56, 0xfb, 0x50, 0x8a, 0x19, 0xca, - 0xa5, 0x60, 0xb9, 0xb0, 0xc4, 0x56, 0x27, 0x10, 0x89, 0xca, 0xe7, 0xef, 0x61, 0x7c, 0x72, 0x6e, - 0xbb, 0x49, 0x2c, 0x5e, 0x98, 0x5f, 0x2e, 0xda, 0x3a, 0x17, 0x15, 0xa9, 0xf9, 0x71, 0xb1, 0xeb, - 0xa1, 0x08, 0x85, 0x59, 0xf6, 0xf5, 0xca, 0xa2, 0xdd, 0x19, 0x6c, 0x3e, 0x65, 0x6a, 0x36, 0x46, - 0xb9, 0x88, 0x02, 0xdc, 0x11, 0xc9, 0x24, 0x0a, 0x49, 0x1b, 0x1a, 0xb1, 0x08, 0xfd, 0x49, 0x14, - 0xa3, 0x3f, 0xe1, 0xcd, 0x52, 0xa7, 0xd4, 0x5b, 0xa5, 0xf5, 0x58, 0x84, 0x7b, 0x51, 0x8c, 0x7b, - 0x9c, 0x7c, 0x0c, 0xf5, 0x8c, 0xa9, 0x99, 0x9f, 0xb0, 0x39, 0x36, 0xff, 0xd7, 0x29, 0xf5, 0xea, - 0xb4, 0xa6, 0x81, 0xef, 0xd9, 0x1c, 0xc9, 0x0d, 0xa8, 0xe5, 0x5c, 0xf9, 0x29, 0xcb, 0xa6, 0xcd, - 0x15, 0x13, 0xab, 0xe6, 0x5c, 0x1d, 0xb0, 0x6c, 0xda, 0xfd, 0xa7, 0x0c, 0xeb, 0xe3, 0x69, 0x3e, - 0x99, 0xc4, 0x48, 0xf1, 0x79, 0x8e, 0x2a, 0x23, 0x23, 0xa8, 0xfe, 0x2c, 0x72, 0x99, 0xb0, 0xd8, - 0x6c, 0x53, 0x1f, 0xf6, 0x4f, 0x4f, 0xb6, 0xb6, 0x43, 0xe1, 0x85, 0xec, 0x25, 0x66, 0x19, 0x7a, - 0x1c, 0x17, 0xfd, 0x40, 0x48, 0xec, 0xbf, 0x75, 0x74, 0xde, 0x23, 0x2b, 0xa3, 0x85, 0x9e, 0xfc, - 0x1f, 0x2a, 0x12, 0xd3, 0x98, 0x1d, 0x9b, 0x92, 0x6a, 0xd4, 0x3d, 0xe9, 0x82, 0x0e, 0xf3, 0x28, - 0xe6, 0x7e, 0xc4, 0x8b, 0x82, 0xcc, 0xf3, 0x88, 0x93, 0x3d, 0xa8, 0x88, 0xc9, 0x44, 0x61, 0xd6, - 0x2c, 0x77, 0x4a, 0xbd, 0x95, 0xa1, 0x77, 0x7a, 0xb2, 0xf5, 0xf9, 0x87, 0x6c, 0xfe, 0xc4, 0xa8, - 0xa8, 0x53, 0x93, 0x7d, 0x00, 0x4c, 0xb8, 0xef, 0x72, 0xad, 0x5e, 0x2a, 0x57, 0x1d, 0x13, 0x6e, - 0x97, 0x64, 0x1b, 0x56, 0x25, 0x4b, 0x42, 0x6c, 0x56, 0x3a, 0xa5, 0x5e, 0x63, 0x70, 0xcd, 0x33, - 0xaf, 0x98, 0x6a, 0x68, 0x9c, 0x62, 0x30, 0x2c, 0xbf, 0x3a, 0xd9, 0xba, 0x42, 0x2d, 0x87, 0x8c, - 0xa1, 0x11, 0x08, 0x21, 0x79, 0x94, 0xb0, 0x4c, 0xc8, 0x66, 0xd5, 0x9c, 0xe2, 0x97, 0xa7, 0x27, - 0x5b, 0x5f, 0xbc, 0x6b, 0xf3, 0x0b, 0x06, 0xf3, 0xc6, 0x53, 0x26, 0xf9, 0x68, 0x97, 0xbe, 0x99, - 0x85, 0xdc, 0x05, 0x90, 0xa8, 0x44, 0x9c, 0x67, 0x91, 0x48, 0x9a, 0x35, 0x53, 0xc6, 0x86, 0xb7, - 0xd4, 0x3c, 0x44, 0xc6, 0x51, 0xd2, 0x37, 0x38, 0xe4, 0x33, 0xb8, 0xaa, 0xec, 0xab, 0xf5, 0xa3, - 0x84, 0xe3, 0x51, 0xb3, 0xde, 0x29, 0xf5, 0xae, 0xd2, 0x35, 0x07, 0x8e, 0x34, 0x46, 0xbe, 0x02, - 0xe0, 0x28, 0xa3, 0x05, 0x33, 0x69, 0xc1, 0xa4, 0xbd, 0x6e, 0xbb, 0xdb, 0x11, 0x71, 0x8c, 0x81, - 0xc6, 0x75, 0x8b, 0xf4, 0x0d, 0x1e, 0xd9, 0x81, 0x6b, 0x73, 0x96, 0xa1, 0x8c, 0x58, 0x1c, 0xbd, - 0xb4, 0xd2, 0x86, 0x91, 0xde, 0xb0, 0xd2, 0xfd, 0xf3, 0x41, 0xa3, 0x7f, 0x5b, 0xd1, 0xfd, 0xa3, - 0x0c, 0xd7, 0x96, 0xde, 0x53, 0xa9, 0x48, 0x14, 0x92, 0x1e, 0x54, 0x54, 0xc6, 0xb2, 0x5c, 0x19, - 0xef, 0xad, 0x0f, 0x36, 0xbc, 0xe2, 0x78, 0xbc, 0xb1, 0xc1, 0xa9, 0x8b, 0x6b, 0xe6, 0xd4, 0xf4, - 0x6c, 0xbc, 0xf5, 0xae, 0xb3, 0x70, 0x71, 0x72, 0x1b, 0xd6, 0x33, 0x94, 0xf3, 0x28, 0x61, 0xb1, - 0x8f, 0x52, 0x0a, 0xe9, 0x3c, 0x77, 0xb5, 0x40, 0x1f, 0x68, 0x90, 0xfc, 0x00, 0x6b, 0x12, 0x19, - 0xf7, 0xb3, 0xa9, 0x14, 0x79, 0x38, 0xbd, 0xa4, 0xff, 0x1a, 0x3a, 0xc7, 0x53, 0x9b, 0x42, 0x9b, - 0xf0, 0x85, 0x8c, 0x32, 0xf4, 0x75, 0x25, 0x97, 0x35, 0xa1, 0xc9, 0xa0, 0x5b, 0x22, 0x23, 0x58, - 0x65, 0x12, 0x13, 0x66, 0x4c, 0xb8, 0x36, 0xbc, 0x77, 0x7a, 0xb2, 0xd5, 0x0f, 0xa3, 0x6c, 0x9a, - 0x1f, 0x7a, 0x81, 0x98, 0xf7, 0x51, 0x65, 0x39, 0x93, 0xc7, 0x76, 0x04, 0x5d, 0x18, 0x4a, 0xde, - 0xb7, 0x5a, 0x4a, 0x6d, 0x06, 0x72, 0x1b, 0xca, 0x5c, 0x04, 0xaa, 0x59, 0xed, 0xac, 0xf4, 0x1a, - 0x83, 0x86, 0x7d, 0x6b, 0xe3, 0x38, 0x0a, 0xd0, 0x59, 0xd9, 0x84, 0xc9, 0x43, 0xa8, 0xda, 0x1b, - 0xa4, 0x9a, 0xb5, 0xce, 0xca, 0x25, 0xaa, 0x2f, 0xe4, 0xda, 0x67, 0x79, 0x1e, 0x71, 0x3f, 0x65, - 0x32, 0x53, 0xcd, 0xba, 0xd9, 0xd6, 0xdd, 0xa2, 0x67, 0xcf, 0x46, 0xbb, 0x07, 0x1a, 0x76, 0x5b, - 0xd7, 0x35, 0xd1, 0x00, 0xda, 0xf4, 0x29, 0x0b, 0x66, 0xc8, 0xfd, 0x19, 0x1e, 0x37, 0xe1, 0xbf, - 0x8a, 0xad, 0x5b, 0xd2, 0x77, 0x78, 0xdc, 0xe5, 0xb0, 0x49, 0x45, 0x30, 0x53, 0xbb, 0xc3, 0x5d, - 0x54, 0x81, 0x8c, 0x52, 0x7d, 0x77, 0xee, 0x00, 0x91, 0x1a, 0xe4, 0x87, 0x3e, 0x26, 0x0b, 0x7f, - 0x8e, 0xf3, 0x34, 0x93, 0xc6, 0x61, 0x15, 0xba, 0xe1, 0x22, 0x0f, 0x92, 0xc5, 0xbe, 0xc1, 0xc9, - 0x4d, 0x58, 0x2b, 0xd8, 0x66, 0x64, 0xda, 0x71, 0xda, 0x70, 0x98, 0x19, 0x9b, 0x7f, 0x97, 0x60, - 0x63, 0x57, 0x5f, 0x87, 0x62, 0x6a, 0x3e, 0x38, 0xca, 0xc8, 0x00, 0xca, 0x22, 0xc5, 0xc4, 0xe4, - 0x6d, 0x0c, 0xda, 0x5e, 0xf1, 0xc5, 0x79, 0x9b, 0xe8, 0x3d, 0x49, 0x31, 0xa1, 0x86, 0xdb, 0xfa, - 0xb5, 0x04, 0x65, 0xfd, 0x48, 0xb6, 0x41, 0x4f, 0x73, 0x3f, 0xc6, 0x05, 0xc6, 0xce, 0xfb, 0xeb, - 0x9e, 0xfe, 0x54, 0x3c, 0x16, 0xa1, 0xf7, 0x58, 0xa3, 0xb4, 0x16, 0x8b, 0xd0, 0xac, 0xc8, 0x2d, - 0x58, 0x57, 0xcf, 0x63, 0x6d, 0xac, 0xc5, 0x44, 0xf9, 0xb9, 0x8c, 0x5c, 0x8d, 0x6b, 0x16, 0xfd, - 0x71, 0xa2, 0x9e, 0xc9, 0x88, 0x8c, 0xce, 0xba, 0xe6, 0xcb, 0xb3, 0x30, 0xde, 0x6f, 0x0c, 0x5a, - 0xcb, 0xea, 0x2e, 0x9c, 0x16, 0xdd, 0x74, 0xaa, 0x33, 0xa8, 0xfb, 0xdb, 0x0a, 0x6c, 0x16, 0x6d, - 0xd8, 0x9b, 0xaa, 0x1b, 0xfe, 0x1a, 0x2a, 0xba, 0x09, 0xe4, 0xae, 0xe5, 0x9b, 0x17, 0x5a, 0x5e, - 0x72, 0x4d, 0xcf, 0xc8, 0xa9, 0x13, 0x90, 0x21, 0xd4, 0xd3, 0xfc, 0x30, 0x8e, 0xd4, 0x14, 0xb9, - 0xbb, 0xc0, 0xb7, 0xde, 0xa3, 0x3e, 0x28, 0xb8, 0xf4, 0x4c, 0x46, 0xbe, 0x81, 0xea, 0x24, 0xce, - 0x4d, 0x06, 0xdb, 0x54, 0xf7, 0x3d, 0x19, 0xf6, 0x2c, 0x93, 0x16, 0x92, 0xd6, 0x3e, 0x54, 0x6c, - 0x4d, 0x64, 0x07, 0x88, 0xd3, 0xf9, 0xc1, 0x14, 0x83, 0x59, 0x2a, 0xa2, 0x24, 0x73, 0x2d, 0x5d, - 0x3f, 0x9b, 0x3f, 0x3b, 0xcb, 0x18, 0xdd, 0x74, 0xfc, 0x33, 0xa8, 0x95, 0x41, 0x7d, 0x59, 0xa4, - 0xfe, 0x1a, 0xcf, 0xd9, 0x91, 0x1f, 0xc4, 0x22, 0x98, 0x39, 0x9b, 0xd5, 0xe6, 0xec, 0x68, 0x47, - 0x3f, 0x93, 0x4f, 0x01, 0x66, 0x78, 0xec, 0x5b, 0xcb, 0x9a, 0xde, 0xd7, 0x68, 0x7d, 0x86, 0xc7, - 0x07, 0x06, 0x20, 0xdb, 0xb0, 0xa9, 0xef, 0x48, 0xa4, 0x47, 0xa4, 0x2a, 0x58, 0x2b, 0x86, 0xb5, - 0x71, 0x16, 0xb0, 0xe4, 0xd6, 0x36, 0x54, 0x5d, 0x63, 0xa4, 0x03, 0xab, 0x7a, 0x32, 0x2a, 0x57, - 0x38, 0x18, 0xf3, 0xe8, 0x99, 0xa9, 0xa8, 0x0d, 0x0c, 0x1e, 0x41, 0xcd, 0x8d, 0x5b, 0x49, 0xee, - 0x43, 0xd5, 0xad, 0xc9, 0x47, 0xcb, 0x53, 0x3b, 0xff, 0x8f, 0x40, 0xab, 0x79, 0x31, 0x60, 0xcf, - 0xf3, 0x6e, 0x69, 0x78, 0xff, 0xd5, 0x9f, 0xed, 0x2b, 0xaf, 0x5e, 0xb7, 0x4b, 0xbf, 0xbf, 0x6e, - 0x97, 0x7e, 0xf9, 0xab, 0x5d, 0xfa, 0xe9, 0xce, 0x07, 0x4d, 0x22, 0x97, 0xf1, 0xb0, 0x62, 0xa0, - 0x7b, 0xff, 0x06, 0x00, 0x00, 0xff, 0xff, 0xd9, 0x53, 0xd4, 0x2f, 0xac, 0x09, 0x00, 0x00, + // 1267 bytes of a gzipped FileDescriptorProto + 0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xa4, 0x56, 0x4f, 0x6f, 0x13, 0x47, + 0x14, 0xc7, 0xc4, 0xb1, 0xb3, 0xcf, 0x49, 0x88, 0x47, 0x14, 0x8c, 0xdb, 0x26, 0xc6, 0x05, 0x29, + 0x22, 0x74, 0x4d, 0x43, 0x55, 0xa9, 0x52, 0x45, 0x45, 0x1c, 0x22, 0x4c, 0x1b, 0x48, 0xc7, 0xd0, + 0x43, 0x55, 0x69, 0x35, 0xde, 0x19, 0xdb, 0x53, 0xaf, 0x77, 0x96, 0xd9, 0xd9, 0xfc, 0xe1, 0x93, + 0xf4, 0xd8, 0x8f, 0xc3, 0xb1, 0xea, 0x07, 0x88, 0x54, 0x7a, 0xec, 0xb5, 0x97, 0xe6, 0x54, 0xcd, + 0x9f, 0x5d, 0x13, 0x42, 0x11, 0x0a, 0x87, 0xc4, 0x3b, 0xbf, 0xf7, 0x7e, 0xbf, 0x79, 0x6f, 0xf6, + 0xbd, 0x37, 0x0b, 0xed, 0x91, 0xe8, 0x24, 0x52, 0x28, 0x11, 0x8a, 0x28, 0xed, 0xc8, 0x2c, 0x56, + 0x7c, 0xca, 0xf2, 0x5f, 0xdf, 0x58, 0x50, 0xd5, 0x2d, 0x9b, 0xab, 0x03, 0x29, 0x26, 0x4c, 0x16, + 0x84, 0xe2, 0xc1, 0x3a, 0x36, 0x5b, 0xa1, 0x88, 0xd3, 0x6c, 0xfa, 0x0e, 0x8f, 0x4f, 0x4e, 0x6d, + 0x37, 0x8c, 0xc4, 0x81, 0xf9, 0xe7, 0xac, 0xcd, 0x53, 0x56, 0x91, 0x98, 0x3f, 0x67, 0xbb, 0x3c, + 0x12, 0x23, 0x61, 0x1e, 0x3b, 0xfa, 0xc9, 0xa2, 0xed, 0x5f, 0x4b, 0x50, 0x7f, 0x4a, 0xd2, 0x49, + 0x9f, 0xc9, 0x7d, 0x1e, 0xb2, 0xae, 0x88, 0x87, 0x7c, 0x84, 0x56, 0xa1, 0x16, 0x89, 0x51, 0x30, + 0xe4, 0x11, 0x0b, 0x86, 0xb4, 0x51, 0x6a, 0x95, 0xd6, 0xe7, 0xb1, 0x17, 0x89, 0xd1, 0x0e, 0x8f, + 0xd8, 0x0e, 0x45, 0x1f, 0x83, 0xa7, 0x48, 0x3a, 0x09, 0x62, 0x32, 0x65, 0x8d, 0x8b, 0xad, 0xd2, + 0xba, 0x87, 0x17, 0x34, 0xf0, 0x98, 0x4c, 0x19, 0xba, 0x06, 0x0b, 0x19, 0x4d, 0x83, 0x84, 0xa8, + 0x71, 0x63, 0xce, 0xd8, 0xaa, 0x19, 0x4d, 0xf7, 0x88, 0x1a, 0xa3, 0x0d, 0xa8, 0x87, 0x22, 0x56, + 0x84, 0xc7, 0x4c, 0x06, 0x31, 0x53, 0x07, 0x42, 0x4e, 0x1a, 0x65, 0xe3, 0xb3, 0x52, 0x18, 0x1e, + 0x5b, 0xbc, 0xfd, 0x6f, 0x19, 0x96, 0xfb, 0xe3, 0x6c, 0x38, 0x8c, 0x18, 0x66, 0xcf, 0x33, 0x96, + 0x2a, 0xd4, 0x83, 0xea, 0x2f, 0x22, 0x93, 0x31, 0x89, 0x4c, 0x4c, 0xde, 0x56, 0xe7, 0xe4, 0x78, + 0x6d, 0x63, 0x24, 0xfc, 0x11, 0x79, 0xc1, 0x94, 0x62, 0x3e, 0x65, 0xfb, 0x9d, 0x50, 0x48, 0xd6, + 0x79, 0xe3, 0xa0, 0xfd, 0x47, 0x96, 0x86, 0x73, 0x3e, 0xba, 0x02, 0x15, 0xc9, 0x92, 0x88, 0x1c, + 0x99, 0xf8, 0x17, 0xb0, 0x5b, 0xe9, 0xe8, 0x07, 0x19, 0x8f, 0x68, 0xc0, 0x69, 0x1e, 0xbd, 0x59, + 0xf7, 0x28, 0xda, 0x81, 0x8a, 0x18, 0x0e, 0x53, 0xa6, 0x4c, 0xc8, 0x73, 0x5b, 0xfe, 0xc9, 0xf1, + 0xda, 0xad, 0xf7, 0xd9, 0xfc, 0x89, 0x61, 0x61, 0xc7, 0x46, 0xbb, 0x00, 0x2c, 0xa6, 0x81, 0xd3, + 0x9a, 0x3f, 0x97, 0x96, 0xc7, 0x62, 0x6a, 0x1f, 0xd1, 0x06, 0xcc, 0x4b, 0x12, 0x8f, 0x58, 0xa3, + 0xd2, 0x2a, 0xad, 0xd7, 0x36, 0x2f, 0xf9, 0xa6, 0x20, 0xb0, 0x86, 0xfa, 0x09, 0x0b, 0xb7, 0xca, + 0x2f, 0x8f, 0xd7, 0x2e, 0x60, 0xeb, 0x83, 0xfa, 0x50, 0x0b, 0x85, 0x90, 0x94, 0xc7, 0x44, 0x09, + 0xd9, 0xa8, 0x9a, 0x53, 0xfc, 0xe2, 0xe4, 0x78, 0xed, 0xf3, 0xb7, 0x6d, 0x7e, 0xa6, 0x1c, 0xfd, + 0xfe, 0x98, 0x48, 0xda, 0xdb, 0xc6, 0xaf, 0xab, 0xa0, 0x3b, 0x00, 0x92, 0xa5, 0x22, 0xca, 0x14, + 0x17, 0x71, 0x63, 0xc1, 0x84, 0xb1, 0xe2, 0x17, 0x9c, 0x87, 0x8c, 0x50, 0x26, 0xf1, 0x6b, 0x3e, + 0xe8, 0x33, 0x58, 0x4a, 0xed, 0xab, 0x0d, 0x78, 0x4c, 0xd9, 0x61, 0xc3, 0x6b, 0x95, 0xd6, 0x97, + 0xf0, 0xa2, 0x03, 0x7b, 0x1a, 0x43, 0x5f, 0x02, 0x50, 0x26, 0xf9, 0x3e, 0x31, 0xb2, 0x60, 0x64, + 0x2f, 0xdb, 0xec, 0xba, 0x22, 0x8a, 0x58, 0xa8, 0x71, 0x9d, 0x22, 0x7e, 0xcd, 0x0f, 0x75, 0xe1, + 0xd2, 0x94, 0x28, 0x26, 0x39, 0x89, 0xf8, 0x0b, 0x4b, 0xad, 0x19, 0xea, 0x35, 0x4b, 0xdd, 0x3d, + 0x6d, 0x34, 0xfc, 0x37, 0x19, 0xed, 0x3f, 0xca, 0x70, 0xa9, 0xa8, 0xbd, 0x34, 0x11, 0x71, 0xca, + 0xd0, 0x3a, 0x54, 0x52, 0x45, 0x54, 0x96, 0x9a, 0xda, 0x5b, 0xde, 0x5c, 0xf1, 0xf3, 0xe3, 0xf1, + 0xfb, 0x06, 0xc7, 0xce, 0xae, 0x3d, 0xc7, 0x26, 0x67, 0x53, 0x5b, 0x6f, 0x3b, 0x0b, 0x67, 0x47, + 0x37, 0x61, 0x59, 0x31, 0x39, 0xe5, 0x31, 0x89, 0x02, 0x26, 0xa5, 0x90, 0xae, 0xe6, 0x96, 0x72, + 0xf4, 0x81, 0x06, 0xd1, 0x0f, 0xb0, 0x28, 0x19, 0xa1, 0x81, 0x1a, 0x4b, 0x91, 0x8d, 0xc6, 0xe7, + 0xac, 0xbf, 0x9a, 0xd6, 0x78, 0x6a, 0x25, 0x74, 0x11, 0x1e, 0x48, 0xae, 0x58, 0xa0, 0x23, 0x39, + 0x6f, 0x11, 0x1a, 0x05, 0x9d, 0x12, 0xea, 0xc1, 0x3c, 0x91, 0x2c, 0x26, 0xa6, 0x08, 0x17, 0xb7, + 0xee, 0x9e, 0x1c, 0xaf, 0x75, 0x46, 0x5c, 0x8d, 0xb3, 0x81, 0x1f, 0x8a, 0x69, 0x87, 0xa5, 0x2a, + 0x23, 0xf2, 0xc8, 0x0e, 0xac, 0x33, 0x23, 0xcc, 0xbf, 0xaf, 0xa9, 0xd8, 0x2a, 0xa0, 0x9b, 0x50, + 0xa6, 0x22, 0x4c, 0x1b, 0xd5, 0xd6, 0xdc, 0x7a, 0x6d, 0xb3, 0x66, 0xdf, 0x5a, 0x3f, 0xe2, 0x21, + 0x73, 0xa5, 0x6c, 0xcc, 0xe8, 0x21, 0x54, 0x6d, 0x07, 0xa5, 0x8d, 0x85, 0xd6, 0xdc, 0x39, 0xa2, + 0xcf, 0xe9, 0xba, 0xce, 0xb2, 0x8c, 0xd3, 0x20, 0x21, 0x52, 0xa5, 0x0d, 0xcf, 0x6c, 0xeb, 0xba, + 0xe8, 0xd9, 0xb3, 0xde, 0xf6, 0x9e, 0x86, 0xdd, 0xd6, 0x9e, 0x76, 0x34, 0x80, 0x2e, 0xfa, 0x84, + 0x84, 0x13, 0x46, 0x83, 0x09, 0x3b, 0x6a, 0xc0, 0xff, 0x05, 0xeb, 0x59, 0xa7, 0xef, 0xd8, 0x51, + 0x9b, 0x42, 0x1d, 0x8b, 0x70, 0x92, 0x6e, 0x6f, 0x6d, 0xb3, 0x34, 0x94, 0x3c, 0xd1, 0xbd, 0x73, + 0x1b, 0x90, 0xd4, 0x20, 0x1d, 0x04, 0x2c, 0xde, 0x0f, 0xa6, 0x6c, 0x9a, 0x28, 0x69, 0x2a, 0xac, + 0x82, 0x57, 0x9c, 0xe5, 0x41, 0xbc, 0xbf, 0x6b, 0x70, 0x74, 0x1d, 0x16, 0x73, 0x6f, 0x33, 0x5f, + 0xed, 0xec, 0xad, 0x39, 0x4c, 0xcf, 0xd8, 0xf6, 0xcf, 0xe0, 0x75, 0xf3, 0x51, 0x8a, 0xae, 0x42, + 0x95, 0x27, 0x01, 0xa1, 0xd4, 0x4a, 0x7a, 0xb8, 0xc2, 0x93, 0xfb, 0x94, 0x4a, 0xf4, 0x15, 0x2c, + 0xb9, 0xf9, 0x1b, 0x24, 0x42, 0xa7, 0x7d, 0xd1, 0x24, 0x50, 0xb7, 0x09, 0xb8, 0x11, 0xbc, 0x27, + 0xa4, 0xc2, 0x8b, 0xf1, 0x6c, 0x91, 0xb6, 0xbf, 0x85, 0x7a, 0x97, 0x24, 0x2a, 0x93, 0xf9, 0x4c, + 0x7e, 0x70, 0xa8, 0xd0, 0x2d, 0xa8, 0x44, 0x64, 0xc0, 0x22, 0xdb, 0x19, 0xb5, 0x4d, 0xe4, 0xeb, + 0x6b, 0xc7, 0x8c, 0x8a, 0xef, 0x35, 0xce, 0xe3, 0x11, 0x76, 0x1e, 0xed, 0x1d, 0x40, 0x85, 0x80, + 0x6d, 0x2c, 0xad, 0x70, 0x07, 0xbc, 0x62, 0xfe, 0x17, 0x22, 0xf9, 0x25, 0x5a, 0xa4, 0x83, 0x67, + 0x4e, 0xed, 0xbf, 0x4b, 0xb0, 0xb2, 0xad, 0xbb, 0xfe, 0x9c, 0x81, 0xa0, 0x4d, 0x28, 0x8b, 0x84, + 0xc5, 0xae, 0x45, 0x57, 0x8b, 0xdd, 0xde, 0x14, 0xf5, 0x9f, 0x24, 0x2c, 0xc6, 0xc6, 0xb7, 0x79, + 0x00, 0x65, 0xbd, 0x42, 0x37, 0x60, 0x39, 0x7d, 0x1e, 0xe9, 0xee, 0xd9, 0x1f, 0xa6, 0x41, 0x26, + 0xb9, 0x3b, 0xdd, 0x45, 0x8b, 0xfe, 0x38, 0x4c, 0x9f, 0x49, 0x8e, 0x7a, 0xb3, 0x57, 0x4b, 0x8b, + 0x17, 0xee, 0xf6, 0x6b, 0x16, 0xfb, 0x9d, 0x29, 0x09, 0x5c, 0x77, 0xac, 0x19, 0xd4, 0xfe, 0x67, + 0x0e, 0xea, 0x79, 0x60, 0x1f, 0x70, 0x6a, 0xe8, 0x6b, 0xa8, 0xe8, 0x44, 0x18, 0x75, 0x61, 0x5c, + 0x3f, 0x93, 0x76, 0xa1, 0x6e, 0xf2, 0x66, 0x14, 0x3b, 0x02, 0xda, 0x02, 0x2f, 0xc9, 0x06, 0x11, + 0x4f, 0xc7, 0xcc, 0xde, 0x8c, 0xb5, 0xcd, 0x1b, 0xef, 0x60, 0xef, 0xe5, 0xbe, 0x78, 0x46, 0x43, + 0xdf, 0x40, 0x75, 0x18, 0x65, 0x46, 0xa1, 0x6c, 0x14, 0xda, 0xef, 0x50, 0xd8, 0xb1, 0x9e, 0x38, + 0xa7, 0x34, 0x77, 0xa1, 0x62, 0x63, 0x42, 0x5d, 0x40, 0x8e, 0x17, 0x84, 0x63, 0x16, 0x4e, 0x12, + 0xc1, 0x63, 0xe5, 0x4e, 0xe0, 0xf2, 0x6c, 0x2c, 0x77, 0x0b, 0x1b, 0xae, 0x3b, 0xff, 0x19, 0xd4, + 0x54, 0xe0, 0x15, 0x41, 0xea, 0x2f, 0x9a, 0x29, 0x39, 0x0c, 0xc2, 0x48, 0x84, 0x13, 0xd7, 0x7d, + 0x0b, 0x53, 0x72, 0xd8, 0xd5, 0x6b, 0xf4, 0x29, 0xc0, 0x84, 0x1d, 0x05, 0xb6, 0x93, 0xcd, 0xc9, + 0x2d, 0x62, 0x6f, 0xc2, 0x8e, 0xf6, 0x0c, 0xa0, 0xbf, 0x6a, 0xf4, 0xe8, 0xe0, 0xfa, 0xe6, 0x48, + 0x73, 0xaf, 0x39, 0xe3, 0xb5, 0x32, 0x33, 0x58, 0xe7, 0xe6, 0x06, 0x54, 0x5d, 0x62, 0xa8, 0x05, + 0xf3, 0xfa, 0xc2, 0xc8, 0x8b, 0x15, 0x6c, 0xb1, 0x6a, 0x04, 0x5b, 0x43, 0xbb, 0x0b, 0x1f, 0xcd, + 0xae, 0xab, 0xf3, 0x76, 0xdc, 0x23, 0xb8, 0x72, 0x4a, 0xe4, 0x03, 0xea, 0x67, 0xf3, 0x11, 0x2c, + 0xb8, 0x6b, 0x51, 0xa2, 0x7b, 0x50, 0x75, 0xcf, 0xe8, 0x6a, 0xc1, 0x3a, 0xfd, 0xc1, 0xd6, 0x6c, + 0x9c, 0x35, 0xd8, 0xed, 0xef, 0x94, 0xb6, 0xee, 0xbd, 0xfc, 0x73, 0xf5, 0xc2, 0xcb, 0x57, 0xab, + 0xa5, 0xdf, 0x5f, 0xad, 0x96, 0x7e, 0xfb, 0x6b, 0xb5, 0xf4, 0xd3, 0xed, 0xf7, 0xba, 0x31, 0x9c, + 0xe2, 0xa0, 0x62, 0xa0, 0xbb, 0xff, 0x05, 0x00, 0x00, 0xff, 0xff, 0xb9, 0x8a, 0xa5, 0x5c, 0x82, + 0x0b, 0x00, 0x00, } // Reference imports to suppress errors if they are not otherwise used. @@ -749,6 +965,13 @@ func (m *TaskServiceConfig) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } + if len(m.ContainerNetwork) > 0 { + i -= len(m.ContainerNetwork) + copy(dAtA[i:], m.ContainerNetwork) + i = encodeVarintRuntime(dAtA, i, uint64(len(m.ContainerNetwork))) + i-- + dAtA[i] = 0x22 + } if len(m.UdsPath) > 0 { i -= len(m.UdsPath) copy(dAtA[i:], m.UdsPath) @@ -1059,7 +1282,7 @@ func (m *RocksDBDescriptor) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *DeriveRequestExt) Marshal() (dAtA []byte, err error) { +func (m *Container) Marshal() (dAtA []byte, err error) { size := m.ProtoSize() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -1069,12 +1292,12 @@ func (m *DeriveRequestExt) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *DeriveRequestExt) MarshalTo(dAtA []byte) (int, error) { +func (m *Container) MarshalTo(dAtA []byte) (int, error) { size := m.ProtoSize() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *DeriveRequestExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *Container) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -1083,22 +1306,31 @@ func (m *DeriveRequestExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if m.Open != nil { - { - size, err := m.Open.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err + if len(m.NetworkPorts) > 0 { + for iNdEx := len(m.NetworkPorts) - 1; iNdEx >= 0; iNdEx-- { + { + size, err := m.NetworkPorts[iNdEx].MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) } - i -= size - i = encodeVarintRuntime(dAtA, i, uint64(size)) + i-- + dAtA[i] = 0x12 } + } + if len(m.IpAddr) > 0 { + i -= len(m.IpAddr) + copy(dAtA[i:], m.IpAddr) + i = encodeVarintRuntime(dAtA, i, uint64(len(m.IpAddr))) i-- dAtA[i] = 0xa } return len(dAtA) - i, nil } -func (m *DeriveRequestExt_Open) Marshal() (dAtA []byte, err error) { +func (m *CaptureRequestExt) Marshal() (dAtA []byte, err error) { size := m.ProtoSize() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -1108,12 +1340,12 @@ func (m *DeriveRequestExt_Open) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *DeriveRequestExt_Open) MarshalTo(dAtA []byte) (int, error) { +func (m *CaptureRequestExt) MarshalTo(dAtA []byte) (int, error) { size := m.ProtoSize() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *DeriveRequestExt_Open) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *CaptureRequestExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -1122,9 +1354,9 @@ func (m *DeriveRequestExt_Open) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if m.RocksdbDescriptor != nil { + if m.Labels != nil { { - size, err := m.RocksdbDescriptor.MarshalToSizedBuffer(dAtA[:i]) + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } @@ -1132,24 +1364,12 @@ func (m *DeriveRequestExt_Open) MarshalToSizedBuffer(dAtA []byte) (int, error) { i = encodeVarintRuntime(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0x1a - } - if len(m.SqliteVfsUri) > 0 { - i -= len(m.SqliteVfsUri) - copy(dAtA[i:], m.SqliteVfsUri) - i = encodeVarintRuntime(dAtA, i, uint64(len(m.SqliteVfsUri))) - i-- - dAtA[i] = 0x12 - } - if m.LogLevel != 0 { - i = encodeVarintRuntime(dAtA, i, uint64(m.LogLevel)) - i-- - dAtA[i] = 0x8 + dAtA[i] = 0xa } return len(dAtA) - i, nil } -func (m *DeriveResponseExt) Marshal() (dAtA []byte, err error) { +func (m *CaptureResponseExt) Marshal() (dAtA []byte, err error) { size := m.ProtoSize() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -1159,12 +1379,12 @@ func (m *DeriveResponseExt) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *DeriveResponseExt) MarshalTo(dAtA []byte) (int, error) { +func (m *CaptureResponseExt) MarshalTo(dAtA []byte) (int, error) { size := m.ProtoSize() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *DeriveResponseExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *CaptureResponseExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -1173,33 +1393,9 @@ func (m *DeriveResponseExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if m.Flushed != nil { - { - size, err := m.Flushed.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRuntime(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x1a - } - if m.Published != nil { - { - size, err := m.Published.MarshalToSizedBuffer(dAtA[:i]) - if err != nil { - return 0, err - } - i -= size - i = encodeVarintRuntime(dAtA, i, uint64(size)) - } - i-- - dAtA[i] = 0x12 - } - if m.Opened != nil { + if m.Container != nil { { - size, err := m.Opened.MarshalToSizedBuffer(dAtA[:i]) + size, err := m.Container.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } @@ -1212,7 +1408,7 @@ func (m *DeriveResponseExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { return len(dAtA) - i, nil } -func (m *DeriveResponseExt_Opened) Marshal() (dAtA []byte, err error) { +func (m *DeriveRequestExt) Marshal() (dAtA []byte, err error) { size := m.ProtoSize() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -1222,12 +1418,12 @@ func (m *DeriveResponseExt_Opened) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *DeriveResponseExt_Opened) MarshalTo(dAtA []byte) (int, error) { +func (m *DeriveRequestExt) MarshalTo(dAtA []byte) (int, error) { size := m.ProtoSize() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *DeriveResponseExt_Opened) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *DeriveRequestExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -1236,9 +1432,21 @@ func (m *DeriveResponseExt_Opened) MarshalToSizedBuffer(dAtA []byte) (int, error i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if m.RuntimeCheckpoint != nil { + if m.Open != nil { { - size, err := m.RuntimeCheckpoint.MarshalToSizedBuffer(dAtA[:i]) + size, err := m.Open.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + if m.Labels != nil { + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } @@ -1251,7 +1459,7 @@ func (m *DeriveResponseExt_Opened) MarshalToSizedBuffer(dAtA []byte) (int, error return len(dAtA) - i, nil } -func (m *DeriveResponseExt_Published) Marshal() (dAtA []byte, err error) { +func (m *DeriveRequestExt_Open) Marshal() (dAtA []byte, err error) { size := m.ProtoSize() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -1261,12 +1469,12 @@ func (m *DeriveResponseExt_Published) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *DeriveResponseExt_Published) MarshalTo(dAtA []byte) (int, error) { +func (m *DeriveRequestExt_Open) MarshalTo(dAtA []byte) (int, error) { size := m.ProtoSize() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *DeriveResponseExt_Published) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *DeriveRequestExt_Open) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -1275,30 +1483,29 @@ func (m *DeriveResponseExt_Published) MarshalToSizedBuffer(dAtA []byte) (int, er i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if len(m.PartitionsPacked) > 0 { - i -= len(m.PartitionsPacked) - copy(dAtA[i:], m.PartitionsPacked) - i = encodeVarintRuntime(dAtA, i, uint64(len(m.PartitionsPacked))) - i-- - dAtA[i] = 0x1a - } - if len(m.KeyPacked) > 0 { - i -= len(m.KeyPacked) - copy(dAtA[i:], m.KeyPacked) - i = encodeVarintRuntime(dAtA, i, uint64(len(m.KeyPacked))) + if m.RocksdbDescriptor != nil { + { + size, err := m.RocksdbDescriptor.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } i-- dAtA[i] = 0x12 } - if m.MaxClock != 0 { - i -= 8 - encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(m.MaxClock)) + if len(m.SqliteVfsUri) > 0 { + i -= len(m.SqliteVfsUri) + copy(dAtA[i:], m.SqliteVfsUri) + i = encodeVarintRuntime(dAtA, i, uint64(len(m.SqliteVfsUri))) i-- - dAtA[i] = 0x9 + dAtA[i] = 0xa } return len(dAtA) - i, nil } -func (m *DeriveResponseExt_Flushed) Marshal() (dAtA []byte, err error) { +func (m *DeriveResponseExt) Marshal() (dAtA []byte, err error) { size := m.ProtoSize() dAtA = make([]byte, size) n, err := m.MarshalToSizedBuffer(dAtA[:size]) @@ -1308,12 +1515,12 @@ func (m *DeriveResponseExt_Flushed) Marshal() (dAtA []byte, err error) { return dAtA[:n], nil } -func (m *DeriveResponseExt_Flushed) MarshalTo(dAtA []byte) (int, error) { +func (m *DeriveResponseExt) MarshalTo(dAtA []byte) (int, error) { size := m.ProtoSize() return m.MarshalToSizedBuffer(dAtA[:size]) } -func (m *DeriveResponseExt_Flushed) MarshalToSizedBuffer(dAtA []byte) (int, error) { +func (m *DeriveResponseExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { i := len(dAtA) _ = i var l int @@ -1322,9 +1529,9 @@ func (m *DeriveResponseExt_Flushed) MarshalToSizedBuffer(dAtA []byte) (int, erro i -= len(m.XXX_unrecognized) copy(dAtA[i:], m.XXX_unrecognized) } - if m.Stats != nil { + if m.Flushed != nil { { - size, err := m.Stats.MarshalToSizedBuffer(dAtA[:i]) + size, err := m.Flushed.MarshalToSizedBuffer(dAtA[:i]) if err != nil { return 0, err } @@ -1332,11 +1539,250 @@ func (m *DeriveResponseExt_Flushed) MarshalToSizedBuffer(dAtA []byte) (int, erro i = encodeVarintRuntime(dAtA, i, uint64(size)) } i-- - dAtA[i] = 0xa + dAtA[i] = 0x22 } - return len(dAtA) - i, nil -} - + if m.Published != nil { + { + size, err := m.Published.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x1a + } + if m.Opened != nil { + { + size, err := m.Opened.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0x12 + } + if m.Container != nil { + { + size, err := m.Container.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *DeriveResponseExt_Opened) Marshal() (dAtA []byte, err error) { + size := m.ProtoSize() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DeriveResponseExt_Opened) MarshalTo(dAtA []byte) (int, error) { + size := m.ProtoSize() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DeriveResponseExt_Opened) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.XXX_unrecognized != nil { + i -= len(m.XXX_unrecognized) + copy(dAtA[i:], m.XXX_unrecognized) + } + if m.RuntimeCheckpoint != nil { + { + size, err := m.RuntimeCheckpoint.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *DeriveResponseExt_Published) Marshal() (dAtA []byte, err error) { + size := m.ProtoSize() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DeriveResponseExt_Published) MarshalTo(dAtA []byte) (int, error) { + size := m.ProtoSize() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DeriveResponseExt_Published) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.XXX_unrecognized != nil { + i -= len(m.XXX_unrecognized) + copy(dAtA[i:], m.XXX_unrecognized) + } + if len(m.PartitionsPacked) > 0 { + i -= len(m.PartitionsPacked) + copy(dAtA[i:], m.PartitionsPacked) + i = encodeVarintRuntime(dAtA, i, uint64(len(m.PartitionsPacked))) + i-- + dAtA[i] = 0x1a + } + if len(m.KeyPacked) > 0 { + i -= len(m.KeyPacked) + copy(dAtA[i:], m.KeyPacked) + i = encodeVarintRuntime(dAtA, i, uint64(len(m.KeyPacked))) + i-- + dAtA[i] = 0x12 + } + if m.MaxClock != 0 { + i -= 8 + encoding_binary.LittleEndian.PutUint64(dAtA[i:], uint64(m.MaxClock)) + i-- + dAtA[i] = 0x9 + } + return len(dAtA) - i, nil +} + +func (m *DeriveResponseExt_Flushed) Marshal() (dAtA []byte, err error) { + size := m.ProtoSize() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *DeriveResponseExt_Flushed) MarshalTo(dAtA []byte) (int, error) { + size := m.ProtoSize() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *DeriveResponseExt_Flushed) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.XXX_unrecognized != nil { + i -= len(m.XXX_unrecognized) + copy(dAtA[i:], m.XXX_unrecognized) + } + if m.Stats != nil { + { + size, err := m.Stats.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *MaterializeRequestExt) Marshal() (dAtA []byte, err error) { + size := m.ProtoSize() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *MaterializeRequestExt) MarshalTo(dAtA []byte) (int, error) { + size := m.ProtoSize() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *MaterializeRequestExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.XXX_unrecognized != nil { + i -= len(m.XXX_unrecognized) + copy(dAtA[i:], m.XXX_unrecognized) + } + if m.Labels != nil { + { + size, err := m.Labels.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + +func (m *MaterializeResponseExt) Marshal() (dAtA []byte, err error) { + size := m.ProtoSize() + dAtA = make([]byte, size) + n, err := m.MarshalToSizedBuffer(dAtA[:size]) + if err != nil { + return nil, err + } + return dAtA[:n], nil +} + +func (m *MaterializeResponseExt) MarshalTo(dAtA []byte) (int, error) { + size := m.ProtoSize() + return m.MarshalToSizedBuffer(dAtA[:size]) +} + +func (m *MaterializeResponseExt) MarshalToSizedBuffer(dAtA []byte) (int, error) { + i := len(dAtA) + _ = i + var l int + _ = l + if m.XXX_unrecognized != nil { + i -= len(m.XXX_unrecognized) + copy(dAtA[i:], m.XXX_unrecognized) + } + if m.Container != nil { + { + size, err := m.Container.MarshalToSizedBuffer(dAtA[:i]) + if err != nil { + return 0, err + } + i -= size + i = encodeVarintRuntime(dAtA, i, uint64(size)) + } + i-- + dAtA[i] = 0xa + } + return len(dAtA) - i, nil +} + func encodeVarintRuntime(dAtA []byte, offset int, v uint64) int { offset -= sovRuntime(v) base := offset @@ -1365,6 +1811,10 @@ func (m *TaskServiceConfig) ProtoSize() (n int) { if l > 0 { n += 1 + l + sovRuntime(uint64(l)) } + l = len(m.ContainerNetwork) + if l > 0 { + n += 1 + l + sovRuntime(uint64(l)) + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } @@ -1498,37 +1948,36 @@ func (m *RocksDBDescriptor) ProtoSize() (n int) { return n } -func (m *DeriveRequestExt) ProtoSize() (n int) { +func (m *Container) ProtoSize() (n int) { if m == nil { return 0 } var l int _ = l - if m.Open != nil { - l = m.Open.ProtoSize() + l = len(m.IpAddr) + if l > 0 { n += 1 + l + sovRuntime(uint64(l)) } + if len(m.NetworkPorts) > 0 { + for _, e := range m.NetworkPorts { + l = e.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + } if m.XXX_unrecognized != nil { n += len(m.XXX_unrecognized) } return n } -func (m *DeriveRequestExt_Open) ProtoSize() (n int) { +func (m *CaptureRequestExt) ProtoSize() (n int) { if m == nil { return 0 } var l int _ = l - if m.LogLevel != 0 { - n += 1 + sovRuntime(uint64(m.LogLevel)) - } - l = len(m.SqliteVfsUri) - if l > 0 { - n += 1 + l + sovRuntime(uint64(l)) - } - if m.RocksdbDescriptor != nil { - l = m.RocksdbDescriptor.ProtoSize() + if m.Labels != nil { + l = m.Labels.ProtoSize() n += 1 + l + sovRuntime(uint64(l)) } if m.XXX_unrecognized != nil { @@ -1537,18 +1986,78 @@ func (m *DeriveRequestExt_Open) ProtoSize() (n int) { return n } -func (m *DeriveResponseExt) ProtoSize() (n int) { +func (m *CaptureResponseExt) ProtoSize() (n int) { if m == nil { return 0 } var l int _ = l - if m.Opened != nil { - l = m.Opened.ProtoSize() - n += 1 + l + sovRuntime(uint64(l)) - } - if m.Published != nil { - l = m.Published.ProtoSize() + if m.Container != nil { + l = m.Container.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *DeriveRequestExt) ProtoSize() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Labels != nil { + l = m.Labels.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + if m.Open != nil { + l = m.Open.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *DeriveRequestExt_Open) ProtoSize() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + l = len(m.SqliteVfsUri) + if l > 0 { + n += 1 + l + sovRuntime(uint64(l)) + } + if m.RocksdbDescriptor != nil { + l = m.RocksdbDescriptor.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *DeriveResponseExt) ProtoSize() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Container != nil { + l = m.Container.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + if m.Opened != nil { + l = m.Opened.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + if m.Published != nil { + l = m.Published.ProtoSize() n += 1 + l + sovRuntime(uint64(l)) } if m.Flushed != nil { @@ -1616,6 +2125,38 @@ func (m *DeriveResponseExt_Flushed) ProtoSize() (n int) { return n } +func (m *MaterializeRequestExt) ProtoSize() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Labels != nil { + l = m.Labels.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + +func (m *MaterializeResponseExt) ProtoSize() (n int) { + if m == nil { + return 0 + } + var l int + _ = l + if m.Container != nil { + l = m.Container.ProtoSize() + n += 1 + l + sovRuntime(uint64(l)) + } + if m.XXX_unrecognized != nil { + n += len(m.XXX_unrecognized) + } + return n +} + func sovRuntime(x uint64) (n int) { return (math_bits.Len64(x|1) + 6) / 7 } @@ -1734,6 +2275,38 @@ func (m *TaskServiceConfig) Unmarshal(dAtA []byte) error { } m.UdsPath = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex + case 4: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field ContainerNetwork", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.ContainerNetwork = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex default: iNdEx = preIndex skippy, err := skipRuntime(dAtA[iNdEx:]) @@ -2567,18 +3140,309 @@ func (m *RocksDBDescriptor) Unmarshal(dAtA []byte) error { break } } - intStringLen := int(stringLen) - if intStringLen < 0 { + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.RocksdbPath = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipRuntime(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthRuntime + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *Container) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: Container: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: Container: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field IpAddr", wireType) + } + var stringLen uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + stringLen |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + intStringLen := int(stringLen) + if intStringLen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + intStringLen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.IpAddr = string(dAtA[iNdEx:postIndex]) + iNdEx = postIndex + case 2: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field NetworkPorts", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + m.NetworkPorts = append(m.NetworkPorts, &flow.NetworkPort{}) + if err := m.NetworkPorts[len(m.NetworkPorts)-1].Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipRuntime(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthRuntime + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *CaptureRequestExt) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: CaptureRequestExt: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: CaptureRequestExt: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Labels == nil { + m.Labels = &ops.ShardLabeling{} + } + if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipRuntime(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthRuntime + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *CaptureResponseExt) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: CaptureResponseExt: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: CaptureResponseExt: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Container", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { return ErrInvalidLengthRuntime } - postIndex := iNdEx + intStringLen + postIndex := iNdEx + msglen if postIndex < 0 { return ErrInvalidLengthRuntime } if postIndex > l { return io.ErrUnexpectedEOF } - m.RocksdbPath = string(dAtA[iNdEx:postIndex]) + if m.Container == nil { + m.Container = &Container{} + } + if err := m.Container.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } iNdEx = postIndex default: iNdEx = preIndex @@ -2632,6 +3496,42 @@ func (m *DeriveRequestExt) Unmarshal(dAtA []byte) error { } switch fieldNum { case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Labels == nil { + m.Labels = &ops.ShardLabeling{} + } + if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Open", wireType) } @@ -2719,25 +3619,6 @@ func (m *DeriveRequestExt_Open) Unmarshal(dAtA []byte) error { } switch fieldNum { case 1: - if wireType != 0 { - return fmt.Errorf("proto: wrong wireType = %d for field LogLevel", wireType) - } - m.LogLevel = 0 - for shift := uint(0); ; shift += 7 { - if shift >= 64 { - return ErrIntOverflowRuntime - } - if iNdEx >= l { - return io.ErrUnexpectedEOF - } - b := dAtA[iNdEx] - iNdEx++ - m.LogLevel |= ops.Log_Level(b&0x7F) << shift - if b < 0x80 { - break - } - } - case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field SqliteVfsUri", wireType) } @@ -2769,7 +3650,7 @@ func (m *DeriveRequestExt_Open) Unmarshal(dAtA []byte) error { } m.SqliteVfsUri = string(dAtA[iNdEx:postIndex]) iNdEx = postIndex - case 3: + case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field RocksdbDescriptor", wireType) } @@ -2857,6 +3738,42 @@ func (m *DeriveResponseExt) Unmarshal(dAtA []byte) error { } switch fieldNum { case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Container", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Container == nil { + m.Container = &Container{} + } + if err := m.Container.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + case 2: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Opened", wireType) } @@ -2892,7 +3809,7 @@ func (m *DeriveResponseExt) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 2: + case 3: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Published", wireType) } @@ -2928,7 +3845,7 @@ func (m *DeriveResponseExt) Unmarshal(dAtA []byte) error { return err } iNdEx = postIndex - case 3: + case 4: if wireType != 2 { return fmt.Errorf("proto: wrong wireType = %d for field Flushed", wireType) } @@ -3289,6 +4206,180 @@ func (m *DeriveResponseExt_Flushed) Unmarshal(dAtA []byte) error { } return nil } +func (m *MaterializeRequestExt) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: MaterializeRequestExt: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: MaterializeRequestExt: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Labels", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Labels == nil { + m.Labels = &ops.ShardLabeling{} + } + if err := m.Labels.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipRuntime(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthRuntime + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} +func (m *MaterializeResponseExt) Unmarshal(dAtA []byte) error { + l := len(dAtA) + iNdEx := 0 + for iNdEx < l { + preIndex := iNdEx + var wire uint64 + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + wire |= uint64(b&0x7F) << shift + if b < 0x80 { + break + } + } + fieldNum := int32(wire >> 3) + wireType := int(wire & 0x7) + if wireType == 4 { + return fmt.Errorf("proto: MaterializeResponseExt: wiretype end group for non-group") + } + if fieldNum <= 0 { + return fmt.Errorf("proto: MaterializeResponseExt: illegal tag %d (wire type %d)", fieldNum, wire) + } + switch fieldNum { + case 1: + if wireType != 2 { + return fmt.Errorf("proto: wrong wireType = %d for field Container", wireType) + } + var msglen int + for shift := uint(0); ; shift += 7 { + if shift >= 64 { + return ErrIntOverflowRuntime + } + if iNdEx >= l { + return io.ErrUnexpectedEOF + } + b := dAtA[iNdEx] + iNdEx++ + msglen |= int(b&0x7F) << shift + if b < 0x80 { + break + } + } + if msglen < 0 { + return ErrInvalidLengthRuntime + } + postIndex := iNdEx + msglen + if postIndex < 0 { + return ErrInvalidLengthRuntime + } + if postIndex > l { + return io.ErrUnexpectedEOF + } + if m.Container == nil { + m.Container = &Container{} + } + if err := m.Container.Unmarshal(dAtA[iNdEx:postIndex]); err != nil { + return err + } + iNdEx = postIndex + default: + iNdEx = preIndex + skippy, err := skipRuntime(dAtA[iNdEx:]) + if err != nil { + return err + } + if (skippy < 0) || (iNdEx+skippy) < 0 { + return ErrInvalidLengthRuntime + } + if (iNdEx + skippy) > l { + return io.ErrUnexpectedEOF + } + m.XXX_unrecognized = append(m.XXX_unrecognized, dAtA[iNdEx:iNdEx+skippy]...) + iNdEx += skippy + } + } + + if iNdEx > l { + return io.ErrUnexpectedEOF + } + return nil +} func skipRuntime(dAtA []byte) (n int, err error) { l := len(dAtA) iNdEx := 0 diff --git a/go/protocols/runtime/runtime.proto b/go/protocols/runtime/runtime.proto index cd27a34768..f5a775ebdf 100644 --- a/go/protocols/runtime/runtime.proto +++ b/go/protocols/runtime/runtime.proto @@ -18,6 +18,7 @@ message TaskServiceConfig { int32 log_file_fd = 1; string task_name = 2; string uds_path = 3; + string container_network = 4; } // ShuffleRequest is the request message of a Shuffle RPC. @@ -122,23 +123,39 @@ message RocksDBDescriptor { string rocksdb_path = 2; } +// Container is a description of a running connector container. +message Container { + string ip_addr = 1; + repeated flow.NetworkPort network_ports = 2; +} + +message CaptureRequestExt { + ops.ShardLabeling labels = 1; +} + +message CaptureResponseExt { + Container container = 1; +} + message DeriveRequestExt { + ops.ShardLabeling labels = 1; + message Open { - // Configured log level. - ops.Log.Level log_level = 1; // URL with a registered SQLite VFS which should be opened. - string sqlite_vfs_uri = 2; + string sqlite_vfs_uri = 1; // RocksDB descriptor which should be opened. - RocksDBDescriptor rocksdb_descriptor = 3; + RocksDBDescriptor rocksdb_descriptor = 2; } - Open open = 1; + Open open = 2; } message DeriveResponseExt { + Container container = 1; + message Opened { consumer.Checkpoint runtime_checkpoint = 1; } - Opened opened = 1; + Opened opened = 2; message Published { // Maximum UUID Clock of sourced document clocks which lead to this published document. @@ -148,10 +165,18 @@ message DeriveResponseExt { // Packed partition values extracted from the published document. bytes partitions_packed = 3; } - Published published = 2; + Published published = 3; message Flushed { ops.Stats stats = 1; } - Flushed flushed = 3; + Flushed flushed = 4; +} + +message MaterializeRequestExt { + ops.ShardLabeling labels = 1; +} + +message MaterializeResponseExt { + Container container = 1; } \ No newline at end of file diff --git a/go/runtime/derive.go b/go/runtime/derive.go index 83e03dab8d..65708e6c3b 100644 --- a/go/runtime/derive.go +++ b/go/runtime/derive.go @@ -114,9 +114,8 @@ func (d *Derive) RestoreCheckpoint(shard consumer.Shard) (cp pf.Checkpoint, err } var requestExt = &pr.DeriveRequestExt{ - Open: &pr.DeriveRequestExt_Open{ - LogLevel: d.labels.LogLevel, - }, + Labels: &d.labels, + Open: &pr.DeriveRequestExt_Open{}, } if d.client != nil {