From bde7d431235f8093791180a72fba5839ecab1c8a Mon Sep 17 00:00:00 2001 From: Michael Uti Date: Fri, 6 Sep 2024 05:42:57 +0100 Subject: [PATCH] chore: add private endpoint support in bigquery example --- .../databases/bigquery/datastream_corp/app.js | 11 ++- .../databases/bigquery/datastream_corp/run.sh | 8 +- .../databases/bigquery/metrics_corp/run.sh | 74 ++++++++++++++++--- .../bigquery/metrics_corp/run_ockam.sh | 5 +- .../command/portals/databases/bigquery/run.sh | 3 + 5 files changed, 82 insertions(+), 19 deletions(-) diff --git a/examples/command/portals/databases/bigquery/datastream_corp/app.js b/examples/command/portals/databases/bigquery/datastream_corp/app.js index 2fdaa5c49b7..810d4b344a2 100644 --- a/examples/command/portals/databases/bigquery/datastream_corp/app.js +++ b/examples/command/portals/databases/bigquery/datastream_corp/app.js @@ -2,7 +2,6 @@ const axios = require('axios'); const { JWT } = require('google-auth-library'); const { BigQuery } = require('@google-cloud/bigquery'); - const projectId = process.env.GOOGLE_CLOUD_PROJECT; if (!projectId) { console.error('GOOGLE_CLOUD_PROJECT environment variable must be set.'); @@ -15,6 +14,12 @@ if (!credentials_base64) { process.exit(1); } +const private_endpoint_name = process.env.PRIVATE_ENDPOINT_NAME; +if (!private_endpoint_name) { + console.error('PRIVATE_ENDPOINT_NAME environment variable must be set.'); + process.exit(1); +} + const credentials_json = Buffer.from(credentials_base64, 'base64').toString('utf-8'); const credentials = JSON.parse(credentials_json); @@ -55,7 +60,7 @@ class CustomBigQueryClient extends BigQuery { ...reqOpts.headers, 'Authorization': `Bearer ${token}`, 'Content-Type': 'application/json', - 'Host': 'bigquery.googleapis.com', + 'Host': `bigquery-${private_endpoint_name}.p.googleapis.com`, }, data: body, }; @@ -68,7 +73,7 @@ class CustomBigQueryClient extends BigQuery { } } -const bigQueryClient = new CustomBigQueryClient('effortless-cat-433609-h1'); +const bigQueryClient = new CustomBigQueryClient(projectId); async function createDataset(datasetId) { console.log(`Creating Dataset ${datasetId}`); diff --git a/examples/command/portals/databases/bigquery/datastream_corp/run.sh b/examples/command/portals/databases/bigquery/datastream_corp/run.sh index c7a70f03160..90e8784a654 100755 --- a/examples/command/portals/databases/bigquery/datastream_corp/run.sh +++ b/examples/command/portals/databases/bigquery/datastream_corp/run.sh @@ -58,13 +58,13 @@ run() { until scp -o StrictHostKeyChecking=no -i ./key.pem ./app.js "ec2-user@$ip:app.js"; do sleep 10; done ssh -o StrictHostKeyChecking=no -i ./key.pem "ec2-user@$ip" \ 'bash -s' << EOS + # Wait for private endpoint to be up. + while ! curl -H "Host: bigquery-${PRIVATE_ENDPOINT_NAME}.p.googleapis.com" http://127.0.0.1:8080/discovery/v1/apis/bigquery/v2/rest --connect-timeout 2 --max-time 5 --silent > /dev/null; do sleep 5 && echo "private endpoint not up yet... retrying"; done export GOOGLE_CLOUD_PROJECT="$GOOGLE_CLOUD_PROJECT_ID" export GOOGLE_APPLICATION_CREDENTIALS_BASE64="$GOOGLE_APPLICATION_CREDENTIALS_BASE64" sudo yum update -y && sudo yum install nodejs -y - npm install @google-cloud/bigquery - npm install google-auth-library - npm install axios - node app.js + npm install @google-cloud/bigquery google-auth-library axios + PRIVATE_ENDPOINT_NAME="$PRIVATE_ENDPOINT_NAME" node app.js EOS } diff --git a/examples/command/portals/databases/bigquery/metrics_corp/run.sh b/examples/command/portals/databases/bigquery/metrics_corp/run.sh index c6f6d52b059..300c78a3787 100755 --- a/examples/command/portals/databases/bigquery/metrics_corp/run.sh +++ b/examples/command/portals/databases/bigquery/metrics_corp/run.sh @@ -1,20 +1,57 @@ #!/usr/bin/env bash + run() { enrollment_ticket="$1" + # It is required to use alphanumeric characters for the private endpoint name. + private_endpoint_address="10.200.0.5" + # ---------------------------------------------------------------------------------------------------------------- - # CREATE INSTANCE AND START RELAY + # CREATE PRIVATE GOOGLE API ENDPOINT (PRIVATE SERVICE CONNECT API) + + # Create a new VPC. + gcloud compute networks create "${name}-vpc" --subnet-mode=custom --project="$GOOGLE_CLOUD_PROJECT_ID" + + # Create a subnet in the VPC. + gcloud compute networks subnets create "${name}-subnet" --network="${name}-vpc" --project="$GOOGLE_CLOUD_PROJECT_ID" \ + --range=10.0.0.0/24 --region=us-central1 + + # Enable Private Google Access for the subnet. + gcloud compute networks subnets update "${name}-subnet" --project="$GOOGLE_CLOUD_PROJECT_ID" \ + --region=us-central1 --enable-private-ip-google-access + + # Reserve an internal IP address for the private service connect (psc). + gcloud compute addresses create "${name}-psc-address" --global --project="$GOOGLE_CLOUD_PROJECT_ID" \ + --purpose=PRIVATE_SERVICE_CONNECT --addresses="$private_endpoint_address" --network="${name}-vpc" + + # Create a forwarding rule to connect to BigQuery using the reserved IP address. + gcloud compute forwarding-rules create "$PRIVATE_ENDPOINT_NAME" --global --project="$GOOGLE_CLOUD_PROJECT_ID" \ + --network="${name}-vpc" --address="${name}-psc-address" --target-google-apis-bundle=all-apis + + gcloud compute firewall-rules create allow-ssh \ + --network="${name}-vpc" \ + --allow=tcp:22 \ + --source-ranges=0.0.0.0/0 \ + --target-tags=allow-internal + + gcloud compute firewall-rules create allow-all-egress \ + --network="${name}-vpc" --allow=all --direction=EGRESS --priority=1000 --destination-ranges=0.0.0.0/0 --target-tags=allow-egress + + + # ---------------------------------------------------------------------------------------------------------------- + # CREATE INSTANCE USING THE PRIVATE GOOGLE API ENDPOINT sed "s/\$ENROLLMENT_TICKET/${enrollment_ticket}/g" run_ockam.sh > user_data1.sh - sed "s/\$OCKAM_VERSION/${OCKAM_VERSION}/g" user_data1.sh > user_data.sh + sed "s/\$OCKAM_VERSION/${OCKAM_VERSION}/g" user_data1.sh > user_data2.sh + sed "s/\$PRIVATE_ENDPOINT_NAME/${PRIVATE_ENDPOINT_NAME}/g" user_data2.sh > user_data.sh - gcloud compute instances create "${name}-key" \ + gcloud compute instances create "${name}-vm-instance" \ --project="$GOOGLE_CLOUD_PROJECT_ID" \ - --zone="us-central1-c" \ - --create-disk=auto-delete=yes,boot=yes,device-name="${name}-key",image=projects/debian-cloud/global/images/debian-12-bookworm-v20240815,mode=rw,size=10,type=pd-balanced \ + --zone="us-central1-a" \ + --create-disk=auto-delete=yes,boot=yes,device-name="${name}-vm-instance",image=projects/debian-cloud/global/images/debian-12-bookworm-v20240815,mode=rw,size=10,type=pd-balanced \ --machine-type=e2-medium \ - --network-interface=network-tier=PREMIUM,stack-type=IPV4_ONLY,subnet=default \ - --tags="${name}-key" \ + --subnet="${name}-subnet" \ + --tags=allow-egress,allow-internal \ --metadata-from-file=startup-script=user_data.sh rm -rf user_data*.sh @@ -22,8 +59,27 @@ run() { cleanup() { # ---------------------------------------------------------------------------------------------------------------- - # DELETE INSTANCE - gcloud compute instances delete "${name}-key" --zone="us-central1-c" --project="$GOOGLE_CLOUD_PROJECT_ID" --quiet || true + # DELETE NETWORK + + # Delete forwarding rule + gcloud compute forwarding-rules delete "$PRIVATE_ENDPOINT_NAME" --global --quiet + + # Delete reserved endpoint address + gcloud compute addresses delete "${name}-psc-address" --global --quiet + + gcloud compute firewall-rules delete allow-all-egress --quiet + + gcloud compute firewall-rules delete allow-ssh --quiet + + # ---------------------------------------------------------------------------------------------------------------- + # DELETE INSTANCE RESOURCES + gcloud compute instances delete "${name}-vm-instance" --zone="us-central1-a" --project="$GOOGLE_CLOUD_PROJECT_ID" --quiet + + # Delete subnet + gcloud compute networks subnets delete "${name}-subnet" --region=us-central1 --quiet + # Delete VPC + gcloud compute networks delete "${name}-vpc" --quiet + rm -rf user_data*.sh } diff --git a/examples/command/portals/databases/bigquery/metrics_corp/run_ockam.sh b/examples/command/portals/databases/bigquery/metrics_corp/run_ockam.sh index 02b4fa1fa3d..63276c615e8 100755 --- a/examples/command/portals/databases/bigquery/metrics_corp/run_ockam.sh +++ b/examples/command/portals/databases/bigquery/metrics_corp/run_ockam.sh @@ -39,14 +39,13 @@ ockam project enroll "$ENROLLMENT_TICKET" # Create a TCP Portal Outlet to BigQuery API at at - bigquery.googleapis.com:443. cat << EOF > outlet.yaml tcp-outlet: - to: bigquery.googleapis.com:443 + to: bigquery-$PRIVATE_ENDPOINT_NAME.p.googleapis.com:443 tls: true allow: '(= subject.bigquery-inlet "true")' relay: bigquery EOF +cat outlet.yaml ockam node create outlet.yaml -rm outlet.yaml - EOS diff --git a/examples/command/portals/databases/bigquery/run.sh b/examples/command/portals/databases/bigquery/run.sh index ec70cbd9be1..e02e3c2a552 100755 --- a/examples/command/portals/databases/bigquery/run.sh +++ b/examples/command/portals/databases/bigquery/run.sh @@ -92,6 +92,9 @@ for c in aws curl; do if ! type "$c" &>/dev/null; then echo "ERROR: Please install: $c" && exit 1; fi done +# It is required to use alphanumeric characters for the private endpoint name. +export PRIVATE_ENDPOINT_NAME="ockamendpoint" + # Check if the first argument is "cleanup" # If it is, call the cleanup function. If not, call the run function. if [ "$1" = "cleanup" ]; then cleanup; else run; fi