Skip to content

Commit

Permalink
And again
Browse files Browse the repository at this point in the history
  • Loading branch information
i-am-tom committed Jan 15, 2024
1 parent cb62c40 commit 82f7d81
Show file tree
Hide file tree
Showing 46 changed files with 46 additions and 46 deletions.
4 changes: 2 additions & 2 deletions .github/workflows/benchmarks.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ name: Postgres NDC component benchmarks

permissions:
contents: write
ndc_metadatas: write
deployment: write

jobs:
benchmark:
Expand Down Expand Up @@ -34,7 +34,7 @@ jobs:
cd benchmarks/component
docker compose up --detach --wait postgres grafana
- name: Generate the ndc_metadata configuration 🚧
- name: Generate the NDC metadata configuration 🚧
run: |
set -e -u -o pipefail
cd benchmarks/component
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/cargo-test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -122,11 +122,11 @@ jobs:
version: "25.x"
repo-token: ${{ secrets.GITHUB_TOKEN }}

- name: setup ndc_metadata
- name: setup NDC metadata
env:
AURORA_CONNECTION_STRING: ${{ secrets.AURORA_CONNECTION_STRING }}
run: |
# take connection string from env, create ndc_metadata file with it
# take connection string from env, create NDC metadata file with it
cat static/aurora/v1-chinook-ndc-metadata-template.json \
| jq '.connectionUri={"uri":{"value":(env | .AURORA_CONNECTION_STRING)}}' \
> static/aurora/v1-chinook-ndc-metadata.json
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/component/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ Running `run.sh` with a benchmark name as an argument will:

1. build the PostgreSQL data connector Docker image,
2. start the database with Chinook data,
3. start the agent using an associated ndc_metadata, and
3. start the agent using an associated NDC metadata file, and
4. run a benchmark using k6.

Running without arguments will list available benchmarks.
Expand Down
2 changes: 1 addition & 1 deletion benchmarks/component/start.sh
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ info 'Starting the dependencies'
docker compose up --wait postgres grafana
POSTGRESQL_SOCKET="$(docker compose port postgres 5432)"

info 'Generating the ndc_metadata configuration'
info 'Generating the NDC metadata configuration'
mkdir -p generated
cargo run -p ndc-postgres --quiet --release -- configuration serve &
AGENT_PID=$!
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -311,7 +311,7 @@ pub async fn validate_raw_configuration(
Ok(config)
}

/// Construct the ndc_metadata configuration by introspecting the database.
/// Construct the NDC metadata configuration by introspecting the database.
pub async fn configure(
args: RawConfiguration,
) -> Result<RawConfiguration, connector::UpdateConfigurationError> {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -243,7 +243,7 @@ pub async fn validate_raw_configuration(
Ok(config)
}

/// Construct the ndc_metadata configuration by introspecting the database.
/// Construct the NDC metadata configuration by introspecting the database.
pub async fn configure(
args: RawConfiguration,
) -> Result<RawConfiguration, connector::UpdateConfigurationError> {
Expand Down
2 changes: 1 addition & 1 deletion crates/tests/databases-tests/src/aurora/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ pub fn get_connection_string() -> String {
env::var("AURORA_CONNECTION_STRING").unwrap()
}

/// Creates a router with a fresh state from the test ndc_metadata.
/// Creates a router with a fresh state from the test NDC metadata.
pub async fn create_router() -> axum::Router {
tests_common::router::create_router(CHINOOK_NDC_METADATA_PATH).await
}
2 changes: 1 addition & 1 deletion crates/tests/databases-tests/src/citus/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ pub const CHINOOK_NDC_METADATA_PATH: &str = "static/citus/v2-chinook-ndc-metadat
pub const CONNECTION_STRING: &str =
"postgresql://postgres:password@localhost:64004?sslmode=disable";

/// Creates a router with a fresh state from the test ndc_metadata.
/// Creates a router with a fresh state from the test NDC metadata.
pub async fn create_router() -> axum::Router {
tests_common::router::create_router(CHINOOK_NDC_METADATA_PATH).await
}
2 changes: 1 addition & 1 deletion crates/tests/databases-tests/src/cockroach/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ pub const CHINOOK_NDC_METADATA_PATH: &str = "static/cockroach/v2-chinook-ndc-met

pub const CONNECTION_STRING: &str = "postgresql://postgres:password@localhost:64003/defaultdb";

/// Creates a router with a fresh state from the test ndc_metadata.
/// Creates a router with a fresh state from the test NDC metadata.
pub async fn create_router() -> axum::Router {
tests_common::router::create_router(CHINOOK_NDC_METADATA_PATH).await
}
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ mod ndc_metadata_snapshots {
use std::fs;
use std::path::PathBuf;

// each time we run `just generate-chinook-configuration` we save the old
// Postgres ndc_metadata in `static/ndc-metadata-snapshots`. This test parses each snapshot to
// ensure we are still able to understand old versions
// each time we run `just generate-chinook-configuration` we save the old Postgres NDC metadata
// file in `static/ndc-metadata-snapshots`. This test parses each snapshot to ensure we are
// still able to understand old versions
#[test_each::path(glob = "static/ndc-metadata-snapshots/*.json", name(segments = 2))]
fn test_snapshot(ndc_metadata_path: PathBuf) {
let file = fs::File::open(ndc_metadata_path).expect("fs::File::open");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
//! The github CI setup runs these tests subject to the filtering logic in
//! '.github/test-configuration.json'. Naming a test with the prefix 'postgres_current_only` will
//! ensure they only run on the latest version of postgres being tested. This is necessary because
//! they rely on supporting data (the chinook ndc_metadata configuration) which we maintain only for
//! they rely on supporting data (the chinook NDC metadata configuration) which we maintain only for
//! the latest version.
#[cfg(test)]
Expand Down
2 changes: 1 addition & 1 deletion crates/tests/databases-tests/src/yugabyte/common.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
pub const CHINOOK_NDC_METADATA_PATH: &str = "static/yugabyte/v2-chinook-ndc-metadata.json";

/// Creates a router with a fresh state from the test ndc_metadata.
/// Creates a router with a fresh state from the test NDC metadata.
pub async fn create_router() -> axum::Router {
tests_common::router::create_router(CHINOOK_NDC_METADATA_PATH).await
}
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ use crate::schemas::check_value_conforms_to_schema;

// Tests that configuration generation has not changed.
//
// This test does not use insta snapshots because it checks the ndc_metadata file that is shared with
// other tests.
// This test does not use insta snapshots because it checks the NDC metadata file that is shared
// with other tests.
//
// If you have changed it intentionally, run `just generate-chinook-configuration`.
pub async fn configure_is_idempotent(
Expand Down Expand Up @@ -61,7 +61,7 @@ fn read_configuration(chinook_ndc_metadata_path: impl AsRef<Path>) -> serde_json
let mut multi_version: serde_json::Value =
serde_json::from_reader(file).expect("serde_json::from_reader");

// We assume the stored ndc_metadata file to be in the newest version, so to be able to make
// We assume the stored NDC metadata file to be in the newest version, so to be able to make
// assertions on its serialization behavior we remove the version discriminator field.
multi_version.as_object_mut().unwrap().remove("version");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ use crate::schemas::check_value_conforms_to_schema;

// Tests that configuration generation has not changed.
//
// This test does not use insta snapshots because it checks the ndc_metadata file that is shared with
// This test does not use insta snapshots because it checks the NDC metadata file that is shared with
// other tests.
//
// If you have changed it intentionally, run `just generate-chinook-configuration`.
Expand Down Expand Up @@ -61,7 +61,7 @@ fn read_configuration(chinook_ndc_metadata_path: impl AsRef<Path>) -> serde_json
let mut multi_version: serde_json::Value =
serde_json::from_reader(file).expect("serde_json::from_reader");

// We assume the stored ndc_metadata file to be in the newest version, so to be able to make
// We assume the stored NDC metadata file to be in the newest version, so to be able to make
// assertions on its serialization behavior we remove the version discriminator field.
multi_version.as_object_mut().unwrap().remove("version");

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use ndc_postgres::configuration::RawConfiguration;

use super::helpers::get_path_from_project_root;

/// Load ndc_metadata at `main_ndc_metadata_path`
/// Load NDC metadata at `main_ndc_metadata_path`
/// replace url with `new_postgres_url`
/// save at `new_ndc_metadata_path`
pub fn copy_ndc_metadata_with_new_postgres_url(
Expand All @@ -31,7 +31,7 @@ pub fn copy_ndc_metadata_with_new_postgres_url(
Ok(())
}

/// Erase test ndc_metadata file created at `ndc_metadata_path`
/// Erase test NDC metadata file created at `ndc_metadata_path`
pub fn delete_ndc_metadata(ndc_metadata_path: impl AsRef<Path>) -> io::Result<()> {
let absolute_path = get_path_from_project_root(ndc_metadata_path);

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
//! Helpers for creating fresh ndc_metadatas for testing . Use `create_fresh_ndc_metadata` to set up a new database and
//! ndc_metadata file, and `clean_up_ndc_metadata` to remove it again afterwards.
//! It would be great to find a way of implementing these in a more Bracket-esq pattern that
//! automatically takes care of clean up in future.
//! Helpers for creating fresh NDC metadata files for testing . Use `create_fresh_ndc_metadata` to
//! set up a new database and NDC metadata file, and `clean_up_ndc_metadata` to remove it again
//! afterwards. It would be great to find a way of implementing these in a more Bracket-esq pattern
//! that automatically takes care of clean up in future.
mod configuration;
mod database;
Expand All @@ -16,7 +16,7 @@ pub struct FreshDeployment {
pub admin_connection_string: String, // for dropping after
}

/// Create a new ndc_metadata, pointing to a fresh copy of the database
/// Create a new NDC metadata, pointing to a fresh copy of the database
pub async fn create_fresh_ndc_metadata(
connection_uri: &str,
ndc_metadata_path: impl AsRef<Path>,
Expand All @@ -38,7 +38,7 @@ pub async fn create_fresh_ndc_metadata(
})
}

/// Remove database created for fresh ndc_metadata
/// Remove database created for fresh NDC metadata
pub async fn clean_up_ndc_metadata(ndc_metadata: FreshDeployment) -> io::Result<()> {
database::drop_database(&ndc_metadata.admin_connection_string, &ndc_metadata.db_name).await;
configuration::delete_ndc_metadata(&ndc_metadata.ndc_metadata_path)
Expand Down
8 changes: 4 additions & 4 deletions crates/tests/tests-common/src/router.rs
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
use std::path::Path;

/// Creates a router with a fresh state from the test ndc_metadata.
/// Creates a router with a fresh state from the test NDC metadata.
pub async fn create_router(chinook_ndc_metadata_path: impl AsRef<Path>) -> axum::Router {
let _ = env_logger::builder().is_test(true).try_init();

// work out where the ndc_metadata configs live
// work out where the NDC metadata configs live
let test_ndc_metadata_file =
super::ndc_metadata::helpers::get_path_from_project_root(chinook_ndc_metadata_path);

Expand All @@ -17,11 +17,11 @@ pub async fn create_router(chinook_ndc_metadata_path: impl AsRef<Path>) -> axum:
ndc_sdk::default_main::create_router(state, None)
}

/// Creates a router with a fresh state from a ndc_metadata file path
/// Creates a router with a fresh state from a NDC metadata file path
pub async fn create_router_from_ndc_metadata(ndc_metadata_path: impl AsRef<Path>) -> axum::Router {
let _ = env_logger::builder().is_test(true).try_init();

// work out where the ndc_metadata configs live
// work out where the NDC metadata configs live
let test_ndc_metadata_file =
super::ndc_metadata::helpers::get_path_from_project_root(ndc_metadata_path);

Expand Down
4 changes: 2 additions & 2 deletions docs/deployment.md
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ _Note: Mention any dependencies for these components, any other requirements, or
Listens to the config changes in the file `ddn-cloud/images/<environment>/kustomization.yaml` and auto-deploys it. Access the ArgoCD dashboard [here](https://argocd.hasura-app.io/), search by `<component>-<environment>-region`, e.g., postgres-ndc-config-server-prod-asia-south1.

- **k8s**
Workloads are deployed in the Kubernetes environment, and `ndc-postgres-config-server` is deployed in the data-plane. Its ndc_metadata and service files are found in [here](https://github.com/hasura/ddn-cloud/tree/main/components/region/ndc-config-server).
Workloads are deployed in the Kubernetes environment, and `ndc-postgres-config-server` is deployed in the data-plane. Its NDC metadata and service files are found in [here](https://github.com/hasura/ddn-cloud/tree/main/components/region/ndc-config-server).

## Deployment Workflow

Expand All @@ -40,4 +40,4 @@ _Note: Update the checkpoints for any dependency._

## Rollback

Deployment updates are sent in the Slack channels configured. Check out the doc [here](https://docs-internal.hasura-app.io/books/hasura-v3-ndc-metadata-operations/page/v3-ndc_metadatas-slack-notification-configuration). To rollback, under the notification sent, look for the tag under _Component's Image Tag_ and update the tag in `ddn/cloud/images/<environment>/kustomization.yaml` to rollback.
Deployment updates are sent in the Slack channels configured. Check out the doc [here](https://docs-internal.hasura-app.io/books/hasura-v3-deployment-operations/page/v3-deployments-slack-notification-configuration). To rollback, under the notification sent, look for the tag under _Component's Image Tag_ and update the tag in `ddn/cloud/images/<environment>/kustomization.yaml` to rollback.
16 changes: 8 additions & 8 deletions justfile
Original file line number Diff line number Diff line change
Expand Up @@ -142,7 +142,7 @@ document-openapi:
RUST_LOG=INFO cargo run --bin openapi-generator

# Run postgres, testing against external DBs like Aurora
test-other-dbs: create-aurora-ndc_metadata start-dependencies
test-other-dbs: create-aurora-ndc-metadata start-dependencies
RUST_LOG=INFO \
OTLP_ENDPOINT=http://localhost:4317 \
OTEL_SERVICE_NAME=ndc-postgres \
Expand Down Expand Up @@ -183,7 +183,7 @@ doc:
cargo doc --lib --no-deps --open

# run all tests
test *args: start-dependencies create-aurora-ndc_metadata
test *args: start-dependencies create-aurora-ndc-metadata
#!/usr/bin/env bash
# choose a test runner
Expand Down Expand Up @@ -221,14 +221,14 @@ test *args: start-dependencies create-aurora-ndc_metadata
echo "$(tput bold)${TEST_COMMAND[*]}$(tput sgr0)"
RUST_LOG=DEBUG "${TEST_COMMAND[@]}"

# re-generate the ndc_metadata configuration file
# re-generate the ndc-metadata configuration file
generate-chinook-configuration: build start-dependencies
./scripts/archive-old-ndc_metadata.sh '{{POSTGRES_V1_CHINOOK_NDC_METADATA}}'
./scripts/archive-old-ndc-metadata.sh '{{POSTGRES_V1_CHINOOK_NDC_METADATA}}'
./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{POSTGRESQL_CONNECTION_STRING}}' '{{POSTGRES_V1_CHINOOK_NDC_METADATA}}'
./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{CITUS_CONNECTION_STRING}}' '{{CITUS_V1_CHINOOK_NDC_METADATA}}'
./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{COCKROACH_CONNECTION_STRING}}' '{{COCKROACH_V1_CHINOOK_NDC_METADATA}}'

./scripts/archive-old-ndc_metadata.sh '{{POSTGRES_V2_CHINOOK_NDC_METADATA}}'
./scripts/archive-old-ndc-metadata.sh '{{POSTGRES_V2_CHINOOK_NDC_METADATA}}'
./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{POSTGRESQL_CONNECTION_STRING}}' '{{POSTGRES_V2_CHINOOK_NDC_METADATA}}'
./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{CITUS_CONNECTION_STRING}}' '{{CITUS_V2_CHINOOK_NDC_METADATA}}'
./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{COCKROACH_CONNECTION_STRING}}' '{{COCKROACH_V2_CHINOOK_NDC_METADATA}}'
Expand All @@ -246,7 +246,7 @@ generate-chinook-configuration: build start-dependencies
./scripts/generate-chinook-configuration.sh "ndc-postgres" '{{AURORA_CONNECTION_STRING}}' '{{AURORA_V1_CHINOOK_NDC_METADATA_TEMPLATE}}'; \
echo "$(tput bold)./scripts/generate-chinook-configuration.sh 'ndc-postgres' '{{AURORA_CONNECTION_STRING}}' '{{AURORA_V2_CHINOOK_NDC_METADATA_TEMPLATE}}'$(tput sgr0)"; \
./scripts/generate-chinook-configuration.sh "ndc-postgres" '{{AURORA_CONNECTION_STRING}}' '{{AURORA_V2_CHINOOK_NDC_METADATA_TEMPLATE}}'; \
just create-aurora-ndc_metadata; \
just create-aurora-ndc-metadata; \
else \
echo "$(tput bold)$(tput setaf 3)WARNING:$(tput sgr0) Not updating the Aurora configuration because the connection string is unset."; \
fi
Expand All @@ -268,8 +268,8 @@ start-dependencies:
echo "$(tput bold)${COMMAND[*]}$(tput sgr0)"
"${COMMAND[@]}"

# injects the Aurora connection string into a ndc_metadata configuration template
create-aurora-ndc_metadata:
# injects the Aurora connection string into a NDC metadata configuration template
create-aurora-ndc-metadata:
cat {{ AURORA_V2_CHINOOK_NDC_METADATA_TEMPLATE }} \
| jq '.connectionUri.uri.value = (env | .AURORA_CONNECTION_STRING)' \
| prettier --parser=json \
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env bash
set -e -u -o pipefail

# before we generate a new ndc_metadata, save the current one in
# before we generate a new NDC metadata, save the current one in
# `/static/ndc-metadata-snapshots` so we can ensure they can all be read over
# time

Expand All @@ -17,5 +17,5 @@ mkdir -p "$SNAPSHOT_DIR"
# create filename from hash of contents
NEW_FILENAME="$(sha256sum "${CHINOOK_NDC_METADATA}" | cut -f1 -d' ').json"

# copy current ndc_metadata to new filename
# copy current NDC metadata to new filename
cp "${CHINOOK_NDC_METADATA}" "${SNAPSHOT_DIR}/${NEW_FILENAME}"
2 changes: 1 addition & 1 deletion scripts/generate-chinook-configuration.sh
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ if ! kill -0 "$CONFIGURATION_SERVER_PID"; then
exit 1
fi

# We want to preserve the connectionUri unchanged in the ndc_metadata file, for secrets templating purposes
# We want to preserve the connectionUri unchanged in the NDC metadata file, for secrets templating purposes
PRESERVED_DATA="$(jq '{"connectionUri": .connectionUri}' "$CHINOOK_NDC_METADATA")"

# Native queries should inform the initial configuration call
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.
File renamed without changes.

0 comments on commit 82f7d81

Please sign in to comment.