Skip to content

Commit eed4c26

Browse files
authored
Chore: GCP Postgres integration tests (#5143)
1 parent 9afc728 commit eed4c26

File tree

7 files changed

+48
-7
lines changed

7 files changed

+48
-7
lines changed

.circleci/continue_config.yml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -246,6 +246,7 @@ jobs:
246246
echo "export SNOWFLAKE_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
247247
echo "export DATABRICKS_CATALOG='$TEST_DB_NAME'" >> "$BASH_ENV"
248248
echo "export REDSHIFT_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
249+
echo "export GCP_POSTGRES_DATABASE='$TEST_DB_NAME'" >> "$BASH_ENV"
249250
- run:
250251
name: Create test database
251252
command: ./.circleci/manage-test-db.sh << parameters.engine >> "$TEST_DB_NAME" up
@@ -303,6 +304,7 @@ workflows:
303304
- bigquery
304305
- clickhouse-cloud
305306
- athena
307+
- gcp-postgres
306308
filters:
307309
branches:
308310
only:

.circleci/manage-test-db.sh

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -109,6 +109,32 @@ clickhouse-cloud_init() {
109109
echo "Clickhouse Cloud instance $CLICKHOUSE_CLOUD_HOST is up and running"
110110
}
111111

112+
# GCP Postgres
113+
gcp-postgres_init() {
114+
# Download and start Cloud SQL Proxy
115+
curl -fsSL -o cloud-sql-proxy https://storage.googleapis.com/cloud-sql-connectors/cloud-sql-proxy/v2.18.0/cloud-sql-proxy.linux.amd64
116+
chmod +x cloud-sql-proxy
117+
echo "$GCP_POSTGRES_KEYFILE_JSON" > /tmp/keyfile.json
118+
./cloud-sql-proxy --credentials-file /tmp/keyfile.json $GCP_POSTGRES_INSTANCE_CONNECTION_STRING &
119+
120+
# Wait for proxy to start
121+
sleep 5
122+
}
123+
124+
gcp-postgres_exec() {
125+
PGPASSWORD=$GCP_POSTGRES_PASSWORD psql -h 127.0.0.1 -U $GCP_POSTGRES_USER -c "$1" postgres
126+
}
127+
128+
gcp-postgres_up() {
129+
gcp-postgres_exec "create database $1"
130+
}
131+
132+
gcp-postgres_down() {
133+
gcp-postgres_exec "drop database $1"
134+
}
135+
136+
137+
112138
INIT_FUNC="${ENGINE}_init"
113139
UP_FUNC="${ENGINE}_up"
114140
DOWN_FUNC="${ENGINE}_down"

Makefile

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -173,6 +173,9 @@ clickhouse-cloud-test: guard-CLICKHOUSE_CLOUD_HOST guard-CLICKHOUSE_CLOUD_USERNA
173173
athena-test: guard-AWS_ACCESS_KEY_ID guard-AWS_SECRET_ACCESS_KEY guard-ATHENA_S3_WAREHOUSE_LOCATION engine-athena-install
174174
pytest -n auto -m "athena" --retries 3 --junitxml=test-results/junit-athena.xml
175175

176+
gcp-postgres-test: guard-GCP_POSTGRES_INSTANCE_CONNECTION_STRING guard-GCP_POSTGRES_USER guard-GCP_POSTGRES_PASSWORD guard-GCP_POSTGRES_KEYFILE_JSON engine-gcppostgres-install
177+
pytest -n auto -m "gcp_postgres" --retries 3 --junitxml=test-results/junit-gcp-postgres.xml
178+
176179
vscode_settings:
177180
mkdir -p .vscode
178181
cp -r ./tooling/vscode/*.json .vscode/

sqlmesh/core/config/connection.py

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1210,12 +1210,6 @@ def _validate_auth_method(cls, data: t.Any) -> t.Any:
12101210
password = data.get("password")
12111211
enable_iam_auth = data.get("enable_iam_auth")
12121212

1213-
if password and enable_iam_auth:
1214-
raise ConfigError(
1215-
"Invalid GCP Postgres connection configuration - both password and"
1216-
" enable_iam_auth set. Use password when connecting to a postgres"
1217-
" user and enable_iam_auth 'True' when connecting to an IAM user."
1218-
)
12191213
if not password and not enable_iam_auth:
12201214
raise ConfigError(
12211215
"GCP Postgres connection configuration requires either password set"

tests/core/engine_adapter/integration/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -82,6 +82,7 @@ def pytest_marks(self) -> t.List[MarkDecorator]:
8282
IntegrationTestEngine("bigquery", native_dataframe_type="bigframe", cloud=True),
8383
IntegrationTestEngine("databricks", native_dataframe_type="pyspark", cloud=True),
8484
IntegrationTestEngine("snowflake", native_dataframe_type="snowpark", cloud=True),
85+
IntegrationTestEngine("gcp_postgres", cloud=True),
8586
]
8687

8788
ENGINES_BY_NAME = {e.engine: e for e in ENGINES}

tests/core/engine_adapter/integration/config.yaml

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -186,5 +186,16 @@ gateways:
186186
state_connection:
187187
type: duckdb
188188

189+
inttest_gcp_postgres:
190+
connection:
191+
type: gcp_postgres
192+
instance_connection_string: {{ env_var("GCP_POSTGRES_INSTANCE_CONNECTION_STRING") }}
193+
user: {{ env_var("GCP_POSTGRES_USER") }}
194+
password: {{ env_var("GCP_POSTGRES_PASSWORD") }}
195+
keyfile_json: {{ env_var("GCP_POSTGRES_KEYFILE_JSON", "") }}
196+
db: {{ env_var("GCP_POSTGRES_DATABASE") }}
197+
enable_iam_auth: true
198+
check_import: false
199+
189200
model_defaults:
190201
dialect: duckdb

tests/core/engine_adapter/integration/test_integration.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1587,7 +1587,11 @@ def _normalize_snowflake(name: str, prefix_regex: str = "(sqlmesh__)(.*)"):
15871587
k: [_normalize_snowflake(name) for name in v] for k, v in object_names.items()
15881588
}
15891589

1590-
init_example_project(tmp_path, ctx.mark.split("_")[0], schema_name=schema_name)
1590+
if ctx.mark.startswith("gcp_postgres"):
1591+
engine_type = "gcp_postgres"
1592+
else:
1593+
engine_type = ctx.mark.split("_")[0]
1594+
init_example_project(tmp_path, engine_type, schema_name=schema_name)
15911595

15921596
config = load_config_from_paths(
15931597
Config,

0 commit comments

Comments
 (0)