Skip to content

Commit 6084840

Browse files
authored
#186: Refactoring/186 integration tests for correctness of udf path generation #186 (#239)
Integration test for udf path correctness
1 parent 74b90d7 commit 6084840

File tree

4 files changed

+106
-4
lines changed

4 files changed

+106
-4
lines changed

.github/workflows/slow-checks.yml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ jobs:
2828
SAAS_HOST: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_HOST }}
2929
SAAS_ACCOUNT_ID: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_ACCOUNT_ID }}
3030
SAAS_PAT: ${{ secrets.INTEGRATION_TEAM_SAAS_STAGING_PAT }}
31-
run: poetry run -- pytest --backend=all test/integration
31+
run: poetry run -- pytest --backend=onprem test/integration
3232

3333
# - name: Upload Artifacts
3434
# uses: actions/upload-artifact@v4.6.2

doc/changes/unreleased.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1 +1,4 @@
11
# Unreleased
2+
3+
## Refactorings
4+
* #186: Integration test for correctness of UDF path generation

doc/developer_guide/developer_guide.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ infrastructure which are creating costs for the database instances temporarily
5959
created during test execution.
6060

6161
Group G2 is guarded by a dedicated `GitHub Enviroment
62-
<https://docs.github.com/en/actions/how-tos/managing-workflow-runs-and-deployments/managing-deployments/managing-environments-for-deployment#required-reviewers>`_
62+
<https://docs.github.com/en/actions/how-tos/deploy/configure-and-manage-deployments/manage-environments>`_
6363
requiring **manual approval** before these tests are executed.
6464

6565
Each of the groups results in a final gating GitHub workflow job that is added to the branch protection of branch ``main``.
@@ -93,4 +93,4 @@ The `exasol-toolbox` provides nox tasks to semi-automate the release process:
9393
nox -s release:trigger
9494
9595
For further information, please refer to the `exasol-toolbox`'s page `How to Release
96-
<https://exasol.github.io/python-toolbox/main/user_guide/how_to_release.html>`_.
96+
<https://exasol.github.io/python-toolbox/main/>`_.

test/integration/test_service_onprem.py

Lines changed: 100 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,19 +5,28 @@
55
ByteString,
66
Iterable,
77
)
8-
from contextlib import contextmanager
8+
from contextlib import (
9+
closing,
10+
contextmanager,
11+
)
912
from inspect import cleandoc
1013
from test.integration.conftest import (
1114
File,
1215
delete_file,
1316
)
17+
from textwrap import dedent
1418
from typing import (
1519
Tuple,
1620
Union,
1721
)
1822

23+
import pyexasol
1924
import pytest
2025
import requests
26+
from exasol.pytest_backend import (
27+
BACKEND_ONPREM,
28+
BACKEND_SAAS,
29+
)
2130

2231
from exasol.bucketfs import (
2332
Bucket,
@@ -315,3 +324,93 @@ def test_any_log_message_get_emitted(httpserver, caplog):
315324
]
316325
# The log level DEBUG should emit at least one log message
317326
assert log_records
327+
328+
329+
def test_upload_and_udf_path(
330+
backend_aware_bucketfs_params, backend_aware_database_params, backend
331+
):
332+
# Upload file to BucketFS
333+
file_name = "Uploaded-File-From-Integration-test.bin"
334+
335+
if backend == BACKEND_ONPREM:
336+
bucket = Bucket(
337+
name=backend_aware_bucketfs_params["bucket_name"],
338+
service_name=backend_aware_bucketfs_params["service_name"],
339+
password=backend_aware_bucketfs_params["password"],
340+
username=backend_aware_bucketfs_params["username"],
341+
verify=backend_aware_bucketfs_params["verify"],
342+
service=backend_aware_bucketfs_params["url"],
343+
)
344+
elif backend == BACKEND_SAAS:
345+
bucket = SaaSBucket(
346+
url=backend_aware_bucketfs_params["url"],
347+
account_id=backend_aware_bucketfs_params["account_id"],
348+
database_id=backend_aware_bucketfs_params["database_id"],
349+
pat=backend_aware_bucketfs_params["pat"],
350+
)
351+
content = "".join("1" for _ in range(0, 10))
352+
try:
353+
bucket.upload(file_name, content)
354+
assert file_name in bucket.files, "File upload failed"
355+
356+
# Generate UDF path
357+
udf_path = bucket.udf_path
358+
assert udf_path is not None, "UDF path generation failed"
359+
360+
conn = pyexasol.connect(**backend_aware_database_params)
361+
362+
conn.execute("CREATE SCHEMA IF NOT EXISTS transact;")
363+
conn.execute("open schema transact;")
364+
365+
# Create UDF SQL
366+
create_udf_sql = dedent(
367+
f"""
368+
--/
369+
CREATE OR REPLACE PYTHON3 SCALAR
370+
SCRIPT CHECK_FILE_EXISTS_UDF(file_path VARCHAR(200000))
371+
RETURNS BOOLEAN AS
372+
import os
373+
def run(ctx):
374+
return os.path.exists(ctx.file_path)
375+
/
376+
"""
377+
)
378+
conn.execute(create_udf_sql)
379+
# Verify the path exists inside the UDF
380+
result = conn.execute(f"SELECT CHECK_FILE_EXISTS_UDF('{udf_path}')").fetchone()[
381+
0
382+
]
383+
assert result == True
384+
385+
# return the content of the file
386+
create_read_udf_sql = dedent(
387+
f"""
388+
--/
389+
CREATE OR REPLACE PYTHON3 SCALAR
390+
SCRIPT READ_FILE_CONTENT_UDF(file_path VARCHAR(200000))
391+
RETURNS VARCHAR(200000) AS
392+
def run(ctx):
393+
with open(ctx.file_path, 'rb') as f:
394+
return f.read().decode('utf-8', errors='replace')
395+
/
396+
"""
397+
)
398+
conn.execute(create_read_udf_sql)
399+
400+
file_content = conn.execute(
401+
f"SELECT READ_FILE_CONTENT_UDF('{udf_path}/{file_name}')"
402+
).fetchone()[0]
403+
assert file_content == content
404+
except Exception as e:
405+
print(e)
406+
407+
finally:
408+
# cleanup
409+
_, _ = delete_file(
410+
bucket._service,
411+
bucket.name,
412+
bucket._username,
413+
bucket._password,
414+
file_name,
415+
)
416+
pass

0 commit comments

Comments
 (0)