|
5 | 5 | ByteString, |
6 | 6 | Iterable, |
7 | 7 | ) |
8 | | -from contextlib import contextmanager |
| 8 | +from contextlib import ( |
| 9 | + closing, |
| 10 | + contextmanager, |
| 11 | +) |
9 | 12 | from inspect import cleandoc |
10 | 13 | from test.integration.conftest import ( |
11 | 14 | File, |
12 | 15 | delete_file, |
13 | 16 | ) |
| 17 | +from textwrap import dedent |
14 | 18 | from typing import ( |
15 | 19 | Tuple, |
16 | 20 | Union, |
17 | 21 | ) |
18 | 22 |
|
| 23 | +import pyexasol |
19 | 24 | import pytest |
20 | 25 | import requests |
| 26 | +from exasol.pytest_backend import ( |
| 27 | + BACKEND_ONPREM, |
| 28 | + BACKEND_SAAS, |
| 29 | +) |
21 | 30 |
|
22 | 31 | from exasol.bucketfs import ( |
23 | 32 | Bucket, |
@@ -315,3 +324,93 @@ def test_any_log_message_get_emitted(httpserver, caplog): |
315 | 324 | ] |
316 | 325 | # The log level DEBUG should emit at least one log message |
317 | 326 | assert log_records |
| 327 | + |
| 328 | + |
| 329 | +def test_upload_and_udf_path( |
| 330 | + backend_aware_bucketfs_params, backend_aware_database_params, backend |
| 331 | +): |
| 332 | + # Upload file to BucketFS |
| 333 | + file_name = "Uploaded-File-From-Integration-test.bin" |
| 334 | + |
| 335 | + if backend == BACKEND_ONPREM: |
| 336 | + bucket = Bucket( |
| 337 | + name=backend_aware_bucketfs_params["bucket_name"], |
| 338 | + service_name=backend_aware_bucketfs_params["service_name"], |
| 339 | + password=backend_aware_bucketfs_params["password"], |
| 340 | + username=backend_aware_bucketfs_params["username"], |
| 341 | + verify=backend_aware_bucketfs_params["verify"], |
| 342 | + service=backend_aware_bucketfs_params["url"], |
| 343 | + ) |
| 344 | + elif backend == BACKEND_SAAS: |
| 345 | + bucket = SaaSBucket( |
| 346 | + url=backend_aware_bucketfs_params["url"], |
| 347 | + account_id=backend_aware_bucketfs_params["account_id"], |
| 348 | + database_id=backend_aware_bucketfs_params["database_id"], |
| 349 | + pat=backend_aware_bucketfs_params["pat"], |
| 350 | + ) |
| 351 | + content = "".join("1" for _ in range(0, 10)) |
| 352 | + try: |
| 353 | + bucket.upload(file_name, content) |
| 354 | + assert file_name in bucket.files, "File upload failed" |
| 355 | + |
| 356 | + # Generate UDF path |
| 357 | + udf_path = bucket.udf_path |
| 358 | + assert udf_path is not None, "UDF path generation failed" |
| 359 | + |
| 360 | + conn = pyexasol.connect(**backend_aware_database_params) |
| 361 | + |
| 362 | + conn.execute("CREATE SCHEMA IF NOT EXISTS transact;") |
| 363 | + conn.execute("open schema transact;") |
| 364 | + |
| 365 | + # Create UDF SQL |
| 366 | + create_udf_sql = dedent( |
| 367 | + f""" |
| 368 | + --/ |
| 369 | + CREATE OR REPLACE PYTHON3 SCALAR |
| 370 | + SCRIPT CHECK_FILE_EXISTS_UDF(file_path VARCHAR(200000)) |
| 371 | + RETURNS BOOLEAN AS |
| 372 | + import os |
| 373 | + def run(ctx): |
| 374 | + return os.path.exists(ctx.file_path) |
| 375 | + / |
| 376 | + """ |
| 377 | + ) |
| 378 | + conn.execute(create_udf_sql) |
| 379 | + # Verify the path exists inside the UDF |
| 380 | + result = conn.execute(f"SELECT CHECK_FILE_EXISTS_UDF('{udf_path}')").fetchone()[ |
| 381 | + 0 |
| 382 | + ] |
| 383 | + assert result == True |
| 384 | + |
| 385 | + # return the content of the file |
| 386 | + create_read_udf_sql = dedent( |
| 387 | + f""" |
| 388 | + --/ |
| 389 | + CREATE OR REPLACE PYTHON3 SCALAR |
| 390 | + SCRIPT READ_FILE_CONTENT_UDF(file_path VARCHAR(200000)) |
| 391 | + RETURNS VARCHAR(200000) AS |
| 392 | + def run(ctx): |
| 393 | + with open(ctx.file_path, 'rb') as f: |
| 394 | + return f.read().decode('utf-8', errors='replace') |
| 395 | + / |
| 396 | + """ |
| 397 | + ) |
| 398 | + conn.execute(create_read_udf_sql) |
| 399 | + |
| 400 | + file_content = conn.execute( |
| 401 | + f"SELECT READ_FILE_CONTENT_UDF('{udf_path}/{file_name}')" |
| 402 | + ).fetchone()[0] |
| 403 | + assert file_content == content |
| 404 | + except Exception as e: |
| 405 | + print(e) |
| 406 | + |
| 407 | + finally: |
| 408 | + # cleanup |
| 409 | + _, _ = delete_file( |
| 410 | + bucket._service, |
| 411 | + bucket.name, |
| 412 | + bucket._username, |
| 413 | + bucket._password, |
| 414 | + file_name, |
| 415 | + ) |
| 416 | + pass |
0 commit comments