From 6e4dd75922249da744db19afd63b1182ed833f84 Mon Sep 17 00:00:00 2001 From: Alexey Mironov Date: Wed, 1 Oct 2025 14:58:07 +0300 Subject: [PATCH 1/3] Add AlchemyRandomValuesStorage --- .../random_values/alchemy/__init__.py | 4 + .../random_values/alchemy/alchemy.py | 234 ++++++++++++++++++ tests/persistence/__init__.py | 0 tests/persistence/random_values/__init__.py | 0 .../random_values/alchemy/__init__.py | 0 .../random_values/alchemy/alchemy_test.py | 197 +++++++++++++++ .../random_values/sqlite/__init__.py | 0 .../random_values/sqlite/sqlite_test.py | 197 +++++++++++++++ 8 files changed, 632 insertions(+) create mode 100644 pysatl_experiment/persistence/random_values/alchemy/__init__.py create mode 100644 pysatl_experiment/persistence/random_values/alchemy/alchemy.py create mode 100644 tests/persistence/__init__.py create mode 100644 tests/persistence/random_values/__init__.py create mode 100644 tests/persistence/random_values/alchemy/__init__.py create mode 100644 tests/persistence/random_values/alchemy/alchemy_test.py create mode 100644 tests/persistence/random_values/sqlite/__init__.py create mode 100644 tests/persistence/random_values/sqlite/sqlite_test.py diff --git a/pysatl_experiment/persistence/random_values/alchemy/__init__.py b/pysatl_experiment/persistence/random_values/alchemy/__init__.py new file mode 100644 index 0000000..1a30973 --- /dev/null +++ b/pysatl_experiment/persistence/random_values/alchemy/__init__.py @@ -0,0 +1,4 @@ +from .alchemy import AlchemyRandomValuesStorage + + +__all__ = ["AlchemyRandomValuesStorage"] diff --git a/pysatl_experiment/persistence/random_values/alchemy/alchemy.py b/pysatl_experiment/persistence/random_values/alchemy/alchemy.py new file mode 100644 index 0000000..148e045 --- /dev/null +++ b/pysatl_experiment/persistence/random_values/alchemy/alchemy.py @@ -0,0 +1,234 @@ +from __future__ import annotations + +import json +from typing import ClassVar + +from sqlalchemy import Integer, String, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column +from typing_extensions import override + +from pysatl_experiment.persistence.db_store.base import ModelBase, SessionType +from pysatl_experiment.persistence.db_store.model import AbstractDbStore +from pysatl_experiment.persistence.model.random_values.random_values import ( + IRandomValuesStorage, + RandomValuesAllModel, + RandomValuesAllQuery, + RandomValuesCountQuery, + RandomValuesModel, + RandomValuesQuery, +) + + +class AlchemyRandomValues(ModelBase): + __tablename__ = "random_values" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) # type: ignore + generator_name: Mapped[str] = mapped_column(String, nullable=False, index=True) # type: ignore + generator_parameters: Mapped[str] = mapped_column(String, nullable=False, index=True) # type: ignore + sample_size: Mapped[int] = mapped_column(Integer, nullable=False, index=True) # type: ignore + sample_num: Mapped[int] = mapped_column(Integer, nullable=False) # type: ignore + data: Mapped[str] = mapped_column(String, nullable=False) # type: ignore + + __table_args__ = ( + UniqueConstraint( + "generator_name", + "generator_parameters", + "sample_size", + "sample_num", + name="uq_random_values_unique", + ), + ) + + +class AlchemyRandomValuesStorage(AbstractDbStore, IRandomValuesStorage): + session: ClassVar[SessionType] + + def __init__(self, db_url: str): + super().__init__(db_url=db_url) + self._initialized: bool = False + + @override + def init(self) -> None: + # Initialize engine and scoped session via AbstractDbStore + super().init() + self._initialized = True + + def _get_session(self) -> SessionType: + if not getattr(self, "_initialized", False): + raise RuntimeError("Storage not initialized. Call init() first.") + # Access class attribute defined by AbstractDbStore after init() + return AlchemyRandomValuesStorage.session + + @override + def get_data(self, query: RandomValuesQuery) -> RandomValuesModel | None: + params_json = json.dumps(query.generator_parameters) + row: AlchemyRandomValues | None = ( + self._get_session() + .query(AlchemyRandomValues) + .filter( + AlchemyRandomValues.generator_name == query.generator_name, + AlchemyRandomValues.generator_parameters == params_json, + AlchemyRandomValues.sample_size == int(query.sample_size), + AlchemyRandomValues.sample_num == int(query.sample_num), + ) + .one_or_none() + ) + if row is None: + return None + return RandomValuesModel( + generator_name=query.generator_name, + generator_parameters=query.generator_parameters, + sample_size=query.sample_size, + sample_num=query.sample_num, + data=json.loads(row.data), + ) + + @override + def insert_data(self, data: RandomValuesModel) -> None: + params_json = json.dumps(data.generator_parameters) + entity = AlchemyRandomValues( + generator_name=data.generator_name, + generator_parameters=params_json, + sample_size=int(data.sample_size), + sample_num=int(data.sample_num), + data=json.dumps(data.data), + ) + existing: AlchemyRandomValues | None = ( + self._get_session() + .query(AlchemyRandomValues) + .filter( + AlchemyRandomValues.generator_name == entity.generator_name, + AlchemyRandomValues.generator_parameters == entity.generator_parameters, + AlchemyRandomValues.sample_size == entity.sample_size, + AlchemyRandomValues.sample_num == entity.sample_num, + ) + .one_or_none() + ) + if existing is None: + self._get_session().add(entity) + else: + existing.data = entity.data + self._get_session().commit() + + @override + def delete_data(self, query: RandomValuesQuery) -> None: + params_json = json.dumps(query.generator_parameters) + ( + self._get_session() + .query(AlchemyRandomValues) + .filter( + AlchemyRandomValues.generator_name == query.generator_name, + AlchemyRandomValues.generator_parameters == params_json, + AlchemyRandomValues.sample_size == int(query.sample_size), + AlchemyRandomValues.sample_num == int(query.sample_num), + ) + .delete() + ) + self._get_session().commit() + + @override + def get_rvs_count(self, query: RandomValuesAllQuery) -> int: + params_json = json.dumps(query.generator_parameters) + return ( + self._get_session() + .query(AlchemyRandomValues) + .filter( + AlchemyRandomValues.generator_name == query.generator_name, + AlchemyRandomValues.generator_parameters == params_json, + AlchemyRandomValues.sample_size == int(query.sample_size), + ) + .count() + ) + + @override + def insert_all_data(self, query: RandomValuesAllModel) -> None: + params_json = json.dumps(query.generator_parameters) + # delete existing + ( + self._get_session() + .query(AlchemyRandomValues) + .filter( + AlchemyRandomValues.generator_name == query.generator_name, + AlchemyRandomValues.generator_parameters == params_json, + AlchemyRandomValues.sample_size == int(query.sample_size), + ) + .delete() + ) + # insert new + for i, sample in enumerate(query.data, start=1): + self._get_session().add( + AlchemyRandomValues( + generator_name=query.generator_name, + generator_parameters=params_json, + sample_size=int(query.sample_size), + sample_num=i, + data=json.dumps(sample), + ) + ) + self._get_session().commit() + + @override + def get_all_data(self, query: RandomValuesAllQuery) -> list[RandomValuesModel] | None: + params_json = json.dumps(query.generator_parameters) + rows: list[AlchemyRandomValues] = ( + self._get_session() + .query(AlchemyRandomValues) + .filter( + AlchemyRandomValues.generator_name == query.generator_name, + AlchemyRandomValues.generator_parameters == params_json, + AlchemyRandomValues.sample_size == int(query.sample_size), + ) + .order_by(AlchemyRandomValues.sample_num) + .all() + ) + return [ + RandomValuesModel( + generator_name=query.generator_name, + generator_parameters=query.generator_parameters, + sample_size=query.sample_size, + sample_num=row.sample_num, + data=json.loads(row.data), + ) + for row in rows + ] + + @override + def delete_all_data(self, query: RandomValuesAllQuery) -> None: + params_json = json.dumps(query.generator_parameters) + ( + self._get_session() + .query(AlchemyRandomValues) + .filter( + AlchemyRandomValues.generator_name == query.generator_name, + AlchemyRandomValues.generator_parameters == params_json, + AlchemyRandomValues.sample_size == int(query.sample_size), + ) + .delete() + ) + self._get_session().commit() + + @override + def get_count_data(self, query: RandomValuesCountQuery) -> list[RandomValuesModel] | None: + params_json = json.dumps(query.generator_parameters) + rows: list[AlchemyRandomValues] = ( + self._get_session() + .query(AlchemyRandomValues) + .filter( + AlchemyRandomValues.generator_name == query.generator_name, + AlchemyRandomValues.generator_parameters == params_json, + AlchemyRandomValues.sample_size == int(query.sample_size), + ) + .order_by(AlchemyRandomValues.sample_num) + .limit(int(query.count)) + .all() + ) + return [ + RandomValuesModel( + generator_name=query.generator_name, + generator_parameters=query.generator_parameters, + sample_size=query.sample_size, + sample_num=row.sample_num, + data=json.loads(row.data), + ) + for row in rows + ] diff --git a/tests/persistence/__init__.py b/tests/persistence/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/persistence/random_values/__init__.py b/tests/persistence/random_values/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/persistence/random_values/alchemy/__init__.py b/tests/persistence/random_values/alchemy/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/persistence/random_values/alchemy/alchemy_test.py b/tests/persistence/random_values/alchemy/alchemy_test.py new file mode 100644 index 0000000..3505c8f --- /dev/null +++ b/tests/persistence/random_values/alchemy/alchemy_test.py @@ -0,0 +1,197 @@ +from __future__ import annotations + +from pathlib import Path + +import pytest + +from pysatl_experiment.persistence.model.random_values.random_values import ( + RandomValuesAllModel, + RandomValuesAllQuery, + RandomValuesCountQuery, + RandomValuesModel, + RandomValuesQuery, +) +from pysatl_experiment.persistence.random_values.alchemy.alchemy import AlchemyRandomValuesStorage + + +@pytest.fixture() +def db_path(tmp_path: Path) -> Path: + return tmp_path / "rvs.sqlite" + + +@pytest.fixture() +def storage(db_path: Path) -> AlchemyRandomValuesStorage: + store = AlchemyRandomValuesStorage(db_url="sqlite:///:memory:") + store.init() + return store + + +def test_guard_requires_init(db_path: Path) -> None: + store = AlchemyRandomValuesStorage(str(db_path)) + with pytest.raises(RuntimeError): + _ = store.get_data( + RandomValuesQuery( + generator_name="gen", + generator_parameters=[0.5], + sample_size=10, + sample_num=1, + ) + ) + + +def test_get_data_empty_returns_none(storage: AlchemyRandomValuesStorage) -> None: + query = RandomValuesQuery( + generator_name="gen_A", + generator_parameters=[0.1, 0.2], + sample_size=20, + sample_num=1, + ) + assert storage.get_data(query) is None + + +def test_insert_and_get_single_sample(storage: AlchemyRandomValuesStorage) -> None: + model = RandomValuesModel( + generator_name="gen_A", + generator_parameters=[0.1, 0.2], + sample_size=20, + sample_num=1, + data=[0.11, 0.22, 0.33], + ) + storage.insert_data(model) + + got = storage.get_data( + RandomValuesQuery( + generator_name="gen_A", + generator_parameters=[0.1, 0.2], + sample_size=20, + sample_num=1, + ) + ) + + assert got is not None + assert got.generator_name == model.generator_name + assert got.generator_parameters == model.generator_parameters + assert got.sample_size == model.sample_size + assert got.sample_num == model.sample_num + assert got.data == model.data + + +def test_delete_single_sample(storage: AlchemyRandomValuesStorage) -> None: + model = RandomValuesModel( + generator_name="gen_B", + generator_parameters=[0.3], + sample_size=5, + sample_num=2, + data=[1.0, 2.0], + ) + storage.insert_data(model) + + storage.delete_data( + RandomValuesQuery( + generator_name="gen_B", + generator_parameters=[0.3], + sample_size=5, + sample_num=2, + ) + ) + + assert ( + storage.get_data( + RandomValuesQuery( + generator_name="gen_B", + generator_parameters=[0.3], + sample_size=5, + sample_num=2, + ) + ) + is None + ) + + +def test_insert_all_and_get_all_and_count(storage: AlchemyRandomValuesStorage) -> None: + all_model = RandomValuesAllModel( + generator_name="gen_C", + generator_parameters=[0.7, 0.9], + sample_size=4, + data=[[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]], + ) + storage.insert_all_data(all_model) + + count = storage.get_rvs_count( + RandomValuesAllQuery( + generator_name="gen_C", + generator_parameters=[0.7, 0.9], + sample_size=4, + ) + ) + assert count == 3 + + all_data = storage.get_all_data( + RandomValuesAllQuery( + generator_name="gen_C", + generator_parameters=[0.7, 0.9], + sample_size=4, + ) + ) + + assert isinstance(all_data, list) + assert [m.sample_num for m in all_data] == [1, 2, 3] + assert [m.data for m in all_data] == [[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]] + + +def test_get_count_data_limits(storage: AlchemyRandomValuesStorage) -> None: + all_model = RandomValuesAllModel( + generator_name="gen_D", + generator_parameters=[1.1], + sample_size=3, + data=[[1], [2], [3], [4]], + ) + storage.insert_all_data(all_model) + + limited = storage.get_count_data( + RandomValuesCountQuery( + generator_name="gen_D", + generator_parameters=[1.1], + sample_size=3, + count=2, + ) + ) + + assert [m.sample_num for m in limited] == [1, 2] + assert [m.data for m in limited] == [[1], [2]] + + +def test_delete_all_data(storage: AlchemyRandomValuesStorage) -> None: + all_model = RandomValuesAllModel( + generator_name="gen_E", + generator_parameters=[2.2], + sample_size=8, + data=[[10, 20], [30, 40]], + ) + storage.insert_all_data(all_model) + + storage.delete_all_data( + RandomValuesAllQuery( + generator_name="gen_E", + generator_parameters=[2.2], + sample_size=8, + ) + ) + + count_after = storage.get_rvs_count( + RandomValuesAllQuery( + generator_name="gen_E", + generator_parameters=[2.2], + sample_size=8, + ) + ) + assert count_after == 0 + + all_data_after = storage.get_all_data( + RandomValuesAllQuery( + generator_name="gen_E", + generator_parameters=[2.2], + sample_size=8, + ) + ) + assert all_data_after == [] diff --git a/tests/persistence/random_values/sqlite/__init__.py b/tests/persistence/random_values/sqlite/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/persistence/random_values/sqlite/sqlite_test.py b/tests/persistence/random_values/sqlite/sqlite_test.py new file mode 100644 index 0000000..0ff2340 --- /dev/null +++ b/tests/persistence/random_values/sqlite/sqlite_test.py @@ -0,0 +1,197 @@ +from __future__ import annotations + +from pathlib import Path + +import pytest + +from pysatl_experiment.persistence.model.random_values.random_values import ( + RandomValuesAllModel, + RandomValuesAllQuery, + RandomValuesCountQuery, + RandomValuesModel, + RandomValuesQuery, +) +from pysatl_experiment.persistence.random_values.sqlite.sqlite import SQLiteRandomValuesStorage + + +@pytest.fixture() +def db_path(tmp_path: Path) -> Path: + return tmp_path / "rvs.sqlite" + + +@pytest.fixture() +def storage(db_path: Path) -> SQLiteRandomValuesStorage: + store = SQLiteRandomValuesStorage(str(db_path)) + store.init() + return store + + +def test_guard_requires_init(db_path: Path) -> None: + store = SQLiteRandomValuesStorage(str(db_path)) + with pytest.raises(RuntimeError): + _ = store.get_data( + RandomValuesQuery( + generator_name="gen", + generator_parameters=[0.5], + sample_size=10, + sample_num=1, + ) + ) + + +def test_get_data_empty_returns_none(storage: SQLiteRandomValuesStorage) -> None: + query = RandomValuesQuery( + generator_name="gen_A", + generator_parameters=[0.1, 0.2], + sample_size=20, + sample_num=1, + ) + assert storage.get_data(query) is None + + +def test_insert_and_get_single_sample(storage: SQLiteRandomValuesStorage) -> None: + model = RandomValuesModel( + generator_name="gen_A", + generator_parameters=[0.1, 0.2], + sample_size=20, + sample_num=1, + data=[0.11, 0.22, 0.33], + ) + storage.insert_data(model) + + got = storage.get_data( + RandomValuesQuery( + generator_name="gen_A", + generator_parameters=[0.1, 0.2], + sample_size=20, + sample_num=1, + ) + ) + + assert got is not None + assert got.generator_name == model.generator_name + assert got.generator_parameters == model.generator_parameters + assert got.sample_size == model.sample_size + assert got.sample_num == model.sample_num + assert got.data == model.data + + +def test_delete_single_sample(storage: SQLiteRandomValuesStorage) -> None: + model = RandomValuesModel( + generator_name="gen_B", + generator_parameters=[0.3], + sample_size=5, + sample_num=2, + data=[1.0, 2.0], + ) + storage.insert_data(model) + + storage.delete_data( + RandomValuesQuery( + generator_name="gen_B", + generator_parameters=[0.3], + sample_size=5, + sample_num=2, + ) + ) + + assert ( + storage.get_data( + RandomValuesQuery( + generator_name="gen_B", + generator_parameters=[0.3], + sample_size=5, + sample_num=2, + ) + ) + is None + ) + + +def test_insert_all_and_get_all_and_count(storage: SQLiteRandomValuesStorage) -> None: + all_model = RandomValuesAllModel( + generator_name="gen_C", + generator_parameters=[0.7, 0.9], + sample_size=4, + data=[[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]], + ) + storage.insert_all_data(all_model) + + count = storage.get_rvs_count( + RandomValuesAllQuery( + generator_name="gen_C", + generator_parameters=[0.7, 0.9], + sample_size=4, + ) + ) + assert count == 3 + + all_data = storage.get_all_data( + RandomValuesAllQuery( + generator_name="gen_C", + generator_parameters=[0.7, 0.9], + sample_size=4, + ) + ) + + assert isinstance(all_data, list) + assert [m.sample_num for m in all_data] == [1, 2, 3] + assert [m.data for m in all_data] == [[0.1, 0.2], [0.3, 0.4], [0.5, 0.6]] + + +def test_get_count_data_limits(storage: SQLiteRandomValuesStorage) -> None: + all_model = RandomValuesAllModel( + generator_name="gen_D", + generator_parameters=[1.1], + sample_size=3, + data=[[1], [2], [3], [4]], + ) + storage.insert_all_data(all_model) + + limited = storage.get_count_data( + RandomValuesCountQuery( + generator_name="gen_D", + generator_parameters=[1.1], + sample_size=3, + count=2, + ) + ) + + assert [m.sample_num for m in limited] == [1, 2] + assert [m.data for m in limited] == [[1], [2]] + + +def test_delete_all_data(storage: SQLiteRandomValuesStorage) -> None: + all_model = RandomValuesAllModel( + generator_name="gen_E", + generator_parameters=[2.2], + sample_size=8, + data=[[10, 20], [30, 40]], + ) + storage.insert_all_data(all_model) + + storage.delete_all_data( + RandomValuesAllQuery( + generator_name="gen_E", + generator_parameters=[2.2], + sample_size=8, + ) + ) + + count_after = storage.get_rvs_count( + RandomValuesAllQuery( + generator_name="gen_E", + generator_parameters=[2.2], + sample_size=8, + ) + ) + assert count_after == 0 + + all_data_after = storage.get_all_data( + RandomValuesAllQuery( + generator_name="gen_E", + generator_parameters=[2.2], + sample_size=8, + ) + ) + assert all_data_after == [] From ddf4d4861f6bfff478c08b49a8f4c7b7a540ca9a Mon Sep 17 00:00:00 2001 From: Alexey Mironov Date: Wed, 1 Oct 2025 15:07:15 +0300 Subject: [PATCH 2/3] Add AlchemyTimeComplexityStorage --- .../time_complexity/alchemy/alchemy.py | 127 ++++++++++++++++++ .../time_complexity/alchemy/alchemy_test.py | 105 +++++++++++++++ .../time_complexity/sqlite/sqlite_test.py | 105 +++++++++++++++ 3 files changed, 337 insertions(+) create mode 100644 pysatl_experiment/persistence/time_complexity/alchemy/alchemy.py create mode 100644 tests/persistence/time_complexity/alchemy/alchemy_test.py create mode 100644 tests/persistence/time_complexity/sqlite/sqlite_test.py diff --git a/pysatl_experiment/persistence/time_complexity/alchemy/alchemy.py b/pysatl_experiment/persistence/time_complexity/alchemy/alchemy.py new file mode 100644 index 0000000..ce77c45 --- /dev/null +++ b/pysatl_experiment/persistence/time_complexity/alchemy/alchemy.py @@ -0,0 +1,127 @@ +from __future__ import annotations + +import json +from typing import ClassVar + +from sqlalchemy import Integer, String, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column +from typing_extensions import override + +from pysatl_experiment.persistence.db_store.base import ModelBase, SessionType +from pysatl_experiment.persistence.db_store.model import AbstractDbStore +from pysatl_experiment.persistence.model.time_complexity.time_complexity import ( + ITimeComplexityStorage, + TimeComplexityModel, + TimeComplexityQuery, +) + + +class AlchemyTimeComplexity(ModelBase): + __tablename__ = "time_complexity" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) # type: ignore + criterion_code: Mapped[str] = mapped_column(String, nullable=False, index=True) # type: ignore + criterion_parameters: Mapped[str] = mapped_column(String, nullable=False, index=True) # type: ignore + sample_size: Mapped[int] = mapped_column(Integer, nullable=False, index=True) # type: ignore + monte_carlo_count: Mapped[int] = mapped_column(Integer, nullable=False, index=True) # type: ignore + experiment_id: Mapped[int] = mapped_column(Integer, nullable=False) # type: ignore + results_times: Mapped[str] = mapped_column(String, nullable=False) # type: ignore + + __table_args__ = ( + UniqueConstraint( + "criterion_code", + "criterion_parameters", + "sample_size", + "monte_carlo_count", + name="uq_time_complexity_unique", + ), + ) + + +class AlchemyTimeComplexityStorage(AbstractDbStore, ITimeComplexityStorage): + session: ClassVar[SessionType] + + def __init__(self, db_url: str): + super().__init__(db_url=db_url) + self._initialized: bool = False + + @override + def init(self) -> None: + super().init() + self._initialized = True + + def _get_session(self) -> SessionType: + if not getattr(self, "_initialized", False): + raise RuntimeError("Storage not initialized. Call init() first.") + return AlchemyTimeComplexityStorage.session + + @override + def get_data(self, query: TimeComplexityQuery) -> TimeComplexityModel | None: + params_json = json.dumps(query.criterion_parameters) + row: AlchemyTimeComplexity | None = ( + self._get_session() + .query(AlchemyTimeComplexity) + .filter( + AlchemyTimeComplexity.criterion_code == query.criterion_code, + AlchemyTimeComplexity.criterion_parameters == params_json, + AlchemyTimeComplexity.sample_size == int(query.sample_size), + AlchemyTimeComplexity.monte_carlo_count == int(query.monte_carlo_count), + ) + .one_or_none() + ) + if row is None: + return None + return TimeComplexityModel( + experiment_id=int(row.experiment_id), + criterion_code=query.criterion_code, + criterion_parameters=query.criterion_parameters, + sample_size=query.sample_size, + monte_carlo_count=query.monte_carlo_count, + results_times=json.loads(row.results_times), + ) + + @override + def insert_data(self, data: TimeComplexityModel) -> None: + params_json = json.dumps(data.criterion_parameters) + existing: AlchemyTimeComplexity | None = ( + self._get_session() + .query(AlchemyTimeComplexity) + .filter( + AlchemyTimeComplexity.criterion_code == data.criterion_code, + AlchemyTimeComplexity.criterion_parameters == params_json, + AlchemyTimeComplexity.sample_size == int(data.sample_size), + AlchemyTimeComplexity.monte_carlo_count == int(data.monte_carlo_count), + ) + .one_or_none() + ) + if existing is None: + entity = AlchemyTimeComplexity( + criterion_code=data.criterion_code, + criterion_parameters=params_json, + sample_size=int(data.sample_size), + monte_carlo_count=int(data.monte_carlo_count), + experiment_id=int(data.experiment_id), + results_times=json.dumps(data.results_times), + ) + self._get_session().add(entity) + else: + # replace experiment_id and results_times for the unique key + existing.experiment_id = int(data.experiment_id) + existing.results_times = json.dumps(data.results_times) + self._get_session().commit() + + @override + def delete_data(self, query: TimeComplexityQuery) -> None: + params_json = json.dumps(query.criterion_parameters) + ( + self._get_session() + .query(AlchemyTimeComplexity) + .filter( + AlchemyTimeComplexity.criterion_code == query.criterion_code, + AlchemyTimeComplexity.criterion_parameters == params_json, + AlchemyTimeComplexity.sample_size == int(query.sample_size), + AlchemyTimeComplexity.monte_carlo_count == int(query.monte_carlo_count), + ) + .delete() + ) + self._get_session().commit() diff --git a/tests/persistence/time_complexity/alchemy/alchemy_test.py b/tests/persistence/time_complexity/alchemy/alchemy_test.py new file mode 100644 index 0000000..7607fd4 --- /dev/null +++ b/tests/persistence/time_complexity/alchemy/alchemy_test.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +from pathlib import Path + +import pytest + +from pysatl_experiment.persistence.model.time_complexity.time_complexity import TimeComplexityModel, TimeComplexityQuery +from pysatl_experiment.persistence.time_complexity.alchemy.alchemy import AlchemyTimeComplexityStorage + + +@pytest.fixture() +def db_path(tmp_path: Path) -> Path: + return tmp_path / "time_complexity.sqlite" + + +@pytest.fixture() +def storage(db_path: Path) -> AlchemyTimeComplexityStorage: + store = AlchemyTimeComplexityStorage(db_url="sqlite:///:memory:") + store.init() + return store + + +def test_guard_requires_init(db_path: Path) -> None: + store = AlchemyTimeComplexityStorage(str(db_path)) + with pytest.raises(RuntimeError): + _ = store.get_data( + TimeComplexityQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + monte_carlo_count=100, + ) + ) + + +def test_get_data_empty_returns_none(storage: AlchemyTimeComplexityStorage) -> None: + query = TimeComplexityQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + monte_carlo_count=100, + ) + assert storage.get_data(query) is None + + +def test_insert_and_get(storage: AlchemyTimeComplexityStorage) -> None: + model = TimeComplexityModel( + experiment_id=123, + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + monte_carlo_count=100, + results_times=[1.0, 2.0, 3.0], + ) + storage.insert_data(model) + + got = storage.get_data( + TimeComplexityQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + monte_carlo_count=100, + ) + ) + + assert got is not None + assert got.experiment_id == model.experiment_id + assert got.criterion_code == model.criterion_code + assert got.criterion_parameters == model.criterion_parameters + assert got.sample_size == model.sample_size + assert got.monte_carlo_count == model.monte_carlo_count + assert got.results_times == model.results_times + + +def test_delete_data(storage: AlchemyTimeComplexityStorage) -> None: + model = TimeComplexityModel( + experiment_id=7, + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + monte_carlo_count=50, + results_times=[0.5, 0.6], + ) + storage.insert_data(model) + + storage.delete_data( + TimeComplexityQuery( + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + monte_carlo_count=50, + ) + ) + + assert ( + storage.get_data( + TimeComplexityQuery( + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + monte_carlo_count=50, + ) + ) + is None + ) diff --git a/tests/persistence/time_complexity/sqlite/sqlite_test.py b/tests/persistence/time_complexity/sqlite/sqlite_test.py new file mode 100644 index 0000000..3eef03b --- /dev/null +++ b/tests/persistence/time_complexity/sqlite/sqlite_test.py @@ -0,0 +1,105 @@ +from __future__ import annotations + +from pathlib import Path + +import pytest + +from pysatl_experiment.persistence.model.time_complexity.time_complexity import TimeComplexityModel, TimeComplexityQuery +from pysatl_experiment.persistence.time_complexity.sqlite.sqlite import SQLiteTimeComplexityStorage + + +@pytest.fixture() +def db_path(tmp_path: Path) -> Path: + return tmp_path / "time_complexity.sqlite" + + +@pytest.fixture() +def storage(db_path: Path) -> SQLiteTimeComplexityStorage: + store = SQLiteTimeComplexityStorage(str(db_path)) + store.init() + return store + + +def test_guard_requires_init(db_path: Path) -> None: + store = SQLiteTimeComplexityStorage(str(db_path)) + with pytest.raises(RuntimeError): + _ = store.get_data( + TimeComplexityQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + monte_carlo_count=100, + ) + ) + + +def test_get_data_empty_returns_none(storage: SQLiteTimeComplexityStorage) -> None: + query = TimeComplexityQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + monte_carlo_count=100, + ) + assert storage.get_data(query) is None + + +def test_insert_and_get(storage: SQLiteTimeComplexityStorage) -> None: + model = TimeComplexityModel( + experiment_id=123, + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + monte_carlo_count=100, + results_times=[1.0, 2.0, 3.0], + ) + storage.insert_data(model) + + got = storage.get_data( + TimeComplexityQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + monte_carlo_count=100, + ) + ) + + assert got is not None + assert got.experiment_id == model.experiment_id + assert got.criterion_code == model.criterion_code + assert got.criterion_parameters == model.criterion_parameters + assert got.sample_size == model.sample_size + assert got.monte_carlo_count == model.monte_carlo_count + assert got.results_times == model.results_times + + +def test_delete_data(storage: SQLiteTimeComplexityStorage) -> None: + model = TimeComplexityModel( + experiment_id=7, + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + monte_carlo_count=50, + results_times=[0.5, 0.6], + ) + storage.insert_data(model) + + storage.delete_data( + TimeComplexityQuery( + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + monte_carlo_count=50, + ) + ) + + assert ( + storage.get_data( + TimeComplexityQuery( + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + monte_carlo_count=50, + ) + ) + is None + ) From 1fe589949e69e50fa9e15bd1eb44693112b021e2 Mon Sep 17 00:00:00 2001 From: Alexey Mironov Date: Wed, 1 Oct 2025 16:37:31 +0300 Subject: [PATCH 3/3] Add AlchemyPowerStorage --- .../persistence/power/alchemy/alchemy.py | 146 ++++++++++++++++++ .../power/alchemy/test_power_alchemy.py | 128 +++++++++++++++ .../power/sqlite/test_power_sqlite.py | 129 ++++++++++++++++ 3 files changed, 403 insertions(+) create mode 100644 pysatl_experiment/persistence/power/alchemy/alchemy.py create mode 100644 tests/persistence/power/alchemy/test_power_alchemy.py create mode 100644 tests/persistence/power/sqlite/test_power_sqlite.py diff --git a/pysatl_experiment/persistence/power/alchemy/alchemy.py b/pysatl_experiment/persistence/power/alchemy/alchemy.py new file mode 100644 index 0000000..499daec --- /dev/null +++ b/pysatl_experiment/persistence/power/alchemy/alchemy.py @@ -0,0 +1,146 @@ +from __future__ import annotations + +import json +from typing import ClassVar + +from sqlalchemy import Float, Integer, String, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column +from typing_extensions import override + +from pysatl_experiment.persistence.db_store.base import ModelBase, SessionType +from pysatl_experiment.persistence.db_store.model import AbstractDbStore +from pysatl_experiment.persistence.model.power.power import IPowerStorage, PowerModel, PowerQuery + + +class AlchemyPower(ModelBase): + __tablename__ = "power" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) # type: ignore + experiment_id: Mapped[int] = mapped_column(Integer, nullable=False) # type: ignore + criterion_code: Mapped[str] = mapped_column(String, nullable=False, index=True) # type: ignore + criterion_parameters: Mapped[str] = mapped_column(String, nullable=False, index=True) # type: ignore + sample_size: Mapped[int] = mapped_column(Integer, nullable=False, index=True) # type: ignore + alternative_code: Mapped[str] = mapped_column(String, nullable=False, index=True) # type: ignore + alternative_parameters: Mapped[str] = mapped_column(String, nullable=False, index=True) # type: ignore + monte_carlo_count: Mapped[int] = mapped_column(Integer, nullable=False, index=True) # type: ignore + significance_level: Mapped[float] = mapped_column(Float, nullable=False, index=True) # type: ignore + results_criteria: Mapped[str] = mapped_column(String, nullable=False) # type: ignore + + __table_args__ = ( + UniqueConstraint( + "criterion_code", + "criterion_parameters", + "sample_size", + "alternative_code", + "alternative_parameters", + "monte_carlo_count", + "significance_level", + name="uq_power_unique", + ), + ) + + +class AlchemyPowerStorage(AbstractDbStore, IPowerStorage): + session: ClassVar[SessionType] + + def __init__(self, db_url: str): + super().__init__(db_url=db_url) + self._initialized: bool = False + + @override + def init(self) -> None: + super().init() + self._initialized = True + + def _get_session(self) -> SessionType: + if not getattr(self, "_initialized", False): + raise RuntimeError("Storage not initialized. Call init() first.") + return AlchemyPowerStorage.session + + @override + def get_data(self, query: PowerQuery) -> PowerModel | None: + params_json = json.dumps(query.criterion_parameters) + alt_params_json = json.dumps(query.alternative_parameters) + row: AlchemyPower | None = ( + self._get_session() + .query(AlchemyPower) + .filter( + AlchemyPower.criterion_code == query.criterion_code, + AlchemyPower.criterion_parameters == params_json, + AlchemyPower.sample_size == int(query.sample_size), + AlchemyPower.alternative_code == query.alternative_code, + AlchemyPower.alternative_parameters == alt_params_json, + AlchemyPower.monte_carlo_count == int(query.monte_carlo_count), + AlchemyPower.significance_level == float(query.significance_level), + ) + .one_or_none() + ) + if row is None: + return None + return PowerModel( + experiment_id=int(row.experiment_id), + criterion_code=query.criterion_code, + criterion_parameters=query.criterion_parameters, + sample_size=query.sample_size, + alternative_code=query.alternative_code, + alternative_parameters=query.alternative_parameters, + monte_carlo_count=query.monte_carlo_count, + significance_level=query.significance_level, + results_criteria=json.loads(row.results_criteria), + ) + + @override + def insert_data(self, data: PowerModel) -> None: + params_json = json.dumps(data.criterion_parameters) + alt_params_json = json.dumps(data.alternative_parameters) + existing: AlchemyPower | None = ( + self._get_session() + .query(AlchemyPower) + .filter( + AlchemyPower.criterion_code == data.criterion_code, + AlchemyPower.criterion_parameters == params_json, + AlchemyPower.sample_size == int(data.sample_size), + AlchemyPower.alternative_code == data.alternative_code, + AlchemyPower.alternative_parameters == alt_params_json, + AlchemyPower.monte_carlo_count == int(data.monte_carlo_count), + AlchemyPower.significance_level == float(data.significance_level), + ) + .one_or_none() + ) + if existing is None: + entity = AlchemyPower( + experiment_id=int(data.experiment_id), + criterion_code=data.criterion_code, + criterion_parameters=params_json, + sample_size=int(data.sample_size), + alternative_code=data.alternative_code, + alternative_parameters=alt_params_json, + monte_carlo_count=int(data.monte_carlo_count), + significance_level=float(data.significance_level), + results_criteria=json.dumps(data.results_criteria), + ) + self._get_session().add(entity) + else: + existing.experiment_id = int(data.experiment_id) + existing.results_criteria = json.dumps(data.results_criteria) + self._get_session().commit() + + @override + def delete_data(self, query: PowerQuery) -> None: + params_json = json.dumps(query.criterion_parameters) + alt_params_json = json.dumps(query.alternative_parameters) + ( + self._get_session() + .query(AlchemyPower) + .filter( + AlchemyPower.criterion_code == query.criterion_code, + AlchemyPower.criterion_parameters == params_json, + AlchemyPower.sample_size == int(query.sample_size), + AlchemyPower.alternative_code == query.alternative_code, + AlchemyPower.alternative_parameters == alt_params_json, + AlchemyPower.monte_carlo_count == int(query.monte_carlo_count), + AlchemyPower.significance_level == float(query.significance_level), + ) + .delete() + ) + self._get_session().commit() diff --git a/tests/persistence/power/alchemy/test_power_alchemy.py b/tests/persistence/power/alchemy/test_power_alchemy.py new file mode 100644 index 0000000..f255a36 --- /dev/null +++ b/tests/persistence/power/alchemy/test_power_alchemy.py @@ -0,0 +1,128 @@ +from __future__ import annotations + +import pytest + +from pysatl_experiment.persistence.model.power.power import PowerModel, PowerQuery +from pysatl_experiment.persistence.power.alchemy.alchemy import AlchemyPowerStorage + + +@pytest.fixture() +def db_url() -> str: + # Use in-memory SQLite with StaticPool as configured by init_db + return "sqlite://" + + +@pytest.fixture() +def storage(db_url: str) -> AlchemyPowerStorage: + store = AlchemyPowerStorage(db_url) + store.init() + return store + + +def test_guard_requires_init(db_url: str) -> None: + store = AlchemyPowerStorage(db_url) + with pytest.raises(RuntimeError): + _ = store.get_data( + PowerQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + alternative_code="alt_A", + alternative_parameters=[1.0], + monte_carlo_count=100, + significance_level=0.05, + ) + ) + + +def test_get_data_empty_returns_none(storage: AlchemyPowerStorage) -> None: + query = PowerQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + alternative_code="alt_A", + alternative_parameters=[1.0], + monte_carlo_count=100, + significance_level=0.05, + ) + assert storage.get_data(query) is None + + +def test_insert_and_get(storage: AlchemyPowerStorage) -> None: + model = PowerModel( + experiment_id=123, + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + alternative_code="alt_A", + alternative_parameters=[1.0], + monte_carlo_count=100, + significance_level=0.05, + results_criteria=[True, False, True], + ) + storage.insert_data(model) + + got = storage.get_data( + PowerQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + alternative_code="alt_A", + alternative_parameters=[1.0], + monte_carlo_count=100, + significance_level=0.05, + ) + ) + + assert got is not None + assert got.experiment_id == model.experiment_id + assert got.criterion_code == model.criterion_code + assert got.criterion_parameters == model.criterion_parameters + assert got.sample_size == model.sample_size + assert got.alternative_code == model.alternative_code + assert got.alternative_parameters == model.alternative_parameters + assert got.monte_carlo_count == model.monte_carlo_count + assert got.significance_level == model.significance_level + assert got.results_criteria == model.results_criteria + + +def test_delete_data(storage: AlchemyPowerStorage) -> None: + model = PowerModel( + experiment_id=7, + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + alternative_code="alt_B", + alternative_parameters=[2.0, 3.0], + monte_carlo_count=50, + significance_level=0.1, + results_criteria=[False, False], + ) + storage.insert_data(model) + + storage.delete_data( + PowerQuery( + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + alternative_code="alt_B", + alternative_parameters=[2.0, 3.0], + monte_carlo_count=50, + significance_level=0.1, + ) + ) + + assert ( + storage.get_data( + PowerQuery( + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + alternative_code="alt_B", + alternative_parameters=[2.0, 3.0], + monte_carlo_count=50, + significance_level=0.1, + ) + ) + is None + ) diff --git a/tests/persistence/power/sqlite/test_power_sqlite.py b/tests/persistence/power/sqlite/test_power_sqlite.py new file mode 100644 index 0000000..761bcfd --- /dev/null +++ b/tests/persistence/power/sqlite/test_power_sqlite.py @@ -0,0 +1,129 @@ +from __future__ import annotations + +from pathlib import Path + +import pytest + +from pysatl_experiment.persistence.model.power.power import PowerModel, PowerQuery +from pysatl_experiment.persistence.power.sqlite.sqlite import SQLitePowerStorage + + +@pytest.fixture() +def db_path(tmp_path: Path) -> Path: + return tmp_path / "power.sqlite" + + +@pytest.fixture() +def storage(db_path: Path) -> SQLitePowerStorage: + store = SQLitePowerStorage(str(db_path)) + store.init() + return store + + +def test_guard_requires_init(db_path: Path) -> None: + store = SQLitePowerStorage(str(db_path)) + with pytest.raises(RuntimeError): + _ = store.get_data( + PowerQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + alternative_code="alt_A", + alternative_parameters=[1.0], + monte_carlo_count=100, + significance_level=0.05, + ) + ) + + +def test_get_data_empty_returns_none(storage: SQLitePowerStorage) -> None: + query = PowerQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + alternative_code="alt_A", + alternative_parameters=[1.0], + monte_carlo_count=100, + significance_level=0.05, + ) + assert storage.get_data(query) is None + + +def test_insert_and_get(storage: SQLitePowerStorage) -> None: + model = PowerModel( + experiment_id=123, + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + alternative_code="alt_A", + alternative_parameters=[1.0], + monte_carlo_count=100, + significance_level=0.05, + results_criteria=[True, False, True], + ) + storage.insert_data(model) + + got = storage.get_data( + PowerQuery( + criterion_code="crit_A", + criterion_parameters=[0.1, 0.2], + sample_size=10, + alternative_code="alt_A", + alternative_parameters=[1.0], + monte_carlo_count=100, + significance_level=0.05, + ) + ) + + assert got is not None + assert got.experiment_id == model.experiment_id + assert got.criterion_code == model.criterion_code + assert got.criterion_parameters == model.criterion_parameters + assert got.sample_size == model.sample_size + assert got.alternative_code == model.alternative_code + assert got.alternative_parameters == model.alternative_parameters + assert got.monte_carlo_count == model.monte_carlo_count + assert got.significance_level == model.significance_level + assert got.results_criteria == model.results_criteria + + +def test_delete_data(storage: SQLitePowerStorage) -> None: + model = PowerModel( + experiment_id=7, + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + alternative_code="alt_B", + alternative_parameters=[2.0, 3.0], + monte_carlo_count=50, + significance_level=0.1, + results_criteria=[False, False], + ) + storage.insert_data(model) + + storage.delete_data( + PowerQuery( + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + alternative_code="alt_B", + alternative_parameters=[2.0, 3.0], + monte_carlo_count=50, + significance_level=0.1, + ) + ) + + assert ( + storage.get_data( + PowerQuery( + criterion_code="crit_B", + criterion_parameters=[0.3], + sample_size=5, + alternative_code="alt_B", + alternative_parameters=[2.0, 3.0], + monte_carlo_count=50, + significance_level=0.1, + ) + ) + is None + )